-
Notifications
You must be signed in to change notification settings - Fork 327
/
run
executable file
·136 lines (107 loc) · 3.57 KB
/
run
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
#!/bin/bash
# This file is used to launch Shark on the master.
export SCALA_VERSION=2.10
SHARK_VERSION=0.9.2
# Figure out where the framework is installed
FWDIR="$(cd `dirname $0`; pwd)"
export SHARK_HOME="$FWDIR"
# Load environment variables from conf/shark-env.sh, if it exists
if [ -e $SHARK_HOME/conf/shark-env.sh ] ; then
. $SHARK_HOME/conf/shark-env.sh
fi
if [ -n "$MASTER" ] ; then
if [ -z $SPARK_HOME ] ; then
echo "No SPARK_HOME specified. Please set SPARK_HOME for cluster mode."
exit 1
fi
fi
# check for shark with spark on yarn params
if [ "x$SHARK_EXEC_MODE" == "xyarn" ] ; then
if [ "x$SPARK_ASSEMBLY_JAR" == "x" ] ; then
echo "No SPARK_ASSEMBLY_JAR specified. Please set SPARK_ASSEMBLY_JAR for spark on yarn mode."
exit 1
else
export SPARK_JAR=$SPARK_ASSEMBLY_JAR
fi
if [ "x$SHARK_ASSEMBLY_JAR" == "x" ] ; then
echo "No SHARK_ASSEMBLY_JAR specified. please set SHARK_ASSEMBLY_JAR for spark on yarn mode."
exit 1
else
export SPARK_YARN_APP_JAR=$SHARK_ASSEMBLY_JAR
fi
# use yarn-client mode for interactive shell.
export MASTER=yarn-client
fi
# Check for optionally specified configuration file path
if [ "x$HIVE_CONF_DIR" == "x" ] ; then
HIVE_CONF_DIR="$SHARK_HOME/conf"
fi
if [ -f "${HIVE_CONF_DIR}/hive-env.sh" ]; then
. "${HIVE_CONF_DIR}/hive-env.sh"
fi
# Add Shark jars.
for jar in `find $SHARK_HOME/lib -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
for jar in `find $SHARK_HOME/lib_managed/jars -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
for jar in `find $SHARK_HOME/lib_managed/bundles -name '*jar'`; do
SPARK_CLASSPATH+=:$jar
done
SPARK_CLASSPATH+=:$HIVE_CONF_DIR
# Build up Shark's jar or classes.
SHARK_CLASSES="$SHARK_HOME/target/scala-$SCALA_VERSION/classes"
SHARK_JAR="$SHARK_HOME/target/scala-$SCALA_VERSION/shark_$SCALA_VERSION-$SHARK_VERSION.jar"
if [ -d "$SHARK_CLASSES/shark" ] ; then
SPARK_CLASSPATH+=":$SHARK_CLASSES"
else
if [ -f "$SHARK_JAR" ] ; then
SPARK_CLASSPATH+=":$SHARK_JAR"
else
echo "Cannot find either compiled classes or compiled jar package for Shark."
echo "Have you compiled Shark yet?"
exit 1
fi
fi
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/test-classes"
SHARK_JAR="$SHARK_HOME/target/scala-$SCALA_VERSION/shark_$SCALA_VERSION-$SHARK_VERSION.jar"
if [ -f "$SHARK_JAR" ] ; then
SPARK_CLASSPATH+=":$SHARK_JAR"
else
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/classes"
fi
SPARK_CLASSPATH+=":$SHARK_HOME/target/scala-$SCALA_VERSION/test-classes"
if [ "x$HADOOP_HOME" == "x" ] ; then
echo "No HADOOP_HOME specified. Shark will run in local-mode"
else
SPARK_CLASSPATH+=:$HADOOP_HOME/etc/hadoop
SPARK_CLASSPATH+=:$HADOOP_HOME/conf
fi
# TODO(rxin): Check aux classpath and aux java opts.
#CLASSPATH=${CLASSPATH}:${AUX_CLASSPATH}
export SPARK_CLASSPATH
export CLASSPATH+=$SPARK_CLASSPATH # Needed for spark-shell
export SPARK_JAVA_OPTS+=" $TEST_JAVA_OPTS"
# supress the HADOOP_HOME warnings in 1.x.x
export HADOOP_HOME_WARN_SUPPRESS=true
if [ "x$SHARK_MASTER_MEM" == "x" ] ; then
SHARK_MASTER_MEM="512m"
fi
# Set JAVA_OPTS to be able to load native libraries and to set heap size
JAVA_OPTS+="$SPARK_JAVA_OPTS"
JAVA_OPTS+=" -Djava.library.path=$SPARK_LIBRARY_PATH"
JAVA_OPTS+=" -Xms$SHARK_MASTER_MEM -Xmx$SHARK_MASTER_MEM"
export JAVA_OPTS
# In case we are running Ant
export ANT_OPTS=$JAVA_OPTS
if [ "x$RUNNER" == "x" ] ; then
if [ -n "$JAVA_HOME" ]; then
RUNNER="${JAVA_HOME}/bin/java"
else
RUNNER=java
fi
# The JVM doesn't read JAVA_OPTS by default so we need to pass it in
EXTRA_ARGS="$JAVA_OPTS"
fi
exec $RUNNER $EXTRA_ARGS "$@"