forked from deanwampler/spark-scala-tutorial
-
Notifications
You must be signed in to change notification settings - Fork 0
/
start.sh
executable file
·54 lines (44 loc) · 1.19 KB
/
start.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
#!/bin/bash
dir=$(dirname $0)
. $dir/scripts/find_cmds
help() {
cat <<EOF
usage: $0 [-h | --help] [--mem N] [options]
where:
-h | --help Print help and exit.
--mem N The default memory for Spark is 4096 (MB), which is also used
to run the non-Hadoop Spark examples. Use a larger integer value
N if you experience out of memory errors.
options Any additional options to pass to SBT.
EOF
}
java_opts() {
mem=$1
perm=$(( $mem / 4 ))
(( $perm < 512 )) && perm=512
echo "-Xms${mem}M -Xmx${mem}M -XX:+CMSClassUnloadingEnabled -XX:MaxPermSize=${perm}M"
}
mem=4096
tool=sbt
while [ $# -gt 0 ]
do
case $1 in
-h|--h*) help; exit 0 ;;
--mem) shift; mem=$1 ;;
*) break ;;
esac
shift
done
dir=$(dirname $0)
ip=$($dir/scripts/getip.sh)
act=$(find_sbt)
if [[ -z $act ]]
then
echo "ERROR: Could not find $tool"
exit 1
fi
log="$dir/$tool.log"
JAVA_OPTS=$(java_opts $mem)
echo "Running SBT. Logging to $log"
echo running: JAVA_OPTS=\"$JAVA_OPTS\" "$act" "${opts[@]}"
[[ -z $NOOP ]] && ( JAVA_OPTS="$JAVA_OPTS" "$act" "${opts[@]}" 2>&1 | tee "$log" )