Skip to content

Commit 2077e22

Browse files
committed
Configure JAVA_HOME and SCALA_HOME from environment
1 parent 1984be2 commit 2077e22

File tree

6 files changed

+20
-7
lines changed

6 files changed

+20
-7
lines changed

deploy_templates.py

+3-1
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,9 @@
4545
"hdfs_data_dirs": os.getenv("MESOS_HDFS_DATA_DIRS"),
4646
"mapred_local_dirs": os.getenv("MESOS_MAPRED_LOCAL_DIRS"),
4747
"spark_local_dirs": os.getenv("MESOS_SPARK_LOCAL_DIRS"),
48-
"default_spark_mem": "%dm" % spark_mb
48+
"default_spark_mem": "%dm" % spark_mb,
49+
"scala_home": os.getenv("SCALA_HOME"),
50+
"java_home": os.getenv("JAVA_HOME")
4951
}
5052

5153
template_dir="/root/spark-ec2/templates"

setup.sh

+13-2
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33
# Make sure we are in the spark-ec2 directory
44
cd /root/spark-ec2
55

6+
# Load the environment variables specific to this AMI
7+
source /root/.bash_profile
8+
69
# Load the cluster variables set by the deploy script
710
source ec2-variables.sh
811

@@ -29,10 +32,18 @@ MASTERS=`cat masters`
2932
NUM_MASTERS=`cat masters | wc -l`
3033
OTHER_MASTERS=`cat masters | sed '1d'`
3134
SLAVES=`cat slaves`
32-
33-
JAVA_HOME=/usr/lib/jvm/java-1.6.0-openjdk.x86_64
3435
SSH_OPTS="-o StrictHostKeyChecking=no -o ConnectTimeout=5"
3536

37+
if [[ "x$JAVA_HOME" == "x" ]] ; then
38+
echo "Expected JAVA_HOME to be set in .bash_profile!"
39+
exit 1
40+
fi
41+
42+
if [[ "x$SCALA_HOME" == "x" ]] ; then
43+
echo "Expected SCALA_HOME to be set in .bash_profile!"
44+
exit 1
45+
fi
46+
3647
if [[ `tty` == "not a tty" ]] ; then
3748
echo "Expecting a tty or pty! (use the ssh -t option)."
3849
exit 1

templates/root/ephemeral-hdfs/conf/hadoop-env.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
# remote nodes.
77

88
# The java implementation to use. Required.
9-
export JAVA_HOME=/usr/lib/jvm/java-1.6.0
9+
export JAVA_HOME={{java_home}}
1010

1111
# Extra Java CLASSPATH elements. Optional.
1212
# export HADOOP_CLASSPATH=

templates/root/persistent-hdfs/conf/hadoop-env.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
# remote nodes.
77

88
# The java implementation to use. Required.
9-
export JAVA_HOME=/usr/lib/jvm/java-1.6.0
9+
export JAVA_HOME={{java_home}}
1010

1111
# Extra Java CLASSPATH elements. Optional.
1212
# export HADOOP_CLASSPATH=

templates/root/spark-ec2/mesos/hadoop-framework-conf/hadoop-env.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@
66
# remote nodes.
77

88
# The java implementation to use. Required.
9-
export JAVA_HOME=/usr/lib/jvm/java-1.6.0-openjdk.x86_64
9+
export JAVA_HOME={{java_home}}
1010

1111
# Mesos build directory, useful for finding JARs and the native library.
1212
export MESOS_BUILD_DIR=/root/mesos/build

templates/root/spark/conf/spark-env.sh

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
# be in the same format as the JVM's -Xmx option, e.g. 300m or 1g).
1111
# - SPARK_LIBRARY_PATH, to add extra search paths for native libraries.
1212

13-
export SCALA_HOME=/root/scala-2.9.2
13+
export SCALA_HOME={{scala_home}}
1414
export MESOS_NATIVE_LIBRARY=/usr/local/lib/libmesos.so
1515

1616
# Set Spark's memory per machine; note that you can also comment this out

0 commit comments

Comments
 (0)