Skip to content

Commit a564a63

Browse files
authored
Merge pull request #233 from fnothaft/issues/231-spark-162-java8
Move to Spark 1.6.2 and Java 8 (resolves #231)
2 parents 4e82a14 + 0006937 commit a564a63

File tree

2 files changed

+5
-5
lines changed

2 files changed

+5
-5
lines changed

spark/src/cgcloud/spark/spark_box.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@
2626
var_dir = '/var/lib/sparkbox'
2727
hdfs_replication = 1
2828
hadoop_version = '2.6.0'
29-
spark_version = '1.5.2'
29+
spark_version = '1.6.2'
3030
# The major version of Hadoop that the Spark binaries were built against
3131
spark_hadoop_version = '2.6'
3232

@@ -127,7 +127,7 @@ def _setup_package_repos( self ):
127127

128128
def _list_packages_to_install( self ):
129129
return super( SparkBox, self )._list_packages_to_install( ) + [
130-
'oracle-java7-set-default' ]
130+
'oracle-java8-set-default' ]
131131

132132
def _get_debconf_selections( self ):
133133
return super( SparkBox, self )._get_debconf_selections( ) + [
@@ -195,7 +195,7 @@ def __install_hadoop( self ):
195195
# Add environment variables to hadoop_env.sh
196196
hadoop_env = dict(
197197
HADOOP_LOG_DIR=self._lazy_mkdir( log_dir, "hadoop" ),
198-
JAVA_HOME='/usr/lib/jvm/java-7-oracle' )
198+
JAVA_HOME='/usr/lib/jvm/java-8-oracle' )
199199
hadoop_env_sh_path = fmt( "{install_dir}/hadoop/etc/hadoop/hadoop-env.sh" )
200200
with remote_open( hadoop_env_sh_path, use_sudo=True ) as hadoop_env_sh:
201201
hadoop_env_sh.write( '\n' )
@@ -270,7 +270,7 @@ def __install_spark( self ):
270270
SPARK_LOG_DIR=self._lazy_mkdir( log_dir, "spark" ),
271271
SPARK_WORKER_DIR=self._lazy_mkdir( spark_dir, "work" ),
272272
SPARK_LOCAL_DIRS=self._lazy_mkdir( spark_dir, "local" ),
273-
JAVA_HOME='/usr/lib/jvm/java-7-oracle',
273+
JAVA_HOME='/usr/lib/jvm/java-8-oracle',
274274
SPARK_MASTER_IP='spark-master',
275275
HADOOP_CONF_DIR=fmt( "{install_dir}/hadoop/etc/hadoop" ) )
276276
with remote_open( spark_env_sh_path, use_sudo=True ) as spark_env_sh:

spark/src/cgcloud/spark/test/test_spark.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -123,7 +123,7 @@ def word_count( ):
123123
body = dedent( '\n'.join( getsource( word_count ).split( '\n' )[ 1: ] ) )
124124
self._send_file( master, body, script )
125125

126-
self._ssh( master, 'spark-submit --driver-memory 64m --executor-memory 64m ' + script )
126+
self._ssh( master, 'spark-submit ' + script )
127127
self._ssh( master, 'hdfs dfs -get /test.txt.counts' )
128128
self._ssh( master, 'test -f test.txt.counts/_SUCCESS' )
129129
for i in xrange( num_slaves ):

0 commit comments

Comments
 (0)