|
26 | 26 | var_dir = '/var/lib/sparkbox' |
27 | 27 | hdfs_replication = 1 |
28 | 28 | hadoop_version = '2.6.0' |
29 | | -spark_version = '1.5.2' |
| 29 | +spark_version = '1.6.2' |
30 | 30 | # The major version of Hadoop that the Spark binaries were built against |
31 | 31 | spark_hadoop_version = '2.6' |
32 | 32 |
|
@@ -127,7 +127,7 @@ def _setup_package_repos( self ): |
127 | 127 |
|
128 | 128 | def _list_packages_to_install( self ): |
129 | 129 | return super( SparkBox, self )._list_packages_to_install( ) + [ |
130 | | - 'oracle-java7-set-default' ] |
| 130 | + 'oracle-java8-set-default' ] |
131 | 131 |
|
132 | 132 | def _get_debconf_selections( self ): |
133 | 133 | return super( SparkBox, self )._get_debconf_selections( ) + [ |
@@ -195,7 +195,7 @@ def __install_hadoop( self ): |
195 | 195 | # Add environment variables to hadoop_env.sh |
196 | 196 | hadoop_env = dict( |
197 | 197 | HADOOP_LOG_DIR=self._lazy_mkdir( log_dir, "hadoop" ), |
198 | | - JAVA_HOME='/usr/lib/jvm/java-7-oracle' ) |
| 198 | + JAVA_HOME='/usr/lib/jvm/java-8-oracle' ) |
199 | 199 | hadoop_env_sh_path = fmt( "{install_dir}/hadoop/etc/hadoop/hadoop-env.sh" ) |
200 | 200 | with remote_open( hadoop_env_sh_path, use_sudo=True ) as hadoop_env_sh: |
201 | 201 | hadoop_env_sh.write( '\n' ) |
@@ -270,7 +270,7 @@ def __install_spark( self ): |
270 | 270 | SPARK_LOG_DIR=self._lazy_mkdir( log_dir, "spark" ), |
271 | 271 | SPARK_WORKER_DIR=self._lazy_mkdir( spark_dir, "work" ), |
272 | 272 | SPARK_LOCAL_DIRS=self._lazy_mkdir( spark_dir, "local" ), |
273 | | - JAVA_HOME='/usr/lib/jvm/java-7-oracle', |
| 273 | + JAVA_HOME='/usr/lib/jvm/java-8-oracle', |
274 | 274 | SPARK_MASTER_IP='spark-master', |
275 | 275 | HADOOP_CONF_DIR=fmt( "{install_dir}/hadoop/etc/hadoop" ) ) |
276 | 276 | with remote_open( spark_env_sh_path, use_sudo=True ) as spark_env_sh: |
|
0 commit comments