-
Notifications
You must be signed in to change notification settings - Fork 65
Description
When I installed spark-1.6 with CDH, a strange thing happened. Csds initialization, HADOOP_HOME path error, less / lib / hadoop, resulting in the failure of the initialization hdfs, spark log file for the file: /user/spark/applicationHistory rather than hdfs:/ user/spark/applicationHistory, as shown below Show
`++ BIGTOP_DEFAULTS_DIR=
++ #export HADOOP_HOME=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/
++ HADOOP_HOME=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/
++ export HDFS_BIN=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45//../../bin/hdfs
++ HDFS_BIN=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45//../../bin/hdfs
++ export HADOOP_CONF_DIR=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf
++ HADOOP_CONF_DIR=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf
++ '[' '!' -d /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf ']'
++ DEFAULT_SPARK_HOME=/usr/lib/spark
+++ readlink -m /opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/spark
++ SPARK_HOME=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/spark
++ export SPARK_HOME=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/spark
++ SPARK_HOME=/opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45/lib/spark
++ export SPARK_CONF_DIR=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf
++ SPARK_CONF_DIR=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf
++ '[' '!' -d /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf ']'
++ export SPARK_ENV=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh
++ SPARK_ENV=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-env.sh
++ export SPARK_DEFAULTS=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf
++ SPARK_DEFAULTS=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf/spark-defaults.conf
++ '[' -f /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/log4j.properties ']'
++ cp /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/log4j.properties /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-conf
++ export 'SPARK_DAEMON_JAVA_OPTS= -Djava.net.preferIPv4Stack=true'
++ SPARK_DAEMON_JAVA_OPTS=' -Djava.net.preferIPv4Stack=true'
+++ readlink -m /opt/cloudera/parcels
++ export PARCELS_ROOT=/opt/cloudera/parcels
++ PARCELS_ROOT=/opt/cloudera/parcels
- case $1 in
- start_history_server
- log 'Starting Spark History Server'
++ date - timestamp='2017年 05月 18日 星期四 17:46:23 CST'
- echo '2017年 05月 18日 星期四 17:46:23 CST: Starting Spark History Server'
- echo '2017年 05月 18日 星期四 17:46:23 CST: Starting Spark History Server'
2017年 05月 18日 星期四 17:46:23 CST: Starting Spark History Server - local CONF_FILE=/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/spark-history-server.conf
++ get_default_fs /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf
++ /opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45//../../bin/hdfs --config /opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/yarn-conf getconf -confKey fs.defaultFS
#/opt/cm-5.7.0/run/cloudera-scm-agent/process/137-spark_on_yarn-SPARK_YARN_HISTORY_SERVER/scripts/common.sh: line 86: /opt/cloudera/parcels/CDH-5.7.0-1.cdh5.7.0.p0.45//../../bin/hdfs: 没有那个文件或目录 - local LOG_DIR=/user/spark/applicationHistory`
I modified HADOOP_HOME of common.sh in SPARK_ON_YARN-5.7.0.jar, but it did not work