-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathSpark_Weekly_Build_copy_dist.xml
109 lines (100 loc) · 4.63 KB
/
Spark_Weekly_Build_copy_dist.xml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
<?xml version='1.0' encoding='UTF-8'?>
<project>
<actions/>
<description></description>
<keepDependencies>false</keepDependencies>
<properties>
<hudson.model.ParametersDefinitionProperty>
<parameterDefinitions>
<hudson.model.TextParameterDefinition>
<name>BUILD_MACHINE</name>
<description>Please enter hostname or IP of machine where Spark will be built and tested.</description>
<defaultValue>9.3.126.18</defaultValue>
</hudson.model.TextParameterDefinition>
<hudson.model.TextParameterDefinition>
<name>USER</name>
<description>Enter the USER on the remote Build Machine under whom the build and testing will be done.</description>
<defaultValue>test</defaultValue>
</hudson.model.TextParameterDefinition>
<hudson.model.PasswordParameterDefinition>
<name>USER_PWD</name>
<description>Enter the password for the USER of BUILD_MACHINE</description>
<defaultValue>{AQAAABAAAAAQ8GC6ZH27d0pOcOJRtHbxVUT/1rOOh1KSFwejGxgH7LE=}</defaultValue>
</hudson.model.PasswordParameterDefinition>
<hudson.model.TextParameterDefinition>
<name>SPARK_BRANCH</name>
<description>Enter the Spark branch to be cloned and built.</description>
<defaultValue>2.0</defaultValue>
</hudson.model.TextParameterDefinition>
<hudson.model.TextParameterDefinition>
<name>HADOOP_PROFILE</name>
<description>Hadoop Profile you want for building cluster. Default is 2.7</description>
<defaultValue>2.7</defaultValue>
</hudson.model.TextParameterDefinition>
<hudson.model.TextParameterDefinition>
<name>BUILD_WITH_HIVE</name>
<description>Flag to be set if you want hive setup with JDBC support with spark setup. Please select Y/N.</description>
<defaultValue>N</defaultValue>
</hudson.model.TextParameterDefinition>
<hudson.model.TextParameterDefinition>
<name>JDK_VAL</name>
<description>Set OPENJDK or IBMJDK for building and testing Spark.</description>
<defaultValue>OPENJDK</defaultValue>
</hudson.model.TextParameterDefinition>
</parameterDefinitions>
</hudson.model.ParametersDefinitionProperty>
</properties>
<scm class="hudson.scm.NullSCM"/>
<canRoam>true</canRoam>
<disabled>false</disabled>
<blockBuildWhenDownstreamBuilding>false</blockBuildWhenDownstreamBuilding>
<blockBuildWhenUpstreamBuilding>false</blockBuildWhenUpstreamBuilding>
<triggers/>
<concurrentBuild>false</concurrentBuild>
<builders>
<hudson.tasks.Shell>
<command>#!/bin/bash
jenkins_ip=$(/sbin/ip -o -4 addr list eth0 | awk &apos;{print $4}&apos; | cut -d/ -f1)
echo $jenkins_ip
echo '\n'
ssh ${USER}@${BUILD_MACHINE} /bin/bash <<EOF
echo "These commands will be run on: $( uname -a )"
echo "They are executed by: $( whoami )"
rm -rf spark
git clone --recursive --depth 1 https://github.com/apache/spark.git -b branch-${SPARK_BRANCH}
cd spark
if [ ${JDK_VAL} = "OPENJDK" ]
then
if [ "$(. /etc/os-release; echo $NAME)" = "Ubuntu" ]; then
echo -en "Setting OpenJDK path and JAVA_HOME\n"
export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-ppc64el
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
else
echo -en "Setting OpenJDK path and JAVA_HOME\n"
export JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
fi
elif [ ${JDK_VAL} = "IBMJDK" ]
then
#export JAVA_HOME=$(grep -Po '(?<=USER_INSTALL_DIR=).*' ${workDirR}/installer.properties)
export PATH=$JAVA_HOME/bin:$JAVA_HOME/jre/bin:$PATH
fi
java -version
if [[ $BUILD_WITH_HIVE == "y" || $BUILD_WITH_HIVE == "Y" ]]
then
echo " Building with Hive and JDBC Support \n "
#build/mvn -Pyarn -Phadoop-${hadoopVer} -Psparkr -Dhadoop.version=${hadoopVer}.0 -Phive -Phive-thriftserver -DskipTests clean package
./dev/make-distribution.sh --name hadoop-${HADOOP_PROFILE} --tgz -Psparkr -Phadoop-${HADOOP_PROFILE} -Phive -Phive-thriftserver -Pyarn
else
echo " Building without Hive and JDBC Support \n "
#build/mvn -Pyarn -Phadoop-${hadoopVer} -Psparkr -Dhadoop.version=${hadoopVer}.0 -DskipTests clean package
./dev/make-distribution.sh --name hadoop-${HADOOP_PROFILE} --tgz -Psparkr -Phadoop-${HADOOP_PROFILE} -Pyarn
fi
scp spark-*-bin-hadoop-${HADOOP_PROFILE}.tgz jenkins@${jenkins_ip}:
EOF
exit 0</command>
</hudson.tasks.Shell>
</builders>
<publishers/>
<buildWrappers/>
</project>