Skip to content
This repository has been archived by the owner on Aug 16, 2024. It is now read-only.

Commit

Permalink
Enable spark 3.1 (Intel-bigdata#670)
Browse files Browse the repository at this point in the history
* enable spark 3.1

Signed-off-by: minmingzhu <[email protected]>

* enable spark 3.1

Signed-off-by: minmingzhu <[email protected]>

* update

Signed-off-by: minmingzhu <[email protected]>

* Update .travis.yml

* update .travis.yml

Signed-off-by: minmingzhu <[email protected]>

* update .travis.yml

Signed-off-by: minmingzhu <[email protected]>

* update .travis.yml

Signed-off-by: minmingzhu <[email protected]>
  • Loading branch information
minmingzhu authored May 12, 2021
1 parent af07288 commit 827c9f6
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 1 deletion.
39 changes: 39 additions & 0 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,16 +21,19 @@ install:
- cd $HOME/downloads
- "[ -f spark-2.4.0-bin-hadoop2.7.tgz ] || wget https://archive.apache.org/dist/spark/spark-2.4.0/spark-2.4.0-bin-hadoop2.7.tgz"
- "[ -f spark-3.0.0-bin-hadoop3.2.tgz ] || wget https://archive.apache.org/dist/spark/spark-3.0.0/spark-3.0.0-bin-hadoop3.2.tgz"
- "[ -f spark-3.1.1-bin-hadoop3.2.tgz ] || wget https://archive.apache.org/dist/spark/spark-3.1.1/spark-3.1.1-bin-hadoop3.2.tgz"
- "[ -f hadoop-2.7.7.tar.gz ] || wget https://archive.apache.org/dist/hadoop/core/hadoop-2.7.7/hadoop-2.7.7.tar.gz"
- "[ -f hadoop-3.2.1.tar.gz ] || wget https://archive.apache.org/dist/hadoop/core/hadoop-3.2.1/hadoop-3.2.1.tar.gz"
- cd /opt/
- tar -xzf $HOME/downloads/spark-2.4.0-bin-hadoop2.7.tgz
- tar -xzf $HOME/downloads/spark-3.0.0-bin-hadoop3.2.tgz
- tar -xzf $HOME/downloads/spark-3.1.1-bin-hadoop3.2.tgz
- tar -xzf $HOME/downloads/hadoop-2.7.7.tar.gz
- tar -xzf $HOME/downloads/hadoop-3.2.1.tar.gz
- cd ${hibench}
- cp ./travis/spark-env.sh /opt/spark-2.4.0-bin-hadoop2.7/conf/
- cp ./travis/spark-env.sh /opt/spark-3.0.0-bin-hadoop3.2/conf/
- cp ./travis/spark-env.sh /opt/spark-3.1.1-bin-hadoop3.2/conf/
- cp ./travis/core-site.xml /opt/hadoop-2.7.7/etc/hadoop/
- cp ./travis/hdfs-site.xml /opt/hadoop-2.7.7/etc/hadoop/
- cp ./travis/mapred-site.xml /opt/hadoop-2.7.7/etc/hadoop/
Expand All @@ -49,6 +52,7 @@ jobs:
- mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.4 -Dscala=2.11 -Dhadoop=3.0 -Dhive=3.0
- mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=2.4 -Dscala=2.11 -Dhadoop=3.1 -Dhive=3.0
- mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=3.0 -Dscala=2.12
- mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=3.1 -Dscala=2.12

- name: spark_2.4_hadoop_2.7_micro
before_script:
Expand Down Expand Up @@ -119,3 +123,38 @@ jobs:
script:
- cp ./travis/benchmarks_graph.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.0_hadoop_3.2.sh

- name: spark_3.1_hadoop_3.2_micro
before_script:
- . ./travis/export_env_spark_3.1_hadoop_3.2.sh
script:
- cp ./travis/benchmarks_micro.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.1_hadoop_3.2.sh

- name: spark_3.1_hadoop_3.2_sql
before_script:
- . ./travis/export_env_spark_3.1_hadoop_3.2.sh
script:
- cp ./travis/benchmarks_sql.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.1_hadoop_3.2.sh

- name: spark_3.1_hadoop_3.2_websearch
before_script:
- . ./travis/export_env_spark_3.1_hadoop_3.2.sh
script:
- cp ./travis/benchmarks_websearch.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.1_hadoop_3.2.sh

- name: spark_3.1_hadoop_3.2_ml
before_script:
- . ./travis/export_env_spark_3.1_hadoop_3.2.sh
script:
- cp ./travis/benchmarks_ml_2.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.1_hadoop_3.2.sh

- name: spark_3.1_hadoop_3.2_graph
before_script:
- . ./travis/export_env_spark_3.1_hadoop_3.2.sh
script:
- cp ./travis/benchmarks_graph.lst ./conf/benchmarks.lst
- . ./travis/build_and_run_all_spark_3.1_hadoop_3.2.sh
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ There are totally 29 workloads in HiBench. The workloads are divided into 6 cate
### Supported Hadoop/Spark/Flink/Storm/Gearpump releases: ###

- Hadoop: Apache Hadoop 3.0.x, 3.1.x, 3.2.x, 2.x, CDH5, HDP
- Spark: Spark 2.4.x, Spark 3.0.x
- Spark: Spark 2.4.x, Spark 3.0.x, Spark 3.1.x
- Flink: 1.0.3
- Storm: 1.0.1
- Gearpump: 0.8.1
Expand Down
14 changes: 14 additions & 0 deletions sparkbench/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,20 @@
</activation>
</profile>

<profile>
<id>spark3.1</id>
<properties>
<spark.version>3.1.1</spark.version>
<spark.bin.version>3.1</spark.bin.version>
</properties>
<activation>
<property>
<name>spark</name>
<value>3.1</value>
</property>
</activation>
</profile>

<profile>
<id>defaultScalaVersion</id>
<properties>
Expand Down
14 changes: 14 additions & 0 deletions travis/build_and_run_all_spark_3.1_hadoop_3.2.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
#!/usr/bin/env bash

mvn clean package -q -Dmaven.javadoc.skip=true -Dspark=3.1 -Dscala=2.12
cp ./travis/hibench.conf ./conf/
cp ./travis/spark.conf ./conf/
cp ./travis/hadoop.conf ./conf/
cp ./travis/hadoop-layout.sh /opt/hadoop-3.2.1/libexec
sed -i '1 i hibench.hadoop.home /opt/hadoop-3.2.1' ./conf/hadoop.conf
sed -i '1 i hibench.spark.home /opt/spark-3.1.1-bin-hadoop3.2\nhibench.spark.version spark3.1' ./conf/spark.conf
sed -i '1 i hibench.hadoop.examples.jar ${hibench.hadoop.home}/share/hadoop/mapreduce/hadoop-mapreduce-examples-3.2.1.jar\nhibench.hadoop.examples.test.jar ${hibench.hadoop.home}/share/hadoop/mapreduce/hadoop-mapreduce-client-jobclient-3.2.1-tests.jar\nhibench.hive.release apache-hive-3.0.0-bin' ./conf/hibench.conf
sudo -E ./travis/configssh.sh
sudo -E ./travis/restart_hadoop_spark.sh
${HADOOP_HOME}/bin/yarn node -list 2
sudo -E ./bin/run_all.sh
6 changes: 6 additions & 0 deletions travis/export_env_spark_3.1_hadoop_3.2.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
#!/usr/bin/env bash
echo "export HADOOP_HOME=/opt/hadoop-3.2.1" >> ~/.bashrc
echo "export SPARK_HOME=/opt/spark-3.1.1-bin-hadoop3.2" >> ~/.bashrc
echo "source /opt/hadoop-3.2.1/libexec/hadoop-layout.sh" >> ~/.bashrc
echo "export JAVA_OPTS=-Xmx512m" >> ~/.bashrc
source ~/.bashrc

0 comments on commit 827c9f6

Please sign in to comment.