@@ -1390,7 +1390,7 @@ jobs:
13901390 ccache -s
13911391 "
13921392
1393- spark-test-spark40 :
1393+ spark-test-spark41 :
13941394 needs : build-native-lib-centos-7
13951395 runs-on : ubuntu-22.04
13961396 env :
@@ -1415,11 +1415,11 @@ jobs:
14151415 pip3 install setuptools==77.0.3 && \
14161416 pip3 install pyspark==3.5.5 cython && \
14171417 pip3 install pandas==2.2.3 pyarrow==20.0.0
1418- - name : Prepare Spark Resources for Spark 4.0.1 # TODO remove after image update
1418+ - name : Prepare Spark Resources for Spark 4.1.0 # TODO remove after image update
14191419 run : |
1420- rm -rf /opt/shims/spark40
1421- bash .github/workflows/util/install-spark-resources.sh 4.0
1422- mv /opt/shims/spark40 /spark_home/assembly/target/scala-2.12 /opt/shims/spark40 /spark_home/assembly/target/scala-2.13
1420+ rm -rf /opt/shims/spark41
1421+ bash .github/workflows/util/install-spark-resources.sh 4.1
1422+ mv /opt/shims/spark41 /spark_home/assembly/target/scala-2.12 /opt/shims/spark41 /spark_home/assembly/target/scala-2.13
14231423 - name : Build and Run unit test for Spark 4.0.0 with scala-2.13 (other tests)
14241424 run : |
14251425 cd $GITHUB_WORKSPACE/
@@ -1429,7 +1429,7 @@ jobs:
14291429 export PATH=$JAVA_HOME/bin:$PATH
14301430 java -version
14311431 $MVN_CMD clean test -Pspark-4.0 -Pscala-2.13 -Pjava-17 -Pbackends-velox \
1432- -Pspark-ut -DargLine="-Dspark.test.home=/opt/shims/spark40 /spark_home/" \
1432+ -Pspark-ut -DargLine="-Dspark.test.home=/opt/shims/spark41 /spark_home/ -Dspark.sql.unionOutputPartitioning=false " \
14331433 -DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.EnhancedFeaturesTest,org.apache.gluten.tags.SkipTest
14341434 - name : Upload test report
14351435 if : always()
@@ -1447,7 +1447,7 @@ jobs:
14471447 **/gluten-ut/**/hs_err_*.log
14481448 **/gluten-ut/**/core.*
14491449
1450- spark-test-spark40 -slow :
1450+ spark-test-spark41 -slow :
14511451 needs : build-native-lib-centos-7
14521452 runs-on : ubuntu-22.04
14531453 env :
@@ -1465,11 +1465,11 @@ jobs:
14651465 with :
14661466 name : arrow-jars-centos-7-${{github.sha}}
14671467 path : /root/.m2/repository/org/apache/arrow/
1468- - name : Prepare Spark Resources for Spark 4.0.1 # TODO remove after image update
1468+ - name : Prepare Spark Resources for Spark 4.1.0 # TODO remove after image update
14691469 run : |
1470- rm -rf /opt/shims/spark40
1471- bash .github/workflows/util/install-spark-resources.sh 4.0
1472- mv /opt/shims/spark40 /spark_home/assembly/target/scala-2.12 /opt/shims/spark40 /spark_home/assembly/target/scala-2.13
1470+ rm -rf /opt/shims/spark41
1471+ bash .github/workflows/util/install-spark-resources.sh 4.1
1472+ mv /opt/shims/spark41 /spark_home/assembly/target/scala-2.12 /opt/shims/spark41 /spark_home/assembly/target/scala-2.13
14731473 - name : Build and Run unit test for Spark 4.0 (slow tests)
14741474 run : |
14751475 cd $GITHUB_WORKSPACE/
@@ -1478,7 +1478,113 @@ jobs:
14781478 export PATH=$JAVA_HOME/bin:$PATH
14791479 java -version
14801480 $MVN_CMD clean test -Pspark-4.0 -Pscala-2.13 -Pjava-17 -Pbackends-velox -Pspark-ut \
1481- -DargLine="-Dspark.test.home=/opt/shims/spark40/spark_home/" \
1481+ -DargLine="-Dspark.test.home=/opt/shims/spark41/spark_home/ -Dspark.sql.unionOutputPartitioning=false" \
1482+ -DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
1483+ - name : Upload test report
1484+ if : always()
1485+ uses : actions/upload-artifact@v4
1486+ with :
1487+ name : ${{ github.job }}-report
1488+ path : ' **/surefire-reports/TEST-*.xml'
1489+ - name : Upload unit tests log files
1490+ if : ${{ !success() }}
1491+ uses : actions/upload-artifact@v4
1492+ with :
1493+ name : ${{ github.job }}-test-log
1494+ path : |
1495+ **/target/*.log
1496+ **/gluten-ut/**/hs_err_*.log
1497+ **/gluten-ut/**/core.*
1498+
1499+ spark-test-spark41 :
1500+ needs : build-native-lib-centos-7
1501+ runs-on : ubuntu-22.04
1502+ env :
1503+ SPARK_TESTING : true
1504+ container : apache/gluten:centos-8-jdk17
1505+ steps :
1506+ - uses : actions/checkout@v2
1507+ - name : Download All Artifacts
1508+ uses : actions/download-artifact@v4
1509+ with :
1510+ name : velox-native-lib-centos-7-${{github.sha}}
1511+ path : ./cpp/build/releases
1512+ - name : Download Arrow Jars
1513+ uses : actions/download-artifact@v4
1514+ with :
1515+ name : arrow-jars-centos-7-${{github.sha}}
1516+ path : /root/.m2/repository/org/apache/arrow/
1517+ - name : Prepare
1518+ run : |
1519+ dnf module -y install python39 && \
1520+ alternatives --set python3 /usr/bin/python3.9 && \
1521+ pip3 install setuptools==77.0.3 && \
1522+ pip3 install pyspark==3.5.5 cython && \
1523+ pip3 install pandas==2.2.3 pyarrow==20.0.0
1524+ - name : Prepare Spark Resources for Spark 4.1.0 # TODO remove after image update
1525+ run : |
1526+ rm -rf /opt/shims/spark41
1527+ bash .github/workflows/util/install-spark-resources.sh 4.1
1528+ mv /opt/shims/spark41/spark_home/assembly/target/scala-2.12 /opt/shims/spark41/spark_home/assembly/target/scala-2.13
1529+ - name : Build and Run unit test for Spark 4.1.0 with scala-2.13 (other tests)
1530+ run : |
1531+ cd $GITHUB_WORKSPACE/
1532+ export SPARK_SCALA_VERSION=2.13
1533+ yum install -y java-17-openjdk-devel
1534+ export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1535+ export PATH=$JAVA_HOME/bin:$PATH
1536+ java -version
1537+ $MVN_CMD clean test -Pspark-4.1 -Pscala-2.13 -Pjava-17 -Pbackends-velox \
1538+ -Pspark-ut -DargLine="-Dspark.test.home=/opt/shims/spark41/spark_home/" \
1539+ -DtagsToExclude=org.apache.spark.tags.ExtendedSQLTest,org.apache.gluten.tags.UDFTest,org.apache.gluten.tags.EnhancedFeaturesTest,org.apache.gluten.tags.SkipTest
1540+ - name : Upload test report
1541+ if : always()
1542+ uses : actions/upload-artifact@v4
1543+ with :
1544+ name : ${{ github.job }}-report
1545+ path : ' **/surefire-reports/TEST-*.xml'
1546+ - name : Upload unit tests log files
1547+ if : ${{ !success() }}
1548+ uses : actions/upload-artifact@v4
1549+ with :
1550+ name : ${{ github.job }}-test-log
1551+ path : |
1552+ **/target/*.log
1553+ **/gluten-ut/**/hs_err_*.log
1554+ **/gluten-ut/**/core.*
1555+
1556+ spark-test-spark41-slow :
1557+ needs : build-native-lib-centos-7
1558+ runs-on : ubuntu-22.04
1559+ env :
1560+ SPARK_TESTING : true
1561+ container : apache/gluten:centos-8-jdk17
1562+ steps :
1563+ - uses : actions/checkout@v2
1564+ - name : Download All Artifacts
1565+ uses : actions/download-artifact@v4
1566+ with :
1567+ name : velox-native-lib-centos-7-${{github.sha}}
1568+ path : ./cpp/build/releases
1569+ - name : Download Arrow Jars
1570+ uses : actions/download-artifact@v4
1571+ with :
1572+ name : arrow-jars-centos-7-${{github.sha}}
1573+ path : /root/.m2/repository/org/apache/arrow/
1574+ - name : Prepare Spark Resources for Spark 4.1.0 # TODO remove after image update
1575+ run : |
1576+ rm -rf /opt/shims/spark41
1577+ bash .github/workflows/util/install-spark-resources.sh 4.1
1578+ mv /opt/shims/spark41/spark_home/assembly/target/scala-2.12 /opt/shims/spark41/spark_home/assembly/target/scala-2.13
1579+ - name : Build and Run unit test for Spark 4.0 (slow tests)
1580+ run : |
1581+ cd $GITHUB_WORKSPACE/
1582+ yum install -y java-17-openjdk-devel
1583+ export JAVA_HOME=/usr/lib/jvm/java-17-openjdk
1584+ export PATH=$JAVA_HOME/bin:$PATH
1585+ java -version
1586+ $MVN_CMD clean test -Pspark-4.1 -Pscala-2.13 -Pjava-17 -Pbackends-velox -Pspark-ut \
1587+ -DargLine="-Dspark.test.home=/opt/shims/spark41/spark_home/" \
14821588 -DtagsToInclude=org.apache.spark.tags.ExtendedSQLTest
14831589 - name : Upload test report
14841590 if : always()
0 commit comments