Skip to content

Add sedonaDB worker to Sedona vectorized udfs #4172

Add sedonaDB worker to Sedona vectorized udfs

Add sedonaDB worker to Sedona vectorized udfs #4172

Workflow file for this run

# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
name: Scala and Java build
on:
push:
branches:
- master
paths:
- 'common/**'
- 'spark/**'
- 'spark-shaded/**'
- 'flink/**'
- 'flink-shaded/**'
- 'snowflake/**'
- 'pom.xml'
- '.github/workflows/java.yml'
pull_request:
branches:
- '*'
paths:
- 'common/**'
- 'spark/**'
- 'spark-shaded/**'
- 'flink/**'
- 'flink-shaded/**'
- 'snowflake/**'
- 'pom.xml'
- '.github/workflows/java.yml'
env:
MAVEN_OPTS: -Dmaven.wagon.httpconnectionManager.ttlSeconds=60
DO_NOT_TRACK: true
SPARK_LOCAL_IP: 127.0.0.1
permissions:
contents: read
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: ${{ github.ref != 'refs/heads/master' }}
jobs:
build:
runs-on: ubuntu-22.04
strategy:
fail-fast: true
matrix:
include:
- spark: 4.0.0
scala: 2.13.8
jdk: '17'
- spark: 3.5.4
scala: 2.12.18
jdk: '17'
- spark: 3.5.0
scala: 2.13.8
jdk: '11'
skipTests: ''
- spark: 3.5.0
scala: 2.12.15
jdk: '11'
skipTests: ''
- spark: 3.4.0
scala: 2.13.8
jdk: '11'
skipTests: ''
- spark: 3.4.0
scala: 2.12.15
jdk: '11'
skipTests: ''
steps:
- uses: actions/checkout@v6
with:
persist-credentials: false
- uses: actions/setup-java@v5
with:
distribution: 'zulu'
java-version: ${{ matrix.jdk }}
- uses: actions/setup-python@v6
with:
python-version: '3.10'
- name: Cache Maven packages
uses: actions/cache@v5
with:
path: ~/.m2
key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
restore-keys: ${{ runner.os }}-m2
- env:
SPARK_VERSION: ${{ matrix.spark }}
SCALA_VERSION: ${{ matrix.scala }}
SKIP_TESTS: ${{ matrix.skipTests }}
run: |
SPARK_COMPAT_VERSION=${SPARK_VERSION:0:3}
if [[ "${SPARK_VERSION}" == "3.5"* ]] || [[ "${SPARK_VERSION}" == "4."* ]]; then
pip install pyspark==$SPARK_VERSION pandas shapely apache-sedona pyarrow
export SPARK_HOME=$(python -c "import pyspark; print(pyspark.__path__[0])")
fi
mvn -q clean install -Dspark=${SPARK_COMPAT_VERSION} -Dscala=${SCALA_VERSION:0:4} -Dspark.version=${SPARK_VERSION} ${SKIP_TESTS}
- run: mkdir staging
- run: cp spark-shaded/target/sedona-*.jar staging
- run: |
[ -d "flink-shaded/target/" ] && cp flink-shaded/target/sedona-*.jar staging 2>/dev/null || true
- uses: actions/upload-artifact@v6
with:
name: generated-jars_spark-${{ matrix.spark }}_scala-${{ matrix.scala }}_jdk-${{ matrix.jdk }}
path: staging