Skip to content

fix: Eliminate RDD usage across SynapseML for Spark 4.0 compatibility #65

fix: Eliminate RDD usage across SynapseML for Spark 4.0 compatibility

fix: Eliminate RDD usage across SynapseML for Spark 4.0 compatibility #65

Workflow file for this run

name: PR Validation
on:
pull_request:
branches: [ "master" ]
paths-ignore: [ "**.md", "docs/**", "website/**" ]
jobs:
python-style:
name: Python Style Check
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up Python
uses: actions/setup-python@v5
with:
python-version: "3.12"
- name: Check formatting with black
run: |
pip install -q 'black[jupyter]==22.3.0'
black --diff --color --extend-exclude 'docs/' . && black --check -q --extend-exclude 'docs/' .
compile-and-lint:
name: Compile & Style Check
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Set up JDK 11
uses: actions/setup-java@v4
with:
distribution: temurin
java-version: 11
cache: sbt
- name: Install sbt
run: |
echo "deb https://repo.scala-sbt.org/scalasbt/debian all main" | sudo tee /etc/apt/sources.list.d/sbt.list
curl -sL "https://keyserver.ubuntu.com/pks/lookup?op=get&search=0x2EE0EA64E40A89B84B2DF73499E82A75642AC823" | sudo apt-key add
sudo apt-get update -q
sudo apt-get install -yq sbt
- name: Scalastyle check
run: sbt scalastyle test:scalastyle
- name: Compile
run: sbt compile test:compile