Skip to content

Commit 825fb9a

Browse files
Remove python dependencies from spark_master_yaml
1 parent e3f3e16 commit 825fb9a

File tree

2 files changed

+1
-27
lines changed

2 files changed

+1
-27
lines changed

.github/workflows/spark_master_python_test.yaml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ jobs:
3939
# cache new stuff.
4040
key: delta-sbt-cache-spark-master-scala${{ matrix.scala }}
4141
- name: Install Job dependencies
42+
# TODO: update pyspark installation once Spark preview is formally released
4243
run: |
4344
sudo apt-get update
4445
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python3-openssl git

.github/workflows/spark_master_test.yaml

Lines changed: 0 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -43,37 +43,10 @@ jobs:
4343
# cache new stuff.
4444
key: delta-sbt-cache-spark-master-scala${{ matrix.scala }}
4545
- name: Install Job dependencies
46-
# TODO: update pyspark installation once Spark preview is formally released
4746
run: |
4847
sudo apt-get update
4948
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python3-openssl git
5049
sudo apt install libedit-dev
51-
curl -LO https://github.com/bufbuild/buf/releases/download/v1.28.1/buf-Linux-x86_64.tar.gz
52-
mkdir -p ~/buf
53-
tar -xvzf buf-Linux-x86_64.tar.gz -C ~/buf --strip-components 1
54-
rm buf-Linux-x86_64.tar.gz
55-
sudo apt install python3-pip --fix-missing
56-
sudo pip3 install pipenv==2021.5.29
57-
curl https://pyenv.run | bash
58-
export PATH="~/.pyenv/bin:$PATH"
59-
eval "$(pyenv init -)"
60-
eval "$(pyenv virtualenv-init -)"
61-
pyenv install 3.9
62-
pyenv global system 3.9
63-
pipenv --python 3.9 install
64-
pipenv run pip install flake8==3.9.0
65-
pipenv run pip install black==23.9.1
66-
pipenv run pip install mypy==1.8.0
67-
pipenv run pip install mypy-protobuf==3.3.0
68-
pipenv run pip install cryptography==37.0.4
69-
pipenv run pip install twine==4.0.1
70-
pipenv run pip install wheel==0.33.4
71-
pipenv run pip install setuptools==41.1.0
72-
pipenv run pip install pydocstyle==3.0.0
73-
pipenv run pip install pandas==1.4.4
74-
pipenv run pip install pyarrow==8.0.0
75-
pipenv run pip install numpy==1.21
76-
pipenv run pip install https://dist.apache.org/repos/dist/dev/spark/v4.0.0-rc4-bin//pyspark-4.0.0.dev1.tar.gz
7750
if: steps.git-diff.outputs.diff
7851
- name: Run Spark Master tests
7952
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_test.yaml

0 commit comments

Comments
 (0)