@@ -43,37 +43,10 @@ jobs:
43
43
# cache new stuff.
44
44
key : delta-sbt-cache-spark-master-scala${{ matrix.scala }}
45
45
- name : Install Job dependencies
46
- # TODO: update pyspark installation once Spark preview is formally released
47
46
run : |
48
47
sudo apt-get update
49
48
sudo apt-get install -y make build-essential libssl-dev zlib1g-dev libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev libncursesw5-dev xz-utils tk-dev libffi-dev liblzma-dev python3-openssl git
50
49
sudo apt install libedit-dev
51
- curl -LO https://github.com/bufbuild/buf/releases/download/v1.28.1/buf-Linux-x86_64.tar.gz
52
- mkdir -p ~/buf
53
- tar -xvzf buf-Linux-x86_64.tar.gz -C ~/buf --strip-components 1
54
- rm buf-Linux-x86_64.tar.gz
55
- sudo apt install python3-pip --fix-missing
56
- sudo pip3 install pipenv==2021.5.29
57
- curl https://pyenv.run | bash
58
- export PATH="~/.pyenv/bin:$PATH"
59
- eval "$(pyenv init -)"
60
- eval "$(pyenv virtualenv-init -)"
61
- pyenv install 3.9
62
- pyenv global system 3.9
63
- pipenv --python 3.9 install
64
- pipenv run pip install flake8==3.9.0
65
- pipenv run pip install black==23.9.1
66
- pipenv run pip install mypy==1.8.0
67
- pipenv run pip install mypy-protobuf==3.3.0
68
- pipenv run pip install cryptography==37.0.4
69
- pipenv run pip install twine==4.0.1
70
- pipenv run pip install wheel==0.33.4
71
- pipenv run pip install setuptools==41.1.0
72
- pipenv run pip install pydocstyle==3.0.0
73
- pipenv run pip install pandas==1.4.4
74
- pipenv run pip install pyarrow==8.0.0
75
- pipenv run pip install numpy==1.21
76
- pipenv run pip install https://dist.apache.org/repos/dist/dev/spark/v4.0.0-rc4-bin//pyspark-4.0.0.dev1.tar.gz
77
50
if : steps.git-diff.outputs.diff
78
51
- name : Run Spark Master tests
79
52
# when changing TEST_PARALLELISM_COUNT make sure to also change it in spark_test.yaml
0 commit comments