Skip to content

Commit 775723f

Browse files
committed
Update PR-test.yml
1 parent 2ad61ef commit 775723f

File tree

1 file changed

+99
-37
lines changed

1 file changed

+99
-37
lines changed

.github/workflows/PR-test.yml

Lines changed: 99 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,46 +1,108 @@
11
name: PR Test
22

3-
on: push
3+
on:
4+
push:
5+
branches: [ main, master ]
6+
paths-ignore:
7+
- '**.md'
8+
- 'docs/**'
9+
pull_request:
10+
branches: [ main, master ]
11+
paths-ignore:
12+
- '**.md'
13+
- 'docs/**'
14+
15+
# Auto-cancel in-progress runs for the same branch/PR
16+
concurrency:
17+
group: ${{ github.workflow }}-${{ github.ref }}
18+
cancel-in-progress: true
19+
20+
# Principle of least privilege
21+
permissions:
22+
contents: read
423

524
jobs:
625
run-tests:
26+
name: Tests (${{ matrix.os }} • py${{ matrix.python }})
727
runs-on: ${{ matrix.os }}
28+
timeout-minutes: 60
829
strategy:
30+
fail-fast: false
931
matrix:
10-
os: [ubuntu-latest] #, macos-latest, windows-latest]
11-
env:
12-
OS: ${{ matrix.os }}
13-
PYTHON: '3.11.*'
32+
os: [ ubuntu-latest ] # add macos-latest, windows-latest as needed
33+
python: [ "3.11" ]
34+
1435
steps:
15-
- name: Cancel Workflow Action
16-
uses: styfle/[email protected]
17-
with:
18-
access_token: ${{ github.token }}
19-
- name: Checkout
20-
uses: actions/checkout@v2
21-
- name: Set up Python
22-
uses: actions/setup-python@v2
23-
with:
24-
python-version: '3.11.*'
25-
- name: Set up conda environment
26-
uses: conda-incubator/setup-miniconda@v2
27-
with:
28-
python-version: '3.11.*'
29-
miniforge-version: latest
30-
use-mamba: true
31-
channels: conda-forge,franklab,edeno
32-
channel-priority: true
33-
activate-environment: replay_trajectory_classification
34-
environment-file: environment.yml
35-
- name: Install replay_trajectory_classification
36-
shell: bash -l {0}
37-
run: |
38-
pip install -e .
39-
- name: Test notebooks
40-
shell: bash -l {0}
41-
run: |
42-
jupyter nbconvert --to notebook --ExecutePreprocessor.kernel_name=python3 --execute notebooks/tutorial/01-Introduction_and_Data_Format.ipynb
43-
jupyter nbconvert --to notebook --ExecutePreprocessor.kernel_name=python3 --execute notebooks/tutorial/02-Decoding_with_Sorted_Spikes.ipynb
44-
jupyter nbconvert --to notebook --ExecutePreprocessor.kernel_name=python3 --execute notebooks/tutorial/03-Decoding_with_Clusterless_Spikes.ipynb
45-
jupyter nbconvert --to notebook --ExecutePreprocessor.kernel_name=python3 --execute notebooks/tutorial/04-Classifying_with_Sorted_Spikes.ipynb
46-
jupyter nbconvert --to notebook --ExecutePreprocessor.kernel_name=python3 --execute notebooks/tutorial/05-Classifying_with_Clusterless_Spikes.ipynb
36+
- name: Checkout
37+
uses: actions/checkout@v4
38+
with:
39+
fetch-depth: 0
40+
41+
# Use Miniforge + mamba for faster, reproducible env solves
42+
- name: Set up Conda (Miniforge)
43+
uses: conda-incubator/setup-miniconda@v3
44+
with:
45+
miniforge-version: latest
46+
use-mamba: true
47+
auto-update-conda: false
48+
auto-activate-base: false
49+
channel-priority: strict
50+
channels: conda-forge,franklab,edeno
51+
activate-environment: replay_trajectory_classification
52+
environment-file: environment.yml
53+
python-version: ${{ matrix.python }}
54+
55+
# Cache conda packages to speed up solves
56+
- name: Cache conda pkgs
57+
uses: actions/cache@v4
58+
with:
59+
path: ~/.conda/pkgs
60+
key: ${{ runner.os }}-conda-${{ hashFiles('environment.yml') }}
61+
restore-keys: |
62+
${{ runner.os }}-conda-
63+
64+
- name: Show Conda info
65+
shell: bash -l {0}
66+
run: |
67+
conda info
68+
conda list
69+
70+
- name: Install package (editable)
71+
shell: bash -l {0}
72+
run: |
73+
python -V
74+
pip install --upgrade pip
75+
pip install -e .
76+
77+
# Execute tutorial notebooks to ensure they run end-to-end
78+
- name: Test notebooks
79+
shell: bash -l {0}
80+
env:
81+
NB_KERNEL: python3
82+
run: |
83+
set -euo pipefail
84+
for nb in \
85+
notebooks/tutorial/01-Introduction_and_Data_Format.ipynb \
86+
notebooks/tutorial/02-Decoding_with_Sorted_Spikes.ipynb \
87+
notebooks/tutorial/03-Decoding_with_Clusterless_Spikes.ipynb \
88+
notebooks/tutorial/04-Classifying_with_Sorted_Spikes.ipynb \
89+
notebooks/tutorial/05-Classifying_with_Clusterless_Spikes.ipynb
90+
do
91+
echo "Executing $nb"
92+
jupyter nbconvert \
93+
--to notebook \
94+
--inplace \
95+
--ExecutePreprocessor.kernel_name="$NB_KERNEL" \
96+
--ExecutePreprocessor.timeout=1800 \
97+
--execute "$nb"
98+
done
99+
100+
# Always upload executed notebooks to aid debugging
101+
- name: Upload executed notebooks
102+
if: always()
103+
uses: actions/upload-artifact@v4
104+
with:
105+
name: executed-notebooks-${{ matrix.os }}-py${{ matrix.python }}
106+
path: |
107+
notebooks/tutorial/*.ipynb
108+
if-no-files-found: warn

0 commit comments

Comments
 (0)