Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
123 changes: 22 additions & 101 deletions .github/ci.jl
Original file line number Diff line number Diff line change
@@ -1,17 +1,23 @@
using Distributed
using Tables
using MarkdownTables
using SHA
using JSON
using Literate

@everywhere begin
ENV["GKSwstype"] = "100"
using Literate, JSON
ENV["GKSwstype"] = "100"

function main(; dopostproc=true)
file = get(ENV, "NB", "test.ipynb")
cachedir = get(ENV, "NBCACHE", ".cache")
if endswith(file, ".jl")
run_literate(file; cachedir, dopostproc)
elseif endswith(file, ".ipynb")
lit = to_literate(file)
run_literate(lit; cachedir, dopostproc)
end
end

# Post-process Jupyter notebook
@everywhere function postprocess(nbpath)
oldfilesize = filesize(nbpath)
nb = open(JSON.parse, nbpath, "r")
function postprocess(ipynb)
oldfilesize = filesize(ipynb)
nb = open(JSON.parse, ipynb, "r")
for cell in nb["cells"]
!haskey(cell, "outputs") && continue
for output in cell["outputs"]
Expand All @@ -31,34 +37,14 @@ end
end
end
end
rm(nbpath; force=true)
write(nbpath, JSON.json(nb, 2))
@info "$(nbpath) is processed. The original size is $(Base.format_bytes(oldfilesize)). The new size is $(Base.format_bytes(filesize(nbpath)))."
return nbpath
end

# Remove cached notebook and sha files if there is no corresponding notebook
function clean_cache(cachedir)
for (root, _, files) in walkdir(cachedir)
for file in files
fn, ext = splitext(file)
if ext == ".sha"
target = joinpath(joinpath(splitpath(root)[2:end]), fn)
nb = target * ".ipynb"
lit = target * ".jl"
if !isfile(nb) && !isfile(lit)
cachepath = joinpath(root, fn)
@info "Notebook $(nb) or $(lit) not found. Removing $(cachepath) SHA and notebook."
rm(cachepath * ".sha"; force=true)
rm(cachepath * ".ipynb"; force=true)
end
end
end
end
rm(ipynb; force=true)
write(ipynb, JSON.json(nb, 2))
@info "The original size is $(Base.format_bytes(oldfilesize)). The new size is $(Base.format_bytes(filesize(ipynb)))."
return ipynb
end

# Convert a Jupyter notebook into a Literate notebook. Adapted from https://github.com/JuliaInterop/NBInclude.jl.
function to_literate(nbpath; shell_or_help=r"^\s*[;?]")
function to_literate(nbpath; shell_or_help = r"^\s*[;?]")
nb = open(JSON.parse, nbpath, "r")
jlpath = splitext(nbpath)[1] * ".jl"
open(jlpath, "w") do io
Expand All @@ -80,39 +66,7 @@ function to_literate(nbpath; shell_or_help=r"^\s*[;?]")
return jlpath
end

# List notebooks without caches in a file tree
function list_notebooks(basedir, cachedir)
list = String[]
for (root, _, files) in walkdir(basedir)
for file in files
name, ext = splitext(file)
if ext == ".ipynb" || ext == ".jl"
nb = joinpath(root, file)
shaval = read(nb, String) |> sha256 |> bytes2hex
@info "$(nb) SHA256 = $(shaval)"
shafilename = joinpath(cachedir, root, name * ".sha")
if isfile(shafilename) && read(shafilename, String) == shaval
@info "$(nb) cache hits and will not be executed."
else
@info "$(nb) cache misses. Writing hash to $(shafilename)."
mkpath(dirname(shafilename))
write(shafilename, shaval)
if ext == ".ipynb"
litnb = to_literate(nb)
rm(nb; force=true)
push!(list, litnb)
elseif ext == ".jl"
push!(list, nb)
end
end
end
end
end
return list
end

# Run a Literate notebook
@everywhere function run_literate(file, cachedir; dopostproc=true)
function run_literate(file; cachedir = ".cache", dopostproc=true)
outpath = joinpath(abspath(pwd()), cachedir, dirname(file))
mkpath(outpath)
ipynb = Literate.notebook(file, dirname(file); mdstrings=true, execute=true)
Expand All @@ -121,37 +75,4 @@ end
return ipynb
end

function main(;
basedir=get(ENV, "DOCDIR", "docs"),
cachedir=get(ENV, "NBCACHE", ".cache"),
dopostproc=true)

mkpath(cachedir)
clean_cache(cachedir)
litnbs = list_notebooks(basedir, cachedir)

if !isempty(litnbs)
## Execute literate notebooks in worker process(es)
ts_lit = pmap(litnbs; on_error=identity) do nb
@elapsed run_literate(nb, cachedir; dopostproc)
end
rmprocs(workers()) ## Remove worker processes to release some memory
failed = false
for (nb, t) in zip(litnbs, ts_lit)
if t isa ErrorException
println("Notebook: ", nb, "failed with error: \n", t.msg)
failed = true
end
end

if failed
error("Please check error(s).")
else
## Print execution result
Tables.table([litnbs ts_lit]; header=["Notebook", "Elapsed (s)"]) |> markdown_table(String) |> print
end
end
end

# Run code
main()
194 changes: 127 additions & 67 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: Build notebooks and publish
name: CI with dynamic parallel matrix

on:
workflow_dispatch:
Expand All @@ -15,15 +15,19 @@ env:
JULIA_CI: 'true'
JULIA_CONDAPKG_BACKEND: 'Null'
JULIA_CONDAPKG_OFFLINE: 'true'
# JULIA_CPU_TARGET: 'generic;icelake-server,clone_all;znver3,clone_all'
JULIA_NUM_THREADS: '2'
JULIA_CPU_TARGET: 'generic;icelake-server,clone_all;znver3,clone_all'
JULIA_NUM_THREADS: 'auto'
NBCACHE: '.cache'
LITERATE_PROC: '6'
PY_VER: '3.14'


jobs:
CI:
runs-on: self-hosted
setup:
runs-on: ubuntu-latest
outputs:
matrix: ${{ steps.set-matrix.outputs.matrix }}
hash: ${{ steps.hash.outputs.value }}
ver: ${{ steps.julia-version.outputs.resolved }}
steps:
- name: Checkout repository
uses: actions/checkout@v6
Expand All @@ -38,89 +42,145 @@ jobs:
run: uv pip install --system -r requirements.txt
- name: Read Julia version
id: julia-version
run: python -c 'import tomllib; from pathlib import Path; print("value=", tomllib.loads(Path("Manifest.toml").read_text())["julia_version"], sep="")' >> "$GITHUB_OUTPUT"
run: python -c 'import tomllib; from pathlib import Path; print("resolved=", tomllib.loads(Path("Manifest.toml").read_text())["julia_version"], sep="")' >> "$GITHUB_OUTPUT"
- name: Get environment hash
id: hash
run: |
echo "value=${{ hashFiles('Project.toml', 'Manifest.toml', 'src/**') }}" >> "$GITHUB_OUTPUT"
- name: Cache executed notebooks
echo "ver=${{ runner.os }}-julia-${{ steps.julia-version.outputs.resolved }}" >> "$GITHUB_OUTPUT"
- name: Cache Julia packages
uses: actions/cache@v5
id: cache-nb
id: cache-julia
with:
path: |
${{ env.NBCACHE }}/**/*.ipynb
${{ env.NBCACHE }}/**/*.sha
key: notebook-${{ steps.hash.outputs.value }}-${{ hashFiles('docs/**') }}
path: ~/.julia
key: ${{ runner.os }}-julia-${{ steps.julia-version.outputs.resolved }}-${{ steps.hash.outputs.value }}
restore-keys: |
notebook-${{ steps.hash.outputs.value }}-
${{ runner.os }}-julia-${{ steps.julia-version.outputs.resolved }}-
- name: Setup Julia
uses: julia-actions/setup-julia@v2
if: ${{ steps.cache-nb.outputs.cache-hit != 'true' }}
if: ${{ steps.cache-julia.outputs.cache-hit != 'true' }}
with:
version: ${{ steps.julia-version.outputs.value }}
version: ${{ steps.julia-version.outputs.resolved }}
arch: ${{ runner.arch }}
- name: Restore Julia packages
uses: actions/cache/restore@v5
if: ${{ steps.cache-nb.outputs.cache-hit != 'true' && runner.environment == 'github-hosted'}}
id: cache-julia
with:
path: ~/.julia
key: ${{ runner.os }}-julia-${{ steps.julia-version.outputs.value }}-${{ steps.hash.outputs.value }}
restore-keys: |
${{ runner.os }}-julia-${{ steps.julia-version.outputs.value }}
- name: Install Julia packages
if: ${{ steps.cache-nb.outputs.cache-hit != 'true' && (runner.environment == 'self-hosted' || steps.cache-julia.outputs.cache-hit != 'true') }}
if: ${{ steps.cache-julia.outputs.cache-hit != 'true' }}
shell: julia --color=yes --project=@. {0}
run: |
using Pkg, Dates
Pkg.instantiate()
Pkg.precompile()
if ENV["RUNNER_ENVIRONMENT"] == "github-hosted"
Pkg.gc(;collect_delay=Day(0))
end
- name: Save Julia packages
uses: actions/cache/save@v5
if: ${{ steps.cache-nb.outputs.cache-hit != 'true' && runner.environment == 'github-hosted' && steps.cache-julia.outputs.cache-hit != 'true' }}
with:
path: ~/.julia
key: ${{ steps.cache-julia.outputs.cache-primary-key }}
- name: Run notebooks
if: ${{ steps.cache-nb.outputs.cache-hit != 'true' }}
run: julia --project=@. --color=yes -p ${{ env.LITERATE_PROC }} .github/ci.jl
- name: Copy back built notebooks
run: |
cp --verbose -rf ${{ env.NBCACHE }}/docs/* docs/
find docs/ -type f -name "*.jl" -delete
- name: Upload notebook
uses: actions/upload-artifact@v6
with:
name: notebooks
path: docs/**/*.ipynb
- name: Upload figures
uses: actions/upload-artifact@v6
with:
name: figures
path: |
docs/**/*.png
docs/**/*.pdf
- name: Setup Quarto
if: ${{ runner.environment == 'github-hosted' }}
uses: quarto-dev/quarto-actions/setup@v2
- name: Render Quarto Project
run: quarto render docs/
- name: Upload pages artifact
uses: actions/upload-pages-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }}
with:
path: docs/_site/
Pkg.gc(;collect_delay=Day(0))
- name: List notebooks as a JSON array
id: set-matrix
run: echo "matrix=$(python -c 'import glob, json; print(json.dumps(glob.glob("**/*.ipynb", root_dir="docs", recursive=True) + glob.glob("**/*.jl", root_dir="docs", recursive=True)))')" >> "$GITHUB_OUTPUT"

execute:
needs: setup
strategy:
max-parallel: 10
fail-fast: false
matrix:
notebook: ${{ fromJSON(needs.setup.outputs.matrix) }}
runs-on: ubuntu-latest
env:
NB: docs/${{ matrix.notebook }}
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Cache notebook
uses: actions/cache@v5
id: nb-cache
with:
path: ${{ env.NBCACHE }}
key: notebook-${{ needs.setup.outputs.hash }}-${{ hashFiles(env.NB) }}
- name: Setup Python
uses: actions/setup-python@v6
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
id: setup-python
with:
python-version: ${{ env.PY_VER }}
- name: Install the latest version of uv
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
uses: astral-sh/setup-uv@v7
- name: Install Python dependencies
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
run: uv pip install --system -r requirements.txt
- name: Setup Julia
uses: julia-actions/setup-julia@v2
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
with:
version: ${{ needs.setup.outputs.ver }}
arch: ${{ runner.arch }}
- name: Restore Julia packages
uses: actions/cache/restore@v5
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
with:
path: ~/.julia
key: ${{ runner.os }}-julia-${{ needs.setup.outputs.ver }}-${{ needs.setup.outputs.hash }}
- name: Execute notebook
if: ${{ steps.nb-cache.outputs.cache-hit != 'true' }}
run: julia --project=@. .github/ci.jl
- name: Convert artifact Name
id: art
run: echo "name=$(echo ${{ env.NB }} | sed 's/\//-/g')" >> "$GITHUB_OUTPUT"
- name: Upload Notebook
uses: actions/upload-artifact@v6
with:
name: notebook-${{ steps.art.outputs.name }}-${{ needs.setup.outputs.hash }}-${{ hashFiles(env.NB) }}
path: ${{ env.NBCACHE }}
include-hidden-files: true
retention-days: 1

render:
needs: execute
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Download notebooks
uses: actions/download-artifact@v7
with:
path: ${{ env.NBCACHE }}/
pattern: notebook-*
merge-multiple: true
- name: Copy back built notebooks
run: |
cp --verbose -rf ${{ env.NBCACHE }}/docs/* docs/
find docs/ -type f -name "*.jl" -delete
- name: Setup Quarto
uses: quarto-dev/quarto-actions/setup@v2
- name: Render Quarto Project
run: quarto render docs --to html
- name: Upload artifact for GH pages
uses: actions/upload-pages-artifact@v4
if: ${{ github.ref == 'refs/heads/main' }}
with:
path: docs/_site/

# CI conclusion for GitHub status check
# Adaped from https://brunoscheufler.com/blog/2022-04-09-the-required-github-status-check-that-wasnt
CI:
needs: render
if: always()
runs-on: ubuntu-slim
steps:
- run: |
if [[ ${{ needs.render.result }} == "success" ]]; then
echo "Tests passed"
exit 0
else
echo "Tests failed"
exit 1
fi

# Deployment job
deploy:
name: Deploy to GitHub pages
needs: CI
if: ${{ github.ref == 'refs/heads/main'}}
needs: render
if: ${{ github.ref == 'refs/heads/main' }}
# Grant GITHUB_TOKEN the permissions required to make a Pages deployment
permissions:
pages: write # to deploy to Pages
pages: write # to deploy to Pages
id-token: write # to verify the deployment originates from an appropriate source
environment:
name: github-pages
Expand Down
Loading