Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
111 changes: 85 additions & 26 deletions .github/ci.jl
Original file line number Diff line number Diff line change
@@ -1,30 +1,14 @@
using Tables
using MarkdownTables
using SHA
using IJulia
using JSON
using Pkg
ENV["GKSwstype"] = "100"

function main(;
basedir=get(ENV, "DOCDIR", "docs"),
cachedir=get(ENV, "NBCACHE", ".cache"),
rmsvg=true)
nb = get(ENV, "NB", "test.ipynb")
IJulia.installkernel("Julia", "--project=@.")
# nbconvert command options
kernelname = "--ExecutePreprocessor.kernel_name=julia-1.$(VERSION.minor)"
execute = ifelse(get(ENV, "ALLOWERRORS", "false") == "true", "--execute --allow-errors", "--execute")
timeout = "--ExecutePreprocessor.timeout=" * get(ENV, "TIMEOUT", "-1")
nbout = joinpath(abspath(pwd()), cachedir, nb)
mkpath(dirname(nbout))
cmd = `jupyter nbconvert --to notebook $(execute) $(timeout) $(kernelname) --output $(nbout) $(nb)`
run(cmd)
rmsvg && strip_svg(nbout)
return nothing
end

# Strip SVG output from a Jupyter notebook
function strip_svg(ipynb)
oldfilesize = filesize(ipynb)
nb = open(JSON.parse, ipynb, "r")
function strip_svg(nbpath)
oldfilesize = filesize(nbpath)
nb = open(JSON.parse, nbpath, "r")
for cell in nb["cells"]
!haskey(cell, "outputs") && continue
for output in cell["outputs"]
Expand All @@ -36,10 +20,85 @@ function strip_svg(ipynb)
end
end
end
rm(ipynb; force=true)
write(ipynb, JSON.json(nb, 1))
@info "Stripped SVG in $(ipynb). The original size is $(Base.format_bytes(oldfilesize)). The new size is $(Base.format_bytes(filesize(ipynb)))."
return ipynb
rm(nbpath; force=true)
write(nbpath, JSON.json(nb, 1))
@info "Stripped SVG in $(nbpath). The original size is $(oldfilesize). The new size is $(filesize(nbpath))."
return nbpath
end

# Remove cached notebook and sha files if there is no corresponding notebook
function clean_cache(cachedir)
for (root, _, files) in walkdir(cachedir)
for file in files
fn, ext = splitext(file)
if ext == ".sha"
target = joinpath(joinpath(splitpath(root)[2:end]), fn)
nb = target * ".ipynb"
if !isfile(nb)
cachepath = joinpath(root, fn)
@info "Notebook $(nb) not found. Removing SHA and notebook in $(cachepath)."
rm(cachepath * ".sha"; force=true)
rm(cachepath * ".ipynb"; force=true)
end
end
end
end
end

# List notebooks without caches in a file tree
function list_notebooks(basedir, cachedir)
list = String[]
for (root, _, files) in walkdir(basedir)
for file in files
name, ext = splitext(file)
if ext == ".ipynb"
nb = joinpath(root, file)
shaval = read(nb, String) |> sha256 |> bytes2hex
@info "$(nb) SHA256 = $(shaval)"
shafilename = joinpath(cachedir, root, name * ".sha")
if isfile(shafilename) && read(shafilename, String) == shaval
@info "$(nb) cache hits and will not be executed."
else
@info "$(nb) cache misses. Writing hash to $(shafilename)."
mkpath(dirname(shafilename))
write(shafilename, shaval)
push!(list, nb)
end
end
end
end
return list
end

function main(;
basedir=get(ENV, "DOCDIR", "docs"),
cachedir=get(ENV, "NBCACHE", ".cache"),
rmsvg=true)

mkpath(cachedir)
clean_cache(cachedir)
nblist = list_notebooks(basedir, cachedir)

if !isempty(nblist)
IJulia.installkernel("Julia", "--project=@.")
# nbconvert command options
ntasks = parse(Int, get(ENV, "NBCONVERT_JOBS", "1"))
kernelname = "--ExecutePreprocessor.kernel_name=julia-1.$(VERSION.minor)"
execute = ifelse(get(ENV, "ALLOWERRORS", "false") == "true", "--execute --allow-errors", "--execute")
timeout = "--ExecutePreprocessor.timeout=" * get(ENV, "TIMEOUT", "-1")
# Run the nbconvert commands in parallel
ts_ipynb = asyncmap(nblist; ntasks) do nb
@elapsed begin
nbout = joinpath(abspath(pwd()), cachedir, nb)
cmd = `jupyter nbconvert --to notebook $(execute) $(timeout) $(kernelname) --output $(nbout) $(nb)`
run(cmd)
rmsvg && strip_svg(nbout)
end
end
# Print execution result
Tables.table([nblist ts_ipynb]; header=["Notebook", "Elapsed (s)"]) |> markdown_table(String) |> print
end
end

# Run code
main()
46 changes: 23 additions & 23 deletions .github/workflows/automerge.yml
Original file line number Diff line number Diff line change
@@ -1,23 +1,23 @@
# A variant of https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions
name: Dependabot auto-merge
on: pull_request
permissions:
contents: write
pull-requests: write
jobs:
dependabot:
runs-on: ubuntu-slim
if: github.event.pull_request.user.login == 'dependabot[bot]'
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
run: gh pr edit "$PR_URL" --add-label "automerge"
env:
PR_URL: ${{ github.event.pull_request.html_url }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# A variant of https://docs.github.com/en/code-security/dependabot/working-with-dependabot/automating-dependabot-with-github-actions
name: Dependabot auto-merge
on: pull_request

permissions:
contents: write
pull-requests: write

jobs:
dependabot:
runs-on: ubuntu-slim
if: github.event.pull_request.user.login == 'dependabot[bot]'
steps:
- name: Dependabot metadata
id: metadata
uses: dependabot/fetch-metadata@v2
with:
github-token: "${{ secrets.GITHUB_TOKEN }}"
- name: Enable auto-merge for Dependabot PRs
run: gh pr edit "$PR_URL" --add-label "automerge"
env:
PR_URL: ${{ github.event.pull_request.html_url }}
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
Loading
Loading