Skip to content

Commit 2ad3337

Browse files
authored
Merge pull request #266 from FluxML/a2/docs-fix
Move to Documenter 1.0 (and some miscellaneous fixes)
2 parents b2ec1d6 + 0aff9b8 commit 2ad3337

File tree

12 files changed

+24
-26
lines changed

12 files changed

+24
-26
lines changed

.github/workflows/CI.yml

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -113,14 +113,6 @@ jobs:
113113
using Pkg
114114
Pkg.develop(PackageSpec(path=pwd()))
115115
Pkg.instantiate()'
116-
- run: |
117-
julia --color=yes --project=docs/ -e '
118-
using Metalhead
119-
# using Pkg; Pkg.activate("docs")
120-
using Documenter
121-
using Documenter: doctest
122-
DocMeta.setdocmeta!(Metalhead, :DocTestSetup, :(using Metalhead); recursive=true)
123-
doctest(Metalhead)'
124116
- run: julia --project=docs docs/make.jl
125117
env:
126118
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}

docs/Project.toml

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,4 +5,8 @@ Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4"
55
Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c"
66
Images = "916415d5-f1e6-5110-898d-aaa5f9f070e0"
77
LazyArtifacts = "4af54fe1-eca0-43a8-85a7-787d91b784e3"
8+
Metalhead = "dbeba491-748d-5e0e-a39e-b530a07fa0cc"
89
Optimisers = "3bd65402-5787-11e9-1adc-39752487f4e2"
10+
11+
[compat]
12+
Documenter = "1"

docs/make.jl

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,12 @@
1-
using Documenter, Metalhead, Artifacts, LazyArtifacts, Images, DataAugmentation, Flux
2-
3-
DocMeta.setdocmeta!(Metalhead, :DocTestSetup, :(using Metalhead); recursive = true)
1+
using Documenter, Metalhead
42

53
# copy readme into index.md
64
open(joinpath(@__DIR__, "src", "index.md"), "w") do io
75
write(io, read(joinpath(@__DIR__, "..", "README.md"), String))
86
end
97

10-
makedocs(; modules = [Metalhead, Artifacts, LazyArtifacts, Images, DataAugmentation, Flux],
8+
makedocs(; modules = [Metalhead],
119
sitename = "Metalhead.jl",
12-
doctest = false,
1310
pages = ["Home" => "index.md",
1411
"Tutorials" => [
1512
"tutorials/quickstart.md",
@@ -41,7 +38,8 @@ makedocs(; modules = [Metalhead, Artifacts, LazyArtifacts, Images, DataAugmentat
4138
"Model Utilities" => "api/utilities.md",
4239
],
4340
],
44-
format = Documenter.HTML(; canonical = "https://fluxml.ai/Metalhead.jl/stable/",
41+
warnonly = [:example_block, :missing_docs, :cross_references],
42+
format = Documenter.HTML(canonical = "https://fluxml.ai/Metalhead.jl/stable/",
4543
# analytics = "UA-36890222-9",
4644
assets = ["assets/flux.css"],
4745
prettyurls = get(ENV, "CI", nothing) == "true"))

src/Metalhead.jl

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,12 +13,12 @@ using Random
1313

1414
import Functors
1515

16-
# Utilities
17-
include("utilities.jl")
16+
# Model utilities
1817
include("core.jl")
1918

2019
# Custom Layers
2120
include("layers/Layers.jl")
21+
include("layers/utilities.jl") # layer utilities
2222
using .Layers
2323

2424
# CNN models

src/convnets/builders/resnet.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ Creates a generic ResNet-like model.
2727
- `block_repeats`: This is a `Vector` of integers that specifies the number of repeats of each
2828
block in each stage.
2929
- `connection`: This is a function that determines the residual connection in the model. For
30-
`resnets`, either of [`Metalhead.addact`](@ref) or [`Metalhead.actadd`](@ref) is recommended.
30+
`resnets`, either of [`Metalhead.Layers.addact`](@ref) or [`Metalhead.Layers.actadd`](@ref) is recommended.
3131
- `classifier_fn`: This is a function that takes in the number of feature maps and returns a
3232
classifier. This is usually built as a closure using a function like [`Metalhead.create_classifier`](@ref).
3333
For example, if the number of output classes is `nclasses`, then the function can be defined as

src/convnets/resnets/core.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -371,7 +371,7 @@ Wide ResNet, ResNeXt and Res2Net. For an _even_ more generic model API, see [`Me
371371
- `inplanes`: The number of input channels in the first convolutional layer.
372372
- `reduction_factor`: The reduction factor used in the model.
373373
- `connection`: This is a function that determines the residual connection in the model. For
374-
`resnets`, either of [`Metalhead.addact`](@ref) or [`Metalhead.actadd`](@ref) is recommended.
374+
`resnets`, either of [`Metalhead.Layers.addact`](@ref) or [`Metalhead.Layers.actadd`](@ref) is recommended.
375375
These decide whether the residual connection is added before or after the activation function.
376376
- `norm_layer`: The normalisation layer to be used in the model.
377377
- `revnorm`: set to `true` to place the normalisation layers before the convolutions

src/layers/Layers.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ using Random
1212

1313
import Flux.testmode!
1414

15-
include("../utilities.jl")
15+
include("utilities.jl")
1616

1717
include("attention.jl")
1818
export MultiHeadSelfAttention

src/layers/conv.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ Create a convolution + normalisation layer pair with activation.
2424
- `pad`: padding of the convolution kernel
2525
- `dilation`: dilation of the convolution kernel
2626
- `groups`: groups for the convolution kernel
27-
- `weight`, `init`: initialization for the convolution kernel (see [`Flux.Conv`](@ref))
27+
- `weight`, `init`: initialization for the convolution kernel (see `Flux.Conv`)
2828
"""
2929
function conv_norm(kernel_size::Dims{2}, inplanes::Integer, outplanes::Integer,
3030
activation = relu; norm_layer = BatchNorm, revnorm::Bool = false,

src/layers/mbconv.jl

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -26,7 +26,7 @@ See Fig. 3 in [reference](https://arxiv.org/abs/1704.04861v1).
2626
- `bias`: whether to use bias in the convolution layers.
2727
- `stride`: stride of the first convolution kernel
2828
- `pad`: padding of the first convolution kernel
29-
- `weight`, `init`: initialization for the convolution kernel (see [`Flux.Conv`](@ref))
29+
- `weight`, `init`: initialization for the convolution kernel (see `Flux.Conv`)
3030
"""
3131
function dwsep_conv_norm(kernel_size::Dims{2}, inplanes::Integer, outplanes::Integer,
3232
activation = relu; norm_layer = BatchNorm, stride::Integer = 1,

src/utilities.jl renamed to src/layers/utilities.jl

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -14,16 +14,16 @@ end
1414
1515
Convenience function for applying an activation function to the output after
1616
summing up the input arrays. Useful as the `connection` argument for the block
17-
function in [`Metalhead.resnet`](@ref).
17+
function in `Metalhead.resnet`.
1818
"""
1919
addact(activation = relu, xs...) = activation(sum(xs))
2020

2121
"""
2222
actadd(activation = relu, xs...)
2323
24-
Convenience function for adding input arrays after applying an activation
25-
function to them. Useful as the `connection` argument for the block function in
26-
[`Metalhead.resnet`](@ref).
24+
Convenience function for summing up the input arrays after applying an
25+
activation function to them. Useful as the `connection` argument for the block
26+
function in `Metalhead.resnet`.
2727
"""
2828
actadd(activation = relu, xs...) = sum(activation.(x) for x in xs)
2929

0 commit comments

Comments
 (0)