Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Revert "remove some duplicate include" #226

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 0 additions & 3 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -2,13 +2,10 @@
*.jl.*.cov
*.jl.mem
*.bson
.vscode/
.DS_Store

# manifests
docs/Manifest.toml
Manifest.toml

# docs theme
_flux-theme

4 changes: 4 additions & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,7 @@ NNlibCUDA = "0.2"
PartialFunctions = "1"
julia = "1.6"

[publish]
ignore = ["^(gh-pages|juliamnt|julia.dmg)$"]
theme = "_flux-theme"
title = "Metalhead.jl"
2 changes: 2 additions & 0 deletions src/Metalhead.jl
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,8 @@ include("convnets/mobilenets/mobilenetv2.jl")
include("convnets/mobilenets/mobilenetv3.jl")
include("convnets/mobilenets/mnasnet.jl")
## Others
include("convnets/densenet.jl")
include("convnets/squeezenet.jl")
include("convnets/unet.jl")
## Hybrid models
include("convnets/hybrid/convnext.jl")
Expand Down
34 changes: 17 additions & 17 deletions src/convnets/densenet.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""
dense_bottleneck(inplanes, outplanes; expansion=4)
dense_bottleneck(inplanes, growth_rate)

Create a Densenet bottleneck layer
([reference](https://arxiv.org/abs/1608.06993)).
Expand All @@ -10,7 +10,7 @@ Create a Densenet bottleneck layer
- `outplanes`: number of output feature maps on bottleneck branch
(and scaling factor for inner feature maps; see ref)
"""
function dense_bottleneck(inplanes::Int, outplanes::Int; expansion::Int = 4)
function dense_bottleneck(inplanes::Integer, outplanes::Integer; expansion::Integer = 4)
return SkipConnection(Chain(conv_norm((1, 1), inplanes, expansion * outplanes;
revnorm = true)...,
conv_norm((3, 3), expansion * outplanes, outplanes;
Expand All @@ -28,7 +28,7 @@ Create a DenseNet transition sequence
- `inplanes`: number of input feature maps
- `outplanes`: number of output feature maps
"""
function transition(inplanes::Int, outplanes::Int)
function transition(inplanes::Integer, outplanes::Integer)
return Chain(conv_norm((1, 1), inplanes, outplanes; revnorm = true)...,
MeanPool((2, 2)))
end
Expand All @@ -46,15 +46,15 @@ the number of output feature maps by `growth_rates` with each block
- `growth_rates`: the growth (additive) rates of output feature maps
after each block (a vector of `k`s from the ref)
"""
function dense_block(inplanes::Int, growth_rates)
function dense_block(inplanes::Integer, growth_rates)
return [dense_bottleneck(i, o)
for (i, o) in zip(inplanes .+ cumsum([0, growth_rates[1:(end - 1)]...]),
growth_rates)]
end

"""
densenet(inplanes, growth_rates; reduction = 0.5, dropout_prob = nothing,
inchannels = 3, nclasses = 1000)
inchannels::Integer = 3, nclasses::Integer = 1000)

Create a DenseNet model
([reference](https://arxiv.org/abs/1608.06993)).
Expand All @@ -68,9 +68,9 @@ Create a DenseNet model
- `dropout_prob`: the dropout probability for the classifier head. Set to `nothing` to disable dropout.
- `nclasses`: the number of output classes
"""
function build_densenet(inplanes::Int, growth_rates; reduction = 0.5,
function build_densenet(inplanes::Integer, growth_rates; reduction = 0.5,
dropout_prob = nothing,
inchannels::Int = 3, nclasses::Int = 1000)
inchannels::Integer = 3, nclasses::Integer = 1000)
layers = []
append!(layers,
conv_norm((7, 7), inchannels, inplanes; stride = 2, pad = (3, 3)))
Expand All @@ -88,9 +88,9 @@ function build_densenet(inplanes::Int, growth_rates; reduction = 0.5,
end

"""
densenet(nblocks::AbstractVector{Int}; growth_rate = 32,
reduction = 0.5, dropout_prob = nothing, inchannels = 3,
nclasses = 1000)
densenet(nblocks::AbstractVector{<:Integer}; growth_rate::Integer = 32,
reduction = 0.5, dropout_prob = nothing, inchannels::Integer = 3,
nclasses::Integer = 1000)

Create a DenseNet model
([reference](https://arxiv.org/abs/1608.06993)).
Expand All @@ -104,9 +104,9 @@ Create a DenseNet model
- `inchannels`: the number of input channels
- `nclasses`: the number of output classes
"""
function densenet(nblocks::AbstractVector{Int}; growth_rate::Int = 32,
reduction = 0.5, dropout_prob = nothing, inchannels::Int = 3,
nclasses::Int = 1000)
function densenet(nblocks::AbstractVector{<:Integer}; growth_rate::Integer = 32,
reduction = 0.5, dropout_prob = nothing, inchannels::Integer = 3,
nclasses::Integer = 1000)
return build_densenet(2 * growth_rate, [fill(growth_rate, n) for n in nblocks];
reduction, dropout_prob, inchannels, nclasses)
end
Expand All @@ -117,8 +117,8 @@ const DENSENET_CONFIGS = Dict(121 => [6, 12, 24, 16],
201 => [6, 12, 48, 32])

"""
DenseNet(config::Int; pretrain = false, growth_rate = 32,
reduction = 0.5, inchannels = 3, nclasses = 1000)
DenseNet(config::Integer; pretrain::Bool = false, growth_rate::Integer = 32,
reduction = 0.5, inchannels::Integer = 3, nclasses::Integer = 1000)

Create a DenseNet model with specified configuration. Currently supported values are (121, 161, 169, 201)
([reference](https://arxiv.org/abs/1608.06993)).
Expand All @@ -143,8 +143,8 @@ struct DenseNet
end
@functor DenseNet

function DenseNet(config::Int; pretrain::Bool = false, growth_rate::Int = 32,
reduction = 0.5, inchannels::Int = 3, nclasses::Int = 1000)
function DenseNet(config::Integer; pretrain::Bool = false, growth_rate::Integer = 32,
reduction = 0.5, inchannels::Integer = 3, nclasses::Integer = 1000)
_checkconfig(config, keys(DENSENET_CONFIGS))
layers = densenet(DENSENET_CONFIGS[config]; growth_rate, reduction, inchannels,
nclasses)
Expand Down
7 changes: 4 additions & 3 deletions src/layers/drop.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ ChainRulesCore.@non_differentiable _dropblock_mask(rng, x, gamma, clipped_block_
# TODO add experimental `DropBlock` options from timm such as gaussian noise and
# more precise `DropBlock` to deal with edges (#188)
"""
dropblock([rng], x::AbstractArray{T, 4}, drop_block_prob, block_size,
dropblock([rng = default_rng_value(x)], x::AbstractArray{T, 4}, drop_block_prob, block_size,
gamma_scale, active::Bool = true)

The dropblock function. If `active` is `true`, for each input, it zeroes out continguous
Expand Down Expand Up @@ -54,7 +54,8 @@ end
dropblock_mask(rng, x, gamma, bs) = _dropblock_mask(rng, x, gamma, bs)

"""
DropBlock(drop_block_prob = 0.1, block_size = 7, gamma_scale = 1.0, [rng])
DropBlock(drop_block_prob = 0.1, block_size = 7, gamma_scale = 1.0,
rng = default_rng_value())

The `DropBlock` layer. While training, it zeroes out continguous regions of
size `block_size` in the input. During inference, it simply returns the input `x`.
Expand Down Expand Up @@ -121,7 +122,7 @@ function Base.show(io::IO, d::DropBlock)
end

"""
StochasticDepth(p, mode = :row; [rng])
StochasticDepth(p, mode = :row; rng = default_rng_value())

Implements Stochastic Depth. This is a `Dropout` layer from Flux that drops values
with probability `p`.
Expand Down