-
-
Notifications
You must be signed in to change notification settings - Fork 610
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
e502dba
commit 3cea17e
Showing
47 changed files
with
5,189 additions
and
4,866 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,62 +1,67 @@ | ||
using Documenter, Flux, NNlib, Functors, MLUtils, BSON, Optimisers, OneHotArrays, Zygote, ChainRulesCore | ||
|
||
using Documenter, Flux, NNlib, Functors, MLUtils, BSON, Optimisers, OneHotArrays, Zygote, | ||
ChainRulesCore | ||
|
||
DocMeta.setdocmeta!(Flux, :DocTestSetup, :(using Flux); recursive = true) | ||
|
||
makedocs( | ||
modules = [Flux, NNlib, Functors, MLUtils, BSON, Optimisers, OneHotArrays, Zygote, ChainRulesCore, Base], | ||
doctest = false, | ||
sitename = "Flux", | ||
# strict = [:cross_references,], | ||
pages = [ | ||
"Getting Started" => [ | ||
"Welcome" => "index.md", | ||
"Quick Start" => "models/quickstart.md", | ||
"Fitting a Line" => "models/overview.md", | ||
"Gradients and Layers" => "models/basics.md", | ||
], | ||
"Building Models" => [ | ||
"Built-in Layers 📚" => "models/layers.md", | ||
"Recurrence" => "models/recurrence.md", | ||
"Activation Functions 📚" => "models/activation.md", | ||
"NNlib.jl 📚 (`softmax`, `conv`, ...)" => "models/nnlib.md", | ||
], | ||
"Handling Data" => [ | ||
"MLUtils.jl 📚 (`DataLoader`, ...)" => "data/mlutils.md", | ||
"OneHotArrays.jl 📚 (`onehot`, ...)" => "data/onehot.md", | ||
], | ||
"Training Models" => [ | ||
"Training" => "training/training.md", | ||
"Regularisation" => "models/regularisation.md", | ||
"Loss Functions 📚" => "models/losses.md", | ||
"Optimisation Rules 📚" => "training/optimisers.md", # TODO move optimiser intro up to Training | ||
"Callback Helpers 📚" => "training/callbacks.md", | ||
"Zygote.jl 📚 (`gradient`, ...)" => "training/zygote.md", | ||
], | ||
"Model Tools" => [ | ||
"GPU Support" => "gpu.md", | ||
"Saving & Loading" => "saving.md", | ||
"Shape Inference 📚" => "outputsize.md", | ||
"Weight Initialisation 📚" => "utilities.md", | ||
"Flat vs. Nested 📚" => "destructure.md", | ||
"Functors.jl 📚 (`fmap`, ...)" => "models/functors.md", | ||
makedocs(modules = [ | ||
Flux, | ||
NNlib, | ||
Functors, | ||
MLUtils, | ||
BSON, | ||
Optimisers, | ||
OneHotArrays, | ||
Zygote, | ||
ChainRulesCore, | ||
Base, | ||
], | ||
"Performance Tips" => "performance.md", | ||
"Flux's Ecosystem" => "ecosystem.md", | ||
"Tutorials" => [ # TODO, maybe | ||
"Custom Layers" => "models/advanced.md", # TODO move freezing to Training | ||
doctest = false, | ||
sitename = "Flux", | ||
# strict = [:cross_references,], | ||
pages = [ | ||
"Getting Started" => [ | ||
"Welcome" => "index.md", | ||
"Quick Start" => "models/quickstart.md", | ||
"Fitting a Line" => "models/overview.md", | ||
"Gradients and Layers" => "models/basics.md", | ||
], | ||
"Building Models" => [ | ||
"Built-in Layers 📚" => "models/layers.md", | ||
"Recurrence" => "models/recurrence.md", | ||
"Activation Functions 📚" => "models/activation.md", | ||
"NNlib.jl 📚 (`softmax`, `conv`, ...)" => "models/nnlib.md", | ||
], | ||
"Handling Data" => [ | ||
"MLUtils.jl 📚 (`DataLoader`, ...)" => "data/mlutils.md", | ||
"OneHotArrays.jl 📚 (`onehot`, ...)" => "data/onehot.md", | ||
], | ||
"Training Models" => [ | ||
"Training" => "training/training.md", | ||
"Regularisation" => "models/regularisation.md", | ||
"Loss Functions 📚" => "models/losses.md", | ||
"Optimisation Rules 📚" => "training/optimisers.md", # TODO move optimiser intro up to Training | ||
"Callback Helpers 📚" => "training/callbacks.md", | ||
"Zygote.jl 📚 (`gradient`, ...)" => "training/zygote.md", | ||
], | ||
"Model Tools" => [ | ||
"GPU Support" => "gpu.md", | ||
"Saving & Loading" => "saving.md", | ||
"Shape Inference 📚" => "outputsize.md", | ||
"Weight Initialisation 📚" => "utilities.md", | ||
"Flat vs. Nested 📚" => "destructure.md", | ||
"Functors.jl 📚 (`fmap`, ...)" => "models/functors.md", | ||
], | ||
"Performance Tips" => "performance.md", | ||
"Flux's Ecosystem" => "ecosystem.md", | ||
"Tutorials" => [ # TODO, maybe | ||
"Custom Layers" => "models/advanced.md", # TODO move freezing to Training | ||
], | ||
], | ||
], | ||
format = Documenter.HTML( | ||
sidebar_sitename = false, | ||
analytics = "UA-36890222-9", | ||
assets = ["assets/flux.css"], | ||
prettyurls = get(ENV, "CI", nothing) == "true" | ||
), | ||
) | ||
format = Documenter.HTML(sidebar_sitename = false, | ||
analytics = "UA-36890222-9", | ||
assets = ["assets/flux.css"], | ||
prettyurls = get(ENV, "CI", nothing) == "true")) | ||
|
||
deploydocs( | ||
repo = "github.com/FluxML/Flux.jl.git", | ||
target = "build", | ||
push_preview = true | ||
) | ||
deploydocs(repo = "github.com/FluxML/Flux.jl.git", | ||
target = "build", | ||
push_preview = true) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,8 +1,8 @@ | ||
for ch in [1, 3, 16, 64] | ||
x = rand(Float32, 64, 64, ch, 64) | ||
model = Conv((3,3), ch=>ch) | ||
model = Conv((3, 3), ch => ch) | ||
println("CPU ch=$ch") | ||
run_benchmark(model, x, cuda=false) | ||
run_benchmark(model, x, cuda = false) | ||
println("CUDA ch=$ch") | ||
run_benchmark(model, x, cuda=true) | ||
run_benchmark(model, x, cuda = true) | ||
end |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,62 +1,62 @@ | ||
|
||
|
||
struct RNNWrapper{T} | ||
rnn::T | ||
rnn::T | ||
end | ||
Flux.@functor RNNWrapper | ||
|
||
# Need to specialize for RNNWrapper. | ||
fw(r::RNNWrapper, X::Vector{<:AbstractArray}) = begin | ||
Flux.reset!(r.rnn) | ||
[r.rnn(x) for x in X] | ||
Flux.reset!(r.rnn) | ||
[r.rnn(x) for x in X] | ||
end | ||
|
||
fw(r::RNNWrapper, X) = begin | ||
Flux.reset!(r.rnn) | ||
r.rnn(X) | ||
Flux.reset!(r.rnn) | ||
r.rnn(X) | ||
end | ||
|
||
fwbw(r::RNNWrapper, ps, X::Vector{<:AbstractArray}) = gradient(ps) do | ||
y = fw(r, X) | ||
sum(sum(y)) | ||
end | ||
fwbw(r::RNNWrapper, ps, X::Vector{<:AbstractArray}) = | ||
gradient(ps) do | ||
y = fw(r, X) | ||
return sum(sum(y)) | ||
end | ||
|
||
pb(r::RNNWrapper, ps, X::Vector{<:AbstractArray}) = pullback(ps) do | ||
y = fw(r, X) | ||
sum(sum(y)) | ||
end | ||
pb(r::RNNWrapper, ps, X::Vector{<:AbstractArray}) = | ||
pullback(ps) do | ||
y = fw(r, X) | ||
return sum(sum(y)) | ||
end | ||
|
||
function rnn_benchmark_sweep(data_creator::Function, rnn_type) | ||
for n in [2, 20, 200, 1000], ts in [1, 4, 16, 64] | ||
x, x_n = data_creator(n, ts) | ||
model = RNNWrapper(rnn_type(n, n)) | ||
|
||
println("$rnn_type $x_n CPU n=$n, ts=$ts") | ||
run_benchmark(model, x, cuda=false) | ||
|
||
println("$rnn_type $x_n CUDA n=$n, ts=$ts") | ||
try | ||
run_benchmark(model, x, cuda=true) | ||
catch ex | ||
@show typeof(ex) | ||
if ex isa OutOfGPUMemoryError | ||
@warn "Not enough GPU memory to run test" | ||
else | ||
rethrow(ex) | ||
end | ||
for n in [2, 20, 200, 1000], ts in [1, 4, 16, 64] | ||
x, x_n = data_creator(n, ts) | ||
model = RNNWrapper(rnn_type(n, n)) | ||
|
||
println("$rnn_type $x_n CPU n=$n, ts=$ts") | ||
run_benchmark(model, x, cuda = false) | ||
|
||
println("$rnn_type $x_n CUDA n=$n, ts=$ts") | ||
try | ||
run_benchmark(model, x, cuda = true) | ||
catch ex | ||
@show typeof(ex) | ||
if ex isa OutOfGPUMemoryError | ||
@warn "Not enough GPU memory to run test" | ||
else | ||
rethrow(ex) | ||
end | ||
end | ||
end | ||
end | ||
end | ||
|
||
for rnn_type in [Flux.RNN, Flux.GRU, Flux.LSTM] | ||
rnn_benchmark_sweep(rnn_type) do n, ts | ||
[randn(Float32, n, n) for _ in 1:ts], "Vec" | ||
end | ||
rnn_benchmark_sweep(rnn_type) do n, ts | ||
return [randn(Float32, n, n) for _ in 1:ts], "Vec" | ||
end | ||
end | ||
|
||
for rnn_type in [Flux.RNN, Flux.GRU, Flux.LSTM] | ||
rnn_benchmark_sweep(rnn_type) do n, ts | ||
randn(Float32, n, n, ts), "Block" | ||
end | ||
rnn_benchmark_sweep(rnn_type) do n, ts | ||
return randn(Float32, n, n, ts), "Block" | ||
end | ||
end | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Oops, something went wrong.