From be7e88998f431fefd0c636a0b4308826e911a95d Mon Sep 17 00:00:00 2001 From: ho-oto Date: Mon, 11 Jan 2021 02:58:45 +0900 Subject: [PATCH 001/490] fix broken link --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index d248793f6..8551bca87 100644 --- a/README.md +++ b/README.md @@ -3,7 +3,7 @@

-![CI Testing](https://github.com/FluxML/Zygote.jl/workflows/CI/badge.svg) +[![CI Testing](https://github.com/FluxML/Zygote.jl/workflows/CI/badge.svg)](https://github.com/FluxML/Zygote.jl/actions) [![Dev Docs](https://img.shields.io/badge/docs-dev-blue.svg)](https://fluxml.ai/Zygote.jl/dev) `] add Zygote` From 3e54503e29c6625be91f69962b826b86e84dfaa3 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 11 Jan 2021 17:45:26 +0530 Subject: [PATCH 002/490] add scalar method for vcat with number --- src/lib/broadcast.jl | 2 ++ test/cuda.jl | 7 +++++++ 2 files changed, 9 insertions(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index db43b163c..80e1f663f 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -239,4 +239,6 @@ end Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end + + pull_block_vert(sz, Δ::CuArray, A::Number) = CUDA.@allowscalar Δ[sz:sz] end diff --git a/test/cuda.jl b/test/cuda.jl index 2820a776b..6fb83cd36 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -15,3 +15,10 @@ end log_grada = cu(Float32[1.0, 0.5, 0.33333334, 0.25, 0.2, 0.16666667, 0.14285715, 0.125, 0.11111111]) @test gradient(x -> w(x) |> sum, a) == (log_grada,) end + +@testset "vcat scalar indexing" begin + r = cu(rand(Float32, 3)) + grads = (cu(ones(Float32, 3)), nothing) + @test gradient((x,y) -> sum(vcat(x,y)), r, 5) == grads +end + From 8b9cc74f3313a58099c613bb28ee8b6a3cf6e32e Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 12 Jan 2021 21:19:55 +0530 Subject: [PATCH 003/490] Rm extra line --- src/lib/broadcast.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 80e1f663f..8ca5aa044 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -239,6 +239,5 @@ end Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end - pull_block_vert(sz, Δ::CuArray, A::Number) = CUDA.@allowscalar Δ[sz:sz] end From bac62ef15150f1aae3d4be4cc62284fdffb97a93 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 12 Jan 2021 21:20:06 +0530 Subject: [PATCH 004/490] typo --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 8ca5aa044..d90e03a04 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -239,5 +239,5 @@ end Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end - pull_block_vert(sz, Δ::CuArray, A::Number) = CUDA.@allowscalar Δ[sz:sz] + pull_block_vert(sz, Δ::CuArray, A::Number) = CUDA.@allowscalar Δ[sz] end From 845685847afb56d06c88efdeda86e8ba904a05d4 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Wed, 13 Jan 2021 14:18:02 +0530 Subject: [PATCH 005/490] qualify CUDA --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index d90e03a04..a3cde8094 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -239,5 +239,5 @@ end Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end - pull_block_vert(sz, Δ::CuArray, A::Number) = CUDA.@allowscalar Δ[sz] + pull_block_vert(sz, Δ::CUDA.CuArray, A::Number) = CUDA.@allowscalar Δ[sz] end From af6fa3ef846a91dd2cd7e12481fb2fcee8a870c1 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 19 Apr 2021 21:57:08 +0200 Subject: [PATCH 006/490] Define custom adjoints for LogExpFunctions instead of StatsFuns --- Project.toml | 4 +- src/Zygote.jl | 2 +- src/lib/{statsfuns.jl => logexpfunctions.jl} | 3 +- test/gradcheck.jl | 76 ++++++++++---------- 4 files changed, 42 insertions(+), 43 deletions(-) rename src/lib/{statsfuns.jl => logexpfunctions.jl} (97%) diff --git a/Project.toml b/Project.toml index 872df418e..80bef388f 100644 --- a/Project.toml +++ b/Project.toml @@ -41,8 +41,8 @@ CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" -StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" +LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["CUDA", "Distances", "FFTW", "FiniteDifferences", "StatsFuns", "Test"] +test = ["CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] diff --git a/src/Zygote.jl b/src/Zygote.jl index 614cd9a53..5c0d743fd 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -40,7 +40,7 @@ include("lib/forward.jl") include("lib/utils.jl") include("lib/range.jl") @init @require Distances="b4f34e82-e78d-54a5-968a-f98e89d6e8f7" include("lib/distances.jl") -@init @require StatsFuns="4c63d2b9-4356-54db-8cca-17b64c39e42c" include("lib/statsfuns.jl") +@init @require LogExpFunctions="2ab3a3ac-af41-5b50-aa03-7779005ae688" include("lib/logexpfunctions.jl") # we need to define this late, so that the genfuncs see lib.jl # Move using statements out of this file to help with sysimage building diff --git a/src/lib/statsfuns.jl b/src/lib/logexpfunctions.jl similarity index 97% rename from src/lib/statsfuns.jl rename to src/lib/logexpfunctions.jl index 85916cae8..1e5e4c0b6 100644 --- a/src/lib/statsfuns.jl +++ b/src/lib/logexpfunctions.jl @@ -1,5 +1,4 @@ -import .StatsFuns -using .StatsFuns: xlogx, xlogy, logistic, logit, log1psq, log1pexp, +using .LogExpFunctions: xlogx, xlogy, logistic, logit, log1psq, log1pexp, logsumexp, logaddexp, logsubexp using Base.Broadcast: broadcasted diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b0619a194..89ff11fc4 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1196,44 +1196,44 @@ end @test gradcheck(x -> muladd(x[1], x[2], x[3]), [2.0, 3.0, 5.0]) end -import StatsFuns +import LogExpFunctions Zygote.refresh() @testset "xlogx" begin - @test gradcheck(x->2.5 * StatsFuns.xlogx(x[1]), [1.0]) - @test gradcheck(x->2.5 * StatsFuns.xlogx(x[1]), [2.45]) - @test gradtest(x -> StatsFuns.xlogx.(x), (3,3)) + @test gradcheck(x->2.5 * LogExpFunctions.xlogx(x[1]), [1.0]) + @test gradcheck(x->2.5 * LogExpFunctions.xlogx(x[1]), [2.45]) + @test gradtest(x -> LogExpFunctions.xlogx.(x), (3,3)) end @testset "xlogy" begin - @test gradcheck(x -> StatsFuns.xlogy(x[1], x[2]), [1.0, 2.0]) - @test gradcheck(x -> StatsFuns.xlogy(x[1], x[2]), [0.0, 2.0]) - @test gradtest((x,y) -> StatsFuns.xlogy.(x,y), (3,3), (3,3)) + @test gradcheck(x -> LogExpFunctions.xlogy(x[1], x[2]), [1.0, 2.0]) + @test gradcheck(x -> LogExpFunctions.xlogy(x[1], x[2]), [0.0, 2.0]) + @test gradtest((x,y) -> LogExpFunctions.xlogy.(x,y), (3,3), (3,3)) end @testset "logistic" begin - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [-5.0]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [-1.0]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [-eps()]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [0.0]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [eps()]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [1.0]) - @test gradcheck(x->3.0 * StatsFuns.logistic(x[1]), [5.0]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [-5.0]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [-1.0]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [-eps()]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [0.0]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [eps()]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [1.0]) + @test gradcheck(x->3.0 * LogExpFunctions.logistic(x[1]), [5.0]) end @testset "logit" begin - @test gradcheck(x->5.0 * StatsFuns.logit(x[1]), [0.1]) - @test gradcheck(x->5.0 * StatsFuns.logit(x[1]), [0.3]) - @test gradcheck(x->5.0 * StatsFuns.logit(x[1]), [0.5]) - @test gradcheck(x->5.0 * StatsFuns.logit(x[1]), [0.7]) - @test gradcheck(x->5.0 * StatsFuns.logit(x[1]), [0.9]) + @test gradcheck(x->5.0 * LogExpFunctions.logit(x[1]), [0.1]) + @test gradcheck(x->5.0 * LogExpFunctions.logit(x[1]), [0.3]) + @test gradcheck(x->5.0 * LogExpFunctions.logit(x[1]), [0.5]) + @test gradcheck(x->5.0 * LogExpFunctions.logit(x[1]), [0.7]) + @test gradcheck(x->5.0 * LogExpFunctions.logit(x[1]), [0.9]) end function test_log1pexp(T, xs) y = T(4.3) for x in xs - @test gradcheck(x->y * StatsFuns.log1pexp(x[1]), [x]) + @test gradcheck(x->y * LogExpFunctions.log1pexp(x[1]), [x]) end end @@ -1249,43 +1249,43 @@ end test_log1pexp(Float64, [33.3, 33.3 + eps(), 100.0]) end end - @test gradcheck(x->2.5 * StatsFuns.log1pexp(x[1]), [1.0]) - @test gradcheck(x->2.5 * StatsFuns.log1pexp(x[1]), [2.45]) - @test gradtest(x -> StatsFuns.log1pexp.(x), (3,3)) + @test gradcheck(x->2.5 * LogExpFunctions.log1pexp(x[1]), [1.0]) + @test gradcheck(x->2.5 * LogExpFunctions.log1pexp(x[1]), [2.45]) + @test gradtest(x -> LogExpFunctions.log1pexp.(x), (3,3)) end @testset "log1psq" begin rng = MersenneTwister(123456) @testset "Float64" begin for x in [-10.0, -5.0, -1.0, -eps(), 0.0, eps(), 1.0, 5.0, 10.0] - @test gradcheck(x->5.1 * StatsFuns.log1psq(x[1]), [x]) + @test gradcheck(x->5.1 * LogExpFunctions.log1psq(x[1]), [x]) end end end @testset "logaddexp" begin - @test gradcheck(x -> StatsFuns.logaddexp(x[1], x[2]), [1.0, 2.0]) - @test gradcheck(x -> StatsFuns.logaddexp(x[1], x[2]), [1.0, -1.0]) - @test gradcheck(x -> StatsFuns.logaddexp(x[1], x[2]), [-2.0, -3.0]) - @test gradcheck(x -> StatsFuns.logaddexp(x[1], x[2]), [5.0, 5.0]) - @test gradtest((x,y) -> StatsFuns.logaddexp.(x,y), (3,3), (3,3)) + @test gradcheck(x -> LogExpFunctions.logaddexp(x[1], x[2]), [1.0, 2.0]) + @test gradcheck(x -> LogExpFunctions.logaddexp(x[1], x[2]), [1.0, -1.0]) + @test gradcheck(x -> LogExpFunctions.logaddexp(x[1], x[2]), [-2.0, -3.0]) + @test gradcheck(x -> LogExpFunctions.logaddexp(x[1], x[2]), [5.0, 5.0]) + @test gradtest((x,y) -> LogExpFunctions.logaddexp.(x,y), (3,3), (3,3)) end @testset "logsubexp" begin - @test gradcheck(x -> StatsFuns.logsubexp(x[1], x[2]), [1.0, 2.0]) - @test gradcheck(x -> StatsFuns.logsubexp(x[1], x[2]), [1.0, -1.0]) - @test gradcheck(x -> StatsFuns.logsubexp(x[1], x[2]), [-2.0, -3.0]) - @test gradtest((x,y) -> StatsFuns.logsubexp.(x,y), (3,3), (3,3)) + @test gradcheck(x -> LogExpFunctions.logsubexp(x[1], x[2]), [1.0, 2.0]) + @test gradcheck(x -> LogExpFunctions.logsubexp(x[1], x[2]), [1.0, -1.0]) + @test gradcheck(x -> LogExpFunctions.logsubexp(x[1], x[2]), [-2.0, -3.0]) + @test gradtest((x,y) -> LogExpFunctions.logsubexp.(x,y), (3,3), (3,3)) end @testset "logsumexp" begin rng = MersenneTwister(123456) @testset "Float64" begin - @test gradtest(StatsFuns.logsumexp, randn(rng, 1)) - @test gradtest(StatsFuns.logsumexp, randn(rng, 1, 1)) - @test gradtest(StatsFuns.logsumexp, randn(rng, 3)) - @test gradtest(StatsFuns.logsumexp, randn(rng, 3, 4, 5)) - @test gradtest(x -> sum(StatsFuns.logsumexp(x; dims=1)), randn(rng, 4, 4)) + @test gradtest(LogExpFunctions.logsumexp, randn(rng, 1)) + @test gradtest(LogExpFunctions.logsumexp, randn(rng, 1, 1)) + @test gradtest(LogExpFunctions.logsumexp, randn(rng, 3)) + @test gradtest(LogExpFunctions.logsumexp, randn(rng, 3, 4, 5)) + @test gradtest(x -> sum(LogExpFunctions.logsumexp(x; dims=1)), randn(rng, 4, 4)) end end From 4f910f25f9362f7364b4fe2294f0021aab0f7e72 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 19 Apr 2021 21:57:47 +0200 Subject: [PATCH 007/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 80bef388f..93172abd7 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.9" +version = "0.6.10" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From a34553eeac36969d62d8672e5b2b810ec82a32d7 Mon Sep 17 00:00:00 2001 From: Dmitri Iouchtchenko Date: Wed, 21 Apr 2021 11:08:57 -0400 Subject: [PATCH 008/490] Handle nothing in map --- src/lib/array.jl | 9 ++++++--- test/gradcheck.jl | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 41 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 3bfad52f6..344db8dcd 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -203,6 +203,7 @@ for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] else ys, backs = unzip(ys_and_backs) ys, function (Δ) + isnothing(Δ) && return nothing # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. Δf_and_args_zipped = $mapfunc((f, δ) -> f(δ), _tryreverse($mapfunc, backs, Δ)...) Δf_and_args = unzip(_tryreverse($mapfunc, Δf_and_args_zipped)) @@ -234,11 +235,13 @@ end @nograd workers function _pullback(cx::AContext, ::typeof(collect), g::Base.Generator) - y, back = ∇map(cx, g.f, g.iter) - y, function (ȳ) - f̄, x̄ = back(ȳ) + y, b = ∇map(cx, g.f, g.iter) + back(::Nothing) = nothing + function back(ȳ) + f̄, x̄ = b(ȳ) (nothing, (f = f̄, iter = x̄),) end + y, back end @adjoint iterate(r::UnitRange, i...) = iterate(r, i...), _ -> nothing diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b0619a194..6f1e5a996 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1672,3 +1672,38 @@ end gradient(x->norm(x*[1im, 1]), 1.23) gradient(x->norm(x*[1im 1]), 1.23) end + +# https://github.com/FluxML/Zygote.jl/issues/804 +@testset "Unused comprehension" begin + # Comprehension is used. + io = IOBuffer() + s = 0.0 + gs = gradient([1.0, 2.0]) do xs + sum([(print(io, x); s += x; s * x) for x in xs]) + end + @test String(take!(io)) == "1.02.0" + @test s == 3.0 + @test gs == ([4.0, 5.0],) + + # Comprehension is not used. + io = IOBuffer() + s = 0.0 + gs = gradient([1.0, 2.0]) do xs + sum([(print(io, x); s += x; s * x) for x in xs]) + 0.0 + end + @test String(take!(io)) == "1.02.0" + @test s == 3.0 + @test gs == (nothing,) + + # Comprehension is empty and not used. + io = IOBuffer() + s = 0.0 + gs = gradient([]) do xs + [(print(io, x); s += x; s * x) for x in xs] + 0.0 + end + @test String(take!(io)) == "" + @test s == 0.0 + @test gs == (nothing,) +end From 148ebeb6aec920ec33d1c1e1e0335a0d83dc3d7b Mon Sep 17 00:00:00 2001 From: Simeon Schaub Date: Mon, 26 Apr 2021 21:50:00 +0200 Subject: [PATCH 009/490] fix adjoint for sum addresses the second part of #897 --- src/lib/array.jl | 11 +++-------- test/lib/array.jl | 4 ++++ test/runtests.jl | 1 + 3 files changed, 8 insertions(+), 8 deletions(-) create mode 100644 test/lib/array.jl diff --git a/src/lib/array.jl b/src/lib/array.jl index 3bfad52f6..321753860 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -270,14 +270,9 @@ end sum(xs, dims = dims), Δ -> (nothing,) end -_normalize_kws(kws::NamedTuple) = kws -_normalize_kws(kws) = NamedTuple() - -function _pullback(cx::AContext, kwtype, kws, ::typeof(sum), f, xs::AbstractArray) - norm_kws = _normalize_kws(kws) - @assert !haskey(norm_kws, :init) # TODO add init support (julia 1.6) - y, back = pullback(cx, (f, xs) -> sum(f.(xs); norm_kws...), f, xs) - y, ȳ -> (nothing, nothing, nothing, back(ȳ)...) +@adjoint function sum(f, xs::AbstractArray; kws...) + @assert !haskey(kws, :init) # TODO add init support (julia 1.6) + return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end @adjoint function sum(::typeof(abs2), X::AbstractArray; dims = :) diff --git a/test/lib/array.jl b/test/lib/array.jl new file mode 100644 index 000000000..380d1bb8f --- /dev/null +++ b/test/lib/array.jl @@ -0,0 +1,4 @@ +using LinearAlgebra + +# issue 897 +@test gradient(x -> sum(sin, Diagonal(x)), ones(2)) == ([0.5403023058681398, 0.5403023058681398],) diff --git a/test/runtests.jl b/test/runtests.jl index b6b7aab0b..f20b59a7e 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -25,6 +25,7 @@ end @testset "lib" begin include("lib/number.jl") include("lib/lib.jl") + include("lib/array.jl") end @testset "Features" begin From 3218e50551e5c525452185de2cf8bfd2fba6fd65 Mon Sep 17 00:00:00 2001 From: Simeon Schaub Date: Mon, 26 Apr 2021 22:02:56 +0200 Subject: [PATCH 010/490] fix differentiation of loopinfo exprs addresses the first part of #897 --- src/compiler/reverse.jl | 2 +- test/compiler.jl | 3 +++ test/features.jl | 2 +- 3 files changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index e144be68d..e746684f7 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -275,7 +275,7 @@ function adjoint(pr::Primal) end elseif ex isa Core.PiNode grads[ex.val] = grads[v] - elseif isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta) + elseif isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta, :loopinfo) elseif isexpr(ex) push!(rb, stmt(xcall(Base, :error, "Can't differentiate $(ex.head) expression"), line = b[v].line)) diff --git a/test/compiler.jl b/test/compiler.jl index c97a50f61..af8e6ccb7 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -143,3 +143,6 @@ end ms = MyStruct(1, 2) @test Zygote.gradient(sumall, ms) == ((a = 2, b = 2),) end + +# issue 897 +@test gradient(x -> sum(norm, collect(eachcol(x))), ones(3, 400))[1] ≈ fill(0.5773502691896258, 3, 400) diff --git a/test/features.jl b/test/features.jl index 5531ebd0b..48df0c87c 100644 --- a/test/features.jl +++ b/test/features.jl @@ -402,7 +402,7 @@ function pow_simd(x, n) return r end -@test_broken gradient(pow_simd, 2, 3) == (12,nothing) +@test gradient(pow_simd, 2, 3) == (12,nothing) @testset "tuple getindex" begin @test gradient(x -> size(x)[2], ones(2,2,2)) == (nothing,) From 703e5bce12171b0e9fa4d4e29f5d30bba3775d4b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 26 Apr 2021 23:19:47 -0400 Subject: [PATCH 011/490] add diagonal of Hessian function --- docs/src/utils.md | 1 + src/Zygote.jl | 2 +- src/lib/forward.jl | 30 ++++++++++++++++++++++++++++++ src/lib/grad.jl | 28 ++++++++++++++++++++++++++++ test/utils.jl | 21 +++++++++++++++++++++ 5 files changed, 81 insertions(+), 1 deletion(-) diff --git a/docs/src/utils.md b/docs/src/utils.md index c46b646ce..4d3063ee2 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -6,6 +6,7 @@ or a Hessian (by taking a second derivative). ```@docs Zygote.jacobian Zygote.hessian +Zygote.diaghessian ``` Zygote also provides a set of helpful utilities. These are all "user-level" tools – diff --git a/src/Zygote.jl b/src/Zygote.jl index 614cd9a53..dc5c785ed 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -12,7 +12,7 @@ using MacroTools, Requires using MacroTools: @forward import Distributed: pmap, CachingPool, workers -export Params, gradient, jacobian, hessian, pullback, pushforward, @code_adjoint +export Params, gradient, jacobian, hessian, diaghessian, pullback, pushforward, @code_adjoint const Numeric{T<:Number} = Union{T, AbstractArray{<:T}} diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 7a6e125ff..f40ff8f3e 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -46,6 +46,36 @@ vec_scalar(x::Real) = [x] reshape_scalar(x, y) = reshape(y, size(x)) reshape_scalar(x::Real, y) = y[] +function extract_diag(offset, xs::AbstractArray{ForwardDiff.Dual{T,V,N}}) where {T,V,N} + D = similar(xs, V, N) + for j in 1:min(N, length(xs)-offset) + D[j] = xs[offset+j].partials.values[j] + end + return map(x -> x.value, xs), D +end + +function forward_diag(f, x::AbstractArray, ::Val{N}) where N + y, _D = extract_diag(0, f(seed(x, Val(N)))) + D = similar(_D, size(x)...) + D[1:N] = _D + offset = 0 + while offset + N < length(x) + offset += N + _, _D = extract_diag(offset, f(seed(x, Val(N), offset))) + range = (1+offset):min(N+offset,length(x)) + D[range] = @view _D[range.-offset] + end + return y, D +end + +function forward_diag(f, x::AbstractArray) + if length(x) < ForwardDiff.DEFAULT_CHUNK_THRESHOLD + forward_diag(f, x, Val(length(x))) + else + forward_diag(f, x, Val(ForwardDiff.DEFAULT_CHUNK_THRESHOLD)) + end +end + """ forwarddiff(f, x) -> f(x) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 7a1b0bdd8..77e0968dc 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -213,3 +213,31 @@ function jacobian(f, pars::Params) end Grads(out, pars) end + +""" + diaghessian(f, args...) + +Diagonal part of the Hessian, literally `diaghessian(f, x)[1] == diag(hessian(f,x))` +for one vector argument `x`. In general this returns a tuple, with an array the same shape +as each argument, `d[i] = ∂²y/∂x[i]∂x[i]`, where `y = f(args...)` must be a real number. + +Like [`hessian`](@ref) it uses ForwardDiff over Zygote. + +!!! warning + For arguments of any type except `Number` & `AbstractArray`, the result is `nothing`. +""" +function diaghessian(f, args...) + ntuple(length(args)) do n + x = args[n] + if x isa AbstractArray + forward_diag(x -> gradient(f, _splice(x, args, Val(n))...)[n], x)[2] + elseif x isa Number + ForwardDiff.derivative(x -> gradient(f, _splice(x, args, Val(n))...)[n], x) + end + end +end + +# diaghessian(f, x::AbstractArray) = (forward_diag(x -> gradient(f, x)[1], x)[2],) + +_splice(x, args, ::Val{n}) where {n} = ntuple(i -> i==n ? x : args[i], length(args)) + diff --git a/test/utils.jl b/test/utils.jl index d09fc2dc2..7aef69807 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -18,6 +18,27 @@ using Zygote: hessian_dual, hessian_reverse @test_throws Exception hess(identity, randn(2)) end +@testset "diagonal hessian" begin + @test diaghessian(x -> x[1]*x[2]^2, [1, pi]) == ([0, 2],) + + xs, y = randn(2,3), rand() + f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments + dx, dy = diaghessian(f34, xs, y) + @test size(dx) == size(xs) + @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) + @test dy ≈ hessian(y -> f34(xs,y), y) + + zs = randn(7,13) # test chunk mode + @test length(zs) > ForwardDiff.DEFAULT_CHUNK_THRESHOLD + @test length(zs) % ForwardDiff.DEFAULT_CHUNK_THRESHOLD != 0 + f713(zs) = sum(vec(zs)' .* exp.(vec(zs))) + @test vec(diaghessian(f713, zs)[1]) ≈ diag(hessian(f713, zs)) + + @test_throws Exception diaghessian(sin, im*pi) + @test_throws Exception diaghessian(x -> x+im, pi) + @test_throws Exception diaghessian(identity, randn(2)) +end + @testset "jacobian(f, args...)" begin @test jacobian(identity, [1,2])[1] == [1 0; 0 1] From 887f9b34450aefb152c7e6df97e07673bd8c29db Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Apr 2021 00:28:09 -0400 Subject: [PATCH 012/490] forward over ForwardDiff --- src/lib/forward.jl | 6 ++++++ test/utils.jl | 11 +++++++++++ 2 files changed, 17 insertions(+) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index f40ff8f3e..b6913dd4a 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -131,3 +131,9 @@ forwarddiff(f, x) = f(x) y, J = forward_jacobian(f, x) return y, ȳ -> (nothing, reshape_scalar(x, J*vec_scalar(ȳ))) end + +# Second derivatives +@adjoint ForwardDiff.derivative(f, x) = pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) +@adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) +@adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) + diff --git a/test/utils.jl b/test/utils.jl index 7aef69807..d8ebae5de 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -82,3 +82,14 @@ end @test Jxy[ys] ≈ [1 0 0; 0 1 0] @test Jxy[xs] ≈ [2 6 4 8; 2 6 4 8] end + +@testset "adjoints of ForwardDiff functions" begin + f1(x) = ForwardDiff.gradient(x -> sum(exp.(x.+1)), x) + x1 = randn(3,7) + @test Zygote.jacobian(f1, x1)[1] ≈ ForwardDiff.jacobian(f1, x1) + + f2(x) = ForwardDiff.jacobian(x -> log.(x[1:3] .+ x[2:4]), x) + x2 = rand(5) .+ 1 + @test Zygote.jacobian(f2, x2)[1] ≈ ForwardDiff.jacobian(f2, x2) +end + From 0e52277388d049a3b023af5c4e81ec8ed725f9d9 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 27 Apr 2021 16:38:23 +0200 Subject: [PATCH 013/490] Improve performance of `Base.Fix1` and `Base.Fix2` --- src/lib/base.jl | 14 ++++++++++++++ test/gradcheck.jl | 15 +++++++++++++++ 2 files changed, 29 insertions(+) diff --git a/src/lib/base.jl b/src/lib/base.jl index d90998c2c..67f8b2c5e 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -140,3 +140,17 @@ end @adjoint Base.nameof(x::UnionAll) = nameof(x), _ -> (nothing,) @nograd typeintersect + +# Base.Fix1 and Base.Fix2: https://github.com/FluxML/Zygote.jl/issues/957 +@adjoint function (g::Base.Fix1)(y) + f = g.f + x = g.x + fallback_Fix1(y) = f(x, y) + return _pullback(__context__, fallback_Fix1, y) +end +@adjoint function (g::Base.Fix2)(y) + f = g.f + x = g.x + fallback_Fix2(y) = f(y, x) + return _pullback(__context__, fallback_Fix2, y) +end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b0619a194..a5be4fb98 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1672,3 +1672,18 @@ end gradient(x->norm(x*[1im, 1]), 1.23) gradient(x->norm(x*[1im 1]), 1.23) end + +@testset "Fix1 and Fix2" begin + @test gradcheck(x -> prod(Base.Fix1(+, 1), x), randn(100)) + @test gradcheck(x -> prod(Base.Fix2(+, 1), x), randn(100)) + + # compile once and check the execution times compared with a closure + # https://github.com/FluxML/Zygote.jl/issues/957 + x = randn(100) + gradient(x -> prod(y -> y + 1, x), x) + t = @elapsed(gradient(x -> prod(y -> y + 1, x), x)) + gradient(x -> prod(Base.Fix1(+, 1), x), x) + @test @elapsed(gradient(x -> prod(Base.Fix1(+, 1), x), x)) < 2 * t + gradient(x -> prod(Base.Fix1(+, 1), x), x) + @test @elapsed(gradient(x -> prod(Base.Fix2(+, 1), x), x)) < 2 * t +end From 419d9da62c52dc9329b92d844b6fd6270ca6bf02 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 27 Apr 2021 16:41:18 +0200 Subject: [PATCH 014/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 68d7366a9..3decc2e10 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.10" +version = "0.6.11" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 29a624789ca29bc319e230b832ead54d932db639 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 27 Apr 2021 16:45:24 +0200 Subject: [PATCH 015/490] Fix typo --- test/gradcheck.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index a5be4fb98..ddd0ebd3b 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1684,6 +1684,6 @@ end t = @elapsed(gradient(x -> prod(y -> y + 1, x), x)) gradient(x -> prod(Base.Fix1(+, 1), x), x) @test @elapsed(gradient(x -> prod(Base.Fix1(+, 1), x), x)) < 2 * t - gradient(x -> prod(Base.Fix1(+, 1), x), x) + gradient(x -> prod(Base.Fix2(+, 1), x), x) @test @elapsed(gradient(x -> prod(Base.Fix2(+, 1), x), x)) < 2 * t end From dad3e2067c27173c3fbe4606bf29af9224753dde Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Apr 2021 13:13:02 -0400 Subject: [PATCH 016/490] docstring --- src/lib/grad.jl | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 77e0968dc..6146e2154 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -217,14 +217,30 @@ end """ diaghessian(f, args...) -Diagonal part of the Hessian, literally `diaghessian(f, x)[1] == diag(hessian(f,x))` -for one vector argument `x`. In general this returns a tuple, with an array the same shape -as each argument, `d[i] = ∂²y/∂x[i]∂x[i]`, where `y = f(args...)` must be a real number. +Diagonal part of the Hessian. Returns a tuple containing +an array `h` the same shape as each argument `x`, +with `Hᵢᵢ = h[i] = ∂²y/∂x[i]∂x[i]`. +The original evaluation `y = f(args...)` must give a real number `y`. +For one vector argument `x`, this is equivalent to `(diag(hessian(f,x)),)`. Like [`hessian`](@ref) it uses ForwardDiff over Zygote. !!! warning For arguments of any type except `Number` & `AbstractArray`, the result is `nothing`. + +# Examples +```jldoctest; setup=:(using Zygote, LinearAlgebra) +julia> diaghessian(x -> sum(x.^3), [1 2; 3 4])[1] +2×2 Matrix{$Int}: + 6 12 + 18 24 + +julia> Diagonal(vec(ans)) == hessian(x -> sum(x.^3), [1 2; 3 4]) +true + +julia> diaghessian((x,y) -> sum(x .* y .* y'), [1 22; 333 4], [0.5, 0.666]) +([0.0 0.0; 0.0 0.0], [2.0, 8.0]) +``` """ function diaghessian(f, args...) ntuple(length(args)) do n @@ -237,7 +253,5 @@ function diaghessian(f, args...) end end -# diaghessian(f, x::AbstractArray) = (forward_diag(x -> gradient(f, x)[1], x)[2],) - _splice(x, args, ::Val{n}) where {n} = ntuple(i -> i==n ? x : args[i], length(args)) From 052d5266064074e0a9a4526725b1e444a17dc559 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Apr 2021 13:23:45 -0400 Subject: [PATCH 017/490] load ForwardDiff --- test/utils.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/utils.jl b/test/utils.jl index d8ebae5de..9b5171481 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -1,4 +1,5 @@ using LinearAlgebra +using ForwardDiff using Zygote: hessian_dual, hessian_reverse @testset "hessian: $hess" for hess in [hessian_dual, hessian_reverse] From 040e047feee36b195b869070fa414a986ea8aa41 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Apr 2021 13:31:46 -0400 Subject: [PATCH 018/490] examples --- src/lib/grad.jl | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 6146e2154..065c244ce 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -235,11 +235,19 @@ julia> diaghessian(x -> sum(x.^3), [1 2; 3 4])[1] 6 12 18 24 -julia> Diagonal(vec(ans)) == hessian(x -> sum(x.^3), [1 2; 3 4]) +julia> Diagonal(vec(ans)) == hessian(x -> sum(x.^3), [1 2; 3 4]) # full Hessian is diagonal true -julia> diaghessian((x,y) -> sum(x .* y .* y'), [1 22; 333 4], [0.5, 0.666]) +julia> diaghessian((x,y) -> sum(x .* y .* y'), [1 22; 333 4], [0.5, 0.666]) # two array arguments ([0.0 0.0; 0.0 0.0], [2.0, 8.0]) + +julia> diaghessian(atan, 1, 2) # scalar arguments +(-0.16, 0.16) + +julia> hessian(xy -> atan(xy[1], xy[2]), [1, 2]) # full Hessian is not diagonal +2×2 Matrix{Float64}: + -0.16 -0.12 + -0.12 0.16 ``` """ function diaghessian(f, args...) From 4098e7048149a1af7aa69223c2f101291358d915 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Fri, 30 Apr 2021 11:49:42 +0200 Subject: [PATCH 019/490] Add lower bound for StatsFuns --- Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Project.toml b/Project.toml index 93172abd7..1cb75371f 100644 --- a/Project.toml +++ b/Project.toml @@ -33,6 +33,7 @@ MacroTools = "0.5" NaNMath = "0.3" Requires = "1.1" SpecialFunctions = "0.10, 1.0" +StatsFuns = "0.9.8" ZygoteRules = "0.2.1" julia = "1.3" From 7ac4d8193c8ea7ae6a782cbf46482652dd2b23af Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Apr 2021 14:58:32 -0400 Subject: [PATCH 020/490] speed improvement --- src/lib/grad.jl | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 065c244ce..1375c9d63 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -217,9 +217,8 @@ end """ diaghessian(f, args...) -Diagonal part of the Hessian. Returns a tuple containing -an array `h` the same shape as each argument `x`, -with `Hᵢᵢ = h[i] = ∂²y/∂x[i]∂x[i]`. +Diagonal part of the Hessian. Returns a tuple containing, for each argument `x`, +`h` of the same shape with `h[i] = Hᵢᵢ = ∂²y/∂x[i]∂x[i]`. The original evaluation `y = f(args...)` must give a real number `y`. For one vector argument `x`, this is equivalent to `(diag(hessian(f,x)),)`. @@ -252,14 +251,14 @@ julia> hessian(xy -> atan(xy[1], xy[2]), [1, 2]) # full Hessian is not diagonal """ function diaghessian(f, args...) ntuple(length(args)) do n - x = args[n] - if x isa AbstractArray - forward_diag(x -> gradient(f, _splice(x, args, Val(n))...)[n], x)[2] - elseif x isa Number - ForwardDiff.derivative(x -> gradient(f, _splice(x, args, Val(n))...)[n], x) + let x = args[n], valn = Val(n) # let Val improves speed, sometimes + if x isa AbstractArray + forward_diag(x -> gradient(f, _splice(x, args, valn)...)[n], x)[2] + elseif x isa Number + ForwardDiff.derivative(x -> gradient(f, _splice(x, args, valn)...)[n], x) + end end end end _splice(x, args, ::Val{n}) where {n} = ntuple(i -> i==n ? x : args[i], length(args)) - From 125f4146764bcef4d93d7b0eeaaae939ed6d3432 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Fri, 30 Apr 2021 13:52:05 +0200 Subject: [PATCH 021/490] Add StatsFuns to `[extras]` (otherwise Julia complains) --- Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Project.toml b/Project.toml index 1cb75371f..32849f933 100644 --- a/Project.toml +++ b/Project.toml @@ -43,6 +43,7 @@ Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] From b097a0dff57d539408389f12ef41a8fc7dea8a8a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 30 Apr 2021 07:39:36 -0400 Subject: [PATCH 022/490] reduce allocations --- src/lib/forward.jl | 20 +++++++++++--------- src/lib/grad.jl | 4 +++- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index b6913dd4a..7e6b6e0fa 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -46,26 +46,28 @@ vec_scalar(x::Real) = [x] reshape_scalar(x, y) = reshape(y, size(x)) reshape_scalar(x::Real, y) = y[] -function extract_diag(offset, xs::AbstractArray{ForwardDiff.Dual{T,V,N}}) where {T,V,N} - D = similar(xs, V, N) +# very similar functions needed for diaghessian: + +function extract_diag!(_D, offset, xs::AbstractArray{ForwardDiff.Dual{T,V,N}}) where {T,V,N} for j in 1:min(N, length(xs)-offset) - D[j] = xs[offset+j].partials.values[j] + _D[j] = xs[offset+j].partials.values[j] end - return map(x -> x.value, xs), D end -function forward_diag(f, x::AbstractArray, ::Val{N}) where N - y, _D = extract_diag(0, f(seed(x, Val(N)))) - D = similar(_D, size(x)...) +function forward_diag(f, x::AbstractArray{T}, ::Val{N}) where {N,T} + fx = f(seed(x, Val(N))) + D = similar(x, ForwardDiff.valtype(eltype(fx))) + _D = similar(D, N) + extract_diag!(_D, 0, fx) D[1:N] = _D offset = 0 while offset + N < length(x) offset += N - _, _D = extract_diag(offset, f(seed(x, Val(N), offset))) + extract_diag!(_D, offset, f(seed(x, Val(N), offset))) range = (1+offset):min(N+offset,length(x)) D[range] = @view _D[range.-offset] end - return y, D + return map(y -> y.value, fx), D end function forward_diag(f, x::AbstractArray) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 1375c9d63..ab09bffeb 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -50,6 +50,8 @@ is higher-dimensional. This uses forward over reverse, ForwardDiff over Zygote, calling `hessian_dual(f, x)`. See [`hessian_reverse`](@ref) for an all-Zygote alternative. +See also [`diaghessian`](@ref) to compute only the diagonal part. + # Examples ```jldoctest; setup=:(using Zygote) @@ -240,7 +242,7 @@ true julia> diaghessian((x,y) -> sum(x .* y .* y'), [1 22; 333 4], [0.5, 0.666]) # two array arguments ([0.0 0.0; 0.0 0.0], [2.0, 8.0]) -julia> diaghessian(atan, 1, 2) # scalar arguments +julia> diaghessian(atan, 1, 2) # two scalar arguments (-0.16, 0.16) julia> hessian(xy -> atan(xy[1], xy[2]), [1, 2]) # full Hessian is not diagonal From c4c77a0bf24774e4003e9756b3770cff04454a0a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 30 Apr 2021 22:29:24 -0400 Subject: [PATCH 023/490] speed improvements --- src/lib/forward.jl | 25 ++++++++++++++----------- src/lib/grad.jl | 2 +- 2 files changed, 15 insertions(+), 12 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 7e6b6e0fa..1637df165 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -8,6 +8,12 @@ function seed(x, ::Val{N}, offset = 0) where N Dual(x, ntuple(j -> j+offset == i, Val(N))) end end +function seed!(xplus, x, ::Val{N}, offset) where N + @assert size(x) == size(xplus) + map!(xplus, x, reshape(1:length(x), size(x))) do x, i + Dual(x, ntuple(j -> j+offset == i, Val(N))) + end +end extract(x::ForwardDiff.Dual) = x.value, [x.partials...] @@ -48,26 +54,23 @@ reshape_scalar(x::Real, y) = y[] # very similar functions needed for diaghessian: -function extract_diag!(_D, offset, xs::AbstractArray{ForwardDiff.Dual{T,V,N}}) where {T,V,N} +function extract_diag!(out, offset, xs::AbstractArray{ForwardDiff.Dual{T,V,N}}) where {T,V,N} for j in 1:min(N, length(xs)-offset) - _D[j] = xs[offset+j].partials.values[j] + out[offset+j] = xs[offset+j].partials.values[j] end end function forward_diag(f, x::AbstractArray{T}, ::Val{N}) where {N,T} - fx = f(seed(x, Val(N))) - D = similar(x, ForwardDiff.valtype(eltype(fx))) - _D = similar(D, N) - extract_diag!(_D, 0, fx) - D[1:N] = _D + xplus = seed(x, Val(N)) + fx = f(xplus) + out = similar(x, ForwardDiff.valtype(eltype(fx))) + extract_diag!(out, 0, fx) offset = 0 while offset + N < length(x) offset += N - extract_diag!(_D, offset, f(seed(x, Val(N), offset))) - range = (1+offset):min(N+offset,length(x)) - D[range] = @view _D[range.-offset] + extract_diag!(out, offset, f(seed!(xplus, x, Val(N), offset))) end - return map(y -> y.value, fx), D + return map(y -> y.value, fx), out end function forward_diag(f, x::AbstractArray) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index ab09bffeb..f88aa4a72 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -217,7 +217,7 @@ function jacobian(f, pars::Params) end """ - diaghessian(f, args...) + diaghessian(f, args...) -> Tuple Diagonal part of the Hessian. Returns a tuple containing, for each argument `x`, `h` of the same shape with `h[i] = Hᵢᵢ = ∂²y/∂x[i]∂x[i]`. From d0fa801be22fb109b196e8ace9812f2c89408469 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 30 Apr 2021 22:30:07 -0400 Subject: [PATCH 024/490] another approach, not faster? --- src/lib/forward.jl | 47 ++++++++++++++++++++++++++++++++++++++++++++++ src/lib/grad.jl | 14 ++++++++++++++ 2 files changed, 61 insertions(+) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 1637df165..06f2c62ff 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -81,6 +81,53 @@ function forward_diag(f, x::AbstractArray) end end +# and another approach: all forward, directly 2nd order + +seed_2nd(x::Real, ::Val) = Dual(Dual(x, true), true) + +function seed_2nd(xs, ::Val{N}, offset = 0) where N + map(xs, reshape(1:length(xs), size(xs))) do x, i + b = ntuple(j -> j+offset == i, Val(N)) + Dual(Dual(x, b), b) + end +end +function seed_2nd!(xplus, xs, ::Val{N}, offset) where N + map!(xplus, xs, reshape(1:length(xs), size(xs))) do x, i + b = ntuple(j -> j+offset == i, Val(N)) + Dual(Dual(x, b), b) + end +end + +function extract_2nd!(out, fx::ForwardDiff.Dual{T,V,N}, offset) where {T,V,N} + for j in 1:min(N, length(out)-offset) + out[j+offset] = fx.partials.values[j].partials.values[j] + end +end + +function forward_2nd(f, x::AbstractArray{T}, ::Val{N}) where {N,T} + xplus = seed_2nd(x, Val(N), 0) + fx = f(xplus) + out = similar(x, ForwardDiff.valtype(ForwardDiff.valtype(typeof(fx)))) + extract_2nd!(out, fx, 0) + offset = 0 + while offset + N < length(x) + offset += N + fx = f(seed_2nd!(xplus, x, Val(N), offset)) + extract_2nd!(out, fx, offset) + end + return fx.value.value, out +end + +function forward_2nd(f, x::AbstractArray) + # if length(x) < ForwardDiff.DEFAULT_CHUNK_THRESHOLD + # forward_2nd(f, x, Val(length(x))) + # else + # forward_2nd(f, x, Val(ForwardDiff.DEFAULT_CHUNK_THRESHOLD ÷ 2)) + forward_2nd(f, x, Val(3)) + # end +end + + """ forwarddiff(f, x) -> f(x) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index f88aa4a72..ab1815b71 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -264,3 +264,17 @@ function diaghessian(f, args...) end _splice(x, args, ::Val{n}) where {n} = ntuple(i -> i==n ? x : args[i], length(args)) + +function diaghessian_2nd(f, args...) + ntuple(length(args)) do n + let x = args[n], valn = Val(n) + if x isa AbstractArray + forward_2nd(x -> f(_splice(x, args, valn)...), x)[2] + elseif x isa Number + ForwardDiff.hessian(x -> f(_splice(x[1], args, valn)...), [x])[1] + end + end + end +end + +export diaghessian_2nd From 7b3e9c20dcd37806e566014418a443a488a12fb9 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 30 Apr 2021 22:45:41 -0400 Subject: [PATCH 025/490] Revert "another approach, not faster?" This reverts commit d0fa801be22fb109b196e8ace9812f2c89408469. --- src/lib/forward.jl | 47 ---------------------------------------------- src/lib/grad.jl | 14 -------------- 2 files changed, 61 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 06f2c62ff..1637df165 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -81,53 +81,6 @@ function forward_diag(f, x::AbstractArray) end end -# and another approach: all forward, directly 2nd order - -seed_2nd(x::Real, ::Val) = Dual(Dual(x, true), true) - -function seed_2nd(xs, ::Val{N}, offset = 0) where N - map(xs, reshape(1:length(xs), size(xs))) do x, i - b = ntuple(j -> j+offset == i, Val(N)) - Dual(Dual(x, b), b) - end -end -function seed_2nd!(xplus, xs, ::Val{N}, offset) where N - map!(xplus, xs, reshape(1:length(xs), size(xs))) do x, i - b = ntuple(j -> j+offset == i, Val(N)) - Dual(Dual(x, b), b) - end -end - -function extract_2nd!(out, fx::ForwardDiff.Dual{T,V,N}, offset) where {T,V,N} - for j in 1:min(N, length(out)-offset) - out[j+offset] = fx.partials.values[j].partials.values[j] - end -end - -function forward_2nd(f, x::AbstractArray{T}, ::Val{N}) where {N,T} - xplus = seed_2nd(x, Val(N), 0) - fx = f(xplus) - out = similar(x, ForwardDiff.valtype(ForwardDiff.valtype(typeof(fx)))) - extract_2nd!(out, fx, 0) - offset = 0 - while offset + N < length(x) - offset += N - fx = f(seed_2nd!(xplus, x, Val(N), offset)) - extract_2nd!(out, fx, offset) - end - return fx.value.value, out -end - -function forward_2nd(f, x::AbstractArray) - # if length(x) < ForwardDiff.DEFAULT_CHUNK_THRESHOLD - # forward_2nd(f, x, Val(length(x))) - # else - # forward_2nd(f, x, Val(ForwardDiff.DEFAULT_CHUNK_THRESHOLD ÷ 2)) - forward_2nd(f, x, Val(3)) - # end -end - - """ forwarddiff(f, x) -> f(x) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index ab1815b71..f88aa4a72 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -264,17 +264,3 @@ function diaghessian(f, args...) end _splice(x, args, ::Val{n}) where {n} = ntuple(i -> i==n ? x : args[i], length(args)) - -function diaghessian_2nd(f, args...) - ntuple(length(args)) do n - let x = args[n], valn = Val(n) - if x isa AbstractArray - forward_2nd(x -> f(_splice(x, args, valn)...), x)[2] - elseif x isa Number - ForwardDiff.hessian(x -> f(_splice(x[1], args, valn)...), [x])[1] - end - end - end -end - -export diaghessian_2nd From 30419f4c95e9e578dbb7449288b5a1e820be8fef Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 4 May 2021 13:14:49 -0400 Subject: [PATCH 026/490] sparse getindex gradient --- src/lib/array.jl | 16 ++++++++++++++-- 1 file changed, 14 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 344db8dcd..2d9b5311c 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -32,8 +32,10 @@ end @adjoint view(x::AbstractArray, inds...) = view(x, inds...), ∇getindex(x, inds) -∇getindex(x::AbstractArray, inds) = dy -> begin - if inds isa NTuple{<:Any, Integer} +∇getindex(x::AbstractArray{T,N}, inds) where {T,N} = dy -> begin + if inds isa NTuple{N,Int} && T <: Number + dx = OneElement(dy, inds, axes(x)) + elseif inds isa NTuple{<:Any, Integer} dx = _zero(x, typeof(dy)) dx[inds...] = dy else @@ -44,6 +46,16 @@ end return (dx, map(_->nothing, inds)...) end +struct OneElement{T,N,I,A} <: AbstractArray{T,N} + val::T + index::I + axes::A + OneElement(x::T, i::I, a::A) where {T,I<:NTuple{N,Int},A} where {N} = new{T,N,I,A}(x, i, a) +end +Base.size(A::OneElement) = map(length, A.axes) +Base.axes(A::OneElement) = A.axes +Base.getindex(A::OneElement{T,N}, i::Vararg{Int,N}) where {T,N} = ifelse(i==A.index, A.val, zero(T)) + _zero(xs::AbstractArray{<:Number}, T::Type{Nothing}) = fill!(similar(xs), zero(eltype(xs))) _zero(xs::AbstractArray{<:Number}, T) = fill!(similar(xs, T), false) _zero(xs::AbstractArray, T) = fill!(similar(xs, Union{Nothing, T}), nothing) From 8fafdf040004d1e163edb99a0edef6050c875cb2 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 4 May 2021 13:15:12 -0400 Subject: [PATCH 027/490] a step towards in-place accumulation --- src/lib/array.jl | 2 ++ src/lib/lib.jl | 1 + 2 files changed, 3 insertions(+) diff --git a/src/lib/array.jl b/src/lib/array.jl index 2d9b5311c..c22b02d9a 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -56,6 +56,8 @@ Base.size(A::OneElement) = map(length, A.axes) Base.axes(A::OneElement) = A.axes Base.getindex(A::OneElement{T,N}, i::Vararg{Int,N}) where {T,N} = ifelse(i==A.index, A.val, zero(T)) +accum(x::Array, y::OneElement) = (@inbounds x[y.index...] = accum(x[y.index...], y.val); x) + _zero(xs::AbstractArray{<:Number}, T::Type{Nothing}) = fill!(similar(xs), zero(eltype(xs))) _zero(xs::AbstractArray{<:Number}, T) = fill!(similar(xs, T), false) _zero(xs::AbstractArray, T) = fill!(similar(xs, Union{Nothing, T}), nothing) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index f4d321c29..3aaf87cde 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -14,6 +14,7 @@ accum(x, y, zs...) = accum(accum(x, y), zs...) accum(x::Tuple, y::Tuple) = accum.(x, y) accum(x::AbstractArray, y::AbstractArray) = accum.(x, y) +accum(x::DenseArray, y::AbstractArray) = x .= accum.(x, y) @generated function accum(x::NamedTuple, y::NamedTuple) # assumes that y has no keys apart from those also in x From 431d3d46400e1f7a693c198829cfb738bf75f234 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 5 May 2021 12:09:10 -0400 Subject: [PATCH 028/490] four seven seven four --- src/lib/lib.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 3aaf87cde..58a551cb6 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -15,6 +15,8 @@ accum(x, y, zs...) = accum(accum(x, y), zs...) accum(x::Tuple, y::Tuple) = accum.(x, y) accum(x::AbstractArray, y::AbstractArray) = accum.(x, y) accum(x::DenseArray, y::AbstractArray) = x .= accum.(x, y) +# work around bug fixed in https://github.com/JuliaLang/julia/pull/39859 +accum(x::DenseVector, y::AbstractArray) = x .= accum.(x, vec(y)) @generated function accum(x::NamedTuple, y::NamedTuple) # assumes that y has no keys apart from those also in x From 549671e4341436635bf1c65b4275fc6523ba3755 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 5 May 2021 17:49:31 -0400 Subject: [PATCH 029/490] change == to isapprox in some tests --- test/gradcheck.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 1b509d159..7e713f950 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -555,14 +555,14 @@ end @testset "Cholesky" begin # Check that the forwards pass computes the correct thing. f(X, Y) = cholesky(X * X' + I) \ Y - @test Zygote.pullback(X -> f(X, Y), X)[1] == cholesky(X * X' + I) \ Y + @test Zygote.pullback(X -> f(X, Y), X)[1] ≈ cholesky(X * X' + I) \ Y @test gradtest(X -> f(X, Y), X) @test gradtest(Y -> f(X, Y), Y) @test gradtest(X -> f(X, y), X) @test gradtest(y -> f(X, y), y) g(X) = cholesky(X * X' + I) - @test Zygote.pullback(g, X)[2]((factors=LowerTriangular(X),)) == - Zygote.pullback(g, X)[2]((factors=Matrix(LowerTriangular(X)),)) + @test Zygote.pullback(g, X)[2]((factors=LowerTriangular(X),))[1] ≈ + Zygote.pullback(g, X)[2]((factors=Matrix(LowerTriangular(X)),))[1] @test_throws PosDefException Zygote.pullback(X -> cholesky(X, check = false), X)[2]((factors=X,)) # https://github.com/FluxML/Zygote.jl/issues/932 @@ -689,8 +689,8 @@ end @test gradtest(Diagonal, d) y, back = Zygote.pullback(Diagonal, d) D̄ = randn(rng, P, P) - @test back(D̄) == back(Diagonal(D̄)) - @test back(D̄) == back((diag=diag(D̄),)) + @test back(D̄)[1] ≈ back(Diagonal(D̄))[1] + @test back(D̄)[1] ≈ back((diag=diag(D̄),))[1] end @testset "dense + UniformScaling" begin @@ -705,7 +705,7 @@ end @testset "cholesky - dense" begin rng, N = MersenneTwister(123456), 5 A = randn(rng, N, N) - @test cholesky(A' * A + I) == first(Zygote.pullback(A->cholesky(A' * A + I), A)) + @test cholesky(A' * A + I).U ≈ first(Zygote.pullback(A->cholesky(A' * A + I), A)).U @test gradtest(A->cholesky(A' * A + I).U, A) @test gradtest(A->logdet(cholesky(A' * A + I)), A) @test gradtest(B->cholesky(Symmetric(B)).U, A * A' + I) From 0811a8c1425babf8ed21bec89c601f76087f6f34 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Thu, 6 May 2021 18:31:34 +0200 Subject: [PATCH 030/490] Use BenchmarkTools --- Project.toml | 3 ++- test/gradcheck.jl | 14 +++++++------- 2 files changed, 9 insertions(+), 8 deletions(-) diff --git a/Project.toml b/Project.toml index 3decc2e10..ce17f0551 100644 --- a/Project.toml +++ b/Project.toml @@ -37,6 +37,7 @@ ZygoteRules = "0.2.1" julia = "1.3" [extras] +BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" @@ -45,4 +46,4 @@ StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["CUDA", "Distances", "FFTW", "FiniteDifferences", "StatsFuns", "Test"] +test = ["BenchmarkTools", "CUDA", "Distances", "FFTW", "FiniteDifferences", "StatsFuns", "Test"] diff --git a/test/gradcheck.jl b/test/gradcheck.jl index a5be4fb98..c01a18ca1 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -4,6 +4,7 @@ using Zygote: gradient using Base.Broadcast: broadcast_shape using Distributed: pmap, CachingPool, workers import FiniteDifferences +using BenchmarkTools function ngradient(f, xs::AbstractArray...) grads = zero.(xs) @@ -1677,13 +1678,12 @@ end @test gradcheck(x -> prod(Base.Fix1(+, 1), x), randn(100)) @test gradcheck(x -> prod(Base.Fix2(+, 1), x), randn(100)) - # compile once and check the execution times compared with a closure + # check the execution times compared with a closure # https://github.com/FluxML/Zygote.jl/issues/957 x = randn(100) - gradient(x -> prod(y -> y + 1, x), x) - t = @elapsed(gradient(x -> prod(y -> y + 1, x), x)) - gradient(x -> prod(Base.Fix1(+, 1), x), x) - @test @elapsed(gradient(x -> prod(Base.Fix1(+, 1), x), x)) < 2 * t - gradient(x -> prod(Base.Fix1(+, 1), x), x) - @test @elapsed(gradient(x -> prod(Base.Fix2(+, 1), x), x)) < 2 * t + tclosure = @belapsed(gradient($(x -> prod(y -> y + 1, x)), $x)) + tfix1 = @belapsed(gradient($(x -> prod(Base.Fix1(+, 1), x)), $x)) + tfix2 = @belapsed(gradient($(x -> prod(Base.Fix2(+, 1), x)), $x)) + @test tfix1 < 2 * tclosure + @test tfix2 < 2 * tclosure end From 85fb41603193573032991cc3ee1b4817472bea5a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 8 May 2021 09:02:01 -0400 Subject: [PATCH 031/490] rm forward forward --- src/lib/forward.jl | 5 ----- test/utils.jl | 11 ----------- 2 files changed, 16 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 1637df165..4e0bb9c82 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -137,8 +137,3 @@ forwarddiff(f, x) = f(x) return y, ȳ -> (nothing, reshape_scalar(x, J*vec_scalar(ȳ))) end -# Second derivatives -@adjoint ForwardDiff.derivative(f, x) = pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) -@adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) -@adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) - diff --git a/test/utils.jl b/test/utils.jl index 9b5171481..73a7d65c4 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -83,14 +83,3 @@ end @test Jxy[ys] ≈ [1 0 0; 0 1 0] @test Jxy[xs] ≈ [2 6 4 8; 2 6 4 8] end - -@testset "adjoints of ForwardDiff functions" begin - f1(x) = ForwardDiff.gradient(x -> sum(exp.(x.+1)), x) - x1 = randn(3,7) - @test Zygote.jacobian(f1, x1)[1] ≈ ForwardDiff.jacobian(f1, x1) - - f2(x) = ForwardDiff.jacobian(x -> log.(x[1:3] .+ x[2:4]), x) - x2 = rand(5) .+ 1 - @test Zygote.jacobian(f2, x2)[1] ≈ ForwardDiff.jacobian(f2, x2) -end - From bc7823191d8c83df56e04b5c344673f790e54549 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 8 May 2021 09:17:03 -0400 Subject: [PATCH 032/490] adjoint for ForwardDiff.jacobian --- src/lib/forward.jl | 4 ++++ test/utils.jl | 10 ++++++++++ 2 files changed, 14 insertions(+) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 7a6e125ff..1de719c27 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -101,3 +101,7 @@ forwarddiff(f, x) = f(x) y, J = forward_jacobian(f, x) return y, ȳ -> (nothing, reshape_scalar(x, J*vec_scalar(ȳ))) end + +# Use this to allow second derivatives +@adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) +@adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) diff --git a/test/utils.jl b/test/utils.jl index d09fc2dc2..e86bbf338 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -61,3 +61,13 @@ end @test Jxy[ys] ≈ [1 0 0; 0 1 0] @test Jxy[xs] ≈ [2 6 4 8; 2 6 4 8] end + +@testset "adjoints of ForwardDiff functions" begin + f1(x) = ForwardDiff.gradient(x -> sum(exp.(x.+1)), x) + x1 = randn(3,7) + @test Zygote.jacobian(f1, x1)[1] ≈ ForwardDiff.jacobian(f1, x1) + + f2(x) = ForwardDiff.jacobian(x -> log.(x[1:3] .+ x[2:4]), x) + x2 = rand(5) .+ 1 + @test Zygote.jacobian(f2, x2)[1] ≈ ForwardDiff.jacobian(f2, x2) +end From 6bff8f8f2d5a36ce4ce894bc1e20299f1022da6d Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 8 May 2021 09:35:50 -0400 Subject: [PATCH 033/490] add tests from https://github.com/FluxML/Zygote.jl/issues/769 --- test/utils.jl | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/test/utils.jl b/test/utils.jl index e86bbf338..473277b5b 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -62,6 +62,8 @@ end @test Jxy[xs] ≈ [2 6 4 8; 2 6 4 8] end +using ForwardDiff + @testset "adjoints of ForwardDiff functions" begin f1(x) = ForwardDiff.gradient(x -> sum(exp.(x.+1)), x) x1 = randn(3,7) @@ -70,4 +72,21 @@ end f2(x) = ForwardDiff.jacobian(x -> log.(x[1:3] .+ x[2:4]), x) x2 = rand(5) .+ 1 @test Zygote.jacobian(f2, x2)[1] ≈ ForwardDiff.jacobian(f2, x2) + + # Tests from https://github.com/FluxML/Zygote.jl/issues/769 + f(x) = [2x[1]^2 + x[1],x[2]^2 * x[1]] + g1(x) = sum(ForwardDiff.jacobian(f,x)) + out,back = Zygote.pullback(g1,[2.0,3.2]) + stakehouse = back(1.0)[1] + @test typeof(stakehouse) <: Vector + @test size(stakehouse) == (2,) + @test stakehouse ≈ ForwardDiff.gradient(g1,[2.0,3.2]) + + g2(x) = prod(ForwardDiff.jacobian(f,x)) + out,back = Zygote.pullback(g2,[2.0,3.2]) + @test_skip back(1.0)[1] == ForwardDiff.gradient(g2,[2.0,3.2]) # contains NaN, @adjoint prod isn't careful + + g3(x) = sum(abs2,ForwardDiff.jacobian(f,x)) + out,back = Zygote.pullback(g3,[2.0,3.2]) + @test back(1.0)[1] == ForwardDiff.gradient(g3,[2.0,3.2]) end From e95ba74c34f1aaedc43522f13ce0fc3f8efd98bf Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 9 May 2021 18:07:28 -0400 Subject: [PATCH 034/490] avoid mutation --- src/lib/array.jl | 1 - src/lib/lib.jl | 7 ++----- test/features.jl | 19 +++++++++++++++++++ 3 files changed, 21 insertions(+), 6 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index c22b02d9a..8bda68bbf 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -56,7 +56,6 @@ Base.size(A::OneElement) = map(length, A.axes) Base.axes(A::OneElement) = A.axes Base.getindex(A::OneElement{T,N}, i::Vararg{Int,N}) where {T,N} = ifelse(i==A.index, A.val, zero(T)) -accum(x::Array, y::OneElement) = (@inbounds x[y.index...] = accum(x[y.index...], y.val); x) _zero(xs::AbstractArray{<:Number}, T::Type{Nothing}) = fill!(similar(xs), zero(eltype(xs))) _zero(xs::AbstractArray{<:Number}, T) = fill!(similar(xs, T), false) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 58a551cb6..045a494de 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -12,11 +12,8 @@ accum(x, y) = accum(x, y, zs...) = accum(accum(x, y), zs...) -accum(x::Tuple, y::Tuple) = accum.(x, y) -accum(x::AbstractArray, y::AbstractArray) = accum.(x, y) -accum(x::DenseArray, y::AbstractArray) = x .= accum.(x, y) -# work around bug fixed in https://github.com/JuliaLang/julia/pull/39859 -accum(x::DenseVector, y::AbstractArray) = x .= accum.(x, vec(y)) +accum(x::Tuple, ys::Tuple...) = accum.(x, ys...) +accum(x::AbstractArray, ys::AbstractArray...) = accum.(x, ys...) @generated function accum(x::NamedTuple, y::NamedTuple) # assumes that y has no keys apart from those also in x diff --git a/test/features.jl b/test/features.jl index 5531ebd0b..874e3916e 100644 --- a/test/features.jl +++ b/test/features.jl @@ -481,3 +481,22 @@ end Zygote.gradient(loss_adjoint,[1.0]) @test x[1] == x[2] end + +@testset "accumulation" begin + # from https://github.com/FluxML/Zygote.jl/issues/905 + function net(x1) + x2 = x1 + x3 = x1 + x2 + x4 = x1 + x2 + x3 + x5 = x1 + x2 + x3 + x4 + x6 = x1 + x2 + x3 + x4 + x5 + x7 = x1 + x2 + x3 + x4 + x5 + x6 + x8 = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x9 = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x10 = x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + end + loss(x) = sum(abs2, net(x)) + @test gradient(loss, ones(10,10))[1] == fill(131072, 10, 10) + @test 150_000_000 > @allocated gradient(loss, ones(1000,1000)) +end + From 4b4cdf2f75c6549ea1fab690f0174f5bbf9f2a03 Mon Sep 17 00:00:00 2001 From: CarloLucibello Date: Fri, 14 May 2021 17:29:14 +0200 Subject: [PATCH 035/490] fix gradient algebra on gpu --- src/compiler/interface.jl | 2 ++ test/cuda.jl | 44 ++++++++++++++++++++++++++++++++++++++- test/interface.jl | 2 +- 3 files changed, 46 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 7d72f9537..f5336ac03 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -80,6 +80,8 @@ end Base.copy(ps::Params) = union!(Params(), ps) Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) +Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) +# Base.issetequal(ps1::Params, x::AbstractSet) = issetequal(ps1.params, x) function Base.intersect!(ps::Params, itrs...) for itr in itrs diff --git a/test/cuda.jl b/test/cuda.jl index 0766ff986..f90bca6a6 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -1,4 +1,6 @@ using CUDA +using Zygote: Grads +using Random: randn! # Test GPU movement inside the call to `gradient` @testset "GPU movement" begin @@ -21,7 +23,6 @@ end g_gpu = gradient(x -> w(x), a_gpu)[1] @test g_gpu isa CuArray @test g_gpu |> collect ≈ g - end @testset "jacobian" begin @@ -37,3 +38,44 @@ end @test j2[v1] isa CuArray @test j2[v1] ≈ cu(res2) end + +@testset "gradient algebra" begin + w, b = rand(2) |> cu, rand(2) |> cu + x1, x2 = rand(2) |> cu, rand(2) |> cu + + gs1 = gradient(() -> sum(w .* x1), Params([w])) + gs2 = gradient(() -> sum(w .* x2), Params([w])) + + @test .- gs1 isa Grads + @test gs1 .- gs2 isa Grads + @test .+ gs1 isa Grads + @test gs1 .+ gs2 isa Grads + @test 2 .* gs1 isa Grads + @test (2 .* gs1)[w] ≈ 2 * gs1[w] + @test gs1 .* 2 isa Grads + @test gs1 ./ 2 isa Grads + @test (gs1 .+ gs2)[w] ≈ gs1[w] .+ gs2[w] + + gs12 = gs1 .+ gs2 + gs1 .+= gs2 + @test gs12[w] ≈ gs1[w] + + gs3 = gradient(() -> sum(w .* x1), Params([w, b])) # grad nothing with respect to b + gs4 = gradient(() -> sum(w .* x2 .+ b), Params([w, b])) + + @test .- gs3 isa Grads + @test gs3 .- gs4 isa Grads + @test .+ gs3 isa Grads + @test gs3 .+ gs4 isa Grads + @test 2 .* gs3 isa Grads + @test gs3 .* 2 isa Grads + @test gs3 ./ 2 isa Grads + @test (gs3 .+ gs4)[w] ≈ gs3[w] .+ gs4[w] + @test (gs3 .+ gs4)[b] ≈ gs4[b] + + @test gs3 .+ Dict(w => similar(w), b => similar(b)) isa Grads + gs3 .+= Dict(p => randn!(similar(p)) for p in keys(gs3)) + @test gs3 isa Grads + + @test_throws ArgumentError gs1 .+ gs4 +end \ No newline at end of file diff --git a/test/interface.jl b/test/interface.jl index 087da74f3..584228c84 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -120,7 +120,7 @@ end gs3 .+= Dict(p => randn(size(p)) for p in keys(gs3)) @test gs3 isa Grads - @test_throws ArgumentError gs1 .+ gs4 + @test_throws ArgumentError gs1 .+ gs4 end @testset "map and broadcast" begin From 85b93e6c4920f397fa878dccf7db9ffbce74eb1b Mon Sep 17 00:00:00 2001 From: CarloLucibello Date: Fri, 14 May 2021 17:45:45 +0200 Subject: [PATCH 036/490] mark one test as broken --- src/compiler/interface.jl | 1 - test/cuda.jl | 9 ++++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index f5336ac03..2e2725480 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -81,7 +81,6 @@ end Base.copy(ps::Params) = union!(Params(), ps) Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) -# Base.issetequal(ps1::Params, x::AbstractSet) = issetequal(ps1.params, x) function Base.intersect!(ps::Params, itrs...) for itr in itrs diff --git a/test/cuda.jl b/test/cuda.jl index f90bca6a6..e08f9a500 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -1,6 +1,7 @@ using CUDA using Zygote: Grads using Random: randn! +CUDA.allowscalar(false) # Test GPU movement inside the call to `gradient` @testset "GPU movement" begin @@ -73,9 +74,11 @@ end @test (gs3 .+ gs4)[w] ≈ gs3[w] .+ gs4[w] @test (gs3 .+ gs4)[b] ≈ gs4[b] - @test gs3 .+ Dict(w => similar(w), b => similar(b)) isa Grads - gs3 .+= Dict(p => randn!(similar(p)) for p in keys(gs3)) - @test gs3 isa Grads + @test_broken begin + gs3 .+ Dict(w => similar(w), b => similar(b)) isa Grads + gs3 .+= Dict(p => randn!(similar(p)) for p in keys(gs3)) + gs3 isa Grads + end @test_throws ArgumentError gs1 .+ gs4 end \ No newline at end of file From 478fa7d00afe8e4f6ace269c0111b4d4306c721f Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 14 May 2021 13:40:23 -0400 Subject: [PATCH 037/490] comment --- src/lib/forward.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 1de719c27..113b6aa6c 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -102,6 +102,7 @@ forwarddiff(f, x) = f(x) return y, ȳ -> (nothing, reshape_scalar(x, J*vec_scalar(ȳ))) end -# Use this to allow second derivatives +# Use this to allow second derivatives -- this is forward-over-forward, +# see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal @adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) @adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) From 734a8d563281a782775f629ab6bbd8f4abc8e7b8 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 14 May 2021 17:57:25 -0400 Subject: [PATCH 038/490] treat derivative and hessian the same way --- src/lib/forward.jl | 5 +++++ test/utils.jl | 9 +++++++++ 2 files changed, 14 insertions(+) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 113b6aa6c..21f39e29c 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -106,3 +106,8 @@ end # see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal @adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) @adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) + +@adjoint ForwardDiff.derivative(f, x) = pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) +@adjoint ForwardDiff.hessian(f, x) = pullback(forwarddiff, x -> ForwardDiff.hessian(f, x), x) + + diff --git a/test/utils.jl b/test/utils.jl index 473277b5b..dcf877c7d 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -73,6 +73,15 @@ using ForwardDiff x2 = rand(5) .+ 1 @test Zygote.jacobian(f2, x2)[1] ≈ ForwardDiff.jacobian(f2, x2) + f3(x) = sum(ForwardDiff.hessian(x -> sum(x .^2 .* x'), x)[1:4:end]) + x3 = rand(3) + @test Zygote.gradient(f3, x3)[1] ≈ ForwardDiff.gradient(f3, x3) + + @test gradient(x -> ForwardDiff.derivative(x -> x^4, x), 7) == (4 * 3 * 7^2,) + + f4(x) = ForwardDiff.derivative(x -> [x,x^2,x^3], x) + @test Zygote.jacobian(f4, pi)[1] ≈ ForwardDiff.derivative(f4, pi) + # Tests from https://github.com/FluxML/Zygote.jl/issues/769 f(x) = [2x[1]^2 + x[1],x[2]^2 * x[1]] g1(x) = sum(ForwardDiff.jacobian(f,x)) From c12cfca39974855172bdcb9721348cf345fb0b44 Mon Sep 17 00:00:00 2001 From: CarloLucibello Date: Sat, 15 May 2021 10:20:18 +0200 Subject: [PATCH 039/490] use IdDict instead of Dict --- docs/src/utils.md | 4 ++-- src/compiler/interface.jl | 1 + test/cuda.jl | 10 ++++------ test/interface.jl | 4 ++-- 4 files changed, 9 insertions(+), 10 deletions(-) diff --git a/docs/src/utils.md b/docs/src/utils.md index c46b646ce..92c8df03a 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -42,8 +42,8 @@ gs = gs1 .+ gs2 @test gs[w] ≈ gs1[w] + gs2[w] @test gs[b] ≈ gs1[b] + gs2[b] -# gradients and dictionaries interact nicely -gs .+= Dict(p => randn(size(p)) for p in keys(gs)) +# gradients and IdDict interact nicely +gs .+= IdDict(p => randn(size(p)) for p in keys(gs)) # clip gradients map(x -> clamp.(x, -0.1, 0.1), gs) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 2e2725480..1dbf73be6 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -81,6 +81,7 @@ end Base.copy(ps::Params) = union!(Params(), ps) Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) +Base.issetequal(ps1::Params, x::Base.AbstractSet) = issetequal(ps1.params, x) function Base.intersect!(ps::Params, itrs...) for itr in itrs diff --git a/test/cuda.jl b/test/cuda.jl index e08f9a500..a54402999 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -74,11 +74,9 @@ end @test (gs3 .+ gs4)[w] ≈ gs3[w] .+ gs4[w] @test (gs3 .+ gs4)[b] ≈ gs4[b] - @test_broken begin - gs3 .+ Dict(w => similar(w), b => similar(b)) isa Grads - gs3 .+= Dict(p => randn!(similar(p)) for p in keys(gs3)) - gs3 isa Grads - end + @test gs3 .+ IdDict(w => similar(w), b => similar(b)) isa Grads + gs3 .+= IdDict(p => randn!(similar(p)) for p in keys(gs3)) + @test gs3 isa Grads @test_throws ArgumentError gs1 .+ gs4 -end \ No newline at end of file +end diff --git a/test/interface.jl b/test/interface.jl index 584228c84..0ffb933f6 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -116,8 +116,8 @@ end @test (gs3 .+ gs4)[w] ≈ gs3[w] .+ gs4[w] @test (gs3 .+ gs4)[b] ≈ gs4[b] - @test gs3 .+ Dict(w => similar(w), b => similar(b)) isa Grads - gs3 .+= Dict(p => randn(size(p)) for p in keys(gs3)) + @test gs3 .+ IdDict(w => similar(w), b => similar(b)) isa Grads + gs3 .+= IdDict(p => randn(size(p)) for p in keys(gs3)) @test gs3 isa Grads @test_throws ArgumentError gs1 .+ gs4 From f0708406fb21743a07b83fb15be5fa3dc90d0008 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 15 May 2021 11:39:09 -0400 Subject: [PATCH 040/490] un-broadcast using mapreduce --- src/lib/broadcast.jl | 21 ++++++++++++++++++--- test/cuda.jl | 8 ++++++++ test/gradcheck.jl | 6 ++++++ 3 files changed, 32 insertions(+), 3 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 1219d7883..6f0774190 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -71,12 +71,27 @@ unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @adjoint broadcasted(::typeof(-), x::Numeric, y::Numeric) = x .- y, Δ -> (nothing, unbroadcast(x, Δ), -unbroadcast(y, Δ)) -@adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x.*y, - z̄ -> (nothing, unbroadcast(x, z̄ .* conj.(y)), unbroadcast(y, z̄ .* conj.(x))) +@adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x .* y, + Δ -> (nothing, mul_unbroadcast(x, Δ, y), mul_unbroadcast(y, Δ, x)) + +mul_unbroadcast(x, Δ, y) = funbroadcast(x, (δ,y₁) -> δ * conj(y₁), Δ, y) + +# This optimisation is only safe when all args... have same size: +funbroadcast(::Number, f, args...) = mapreduce(f, +, args...) +funbroadcast(x, f, args...) = unbroadcast(x, f.(args...)) # fallback + +@adjoint function broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) + res = x ./ y + res, Δ -> begin + Δx = funbroadcast(x, δ -> δ / conj(y), Δ) + Δy = funbroadcast(y, (δ,r) -> -δ * conj(r / y), Δ, res) + (nothing, Δx, Δy) + end +end @adjoint function broadcasted(::typeof(/), x::Numeric, y::Numeric) res = x ./ y - res, Δ -> (nothing, unbroadcast(x, Δ ./ conj.(y)), unbroadcast(y, -Δ .* conj.(res ./ y))) + res, Δ -> (nothing, unbroadcast(x, Δ ./ conj.(y)), unbroadcast(y, .-Δ .* conj.(res ./ y))) end @adjoint function broadcasted(::typeof(Base.literal_pow), ::typeof(^), x::Numeric, exp::Val{p}) where p diff --git a/test/cuda.jl b/test/cuda.jl index 0766ff986..20cd0d14e 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -24,6 +24,14 @@ end end +@testset "un-broadcasting *, / with mapreduce" begin + cu12 = cu(Float32[1,2]) + @test gradient((x,y) -> sum(x .* y), cu12, 5) == ([5, 5], 3) + @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, [5, 5]) + @test gradient((x,y) -> sum(x .* y), cu12, [3 4 5]) == ([12, 12], [3 3 3]) + @test gradient((x,y) -> sum(x ./ y), cu12, 5) == ([0.2, 0.2], -0.12) +end + @testset "jacobian" begin v1 = cu(collect(1:3f0)) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 7e713f950..2d5499dcd 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1308,6 +1308,12 @@ end x1 = rand(3, 3) @test gradient(x -> sum(x .== 0.5), x1)[1] === nothing @test gradient(x -> sum(x .* (x .== maximum(x, dims=1))), x1)[1] == (x1 .== maximum(x1, dims=1)) + + # tests for un-broadcasting *, / with mapreduce + @test gradient((x,y) -> sum(x .* y), [1,2], 5) == ([5, 5], 3) + @test gradient((x,y) -> sum(x .* y), 5, [1,2]) == (3, [5, 5]) + @test gradient((x,y) -> sum(x .* y), [1,2], [3 4 5]) == ([12, 12], [3 3 3]) + @test gradient((x,y) -> sum(x ./ y), [1,2], 5) == ([0.2, 0.2], -0.12) end using Zygote: Buffer From f0511296cc55b6f69e0f2ead4dea3563ad48b003 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 15 May 2021 12:11:19 -0400 Subject: [PATCH 041/490] cu test --- test/cuda.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/test/cuda.jl b/test/cuda.jl index 20cd0d14e..1ca2debb9 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -28,8 +28,9 @@ end cu12 = cu(Float32[1,2]) @test gradient((x,y) -> sum(x .* y), cu12, 5) == ([5, 5], 3) @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, [5, 5]) - @test gradient((x,y) -> sum(x .* y), cu12, [3 4 5]) == ([12, 12], [3 3 3]) - @test gradient((x,y) -> sum(x ./ y), cu12, 5) == ([0.2, 0.2], -0.12) + cu345 = cu(Float32[3 4 5]) + @test all(gradient((x,y) -> sum(x .* y), cu12, cu345) .≈ ([12, 12], [3 3 3])) + @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ ([0.2, 0.2], -0.12)) end @testset "jacobian" begin From 189a4bcb1d69fcedfaaf234a300a05e24d68c84b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 15 May 2021 18:16:08 -0400 Subject: [PATCH 042/490] tests --- src/lib/grad.jl | 4 ++-- test/cuda.jl | 8 +++++--- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 7a1b0bdd8..0bedb0793 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -124,7 +124,7 @@ julia> jacobian((a,d) -> prod(a, dims=d), [1 2; 3 4; 5 6], 2) !!! warning For arguments of any type except `Number` & `AbstractArray`, the result is `nothing`. -```jldoctest; setup=:(using Zygote) +``` julia> jacobian((a,s) -> a.^length(s), [1,2,3], "str") ([3 0 0; 0 12 0; 0 0 27], nothing) @@ -132,7 +132,7 @@ julia> jacobian((a,t) -> sum(a .* t[1]) + t[2], [1,2,3], (4,5)) ([4 4 4], nothing) julia> gradient((a,t) -> sum(a .* t[1]) + t[2], [1,2,3], (4,5)) # gradient undersands the tuple -([4, 4, 4], (6, 1)) +(Fill(4, 3), (6, 1)) ``` """ function jacobian(f, args...) diff --git a/test/cuda.jl b/test/cuda.jl index 1ca2debb9..55eb6b7ac 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -26,11 +26,13 @@ end @testset "un-broadcasting *, / with mapreduce" begin cu12 = cu(Float32[1,2]) - @test gradient((x,y) -> sum(x .* y), cu12, 5) == ([5, 5], 3) + @test gradient((x,y) -> sum(x .* y), cu12, 5) == ([5, 5]), 3) @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, [5, 5]) + @test gradient((x,y) -> sum(z -> z, x .* y), cu12, 5) == ([5, 5], 3) + @test gradient((x,y) -> sum(z -> z, x .* y), 5, cu12) == (3, [5, 5]) cu345 = cu(Float32[3 4 5]) - @test all(gradient((x,y) -> sum(x .* y), cu12, cu345) .≈ ([12, 12], [3 3 3])) - @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ ([0.2, 0.2], -0.12)) + @test all(gradient((x,y) -> sum(x .* y), cu12, cu345) .≈ (cu([12, 12]), cu([3 3 3]))) + @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ (cu([0.2, 0.2]), -0.12)) end @testset "jacobian" begin From 014c13122f5e16ed54152c11ede8f7f6a4ac19a9 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 May 2021 07:07:29 -0400 Subject: [PATCH 043/490] tests, III --- test/cuda.jl | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/test/cuda.jl b/test/cuda.jl index 55eb6b7ac..86d902670 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -26,12 +26,11 @@ end @testset "un-broadcasting *, / with mapreduce" begin cu12 = cu(Float32[1,2]) - @test gradient((x,y) -> sum(x .* y), cu12, 5) == ([5, 5]), 3) - @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, [5, 5]) - @test gradient((x,y) -> sum(z -> z, x .* y), cu12, 5) == ([5, 5], 3) - @test gradient((x,y) -> sum(z -> z, x .* y), 5, cu12) == (3, [5, 5]) + cu55 = cu(Float32[5,5]) + @test gradient((x,y) -> sum(x .* y), cu12, 5) == (cu55, 3) + @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, cu55) cu345 = cu(Float32[3 4 5]) - @test all(gradient((x,y) -> sum(x .* y), cu12, cu345) .≈ (cu([12, 12]), cu([3 3 3]))) + @test gradient((x,y) -> sum(x .* y), cu12, cu345) == (cu([12, 12]), cu([3 3 3])) @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ (cu([0.2, 0.2]), -0.12)) end From d9509013f718d800a47dbdcfc51e1beb9565824d Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sun, 16 May 2021 18:00:19 +0200 Subject: [PATCH 044/490] Update src/compiler/interface.jl --- src/compiler/interface.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 1dbf73be6..3c62d6586 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -82,6 +82,7 @@ Base.copy(ps::Params) = union!(Params(), ps) Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) Base.issetequal(ps1::Params, x::Base.AbstractSet) = issetequal(ps1.params, x) +Base.issetequal(x::Base.AbstractSet, ps1::Params) = issetequal(x, ps1.params) function Base.intersect!(ps::Params, itrs...) for itr in itrs From 75e2fb7942f8c5d84e62e548101ffd8730f5c497 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 May 2021 12:25:46 -0400 Subject: [PATCH 045/490] fill example --- src/lib/grad.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 0bedb0793..7462ef4ed 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -132,7 +132,7 @@ julia> jacobian((a,t) -> sum(a .* t[1]) + t[2], [1,2,3], (4,5)) ([4 4 4], nothing) julia> gradient((a,t) -> sum(a .* t[1]) + t[2], [1,2,3], (4,5)) # gradient undersands the tuple -(Fill(4, 3), (6, 1)) +([4 4 4], (6, 1)) ``` """ function jacobian(f, args...) From 6dd98dd389066c0d4de92488da2a0a64873c034d Mon Sep 17 00:00:00 2001 From: Kyle Daruwalla Date: Sun, 16 May 2021 12:34:28 -0500 Subject: [PATCH 046/490] Update docs/src/utils.md --- docs/src/utils.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/utils.md b/docs/src/utils.md index 92c8df03a..84596357f 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -43,6 +43,7 @@ gs = gs1 .+ gs2 @test gs[b] ≈ gs1[b] + gs2[b] # gradients and IdDict interact nicely +# note that an IdDict must be used for gradient algebra on the GPU gs .+= IdDict(p => randn(size(p)) for p in keys(gs)) # clip gradients From 69a4ca021517b6b3d93d68cecb4d5b518774fce8 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 May 2021 14:17:28 -0400 Subject: [PATCH 047/490] change to use existing scalar * array rules --- src/lib/broadcast.jl | 29 +++++++++++++---------------- test/cuda.jl | 4 +++- 2 files changed, 16 insertions(+), 17 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 6f0774190..8694b394c 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -71,28 +71,25 @@ unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @adjoint broadcasted(::typeof(-), x::Numeric, y::Numeric) = x .- y, Δ -> (nothing, unbroadcast(x, Δ), -unbroadcast(y, Δ)) -@adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x .* y, - Δ -> (nothing, mul_unbroadcast(x, Δ, y), mul_unbroadcast(y, Δ, x)) - -mul_unbroadcast(x, Δ, y) = funbroadcast(x, (δ,y₁) -> δ * conj(y₁), Δ, y) - -# This optimisation is only safe when all args... have same size: -funbroadcast(::Number, f, args...) = mapreduce(f, +, args...) -funbroadcast(x, f, args...) = unbroadcast(x, f.(args...)) # fallback - -@adjoint function broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) - res = x ./ y - res, Δ -> begin - Δx = funbroadcast(x, δ -> δ / conj(y), Δ) - Δy = funbroadcast(y, (δ,r) -> -δ * conj(r / y), Δ, res) - (nothing, Δx, Δy) - end +@adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x.*y, + Δ -> (nothing, unbroadcast(x, Δ .* conj.(y)), unbroadcast(y, Δ .* conj.(x))) +@adjoint function broadcasted(::typeof(*), x::Number, y::AbstractArray{<:Number}) + z, back = pullback(*, x, y) # this uses dot(y,Δ) instead of Δ .* conj.(y) + z, Δ -> (nothing, back(Δ)...) +end +@adjoint function broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) + z, back = pullback(*, x, y) + z, Δ -> (nothing, back(Δ)...) end @adjoint function broadcasted(::typeof(/), x::Numeric, y::Numeric) res = x ./ y res, Δ -> (nothing, unbroadcast(x, Δ ./ conj.(y)), unbroadcast(y, .-Δ .* conj.(res ./ y))) end +@adjoint function broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) + z, back = pullback(/, x, y) + z, Δ -> (nothing, back(Δ)...) +end @adjoint function broadcasted(::typeof(Base.literal_pow), ::typeof(^), x::Numeric, exp::Val{p}) where p y = Base.literal_pow.(^, x, exp) diff --git a/test/cuda.jl b/test/cuda.jl index 86d902670..b45d066c4 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -24,11 +24,13 @@ end end -@testset "un-broadcasting *, / with mapreduce" begin +@testset "un-broadcasting .*, ./ with scalars" begin cu12 = cu(Float32[1,2]) cu55 = cu(Float32[5,5]) @test gradient((x,y) -> sum(x .* y), cu12, 5) == (cu55, 3) @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, cu55) + # @test gradient((x,y) -> sum(z -> z, x .* y), cu12, 5) == (cu55, 3) + # @test gradient((x,y) -> sum(z -> z, x .* y), 5, cu12) == (3, cu55) cu345 = cu(Float32[3 4 5]) @test gradient((x,y) -> sum(x .* y), cu12, cu345) == (cu([12, 12]), cu([3 3 3])) @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ (cu([0.2, 0.2]), -0.12)) From 615100fd6730e6e0fe8ec7426c625de28f77e71c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 May 2021 14:31:37 -0400 Subject: [PATCH 048/490] tests --- test/cuda.jl | 12 ------------ test/gradcheck.jl | 10 +++++----- 2 files changed, 5 insertions(+), 17 deletions(-) diff --git a/test/cuda.jl b/test/cuda.jl index b45d066c4..0766ff986 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -24,18 +24,6 @@ end end -@testset "un-broadcasting .*, ./ with scalars" begin - cu12 = cu(Float32[1,2]) - cu55 = cu(Float32[5,5]) - @test gradient((x,y) -> sum(x .* y), cu12, 5) == (cu55, 3) - @test gradient((x,y) -> sum(x .* y), 5, cu12) == (3, cu55) - # @test gradient((x,y) -> sum(z -> z, x .* y), cu12, 5) == (cu55, 3) - # @test gradient((x,y) -> sum(z -> z, x .* y), 5, cu12) == (3, cu55) - cu345 = cu(Float32[3 4 5]) - @test gradient((x,y) -> sum(x .* y), cu12, cu345) == (cu([12, 12]), cu([3 3 3])) - @test all(gradient((x,y) -> sum(x ./ y), cu12, 5) .≈ (cu([0.2, 0.2]), -0.12)) -end - @testset "jacobian" begin v1 = cu(collect(1:3f0)) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 2d5499dcd..43f482d01 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1309,11 +1309,11 @@ end @test gradient(x -> sum(x .== 0.5), x1)[1] === nothing @test gradient(x -> sum(x .* (x .== maximum(x, dims=1))), x1)[1] == (x1 .== maximum(x1, dims=1)) - # tests for un-broadcasting *, / with mapreduce - @test gradient((x,y) -> sum(x .* y), [1,2], 5) == ([5, 5], 3) - @test gradient((x,y) -> sum(x .* y), 5, [1,2]) == (3, [5, 5]) - @test gradient((x,y) -> sum(x .* y), [1,2], [3 4 5]) == ([12, 12], [3 3 3]) - @test gradient((x,y) -> sum(x ./ y), [1,2], 5) == ([0.2, 0.2], -0.12) + # tests for un-broadcasting *, / via scalar rules + @test all(gradient((x,y) -> sum(x .* y), [1,2], 5) .≈ ([5, 5], 3)) + @test all(gradient((x,y) -> sum(x .* y), 5, [1,2]) .≈ (3, [5, 5])) + @test all(gradient((x,y) -> sum(x .* y), [1,2], [3 4 5]) .≈ ([12, 12], [3 3 3])) + @test all(gradient((x,y) -> sum(x ./ y), [1,2], 5) .≈ ([0.2, 0.2], -0.12)) end using Zygote: Buffer From 6dca3a96a66029bd41d0546897321cc3266f009f Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Wed, 19 May 2021 09:50:55 +0100 Subject: [PATCH 049/490] rename chain rules differential types --- src/compiler/chainrules.jl | 6 +++--- test/chainrules.jl | 4 ++-- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index c573d98f7..8392a27ce 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -47,7 +47,7 @@ for T_outer in (:Tuple, :NamedTuple) # we create separate methods rather than using a `Union` + an `if` so that we avoid a # branch that changes output type, because nested AD on that kinda thing makes Zygote less # than happy. - @eval @inline function wrap_chainrules_output(x::ChainRules.Composite{P, T}) where {P, T<:$T_outer} + @eval @inline function wrap_chainrules_output(x::ChainRules.Tangent{P, T}) where {P, T<:$T_outer} xp = map(wrap_chainrules_output, canonicalize(x)) convert($T_outer, xp) end @@ -59,10 +59,10 @@ end Convert `x` from the format Zygote uses internally to differentials types ChainRules uses. """ @inline wrap_chainrules_input(x) = x -@inline wrap_chainrules_input(::Nothing) = ChainRules.Zero() +@inline wrap_chainrules_input(::Nothing) = ChainRules.ZeroTangent() @inline function wrap_chainrules_input(xs::Union{Tuple, NamedTuple}) xp = map(wrap_chainrules_input, xs) - ChainRules.Composite{Any, typeof(xp)}(xp) + ChainRules.Tangent{Any, typeof(xp)}(xp) end """ diff --git a/test/chainrules.jl b/test/chainrules.jl index 7fd8c6be5..8b7034753 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -131,7 +131,7 @@ using Zygote, Test, ChainRules not_diff_eg(x, i) = [10, 20][i] function ChainRules.rrule(::typeof(not_diff_eg), x, i) function not_diff_eg_pullback(Δ) - return ChainRules.NO_FIELDS, ChainRules.Zero(), ChainRules.DoesNotExist() + return ChainRules.NO_FIELDS, ChainRules.ZeroTangent(), ChainRules.NoTangent() end return not_diff_eg(x, i), not_diff_eg_pullback end @@ -204,7 +204,7 @@ using Zygote, Test, ChainRules not_diff_kw_eg(x, i; kw=1.0) = [10, 20][i] function ChainRules.rrule(::typeof(not_diff_kw_eg), x, i; kwargs...) function not_diff_kw_eg_pullback(Δ) - return ChainRules.NO_FIELDS, ChainRules.Zero(), ChainRules.DoesNotExist() + return ChainRules.NO_FIELDS, ChainRules.ZeroTangent(), ChainRules.NoTangent() end return not_diff_kw_eg(x, i; kwargs...), not_diff_kw_eg_pullback end From f583d0dc2248f52f1a8fd8ef88cbe187bc6f3634 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Wed, 19 May 2021 09:51:11 +0100 Subject: [PATCH 050/490] version bump and compat bump --- Project.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index 32849f933..0bdee524c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.10" +version = "0.6.11" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "0.7.55" -ChainRulesCore = "0.9.32" +ChainRulesCore = "0.9.44" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11" ForwardDiff = "0.10" From 07f1f94dac577e411932616c9afd8bca04fa692e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 19 May 2021 13:55:05 -0400 Subject: [PATCH 051/490] tuple un-broadcast --- src/lib/broadcast.jl | 2 ++ test/features.jl | 11 +++++++++++ 2 files changed, 13 insertions(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 1219d7883..ddadfcbcd 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -46,6 +46,7 @@ function Base.reducedim_init(::typeof(identity), ::typeof(accum), A::AbstractArr end trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) +trim(x::Tuple, Δ) = ntuple(k -> Δ[k], length(x)) unbroadcast(x::AbstractArray, x̄) = size(x) == size(x̄) ? x̄ : @@ -55,6 +56,7 @@ unbroadcast(x::AbstractArray, x̄) = unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) +unbroadcast(x::Tuple, x̄) = trim(x, accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::AbstractArray, x̄::Nothing) = nothing diff --git a/test/features.jl b/test/features.jl index 48df0c87c..254034202 100644 --- a/test/features.jl +++ b/test/features.jl @@ -481,3 +481,14 @@ end Zygote.gradient(loss_adjoint,[1.0]) @test x[1] == x[2] end + +@testset "tuples & broadcasting" begin + @test gradient(x -> sum(x .+ ones(2,2)), (1,2)) == ((2,2),) + @test gradient(x -> sum(x .+ ones(2,2)), (1,)) == ((4,),) + + # https://github.com/FluxML/Zygote.jl/issues/975 + gt = gradient((x,p) -> prod(x .^ p), [3,4], (1,2)) + gv = gradient((x,p) -> prod(x .^ p), [3,4], [1,2]) + @test gt[1] == gv[1] + @test collect(gt[2]) ≈ gv[2] +end From 2dc6a51c225634b4db8bb55aa02306ed2d8a7bd5 Mon Sep 17 00:00:00 2001 From: Marius Millea Date: Wed, 19 May 2021 13:38:24 -0700 Subject: [PATCH 052/490] minor typo fix Minorest of typos, makes it so the user wont confuse the first clause as declarative. --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 3c62d6586..b066b73f6 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -43,7 +43,7 @@ end sensitivity(y::Number) = one(y) sensitivity(y::Complex) = error("Output is complex, so the gradient is not defined.") -sensitivity(y::AbstractArray) = error("output an array, so the gradient is not defined. Perhaps you wanted jacobian.") +sensitivity(y::AbstractArray) = error("Output is an array, so the gradient is not defined. Perhaps you wanted jacobian.") sensitivity(y) = error("Output should be scalar; gradients are not defined for output $(repr(y))") """ From 33f1d6de9f1fddae3a0fc166f66e921b2e219d9b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 20 May 2021 08:37:01 -0400 Subject: [PATCH 053/490] skip the sum, sometimes --- src/lib/broadcast.jl | 2 +- test/features.jl | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index ddadfcbcd..451d8794c 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -56,7 +56,7 @@ unbroadcast(x::AbstractArray, x̄) = unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) -unbroadcast(x::Tuple, x̄) = trim(x, accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 +unbroadcast(x::Tuple, x̄) = trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::AbstractArray, x̄::Nothing) = nothing diff --git a/test/features.jl b/test/features.jl index 254034202..6843acbf6 100644 --- a/test/features.jl +++ b/test/features.jl @@ -485,6 +485,7 @@ end @testset "tuples & broadcasting" begin @test gradient(x -> sum(x .+ ones(2,2)), (1,2)) == ((2,2),) @test gradient(x -> sum(x .+ ones(2,2)), (1,)) == ((4,),) + @test gradient(x -> sum(x .+ ones(2,1)), (1,2)) == ((1,1),) # https://github.com/FluxML/Zygote.jl/issues/975 gt = gradient((x,p) -> prod(x .^ p), [3,4], (1,2)) From 892022ca20512ceef1342f6705ec8209a8acf078 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Thu, 20 May 2021 14:19:02 +0100 Subject: [PATCH 054/490] add dev to gitignore --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index 78756acf1..aa5ffbd93 100644 --- a/.gitignore +++ b/.gitignore @@ -3,3 +3,4 @@ *.jl.mem docs/build Manifest.toml +dev/ From 81c0aefedaf6b5a25ea092989f2a1b5fa55352f5 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Sat, 22 May 2021 07:06:24 +0530 Subject: [PATCH 055/490] cleanup --- src/compiler/interface.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index b066b73f6..48d6146ac 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -139,8 +139,8 @@ function copy!(ps::Params, x::AbstractVector) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - p .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + p .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end ps end @@ -149,8 +149,8 @@ function copy!(x::AbstractVector, ps::Params) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - x[i+1:i+length(p)] .= vec(p) - i += length(p) + x[i+1:i+length(p)] .= vec(p) + i += length(p) end ps end @@ -196,8 +196,8 @@ length of `x` has to be equal to the sum of the lengths of all gradients. function copy!(gs::Grads, x::AbstractVector) i = 0 for p in gs.params - gs[p] .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + gs[p] .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end x end @@ -205,8 +205,8 @@ end function copy!(x::AbstractVector, gs::Grads) i = 0 for p in gs.params - x[i+1:i+length(p)] .= vec(gs[p]) - i += length(p) + x[i+1:i+length(p)] .= vec(gs[p]) + i += length(p) end x end From 1f492a7d6c3fc32c787cd96a95c1b90478fc615f Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 25 May 2021 10:34:00 -0400 Subject: [PATCH 056/490] add docstring for OneElement --- src/lib/array.jl | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d7fedd4d5..9a8f94d4d 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -46,15 +46,20 @@ end return (dx, map(_->nothing, inds)...) end +""" + OneElement(val, ind, axes) <: AbstractArray + +Extremely simple `struct` used for the gradient of scalar `getindex`. +""" struct OneElement{T,N,I,A} <: AbstractArray{T,N} val::T - index::I + ind::I axes::A - OneElement(x::T, i::I, a::A) where {T,I<:NTuple{N,Int},A} where {N} = new{T,N,I,A}(x, i, a) + OneElement(val::T, ind::I, axes::A) where {T<:Number, I<:NTuple{N,Int}, A} where {N} = new{T,N,I,A}(val, ind, axes) end Base.size(A::OneElement) = map(length, A.axes) Base.axes(A::OneElement) = A.axes -Base.getindex(A::OneElement{T,N}, i::Vararg{Int,N}) where {T,N} = ifelse(i==A.index, A.val, zero(T)) +Base.getindex(A::OneElement{T,N}, i::Vararg{Int,N}) where {T,N} = ifelse(i==A.ind, A.val, zero(T)) _zero(xs::AbstractArray{<:Number}, T::Type{Nothing}) = fill!(similar(xs), zero(eltype(xs))) From 5f87aa7170c2b8157686c50247da0a86d0800a16 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Wed, 2 Jun 2021 17:13:43 +0100 Subject: [PATCH 057/490] =Update ChainRules and ChainRulesCore --- Project.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index 0bdee524c..a6c376d27 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.11" +version = "0.6.12" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -23,8 +23,8 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "0.7.55" -ChainRulesCore = "0.9.44" +ChainRules = "0.7.55, 0.8" +ChainRulesCore = "0.9.44, 0.10" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11" ForwardDiff = "0.10" From 1e1f60ede2097e226ff9a42cb264df389d4f23ee Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 8 Jun 2021 15:25:55 +0100 Subject: [PATCH 058/490] delete rule for prod to use ChainRules' --- src/lib/array.jl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 9a8f94d4d..f8389d4e3 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -300,11 +300,6 @@ end return sum(abs2, X; dims=dims), Δ::Union{Number, AbstractArray}->(nothing, ((2Δ) .* X)) end -@adjoint function prod(xs::AbstractArray; dims = :) - p = prod(xs; dims = dims) - p, Δ -> (p ./ xs .* Δ,) -end - function _pullback(cx::AContext, ::typeof(prod), f, xs::AbstractArray) y, back = pullback(cx, ((f, xs) -> prod(f.(xs))), f, xs) y, ȳ -> (nothing, back(ȳ)...) From 67dbc72575525845c8c9684d77f245484220b18c Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 8 Jun 2021 20:32:03 +0100 Subject: [PATCH 059/490] bump chainrules version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a6c376d27..2a89b4cd1 100644 --- a/Project.toml +++ b/Project.toml @@ -23,7 +23,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "0.7.55, 0.8" +ChainRules = "0.7.66, 0.8" ChainRulesCore = "0.9.44, 0.10" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11" From df762eb8236008d970c2c9fceba4df7993ec59ac Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Fri, 11 Jun 2021 18:18:23 +0530 Subject: [PATCH 060/490] fix #941 --- src/compiler/interface.jl | 126 ++++++++++---------------------------- 1 file changed, 32 insertions(+), 94 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 48d6146ac..5883f4269 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -1,8 +1,12 @@ using InteractiveUtils using InteractiveUtils: typesof using Core: Typeof -import Base: copy! -import Base.Broadcast: broadcasted, materialize! + +@static if VERSION >= v"1.1" + import Base: copy! +else + import Future: copy! +end mutable struct Context <: AContext cache::Union{IdDict{Any,Any},Nothing} @@ -43,17 +47,8 @@ end sensitivity(y::Number) = one(y) sensitivity(y::Complex) = error("Output is complex, so the gradient is not defined.") -sensitivity(y::AbstractArray) = error("Output is an array, so the gradient is not defined. Perhaps you wanted jacobian.") sensitivity(y) = error("Output should be scalar; gradients are not defined for output $(repr(y))") -""" - gradient(f, args...) - -Returns a tuple containing `∂f/∂x` for each argument `x`, -the derivative (for scalar x) or the gradient. - -`f(args...)` must be a real number, see [`jacobian`](@ref) for array output. -""" function gradient(f, args...) y, back = pullback(f, args...) return back(sensitivity(y)) @@ -65,42 +60,35 @@ Base.adjoint(f::Function) = x -> gradient(f, x)[1] # TODO store ids only struct Params - order::Buffer{Any, Vector{Any}} + order::Buffer # {Any, Vector{Any}} params::IdSet{Any} - Params() = new(Buffer([], false), IdSet()) end +Params() = Params(Buffer([], false), IdSet()) +Params(xs) = Params(Buffer(xs, false), IdSet(xs)) + @forward Params.order Base.iterate, Base.length, Base.getindex -@forward Params.params Base.in -function Base.union!(ps::Params, itrs...) - foreach(itr -> foreach(x -> push!(ps, x), itr), itrs) +function Base.push!(ps::Params, x) + if !(x in ps.params) + push!(ps.order, x) + push!(ps.params, x) + end return ps end -Base.copy(ps::Params) = union!(Params(), ps) -Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) -Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) -Base.issetequal(ps1::Params, x::Base.AbstractSet) = issetequal(ps1.params, x) -Base.issetequal(x::Base.AbstractSet, ps1::Params) = issetequal(x, ps1.params) - -function Base.intersect!(ps::Params, itrs...) - for itr in itrs - for x in collect(ps) - x ∉ itr && delete!(ps, x) - end +@adjoint! function Base.push!(xs::IdSet, x...) + l = length(x) + push!(xs, x...), Δ -> begin + (Δ, ntuple(_ -> nothing, l)...) end - return ps end -Base.intersect(ps::Params, itrs...) = intersect!(copy(ps), itrs...) - -function Base.push!(ps::Params, x) - if !(x in ps.params) - push!(ps.order, x) - push!(ps.params, x) +@adjoint! function Base.push!(xs::Params, x::AbstractArray{T}...) where T + sz_x = size.(x) + push!(xs, x...), Δ -> begin + (Δ, map(x -> Ones{T}(x...), sz_x)...) end - return ps end Base.push!(ps::Params, x...) = (foreach(x -> push!(ps, x), x); ps) @@ -139,8 +127,8 @@ function copy!(ps::Params, x::AbstractVector) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - p .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + p .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end ps end @@ -149,8 +137,8 @@ function copy!(x::AbstractVector, ps::Params) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - x[i+1:i+length(p)] .= vec(p) - i += length(p) + x[i+1:i+length(p)] .= vec(p) + i += length(p) end ps end @@ -163,23 +151,7 @@ end Base.show(io::IO, ps::Grads) = print(io, "Grads(...)") -@forward Grads.grads Base.setindex! -@forward Grads.params Base.length - -const ADictOrGrads = Union{AbstractDict, Grads} - -# Dictionary interface. -# Don't use the IdDict directly since it may contain some spurious pairs. -Base.haskey(gs::Grads, x) = x ∈ gs.params -Base.keys(gs::Grads) = gs.params -Base.values(gs::Grads) = (gs.grads[p] for p in gs.params) - -function Base.iterate(gs::Grads, state...) - res = iterate(gs.params, state...) - isnothing(res) && return nothing - p, next_state = res - return gs[p], next_state -end +@forward Grads.grads Base.getindex, Base.haskey function Base.getindex(gs::Grads, x) isbits(x) && error("Only reference types can be differentiated with `Params`.") @@ -196,8 +168,8 @@ length of `x` has to be equal to the sum of the lengths of all gradients. function copy!(gs::Grads, x::AbstractVector) i = 0 for p in gs.params - gs[p] .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + gs[p] .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end x end @@ -205,46 +177,12 @@ end function copy!(x::AbstractVector, gs::Grads) i = 0 for p in gs.params - x[i+1:i+length(p)] .= vec(gs[p]) - i += length(p) + x[i+1:i+length(p)] .= vec(gs[p]) + i += length(p) end x end -broadcasted(f, gs::Grads, gss::ADictOrGrads...) = map(f, gs, gss...) - -broadcasted(f, a::Numeric, gs::Grads) = map(x -> f(a, x), gs) -broadcasted(f, gs::Grads, a::Numeric) = map(x -> f(x, a), gs) - -function materialize!(gs1::Grads, gs2::Grads) - issetequal(gs1.params, gs2.params) || - throw(ArgumentError("Expected Grads objects with the same Params.")) - for p in gs1.params - gs1[p] = gs2[p] - end - return gs1 -end - - -function Base.map(f, gs1::Grads, gss::ADictOrGrads...) - gsout = Grads(IdDict{Any,Any}(), Params(gs1.params)) - return map!(f, gsout, gs1, gss...) -end - -function Base.map!(f, gsout::Grads, gss::ADictOrGrads...) - all(issetequal(gsout.params, keys(gs)) for gs in gss) || - throw(ArgumentError("map! expects Grads objects with the same Params.")) - for p in gsout.params - gsout[p] = f((_getformap(gs, p) for gs in gss)...) - end - return gsout -end - -function _getformap(gs, p) - g = gs[p] - isnothing(g) ? fill!(similar(p), 0) : g -end - function pullback(f, ps::Params) cx = Context() y, back = _pullback(cx, f) From e1b94cc7285ecb9bd08a7ec0e7470f623d97f227 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Fri, 11 Jun 2021 18:25:29 +0530 Subject: [PATCH 061/490] remove internal constructor --- src/compiler/interface.jl | 110 +++++++++++++++++++++++++++++++------- 1 file changed, 91 insertions(+), 19 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 5883f4269..baeba003e 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -1,12 +1,8 @@ using InteractiveUtils using InteractiveUtils: typesof using Core: Typeof - -@static if VERSION >= v"1.1" - import Base: copy! -else - import Future: copy! -end +import Base: copy! +import Base.Broadcast: broadcasted, materialize! mutable struct Context <: AContext cache::Union{IdDict{Any,Any},Nothing} @@ -47,8 +43,15 @@ end sensitivity(y::Number) = one(y) sensitivity(y::Complex) = error("Output is complex, so the gradient is not defined.") +sensitivity(y::AbstractArray) = error("Output is an array, so the gradient is not defined. Perhaps you wanted jacobian.") sensitivity(y) = error("Output should be scalar; gradients are not defined for output $(repr(y))") +""" + gradient(f, args...) +Returns a tuple containing `∂f/∂x` for each argument `x`, +the derivative (for scalar x) or the gradient. +`f(args...)` must be a real number, see [`jacobian`](@ref) for array output. +""" function gradient(f, args...) y, back = pullback(f, args...) return back(sensitivity(y)) @@ -68,6 +71,29 @@ Params() = Params(Buffer([], false), IdSet()) Params(xs) = Params(Buffer(xs, false), IdSet(xs)) @forward Params.order Base.iterate, Base.length, Base.getindex +@forward Params.params Base.in + +function Base.union!(ps::Params, itrs...) + foreach(itr -> foreach(x -> push!(ps, x), itr), itrs) + return ps +end + +Base.copy(ps::Params) = union!(Params(), ps) +Base.union(ps::Params, itrs...) = union!(copy(ps), itrs...) +Base.issetequal(ps1::Params, ps2::Params) = issetequal(ps1.params, ps2.params) +Base.issetequal(ps1::Params, x::Base.AbstractSet) = issetequal(ps1.params, x) +Base.issetequal(x::Base.AbstractSet, ps1::Params) = issetequal(x, ps1.params) + +function Base.intersect!(ps::Params, itrs...) + for itr in itrs + for x in collect(ps) + x ∉ itr && delete!(ps, x) + end + end + return ps +end + +Base.intersect(ps::Params, itrs...) = intersect!(copy(ps), itrs...) function Base.push!(ps::Params, x) if !(x in ps.params) @@ -102,8 +128,6 @@ function Base.delete!(ps::Params, x) return ps end -Params(xs) = push!(Params(), xs...) - Base.Broadcast.broadcasted(f, ps::Params) = broadcasted(f, ps.order) Base.:(==)(x::Params, y::Params) = x.order.data == y.order.data @@ -118,7 +142,6 @@ end """ copy!(ps::Params, x::AbstractVector) copy!(x::AbstractVector, ps::Params) - Copies the content of array `x` into the parameters `ps` or viceversa. The length of `x` has to be equal to the sum of the lengths of all parameters. @@ -127,8 +150,8 @@ function copy!(ps::Params, x::AbstractVector) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - p .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + p .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end ps end @@ -137,8 +160,8 @@ function copy!(x::AbstractVector, ps::Params) @assert length(x) == sum(length(p) for p in ps) i = 0 for p in ps - x[i+1:i+length(p)] .= vec(p) - i += length(p) + x[i+1:i+length(p)] .= vec(p) + i += length(p) end ps end @@ -151,7 +174,23 @@ end Base.show(io::IO, ps::Grads) = print(io, "Grads(...)") -@forward Grads.grads Base.getindex, Base.haskey +@forward Grads.grads Base.setindex! +@forward Grads.params Base.length + +const ADictOrGrads = Union{AbstractDict, Grads} + +# Dictionary interface. +# Don't use the IdDict directly since it may contain some spurious pairs. +Base.haskey(gs::Grads, x) = x ∈ gs.params +Base.keys(gs::Grads) = gs.params +Base.values(gs::Grads) = (gs.grads[p] for p in gs.params) + +function Base.iterate(gs::Grads, state...) + res = iterate(gs.params, state...) + isnothing(res) && return nothing + p, next_state = res + return gs[p], next_state +end function Base.getindex(gs::Grads, x) isbits(x) && error("Only reference types can be differentiated with `Params`.") @@ -161,15 +200,14 @@ end """ copy!(gs::Grads, x::AbstractVector) copy!(x::AbstractVector, gs::Grads) - Copies the content of array `x` into the gradient object `gs` or vice versa. The length of `x` has to be equal to the sum of the lengths of all gradients. """ function copy!(gs::Grads, x::AbstractVector) i = 0 for p in gs.params - gs[p] .= reshape(x[i+1:i+length(p)], size(p)) - i += length(p) + gs[p] .= reshape(x[i+1:i+length(p)], size(p)) + i += length(p) end x end @@ -177,12 +215,46 @@ end function copy!(x::AbstractVector, gs::Grads) i = 0 for p in gs.params - x[i+1:i+length(p)] .= vec(gs[p]) - i += length(p) + x[i+1:i+length(p)] .= vec(gs[p]) + i += length(p) end x end +broadcasted(f, gs::Grads, gss::ADictOrGrads...) = map(f, gs, gss...) + +broadcasted(f, a::Numeric, gs::Grads) = map(x -> f(a, x), gs) +broadcasted(f, gs::Grads, a::Numeric) = map(x -> f(x, a), gs) + +function materialize!(gs1::Grads, gs2::Grads) + issetequal(gs1.params, gs2.params) || + throw(ArgumentError("Expected Grads objects with the same Params.")) + for p in gs1.params + gs1[p] = gs2[p] + end + return gs1 +end + + +function Base.map(f, gs1::Grads, gss::ADictOrGrads...) + gsout = Grads(IdDict{Any,Any}(), Params(gs1.params)) + return map!(f, gsout, gs1, gss...) +end + +function Base.map!(f, gsout::Grads, gss::ADictOrGrads...) + all(issetequal(gsout.params, keys(gs)) for gs in gss) || + throw(ArgumentError("map! expects Grads objects with the same Params.")) + for p in gsout.params + gsout[p] = f((_getformap(gs, p) for gs in gss)...) + end + return gsout +end + +function _getformap(gs, p) + g = gs[p] + isnothing(g) ? fill!(similar(p), 0) : g +end + function pullback(f, ps::Params) cx = Context() y, back = _pullback(cx, f) From 56c7c084ddd303999ca5504ba2f5e88a587946b9 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 14 Jun 2021 15:07:42 +0530 Subject: [PATCH 062/490] add tests --- test/interface.jl | 40 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/test/interface.jl b/test/interface.jl index 0ffb933f6..2c1af3c3d 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -163,4 +163,44 @@ end @test all(abs.(gs[w]) .<= 1e-5) @test all(abs.(gs[b]) .<= 1e-5) end + + @testset "Params nesting" begin + struct Dense{F,T,S} + W::T + b::S + σ::F + end + + (d::Dense)(x) = d.σ.(d.W * x .+ d.b) + d = Dense(ones(Float32, 3,3), zeros(Float32, 3), identity) + ps = Zygote.Params([d.W, d.b]) + r = ones(Float32, 3,3) + + gs = gradient(ps) do + p, pb = pullback(ps) do + sum(d(r)) + end + g = pb(p) + sum(g[d.W]) # + sum(g[d.b]) + end + + @test gs[d.W] ≈ fill(81f0, (3,3)) + + # Test L2 + l2g = gradient(ps) do + sum(sum(x .^ 2) for x in ps) + end + @test l2g[d.W] ≈ fill(2.f0, size(d.W)) + @test l2g[d.b] ≈ fill(0.f0, size(d.b)) + + # Can be safely removed - creating Params within + # gradient calls may break between releases. + sgs = gradient(ps) do + sum(sum(x) for x in Zygote.Params([d.W, d.b, b])) + end + @test sgs[d.W] ≈ fill(1.f0, size(d.W)) + @test sgs[d.b] ≈ fill(1.f0, size(d.b)) + end + + end From 0424158150bd727a147b6bcfb4663db0f83a2be4 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 14 Jun 2021 15:11:50 +0530 Subject: [PATCH 063/490] add Params(::Params) --- src/compiler/interface.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index baeba003e..75850fdbb 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -68,7 +68,8 @@ struct Params end Params() = Params(Buffer([], false), IdSet()) -Params(xs) = Params(Buffer(xs, false), IdSet(xs)) +Params(xs::Vector) = Params(Buffer(xs, false), IdSet(xs)) +Params(ps::Params) = ps @forward Params.order Base.iterate, Base.length, Base.getindex @forward Params.params Base.in From 7e6c2c9e0aa224a4451f535e333c358c1302e27d Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 14 Jun 2021 15:27:03 +0530 Subject: [PATCH 064/490] fixupmissing symbol --- test/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/interface.jl b/test/interface.jl index 2c1af3c3d..159f4bce1 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -196,7 +196,7 @@ end # Can be safely removed - creating Params within # gradient calls may break between releases. sgs = gradient(ps) do - sum(sum(x) for x in Zygote.Params([d.W, d.b, b])) + sum(sum(x) for x in Zygote.Params([d.W, d.b])) end @test sgs[d.W] ≈ fill(1.f0, size(d.W)) @test sgs[d.b] ≈ fill(1.f0, size(d.b)) From d2785a6c5c3facf07605d89e72ed532140f55afd Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 14 Jun 2021 12:18:21 +0200 Subject: [PATCH 065/490] Disable regression tests --- test/gradcheck.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index c0aa8a665..53c29a047 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -4,7 +4,6 @@ using Zygote: gradient using Base.Broadcast: broadcast_shape using Distributed: pmap, CachingPool, workers import FiniteDifferences -using BenchmarkTools function ngradient(f, xs::AbstractArray...) grads = zero.(xs) @@ -1719,6 +1718,7 @@ end @test gradcheck(x -> prod(Base.Fix1(+, 1), x), randn(100)) @test gradcheck(x -> prod(Base.Fix2(+, 1), x), randn(100)) +#= regression tests are not included to reduce CI times # check the execution times compared with a closure # https://github.com/FluxML/Zygote.jl/issues/957 x = randn(100) @@ -1727,4 +1727,5 @@ end tfix2 = @belapsed(gradient($(x -> prod(Base.Fix2(+, 1), x)), $x)) @test tfix1 < 2 * tclosure @test tfix2 < 2 * tclosure +=# end From 5af6148496390344c96dc29662814883ae8f32db Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 14 Jun 2021 12:18:32 +0200 Subject: [PATCH 066/490] Clean test dependencies --- Project.toml | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index b586ded24..fc02a9a3f 100644 --- a/Project.toml +++ b/Project.toml @@ -38,14 +38,12 @@ ZygoteRules = "0.2.1" julia = "1.3" [extras] -BenchmarkTools = "6e4b80f9-dd63-53aa-95a3-0cdb28fa8baf" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["BenchmarkTools", "CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] +test = ["CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] From d7308036ab964c81c7a5fb797ee1b1dcbd30e9c9 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 14 Jun 2021 12:21:16 +0200 Subject: [PATCH 067/490] Re-add StatsFuns dependency --- Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/Project.toml b/Project.toml index fc02a9a3f..56d72758a 100644 --- a/Project.toml +++ b/Project.toml @@ -43,6 +43,7 @@ Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" # otherwise we can't add a compat bound Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] From 4ba326f7ddb4533d0443ef5ccb299eba0c8b3f8e Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 14 Jun 2021 20:59:47 +0530 Subject: [PATCH 068/490] add tuple constructor --- src/compiler/interface.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 75850fdbb..4c8eb0d53 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -68,8 +68,9 @@ struct Params end Params() = Params(Buffer([], false), IdSet()) -Params(xs::Vector) = Params(Buffer(xs, false), IdSet(xs)) +Params(xs) = Params(Buffer(xs, false), IdSet(xs)) Params(ps::Params) = ps +Params(xs::Tuple) = Params(collect(xs)) @forward Params.order Base.iterate, Base.length, Base.getindex @forward Params.params Base.in From ce8eb91a592cdd21a1e6610e03fd9e5245e40ae3 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 15 Jun 2021 11:11:46 +0530 Subject: [PATCH 069/490] whitespace --- src/compiler/interface.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 4c8eb0d53..86e847dc4 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -48,8 +48,10 @@ sensitivity(y) = error("Output should be scalar; gradients are not defined for o """ gradient(f, args...) + Returns a tuple containing `∂f/∂x` for each argument `x`, the derivative (for scalar x) or the gradient. + `f(args...)` must be a real number, see [`jacobian`](@ref) for array output. """ function gradient(f, args...) @@ -144,6 +146,7 @@ end """ copy!(ps::Params, x::AbstractVector) copy!(x::AbstractVector, ps::Params) + Copies the content of array `x` into the parameters `ps` or viceversa. The length of `x` has to be equal to the sum of the lengths of all parameters. @@ -202,6 +205,7 @@ end """ copy!(gs::Grads, x::AbstractVector) copy!(x::AbstractVector, gs::Grads) + Copies the content of array `x` into the gradient object `gs` or vice versa. The length of `x` has to be equal to the sum of the lengths of all gradients. """ From c4373ddcd0e5ae248ac4f2e3cefe627b4d8b112a Mon Sep 17 00:00:00 2001 From: Simeon Schaub Date: Wed, 16 Jun 2021 05:26:16 -0400 Subject: [PATCH 070/490] fix #996 --- src/lib/broadcast.jl | 1 + test/gradcheck.jl | 4 ++++ 2 files changed, 5 insertions(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index df9fdc9b5..879f62c1a 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -186,6 +186,7 @@ end y, ∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y, function (ȳ) dxs = ∂b(ȳ) + dxs === nothing && return nothing (nothing, dxs...) end end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 43f482d01..aeea366cf 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1713,3 +1713,7 @@ end @test s == 0.0 @test gs == (nothing,) end + +# https://github.com/FluxML/Zygote.jl/issues/996 +a = rand(3) +@test Zygote.gradient(x->sum(x .+ rand.()), a) == (ones(3),) From b250d925b8d4db8d5f76ee506c0ba6a4e35fe689 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 18 Jun 2021 18:00:59 +0100 Subject: [PATCH 071/490] Use ChainRules RuleConfig (#990) * draft test_gradient function * draft * update draft * wrap input * polish zygote_ad_rrule * clean up chainrulestest utils * remove multizeros * rename zygote_ad_rrule to rrule_via_ad * rename export * add a real test example * take nothing seriously * skip all chainrules tests * refresh often * remove chainrules_fallback method * Revert "refresh often" This reverts commit 388bd0f060de9b36d89059ac534490a52123909e. * remove one level of nesting * add a test * use ChainRules RuleConfigs * Fix it so can use RuleConfig in Zygote * make tests easier to use * wip * Mark testing of rrule_via_ad on round as broken (others work) * debugging * Don't take nothing seriously * remove scratch file * stop taking nothing seriously again * fix typo * Fix use of test_rrule to test Zygote * renable ChainRules tests * fix ChainRulesTest to use new ChainRulesCore * bring back adjoint for sum on arrays of bools * clash with names less * Use old rule for sum on Arrays of Arrays * Apply suggestions from code review Co-authored-by: Miha Zgubic * import ZygoteRuleConfig into the tests * import ChainRules testing tools etc * import ChainRules testing tools etc Co-authored-by: Miha Zgubic Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Co-authored-by: Miha Zgubic --- Project.toml | 6 +- src/Zygote.jl | 2 + src/compiler/chainrules.jl | 76 ++++++++++++++++++++----- src/compiler/interface2.jl | 18 ++++-- src/lib/array.jl | 11 ++-- test/chainrules.jl | 113 ++++++++++++++++++++++++++++--------- test/features.jl | 6 +- test/gradcheck.jl | 6 +- test/lib/array.jl | 6 +- test/runtests.jl | 2 +- 10 files changed, 183 insertions(+), 63 deletions(-) diff --git a/Project.toml b/Project.toml index 2a89b4cd1..3fe2ceb58 100644 --- a/Project.toml +++ b/Project.toml @@ -23,8 +23,9 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "0.7.66, 0.8" +ChainRules = "0.8.12" ChainRulesCore = "0.9.44, 0.10" +ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11" ForwardDiff = "0.10" @@ -39,6 +40,7 @@ julia = "1.3" [extras] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" +ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" @@ -47,4 +49,4 @@ StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] +test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] diff --git a/src/Zygote.jl b/src/Zygote.jl index 873707fce..895e65f3c 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -6,6 +6,7 @@ using LinearAlgebra: copytri!, AbstractTriangular import ZygoteRules: @adjoint, @adjoint!, AContext, adjoint, _pullback, pullback, literal_getproperty, literal_getfield +using ChainRulesCore using ChainRules: ChainRules, rrule, unthunk, canonicalize using IRTools using MacroTools, Requires @@ -13,6 +14,7 @@ using MacroTools: @forward import Distributed: pmap, CachingPool, workers export Params, gradient, jacobian, hessian, diaghessian, pullback, pushforward, @code_adjoint +export rrule_via_ad const Numeric{T<:Number} = Union{T, AbstractArray{<:T}} diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 8392a27ce..c4e72f07e 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -1,4 +1,11 @@ -const chainrules_fallback = which(rrule, Tuple{Any}) +struct ZygoteRuleConfig{CTX<:AContext} <: RuleConfig{Union{HasReverseMode,NoForwardsMode}} + context::CTX +end +ZygoteRuleConfig() = ZygoteRuleConfig(Context()) + + +const rrule_fallback_method = Base.which(rrule, Tuple{Any, Vararg{Any}}) +const rrule_redispatcher_method = Base.which(rrule, Tuple{RuleConfig, Any, Vararg{Any}}) """ has_chain_rrule(T) @@ -10,13 +17,20 @@ If it does not, then the second argument is a list of edges to attach to the Cod such that if a suitable rule is defined later, the generated function will recompile. """ function has_chain_rrule(T) - m = meta(Tuple{typeof(rrule),T.parameters...}) - if m.method !== chainrules_fallback - # found a rrule, no need to add any edges - return true, nothing + config_T, arg_Ts = Iterators.peel(T.parameters) + m_with_config = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) + if m_with_config.method === rrule_redispatcher_method + # it is being redispatched without config, so check it that hits the fallback + m_without_config = meta(Tuple{typeof(rrule), arg_Ts...}) + if m_without_config.method === rrule_fallback_method + # no rrule exists, return instance for m_with_config as that will be invalidated + # directly if configured rule added, or indirectly if unconfigured rule added + return false, m_with_config.instance + end end - - return false, m.instance + # otherwise found a rrule, no need to add any edges, as it will generate code with + # natural edges. + return true, nothing end """ @@ -80,25 +94,25 @@ end @inline (s::ZBack)(::Nothing) = nothing """ - chain_rrule(f, args...) + chain_rrule(config, f, args...) Returns a the (primal) value of `f(args...)` and a pullback, by invoking `ChainRulesCore.rrule(f, args...)`. The pullback is appropriately wrapped up to follow Zygote conventions. """ -@inline function chain_rrule(f, args...) - y, back = rrule(f, args...) +@inline function chain_rrule(config, f, args...) + y, back = rrule(config, f, args...) return y, ZBack(back) end """ - chain_rrule_kw(kwf, kwargs, f, args...) + chain_rrule_kw(config, kwf, kwargs, f, args...) As per [`chain_rrule`](@ref) but with support for kwargs. `kwf` should be the kwfunc matching to `f`, and `kwargs` are a `NamedTuple` of keyword arguments. """ -@inline function chain_rrule_kw(kwf, kwargs, f, args...) - y, back = rrule(f, args...; kwargs...) +@inline function chain_rrule_kw(config, kwf, kwargs, f, args...) + y, back = rrule(config, f, args...; kwargs...) function kw_zpullback(dy) dxs = ZBack(back)(dy) if dxs === nothing # if dxs is nothing, then all partiaols are nothing @@ -110,3 +124,39 @@ As per [`chain_rrule`](@ref) but with support for kwargs. end return y, kw_zpullback end + + +function ChainRulesCore.rrule_via_ad(config::ZygoteRuleConfig, f, args...) + y, pb = _pullback(config.context, f, args...) + ad_pullback(Δ) = zygote2differential(pb(wrap_chainrules_output(Δ)), (f, args...)) + return y, ad_pullback +end + +""" + zygote2differential(x) + +Convert input `x` from the Zygote format to the ChainRules differential types. +""" +zygote2differential(x, primal) = z2d(x, primal) +zygote2differential(::Nothing, ::Any) = NoTangent() +zygote2differential(t::Tuple, primal::Tuple) = map(z2d, t, primal) +zygote2differential(t::Tuple, primal) = (@warn "primal should be a tuple, not $primal"; return t) +z2d(x, ::Any) = x +z2d(::Nothing, ::Any) = NoTangent() +z2d(a::AbstractArray{<:Number}, primal::AbstractArray{T}) where T = a +z2d(a::AbstractArray, primal::AbstractArray{T}) where T = z2d.(a, primal) +z2d(x::Union{AbstractZero, Tangent}, ::Any) = (difftype_warn(x); return x) +function z2d(t::Tuple, primal::Tuple) + tp::Tuple = map(z2d, t, primal) + primal_type = typeof(primal) + return canonicalize(Tangent{primal_type, typeof(tp)}(tp)) +end + +function z2d(t::NamedTuple, primal) + primal_type = typeof(primal) + fnames = fieldnames(primal_type) + complete_t = NamedTuple{fnames}(fn in keys(t) ? t[fn] : nothing for fn in fnames) + primals = NamedTuple{fnames}(getfield(primal, fn) for fn in fnames) + tp::NamedTuple = map(z2d, complete_t, primals) + return canonicalize(Tangent{primal_type, typeof(tp)}(tp)) +end diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 350599bba..b77c7e3a6 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -10,12 +10,18 @@ end T = Tuple{f,args...} ignore_sig(T) && return :(f(args...), Pullback{$T}(())) - iskw = is_kwfunc(f, args...) - # if it is_kw then `args[1]` are the keyword args, `args[2]` is actual function - base_T = iskw ? Tuple{args[2:end]...} : T - hascr, cr_edge = has_chain_rrule(base_T) - chain_rrule_f = iskw ? :chain_rrule_kw : :chain_rrule - hascr && return :($chain_rrule_f(f, args...)) + if is_kwfunc(f, args...) + # if it is_kw then `args[1]` are the keyword args, `args[2]` is actual function + cr_T = Tuple{ZygoteRuleConfig{ctx}, args[2:end]...} + chain_rrule_f = :chain_rrule_kw + else + cr_T = Tuple{ZygoteRuleConfig{ctx}, f, args...} + chain_rrule_f = :chain_rrule + end + + hascr, cr_edge = has_chain_rrule(cr_T) + + hascr && return :($chain_rrule_f(ZygoteRuleConfig(ctx), f, args...)) g = try _lookup_grad(T) catch e e end !(g isa Tuple) && return :(f(args...), Pullback{$T}((f,))) diff --git a/src/lib/array.jl b/src/lib/array.jl index f8389d4e3..f1e217fe2 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -287,19 +287,16 @@ end end end -@adjoint function sum(xs::AbstractArray{Bool}; dims = :) - sum(xs, dims = dims), Δ -> (nothing,) -end - -@adjoint function sum(f, xs::AbstractArray; kws...) +@adjoint function sum(f, xs::AbstractArray{<:AbstractArray}; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end -@adjoint function sum(::typeof(abs2), X::AbstractArray; dims = :) - return sum(abs2, X; dims=dims), Δ::Union{Number, AbstractArray}->(nothing, ((2Δ) .* X)) +@adjoint function sum(xs::AbstractArray{Bool}; dims = :) + sum(xs, dims = dims), Δ -> (nothing,) end + function _pullback(cx::AContext, ::typeof(prod), f, xs::AbstractArray) y, back = pullback(cx, ((f, xs) -> prod(f.(xs))), f, xs) y, ȳ -> (nothing, back(ȳ)...) diff --git a/test/chainrules.jl b/test/chainrules.jl index 8b7034753..519c10ad6 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -1,16 +1,14 @@ -using Zygote, Test, ChainRules - - -@testset "ChainRules Integration" begin - @testset "basic" begin +using ChainRulesCore, ChainRulesTestUtils, Zygote +@testset "ChainRules integration" begin + @testset "ChainRules basics" begin cr_inner_demo_rrule_hitcount = Ref(0) cr_inner_demo_pullback_hitcount = Ref(0) cr_inner_demo(x) = 5x - function ChainRules.rrule(::typeof(cr_inner_demo), x) + function ChainRulesCore.rrule(::typeof(cr_inner_demo), x) cr_inner_demo_rrule_hitcount[] += 1 function cr_inner_demo_pullback(Δx) cr_inner_demo_pullback_hitcount[] += 1 - return ChainRules.NO_FIELDS, 5.0*Δx + return NoTangent(), 5.0*Δx end return cr_inner_demo(x), cr_inner_demo_pullback end @@ -19,6 +17,7 @@ using Zygote, Test, ChainRules 2 + 10cr_inner_demo(x) end + # @testset "gradient inner" begin cr_inner_demo_rrule_hitcount[] = 0 @@ -55,19 +54,19 @@ using Zygote, Test, ChainRules simo_rrule_hitcount = Ref(0) simo_pullback_hitcount = Ref(0) simo(x) = (5x, 7x) - function ChainRules.rrule(::typeof(simo), x) + function ChainRulesCore.rrule(::typeof(simo), x) simo_rrule_hitcount[] += 1 function simo_pullback((Δa, Δb)) simo_pullback_hitcount[] += 1 - return ChainRules.NO_FIELDS, 5*Δa + 7*Δb + return NoTangent(), 5*Δa + 7*Δb end return simo(x), simo_pullback end - + simo_outer(x) = sum(simo(x)) - @assert simo_rrule_hitcount[] == 0 - @assert simo_pullback_hitcount[] == 0 + simo_rrule_hitcount[] = 0 + simo_pullback_hitcount[] = 0 @test (12,) == Zygote.gradient(simo_outer, π) @test simo_rrule_hitcount[] == 1 @test simo_pullback_hitcount[] == 1 @@ -77,19 +76,20 @@ using Zygote, Test, ChainRules miso_rrule_hitcount = Ref(0) miso_pullback_hitcount = Ref(0) miso(a, b) = 5a + 7b - function ChainRules.rrule(::typeof(miso), a, b) + function ChainRulesCore.rrule(::typeof(miso), a, b) miso_rrule_hitcount[] += 1 function miso_pullback(Δy) miso_pullback_hitcount[] += 1 - return ChainRules.NO_FIELDS, 5Δy , 7Δy + return NoTangent(), 5Δy , 7Δy end return miso(a, b), miso_pullback end + miso_outer(x) = miso(100x, 10x) - @assert miso_rrule_hitcount[] == 0 - @assert miso_pullback_hitcount[] == 0 + miso_rrule_hitcount[] = 0 + miso_pullback_hitcount[] = 0 @test (570,) == Zygote.gradient(miso_outer, π) @test miso_rrule_hitcount[] == 1 @test miso_pullback_hitcount[] == 1 @@ -99,17 +99,17 @@ using Zygote, Test, ChainRules mimo_rrule_hitcount = Ref(0) mimo_pullback_hitcount = Ref(0) mimo(a, b) = (5a + 7b, 100a, 10b) - function ChainRules.rrule(::typeof(mimo), a, b) + function ChainRulesCore.rrule(::typeof(mimo), a, b) mimo_rrule_hitcount[] += 1 function mimo_pullback((Δx, Δy, Δz)) mimo_pullback_hitcount[] += 1 - return ChainRules.NO_FIELDS, 5Δx + 100Δy , 7Δx + 10Δz + return NoTangent(), 5Δx + 100Δy , 7Δx + 10Δz end return mimo(a, b), mimo_pullback end - @assert mimo_rrule_hitcount[] == 0 - @assert mimo_pullback_hitcount[] == 0 + mimo_rrule_hitcount[] = 0 + mimo_pullback_hitcount[] = 0 _, pb = Zygote.pullback(mimo, π, 2π) @test (105, 17) == pb((1, 1, 1)) @test mimo_rrule_hitcount[] == 1 @@ -129,13 +129,14 @@ using Zygote, Test, ChainRules # to a single `nothing` if they are all zero-like. not_diff_eg(x, i) = [10, 20][i] - function ChainRules.rrule(::typeof(not_diff_eg), x, i) + function ChainRulesCore.rrule(::typeof(not_diff_eg), x, i) function not_diff_eg_pullback(Δ) - return ChainRules.NO_FIELDS, ChainRules.ZeroTangent(), ChainRules.NoTangent() + return NoTangent(), ZeroTangent(), NoTangent() end return not_diff_eg(x, i), not_diff_eg_pullback end + _, pb = Zygote.pullback(not_diff_eg, 10.4, 2) @test pb(1.2) === nothing end @@ -175,14 +176,15 @@ using Zygote, Test, ChainRules kwfoo_rrule_hitcount = Ref(0) kwfoo_pullback_hitcount = Ref(0) kwfoo(x; k=10) = x + k - function ChainRules.rrule(::typeof(kwfoo), x; k=10) + function ChainRulesCore.rrule(::typeof(kwfoo), x; k=10) kwfoo_rrule_hitcount[] += 1 function kwfoo_pullback(Δy) kwfoo_pullback_hitcount[] += 1 - return ChainRules.NO_FIELDS, Δy + return NoTangent(), Δy end return kwfoo(x; k=k), kwfoo_pullback end + kwfoo_outer_unused(x) = kwfoo(x) kwfoo_outer_used(x) = kwfoo(x; k=-15) @@ -196,24 +198,81 @@ using Zygote, Test, ChainRules end end - @testset "kwarg, with all AbstractZero partials" begin # while ChainRules always has a partial for every input, Zygote combined them all # to a single `nothing` if they are all zero-like. not_diff_kw_eg(x, i; kw=1.0) = [10, 20][i] - function ChainRules.rrule(::typeof(not_diff_kw_eg), x, i; kwargs...) + function ChainRulesCore.rrule(::typeof(not_diff_kw_eg), x, i; kwargs...) function not_diff_kw_eg_pullback(Δ) - return ChainRules.NO_FIELDS, ChainRules.ZeroTangent(), ChainRules.NoTangent() + return NoTangent(), ZeroTangent(), NoTangent() end return not_diff_kw_eg(x, i; kwargs...), not_diff_kw_eg_pullback end + @test (nothing,) == Zygote.gradient(x->not_diff_kw_eg(x, 2), 10.4) @test (nothing,) == Zygote.gradient(x->not_diff_kw_eg(x, 2; kw=2.0), 10.4) end end +@testset "ChainRulesCore.rrule_via_ad" begin + @testset "basic" begin + # broken because Zygoye compresses `(NoTangent(), NoTangent())` into just NoTangent() + # which ChainRulesTestUtils does not think is valid: + @test_broken(rrule_via_ad(ZygoteRuleConfig(), round, 2.2) isa Tuple{NoTangent,NoTangent}) + # uncomment below when/if above is fixed + # test_rrule(ZygoteRuleConfig(), round, 2.2; rrule_f=rrule_via_ad) + + test_rrule(ZygoteRuleConfig(), vcat, rand(3), rand(4); rrule_f=rrule_via_ad, check_inferred=false) + test_rrule(ZygoteRuleConfig(), getindex, rand(5), 3; rrule_f=rrule_via_ad) + end + + @testset "struct" begin + struct Foo + x + y + end + makefoo(a, b) = Foo(a, b) + sumfoo(foo) = foo.x + foo.y + + + test_rrule( + ZygoteRuleConfig(), sumfoo, Foo(1.2, 2.3); rrule_f=rrule_via_ad, check_inferred=false + ) + test_rrule( + ZygoteRuleConfig(), makefoo, 1.0, 2.0; + rrule_f=rrule_via_ad, check_inferred=false + ) + end + + @testset "tuples/namedtuples" begin + my_tuple(a, b, c) = (a+b, b+c) + my_namedtuple(a, b, c) = (a=a, b=b, c=0.0) + + test_rrule( + ZygoteRuleConfig(), my_tuple, 1., 2., 3.; rrule_f=rrule_via_ad + ) + test_rrule( + ZygoteRuleConfig(), my_namedtuple, 1., 2., 3.; rrule_f=rrule_via_ad + ) + test_rrule( + ZygoteRuleConfig(), my_namedtuple, 1., (2.0, "str"), 3.; rrule_f=rrule_via_ad + ) + test_rrule(ZygoteRuleConfig(), sum, (1.0, 2.0, 3.0); rrule_f=rrule_via_ad) + test_rrule( + ZygoteRuleConfig(), sum, (a=1.0, b=2.0); rrule_f=rrule_via_ad, check_inferred=false + ) + end + + @testset "arrays" begin + nada(x, y) = 1.0 + test_rrule(ZygoteRuleConfig(), nada, rand(3), rand(2,3); rrule_f=rrule_via_ad) + test_rrule(ZygoteRuleConfig(), +, rand(3), rand(3); rrule_f=rrule_via_ad) + test_rrule(ZygoteRuleConfig(), *, rand(1, 3), rand(3); rrule_f=rrule_via_ad) + end +end + @testset "FastMath support" begin @test gradient(2.0) do x @fastmath x^2.0 diff --git a/test/features.jl b/test/features.jl index 4c63aca74..d10d60c19 100644 --- a/test/features.jl +++ b/test/features.jl @@ -179,13 +179,13 @@ end @test gradient(x -> x.re*x.im, 2+3im) == ((re = 3, im = 2),) -struct Foo{T} +struct Bar{T} a::T b::T end function mul_struct(a, b) - c = Foo(a, b) + c = Bar(a, b) c.a * c.b end @@ -358,7 +358,7 @@ end pop!(stk) end == (1,) -@test gradient(x -> [x][1].a, Foo(1, 1)) == ((a=1, b=nothing),) +@test gradient(x -> [x][1].a, Bar(1, 1)) == ((a=1, b=nothing),) @test gradient((a, b) -> Zygote.hook(-, a)*b, 2, 3) == (-3, 2) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index aeea366cf..5d95f3a5c 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -72,13 +72,13 @@ end @testset "power" begin @test gradient(x -> x^2, -2) == (-4,) @test gradient(x -> x^10, -1.0) == (-10,) # literal_pow - pow = 10 - @test gradient(x -> x^pow, -1.0) == (-pow,) + _pow = 10 + @test gradient(x -> x^_pow, -1.0) == (-_pow,) @test gradient(p -> real(2^p), 2)[1] ≈ 4*log(2) @test gradient(xs ->sum(xs .^ 2), [2, -1]) == ([4, -2],) @test gradient(xs ->sum(xs .^ 10), [3, -1]) == ([10*3^9, -10],) - @test gradient(xs ->sum(xs .^ pow), [4, -1]) == ([pow*4^9, -10],) + @test gradient(xs ->sum(xs .^ _pow), [4, -1]) == ([_pow*4^9, -10],) @test gradient(x -> real((1+3im) * x^2), 5+7im) == (-32 - 44im,) @test gradient(p -> real((1+3im) * (5+7im)^p), 2)[1] ≈ (-234 + 2im)*log(5 - 7im) diff --git a/test/lib/array.jl b/test/lib/array.jl index 380d1bb8f..6f72a4a2f 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -1,4 +1,8 @@ +using ChainRulesTestUtils using LinearAlgebra +using Zygote: ZygoteRuleConfig # issue 897 -@test gradient(x -> sum(sin, Diagonal(x)), ones(2)) == ([0.5403023058681398, 0.5403023058681398],) + +test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), ones(2); rrule_f=rrule_via_ad, check_inferred=false) +test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_via_ad, check_inferred=false) diff --git a/test/runtests.jl b/test/runtests.jl index f20b59a7e..67893a7a5 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,5 @@ using Zygote, Test -using Zygote: gradient +using Zygote: gradient, ZygoteRuleConfig using CUDA: has_cuda if has_cuda() From 13647cd618c45d9c7e691d403e666a64d5d5309a Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 18 Jun 2021 18:02:23 +0100 Subject: [PATCH 072/490] bump version --- Project.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index 3fe2ceb58..60476fca1 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.12" +version = "0.6.13" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "0.8.12" -ChainRulesCore = "0.9.44, 0.10" +ChainRulesCore = "0.10.4" ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11" From 61d4eebdc77aa4599e88991e32bf29a6ba8af47b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:13:37 -0400 Subject: [PATCH 073/490] faster generic broadcasting --- src/lib/broadcast.jl | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 879f62c1a..219a9fbdb 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -172,23 +172,25 @@ collapse_nothings(xs) = xs @adjoint function broadcasted(::AbstractArrayStyle, f, args...) len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) - y = map(x -> x[1], y∂b) - ∂b = map(x -> x[2], y∂b) - y, function (ȳ) - dxs_zip = map((∂b, ȳ) -> ∂b(ȳ), ∂b, ȳ) - dxs = collapse_nothings.(ntuple(i -> map(x -> _get(x, i), dxs_zip), len)) + y = map(first, y∂b) + function ∇broadcasted(ȳ) + dxs_zip = map((pair, ȳ₁) -> last(pair)(ȳ₁), y∂b, ȳ) + dxs = ntuple(len) do i + collapse_nothings(map(StaticGetter{i}(), dxs_zip)) + end (nothing, accum_sum(dxs[1]), map(unbroadcast, args, Base.tail(dxs))...) end + y, ∇broadcasted end @adjoint function broadcasted(::AbstractArrayStyle{0}, f, args...) - len = inclen(args) y, ∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) - y, function (ȳ) + function ∇broadcasted0(ȳ) dxs = ∂b(ȳ) dxs === nothing && return nothing (nothing, dxs...) end + y, ∇broadcasted0 end # Use the `map` adjoint in this special case, which is the same but applies From d949838b2996cfe9db49e840ee974c5a0e0dc24a Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 18 Jun 2021 19:18:18 +0100 Subject: [PATCH 074/490] for clarity check of _lookup grad is nothing rather than is a tuple, thus matching the rest of the code referencing g --- src/compiler/interface2.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index b77c7e3a6..141d90a77 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -24,7 +24,7 @@ end hascr && return :($chain_rrule_f(ZygoteRuleConfig(ctx), f, args...)) g = try _lookup_grad(T) catch e e end - !(g isa Tuple) && return :(f(args...), Pullback{$T}((f,))) + g === nothing && return :(f(args...), Pullback{$T}((f,))) meta, forw, _ = g argnames!(meta, Symbol("#self#"), :ctx, :f, :args) forw = varargs!(meta, forw, 3) From 8d9fac7b19fa0cfd511d430fab1a61541c1b7626 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 18 Jun 2021 19:23:19 +0100 Subject: [PATCH 075/490] rename _lookup_grad --- src/compiler/emit.jl | 2 +- src/compiler/interface2.jl | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/compiler/emit.jl b/src/compiler/emit.jl index d277621ab..1c82a44f1 100644 --- a/src/compiler/emit.jl +++ b/src/compiler/emit.jl @@ -95,7 +95,7 @@ end varargs(m::Method, n) = m.isva ? n - m.nargs + 1 : nothing -function _lookup_grad(T) +function _generate_pullback_via_decomposition(T) (m = meta(T)) === nothing && return va = varargs(m.method, length(T.parameters)) forw, back = stacks!(Adjoint(IR(m), varargs = va, normalise = false), T) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 141d90a77..ac4a5a76a 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -23,7 +23,7 @@ end hascr && return :($chain_rrule_f(ZygoteRuleConfig(ctx), f, args...)) - g = try _lookup_grad(T) catch e e end + g = try _generate_pullback_via_decomposition(T) catch e e end g === nothing && return :(f(args...), Pullback{$T}((f,))) meta, forw, _ = g argnames!(meta, Symbol("#self#"), :ctx, :f, :args) @@ -37,7 +37,8 @@ end @generated function (j::Pullback{T})(Δ) where T ignore_sig(T) && return :nothing - g = try _lookup_grad(T) + g = try + _generate_pullback_via_decomposition(T) catch e rethrow(CompileError(T,e)) end From 7f52f560d9f2bece25771e9a2774c2da1edc3b01 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:38:36 -0400 Subject: [PATCH 076/490] (StaticGetter)(::Nothing) --- src/lib/array.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/array.jl b/src/lib/array.jl index f1e217fe2..aa939b2f7 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -194,6 +194,7 @@ end struct StaticGetter{i} end (::StaticGetter{i})(v) where {i} = v[i] +(::StaticGetter{i})(::Nothing) where {i} = nothing @generated function _unzip(tuples, ::Val{N}) where {N} Expr(:tuple, (:(map($(StaticGetter{i}()), tuples)) for i ∈ 1:N)...) end From 079c287e36220f070017b8de6f8e259dbf6f2d5e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:39:12 -0400 Subject: [PATCH 077/490] style change --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 219a9fbdb..da62f0af1 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -174,7 +174,7 @@ collapse_nothings(xs) = xs y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y = map(first, y∂b) function ∇broadcasted(ȳ) - dxs_zip = map((pair, ȳ₁) -> last(pair)(ȳ₁), y∂b, ȳ) + dxs_zip = map(((_, pb), ȳ₁) -> pb(ȳ₁), y∂b, ȳ) dxs = ntuple(len) do i collapse_nothings(map(StaticGetter{i}(), dxs_zip)) end From 458413654a5e53e7002c69c28013e14fab5957e3 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:47:48 -0400 Subject: [PATCH 078/490] use StaticGetter for CuArrays too --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index da62f0af1..16e1cdfa9 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -229,8 +229,8 @@ end out = dual_function(f).(args...) eltype(out) <: Dual || return (out, _ -> nothing) y = map(x -> x.value, out) - _back(ȳ, i) = unbroadcast(args[i], ((a, b) -> a*b.partials[i]).(ȳ, out)) - back(ȳ) = ntuple(i -> _back(ȳ, i), N) + _back(ȳ, geti) = unbroadcast(geti(args), ((a, b) -> a * geti(b.partials)).(ȳ, out)) + back(ȳ) = ntuple(i -> _back(ȳ, StaticGetter{i}()), N) return y, back end From b81c2e30e6a9b91363146cfd053db64cdd0b2eb0 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 14:48:45 -0400 Subject: [PATCH 079/490] rm _get --- src/lib/broadcast.jl | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 16e1cdfa9..1f3e1d076 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -164,23 +164,21 @@ end # Avoid hitting special cases for `Adjoint` etc. _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) -_get(x::Tuple, i) = x[i] -_get(::Nothing, i) = nothing collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs -@adjoint function broadcasted(::AbstractArrayStyle, f, args...) +@adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y = map(first, y∂b) - function ∇broadcasted(ȳ) + function ∇broadcasted(ȳ,y∂b::G) where {G} dxs_zip = map(((_, pb), ȳ₁) -> pb(ȳ₁), y∂b, ȳ) dxs = ntuple(len) do i collapse_nothings(map(StaticGetter{i}(), dxs_zip)) end (nothing, accum_sum(dxs[1]), map(unbroadcast, args, Base.tail(dxs))...) end - y, ∇broadcasted + y, Base.Fix2(∇broadcasted,y∂b) end @adjoint function broadcasted(::AbstractArrayStyle{0}, f, args...) From 072035832e0890d6f529d42818c2b88cf2e84fbf Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 20:27:07 -0400 Subject: [PATCH 080/490] Revert "use StaticGetter for CuArrays too", ++ This reverts commit 458413654a5e53e7002c69c28013e14fab5957e3. --- src/lib/broadcast.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 1f3e1d076..2098001b7 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -167,18 +167,18 @@ _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs -@adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} +@adjoint function broadcasted(::AbstractArrayStyle, f, args...) len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y = map(first, y∂b) - function ∇broadcasted(ȳ,y∂b::G) where {G} + function ∇broadcasted(ȳ) dxs_zip = map(((_, pb), ȳ₁) -> pb(ȳ₁), y∂b, ȳ) dxs = ntuple(len) do i collapse_nothings(map(StaticGetter{i}(), dxs_zip)) end (nothing, accum_sum(dxs[1]), map(unbroadcast, args, Base.tail(dxs))...) end - y, Base.Fix2(∇broadcasted,y∂b) + y, ∇broadcasted end @adjoint function broadcasted(::AbstractArrayStyle{0}, f, args...) @@ -227,8 +227,8 @@ end out = dual_function(f).(args...) eltype(out) <: Dual || return (out, _ -> nothing) y = map(x -> x.value, out) - _back(ȳ, geti) = unbroadcast(geti(args), ((a, b) -> a * geti(b.partials)).(ȳ, out)) - back(ȳ) = ntuple(i -> _back(ȳ, StaticGetter{i}()), N) + _back(ȳ, i) = unbroadcast(args[i], ((a, b) -> a*b.partials[i]).(ȳ, out)) + back(ȳ) = ntuple(i -> _back(ȳ, i), N) return y, back end From 922716865534a678631acf56bf680ab892f8eba5 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 20:41:48 -0400 Subject: [PATCH 081/490] use forward mode sometimes --- src/lib/broadcast.jl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 2098001b7..418003d9f 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -167,7 +167,13 @@ _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs -@adjoint function broadcasted(::AbstractArrayStyle, f, args...) +@adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} + # When safe, avoid generic broadcast & use ForwardDiff instead, often 100x faster + if all(a -> a isa Numeric{<:Real}, args) && Broadcast.combine_eltypes(f, args) <: Real + y, back = broadcast_forward(f, args...) + return y, ȳ -> (nothing, nothing, back(ȳ)...) + end + len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y = map(first, y∂b) From 9578ae7e28e738fa2e3a73298dccb31d8bc96b23 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 21:28:11 -0400 Subject: [PATCH 082/490] safer version --- src/lib/broadcast.jl | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 418003d9f..ddb57c5ce 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -167,13 +167,19 @@ _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs +_purefun(::Type{F}) where {F<:Function} = isempty(fieldnames(F)) +_purefun(::Type{ComposedFunction{F,G}}) where {F,G} = _purefun(F) && _purefun(G) +_purefun(::Type) = false + @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} - # When safe, avoid generic broadcast & use ForwardDiff instead, often 100x faster - if all(a -> a isa Numeric{<:Real}, args) && Broadcast.combine_eltypes(f, args) <: Real + T = Broadcast.combine_eltypes(f, args) + # Avoid generic broadcasting in two easy cases: + if T == Bool + return f.(args...), _->nothing + elseif T <: Real && _purefun(F) && all(a -> a isa Numeric{<:Real}, args) y, back = broadcast_forward(f, args...) return y, ȳ -> (nothing, nothing, back(ȳ)...) end - len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) y = map(first, y∂b) From bdb1d1f3796a8403eedc8b5e99169e13f7b1d940 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 21:44:02 -0400 Subject: [PATCH 083/490] power --- src/lib/broadcast.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index ddb57c5ce..b16572be6 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -171,6 +171,8 @@ _purefun(::Type{F}) where {F<:Function} = isempty(fieldnames(F)) _purefun(::Type{ComposedFunction{F,G}}) where {F,G} = _purefun(F) && _purefun(G) _purefun(::Type) = false +_purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal hessian" + @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: From f7e96b19bb618f662dac4138c8f9f022d3bb1ac2 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 21:58:14 -0400 Subject: [PATCH 084/490] ComposedFunction is new --- src/lib/broadcast.jl | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index b16572be6..f189c8246 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -168,9 +168,10 @@ collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs _purefun(::Type{F}) where {F<:Function} = isempty(fieldnames(F)) -_purefun(::Type{ComposedFunction{F,G}}) where {F,G} = _purefun(F) && _purefun(G) _purefun(::Type) = false - +if VERSION >= v"1.6" + _purefun(::Type{ComposedFunction{F,G}}) where {F,G} = _purefun(F) && _purefun(G) +end _purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal hessian" @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} From c34b2b98f1c26304e0ba5242b2b37cfe67432e1f Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 22:44:17 -0400 Subject: [PATCH 085/490] add & fix some tests --- src/lib/broadcast.jl | 2 +- test/features.jl | 32 ++++++++++++++++++++++---------- test/gradcheck.jl | 3 ++- 3 files changed, 25 insertions(+), 12 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index f189c8246..738508e53 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -179,7 +179,7 @@ _purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal # Avoid generic broadcasting in two easy cases: if T == Bool return f.(args...), _->nothing - elseif T <: Real && _purefun(F) && all(a -> a isa Numeric{<:Real}, args) + elseif isconcretetype(T) && T <: Real && _purefun(F) && all(a -> a isa Numeric{<:Real}, args) y, back = broadcast_forward(f, args...) return y, ȳ -> (nothing, nothing, back(ȳ)...) end diff --git a/test/features.jl b/test/features.jl index d10d60c19..57f743958 100644 --- a/test/features.jl +++ b/test/features.jl @@ -500,14 +500,26 @@ end @test 150_000_000 > @allocated gradient(loss, ones(1000,1000)) end -@testset "tuples & broadcasting" begin - @test gradient(x -> sum(x .+ ones(2,2)), (1,2)) == ((2,2),) - @test gradient(x -> sum(x .+ ones(2,2)), (1,)) == ((4,),) - @test gradient(x -> sum(x .+ ones(2,1)), (1,2)) == ((1,1),) - - # https://github.com/FluxML/Zygote.jl/issues/975 - gt = gradient((x,p) -> prod(x .^ p), [3,4], (1,2)) - gv = gradient((x,p) -> prod(x .^ p), [3,4], [1,2]) - @test gt[1] == gv[1] - @test collect(gt[2]) ≈ gv[2] +@testset "tricky broadcasting" begin + @test gradient(x -> sum(x .+ ones(2,2)), (1,2)) == ((2,2),) + @test gradient(x -> sum(x .+ ones(2,2)), (1,)) == ((4,),) + @test gradient(x -> sum(x .+ ones(2,1)), (1,2)) == ((1,1),) + + # https://github.com/FluxML/Zygote.jl/issues/975 + gt = gradient((x,p) -> prod(x .^ p), [3,4], (1,2)) + gv = gradient((x,p) -> prod(x .^ p), [3,4], [1,2]) + @test gt[1] == gv[1] + @test collect(gt[2]) ≈ gv[2] + + # closure captures y -- can't use ForwardDiff + @test gradient((x,y) -> sum((z->z^2+y[1]).(x)), [1,2,3], [4,5]) == ([2, 4, 6], [3, 0]) + @test gradient((x,y) -> sum((z->z^2+y[1]), x), [1,2,3], [4,5]) == ([2, 4, 6], [3, 0]) + @test gradient((x,y) -> sum(map((z->z^2+y[1]), x)), [1,2,3], [4,5]) == ([2, 4, 6], [3, 0]) + @test gradient((x,y) -> mapreduce((z->z^2+y[1]), +, x), [1,2,3], [4,5]) == ([2, 4, 6], [3, 0]) + + # type unstable + @test gradient(xs -> sum((x -> x<2 ? false : x^2).(xs)), [1,2,3])[1][2:3] == [4, 6] + @test gradient(xs -> sum((x -> x<2 ? false : x^2), xs), [1,2,3])[1][2:3] == [4, 6] + @test gradient(xs -> sum(map((x -> x<2 ? false : x^2), xs)), [1,2,3])[1][2:3] == [4, 6] + @test gradient(xs -> mapreduce((x -> x<2 ? false : x^2), +, xs), [1,2,3])[1][2:3] == [4, 6] end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 5d95f3a5c..0baa0bb53 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1295,7 +1295,8 @@ end end @testset "broadcast" begin - @test gradient(x -> sum(sin.(x)), Diagonal(randn(3)))[1][2] == 1 + # Before https://github.com/FluxML/Zygote.jl/pull/1001 this gave [1 1 1; 1 0 1; 1 1 -1] + @test gradient(x -> sum(sin.(x)), Diagonal([0,pi/2,pi]))[1] ≈ [1 0 0; 0 0 0; 0 0 -1] a = rand(3) b = rand(2,2) From 2c10638b894e86ebd104269a56593ed36188c955 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 18 Jun 2021 23:57:27 -0400 Subject: [PATCH 086/490] widen types --- src/lib/broadcast.jl | 9 ++++++++- test/features.jl | 8 ++++++++ 2 files changed, 16 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 738508e53..8e3a0a2bb 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -174,12 +174,19 @@ if VERSION >= v"1.6" end _purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal hessian" +_dualsafe(x::Numeric{<:Real}) = true +_dualsafe(x::Ref{<:Numeric{<:Real}}) = true +_dualsafe(x::Val) = true +_dualsafe(x::Type) = true +_dualsafe(x::Symbol) = true +_dualsafe(x) = false + @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: if T == Bool return f.(args...), _->nothing - elseif isconcretetype(T) && T <: Real && _purefun(F) && all(a -> a isa Numeric{<:Real}, args) + elseif T <: Real && isconcretetype(T) && _purefun(F) && all(_dualsafe, args) y, back = broadcast_forward(f, args...) return y, ȳ -> (nothing, nothing, back(ȳ)...) end diff --git a/test/features.jl b/test/features.jl index 57f743958..2bd2f828b 100644 --- a/test/features.jl +++ b/test/features.jl @@ -522,4 +522,12 @@ end @test gradient(xs -> sum((x -> x<2 ? false : x^2), xs), [1,2,3])[1][2:3] == [4, 6] @test gradient(xs -> sum(map((x -> x<2 ? false : x^2), xs)), [1,2,3])[1][2:3] == [4, 6] @test gradient(xs -> mapreduce((x -> x<2 ? false : x^2), +, xs), [1,2,3])[1][2:3] == [4, 6] + + # with Ref, Val, Symbol + @test gradient(x -> sum(x .+ Ref(x[1])), [1,2,3]) == ([4,1,1],) + @test gradient(x -> sum(x .+ (x[1],)), [1,2,3]) == ([4,1,1],) + @test gradient(x -> sum((first∘tuple).(x, :ignore)), [1,2,3]) == ([1,1,1],) + @test gradient(x -> sum((first∘tuple).(x, Symbol)), [1,2,3]) == ([1,1,1],) + _f(x,::Val{y}) where {y} = x/y + @test gradient(x -> sum(_f.(x, Val(2))), [1,2,3]) == ([0.5, 0.5, 0.5],) end From 86c3bb49314ea73ec19a2a4a6b70d6c1a44dc82b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 10:25:42 -0400 Subject: [PATCH 087/490] simpler purity check --- src/lib/broadcast.jl | 5 +---- test/features.jl | 7 ++++++- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 8e3a0a2bb..49efbc279 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -167,11 +167,8 @@ _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs -_purefun(::Type{F}) where {F<:Function} = isempty(fieldnames(F)) +_purefun(::Type{F}) where {F<:Function} = Base.issingletontype(F) _purefun(::Type) = false -if VERSION >= v"1.6" - _purefun(::Type{ComposedFunction{F,G}}) where {F,G} = _purefun(F) && _purefun(G) -end _purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal hessian" _dualsafe(x::Numeric{<:Real}) = true diff --git a/test/features.jl b/test/features.jl index 2bd2f828b..dd39a75e8 100644 --- a/test/features.jl +++ b/test/features.jl @@ -528,6 +528,11 @@ end @test gradient(x -> sum(x .+ (x[1],)), [1,2,3]) == ([4,1,1],) @test gradient(x -> sum((first∘tuple).(x, :ignore)), [1,2,3]) == ([1,1,1],) @test gradient(x -> sum((first∘tuple).(x, Symbol)), [1,2,3]) == ([1,1,1],) - _f(x,::Val{y}) where {y} = x/y + _f(x,::Val{y}=Val(2)) where {y} = x/y @test gradient(x -> sum(_f.(x, Val(2))), [1,2,3]) == ([0.5, 0.5, 0.5],) + @test gradient(x -> sum(_f.(x)), [1,2,3]) == ([0.5, 0.5, 0.5],) + @test gradient(x -> sum(map(_f, x)), [1,2,3]) == ([0.5, 0.5, 0.5],) + + @test gradient(x -> sum(x ./ [1,2,4]), [1,2,pi]) == ([1.0, 0.5, 0.25],) + @test gradient(x -> sum(map(/, x, [1,2,4])), [1,2,pi]) == ([1.0, 0.5, 0.25],) end From 570db214af521bd20d64b1c19b39e0f0443cec7c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 10:27:30 -0400 Subject: [PATCH 088/490] use purity check in map, too? --- src/lib/array.jl | 22 +++++++++++++++------- 1 file changed, 15 insertions(+), 7 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index aa939b2f7..7b597f32c 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -215,19 +215,27 @@ _tryreverse(m, x) = x _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] - @eval function $∇mapfunc(cx, f, args...) + @eval function $∇mapfunc(cx, f::F, args...) where {F} ys_and_backs = $mapfunc((args...) -> _pullback(cx, f, args...), args...) if isempty(ys_and_backs) ys_and_backs, _ -> nothing else - ys, backs = unzip(ys_and_backs) + ys = map(first, ys_and_backs) ys, function (Δ) isnothing(Δ) && return nothing - # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. - Δf_and_args_zipped = $mapfunc((f, δ) -> f(δ), _tryreverse($mapfunc, backs, Δ)...) - Δf_and_args = unzip(_tryreverse($mapfunc, Δf_and_args_zipped)) - Δf = reduce(accum, Δf_and_args[1]) - (Δf, Δf_and_args[2:end]...) + if _purefun(F) && length(args) == 1 + Δarg = $mapfunc(((_,pb), δ) -> last(pb(δ)), ys_and_backs, Δ) # No unzip needed + (nothing, Δarg) + elseif _purefun(F) + Δargs = unzip($mapfunc(((_,pb), δ) -> Base.tail(pb(δ)), ys_and_backs, Δ)) + (nothing, Δargs...) + else + # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. + Δf_and_args_zipped = $mapfunc(((_,pb), δ) -> pb(δ), _tryreverse($mapfunc, ys_and_backs, Δ)...) + Δf_and_args = unzip(_tryreverse($mapfunc, Δf_and_args_zipped)) + Δf = reduce(accum, Δf_and_args[1]) + (Δf, Δf_and_args[2:end]...) + end end end end From 72eb6810c5712aa5eb63ed4e2573f9e77ce593a5 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 12:30:12 -0400 Subject: [PATCH 089/490] simplify --- src/lib/array.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 7b597f32c..aaf122cb4 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -223,10 +223,10 @@ for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] ys = map(first, ys_and_backs) ys, function (Δ) isnothing(Δ) && return nothing - if _purefun(F) && length(args) == 1 + if Base.issingletontype(F) && length(args) == 1 Δarg = $mapfunc(((_,pb), δ) -> last(pb(δ)), ys_and_backs, Δ) # No unzip needed (nothing, Δarg) - elseif _purefun(F) + elseif Base.issingletontype(F) # Ensures `f` is pure: nothing captured & no state Δargs = unzip($mapfunc(((_,pb), δ) -> Base.tail(pb(δ)), ys_and_backs, Δ)) (nothing, Δargs...) else From fbed3cec876e4c81614f6353dcf91604b1c441fc Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 12:32:04 -0400 Subject: [PATCH 090/490] use Base.ismutabletype --- src/lib/lib.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 045a494de..2e57850f0 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -1,4 +1,4 @@ -using Base: RefValue +using Base: RefValue, ismutabletype # Interfaces @@ -278,19 +278,19 @@ Jnew{T}(g) where T = Jnew{T,typeof(g)}(g) @adjoint! function __new__(T, args...) x = __new__(T, args...) - g = !T.mutable || fieldcount(T) == 0 ? nothing : grad_mut(__context__, x) + g = !ismutabletype(T) || fieldcount(T) == 0 ? nothing : grad_mut(__context__, x) x, Jnew{T,typeof(g),false}(g) end @adjoint! function __splatnew__(T, args) x = __splatnew__(T, args) - g = !T.mutable || fieldcount(T) == 0 ? nothing : grad_mut(__context__, x) + g = !ismutabletype(T) || fieldcount(T) == 0 ? nothing : grad_mut(__context__, x) x, Jnew{T,typeof(g),true}(g) end # TODO captured mutables + multiple calls to `back` @generated function (back::Jnew{T,G,false})(Δ::Union{NamedTuple,Nothing,RefValue}) where {T,G} - !T.mutable && Δ == Nothing && return :nothing + !ismutabletype(T) && Δ == Nothing && return :nothing Δ = G == Nothing ? :Δ : Δ <: RefValue ? :(back.g[]) : :(accum(back.g[], Δ)) @@ -302,7 +302,7 @@ end end @generated function (back::Jnew{T,G,true})(Δ::Union{NamedTuple,Nothing,RefValue}) where {T,G} - !T.mutable && Δ == Nothing && return :nothing + !ismutabletype(T) && Δ == Nothing && return :nothing Δ = G == Nothing ? :Δ : :(back.g) quote x̄ = $Δ From 58a13685428fa96b5cc1b055dde2c48885926bd4 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 12:42:03 -0400 Subject: [PATCH 091/490] add version check --- src/lib/lib.jl | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 2e57850f0..762939ae3 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -1,4 +1,10 @@ -using Base: RefValue, ismutabletype +using Base: RefValue + +if VERSION > v"1.7.0-DEV.204" + using Base: ismutabletype +else + ismutabletype(::Type{T}) where T = T.mutable +end # Interfaces From ec1a41bf86d59ac6017bb76783d34c9a6eeccf93 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 19 Jun 2021 12:57:47 -0400 Subject: [PATCH 092/490] make ismutabletype look like Base's Co-authored-by: Johnny Chen --- src/lib/lib.jl | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 762939ae3..96422d78c 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -3,7 +3,10 @@ using Base: RefValue if VERSION > v"1.7.0-DEV.204" using Base: ismutabletype else - ismutabletype(::Type{T}) where T = T.mutable + function ismutabletype(@nospecialize(t::Type)) + t = Base.unwrap_unionall(t) + return isa(t, DataType) && t.mutable + end end # Interfaces From 8f4354f752525d9e9dc1a9bfbebf5c9f379681eb Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Sun, 20 Jun 2021 20:44:25 +0100 Subject: [PATCH 093/490] add tests to see if sum(f,x) is broken on GPU --- test/cuda.jl | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/test/cuda.jl b/test/cuda.jl index a54402999..59a7dda56 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -26,6 +26,17 @@ end @test g_gpu |> collect ≈ g end +@testset "sum(f, x)" begin + a = Float32.(-4:4) + a_gpu = a |> cu + + f(x) = sum(abs, x) + g = gradient(f, a)[1] + g_gpu = gradient(f, a_gpu)[1] + @test g_gpu isa CuArray + @test g_gpu |> collect ≈ g +end + @testset "jacobian" begin v1 = cu(collect(1:3f0)) From 075f530aa6efd1c1976216c297716f7e5775dcdb Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Sun, 20 Jun 2021 20:55:53 +0100 Subject: [PATCH 094/490] add back old way of doing sum(f, xs) for CuArrays only --- src/lib/broadcast.jl | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 879f62c1a..9e89d43b9 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -254,7 +254,14 @@ end placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end - + + # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above + # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible + @adjoint function sum(f, xs::CuArray; kws...) + @assert !haskey(kws, :init) # TODO add init support (julia 1.6) + return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) + end + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.CuArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end From 22602b3633a7857e74720a583eb1a6c68dea9dcc Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Sun, 20 Jun 2021 21:33:21 +0100 Subject: [PATCH 095/490] Qualified names --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 9e89d43b9..0689c1627 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -257,7 +257,7 @@ end # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::CuArray; kws...) + @adjoint function sum(f, xs::CUDA.CuArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end From 7776fdd20cb1e0f4f8eae93208b24c7985b8f552 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Sun, 20 Jun 2021 21:43:29 +0100 Subject: [PATCH 096/490] don't test gradient of abs at 0 --- test/cuda.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/cuda.jl b/test/cuda.jl index 59a7dda56..7f34aa99f 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -27,7 +27,7 @@ end end @testset "sum(f, x)" begin - a = Float32.(-4:4) + a = Float32.([-1.5, -9.0, 2.4, -1.3, 0.01]) a_gpu = a |> cu f(x) = sum(abs, x) From bd8c5fb2e9a7659b1ba56d4dd6c4aa6a2822b1a8 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Sun, 20 Jun 2021 21:43:55 +0100 Subject: [PATCH 097/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 60476fca1..e7109b349 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.13" +version = "0.6.14" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 7ebb75b01ab26faf7db0efc7ca53239e371cc974 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 21 Jun 2021 21:46:39 -0400 Subject: [PATCH 098/490] rename, tidy, improve --- src/lib/broadcast.jl | 34 ++++++++++++++++++++-------------- test/features.jl | 6 ++++++ 2 files changed, 26 insertions(+), 14 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 49efbc279..46882acad 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -167,25 +167,31 @@ _broadcast(f::F, x...) where F = materialize(broadcasted(f, x...)) collapse_nothings(xs::AbstractArray{Nothing}) = nothing collapse_nothings(xs) = xs -_purefun(::Type{F}) where {F<:Function} = Base.issingletontype(F) -_purefun(::Type) = false -_purefun(::Type{typeof(^)}) = false # fix @testset "power" & @testset "diagonal hessian" +_dual_purefun(::Type{F}) where {F<:Function} = Base.issingletontype(F) +_dual_purefun(::Type) = false +_dual_purefun(::Type{typeof(^)}) = false # avoid DomainError from negative powers -_dualsafe(x::Numeric{<:Real}) = true -_dualsafe(x::Ref{<:Numeric{<:Real}}) = true -_dualsafe(x::Val) = true -_dualsafe(x::Type) = true -_dualsafe(x::Symbol) = true -_dualsafe(x) = false +_dual_safearg(x::Numeric{<:Real}) = true +_dual_safearg(x::Ref{<:Numeric{<:Real}}) = true +_dual_safearg(x::Union{Type,Val,Symbol}) = true # non-differentiable types +_dual_safearg(x) = false + +# This is Broadcast.combine_eltypes but with dual eltypes: +_combine_dual_eltypes(f, args::Tuple) = + Broadcast.promote_typejoin_union(Base._return_type(f, map(_dual_eltype, args))) +_dual_eltype(x::Numeric{T}) where {T<:Real} = Dual{Nothing, T, 1} # typeof(Dual(one(T),true)) +_dual_eltype(x) = eltype(x) @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} - T = Broadcast.combine_eltypes(f, args) + TD = _combine_dual_eltypes(f, args) # Avoid generic broadcasting in two easy cases: - if T == Bool + if TD <: Dual && isconcretetype(TD) + if _dual_purefun(F) && all(_dual_safearg, args) + y, back = broadcast_forward(f, args...) + return y, ȳ -> (nothing, nothing, back(ȳ)...) + end + elseif TD <: Real && isconcretetype(TD) return f.(args...), _->nothing - elseif T <: Real && isconcretetype(T) && _purefun(F) && all(_dualsafe, args) - y, back = broadcast_forward(f, args...) - return y, ȳ -> (nothing, nothing, back(ȳ)...) end len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) diff --git a/test/features.jl b/test/features.jl index dd39a75e8..f471a29e7 100644 --- a/test/features.jl +++ b/test/features.jl @@ -535,4 +535,10 @@ end @test gradient(x -> sum(x ./ [1,2,4]), [1,2,pi]) == ([1.0, 0.5, 0.25],) @test gradient(x -> sum(map(/, x, [1,2,4])), [1,2,pi]) == ([1.0, 0.5, 0.25],) + + # negative powers + @test gradient((x,p) -> sum(x .^ p), [1.0,2.0,4.0], [1,-1,2])[1] ≈ [1.0, -0.25, 8.0] + @test gradient((x,p) -> sum(x .^ p), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] + @test gradient((x,p) -> sum(z -> z^p, x), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] + @test gradient((x,p) -> mapreduce(z -> z^p, +, x), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] end From 94712ccbc41909a58a10c48ee7e14be26fdd79b0 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 21 Jun 2021 21:50:13 -0400 Subject: [PATCH 099/490] revert some of that due to 20% slowdown --- src/lib/broadcast.jl | 20 ++++++-------------- 1 file changed, 6 insertions(+), 14 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 46882acad..32e307e26 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -176,22 +176,14 @@ _dual_safearg(x::Ref{<:Numeric{<:Real}}) = true _dual_safearg(x::Union{Type,Val,Symbol}) = true # non-differentiable types _dual_safearg(x) = false -# This is Broadcast.combine_eltypes but with dual eltypes: -_combine_dual_eltypes(f, args::Tuple) = - Broadcast.promote_typejoin_union(Base._return_type(f, map(_dual_eltype, args))) -_dual_eltype(x::Numeric{T}) where {T<:Real} = Dual{Nothing, T, 1} # typeof(Dual(one(T),true)) -_dual_eltype(x) = eltype(x) - @adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} - TD = _combine_dual_eltypes(f, args) + T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: - if TD <: Dual && isconcretetype(TD) - if _dual_purefun(F) && all(_dual_safearg, args) - y, back = broadcast_forward(f, args...) - return y, ȳ -> (nothing, nothing, back(ȳ)...) - end - elseif TD <: Real && isconcretetype(TD) - return f.(args...), _->nothing + if T == Bool + return f.(args...), _->nothing + elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) + y, back = broadcast_forward(f, args...) + return y, ȳ -> (nothing, nothing, back(ȳ)...) end len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) From 8424c3e6c808196afbc665b7208993cdb356c0f8 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 21 Jun 2021 22:19:21 -0400 Subject: [PATCH 100/490] delete an unused definition --- src/lib/broadcast.jl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 32e307e26..92ecd1d0f 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -219,7 +219,7 @@ end @adjoint! (b::typeof(broadcast))(f, args...) = _pullback(__context__, broadcasted, f, args...) -# Forward Mode (mainly necessary for CUDA) +# Forward Mode -- necessary for CUDA, also used as a fast path above import ForwardDiff using ForwardDiff: Dual @@ -227,9 +227,6 @@ using ForwardDiff: Dual dual(x, p) = x dual(x::Real, p) = Dual(x, p) -dualtype(::Type{Dual{G,T,P}}) where {G,T,P} = T -dualtype(T) = T - function dual_function(f::F) where F function (args::Vararg{Any,N}) where N ds = map(args, ntuple(identity,Val(N))) do x, i From 163e1731d0a3bbc71464772d61501572be931208 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 22 Jun 2021 18:45:47 +0100 Subject: [PATCH 101/490] use rrules even when all the arguments are types --- src/compiler/interface2.jl | 10 ++++++---- test/chainrules.jl | 18 ++++++++++++++++++ 2 files changed, 24 insertions(+), 4 deletions(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index ac4a5a76a..f0c4fa690 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -7,22 +7,24 @@ function edge!(m::IRTools.Meta, edge::Core.MethodInstance) end @generated function _pullback(ctx::AContext, f, args...) - T = Tuple{f,args...} - ignore_sig(T) && return :(f(args...), Pullback{$T}(())) - + # Try using ChainRulesCore if is_kwfunc(f, args...) # if it is_kw then `args[1]` are the keyword args, `args[2]` is actual function cr_T = Tuple{ZygoteRuleConfig{ctx}, args[2:end]...} chain_rrule_f = :chain_rrule_kw else cr_T = Tuple{ZygoteRuleConfig{ctx}, f, args...} + Core.println("cr_T=", cr_T) chain_rrule_f = :chain_rrule end hascr, cr_edge = has_chain_rrule(cr_T) - hascr && return :($chain_rrule_f(ZygoteRuleConfig(ctx), f, args...)) + # No ChainRule, going to have to work it out. + T = Tuple{f,args...} + ignore_sig(T) && return :(f(args...), Pullback{$T}(())) + g = try _generate_pullback_via_decomposition(T) catch e e end g === nothing && return :(f(args...), Pullback{$T}((f,))) meta, forw, _ = g diff --git a/test/chainrules.jl b/test/chainrules.jl index 519c10ad6..66058c93d 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -214,6 +214,24 @@ using ChainRulesCore, ChainRulesTestUtils, Zygote @test (nothing,) == Zygote.gradient(x->not_diff_kw_eg(x, 2), 10.4) @test (nothing,) == Zygote.gradient(x->not_diff_kw_eg(x, 2; kw=2.0), 10.4) end + + @testset "Type only rrule" begin + struct StructForTestingTypeOnlyRRules{T} + x::T + end + StructForTestingTypeOnlyRRules() = StructForTestingTypeOnlyRRules(1.0) + + function ChainRulesCore.rrule(P::Type{<:StructForTestingTypeOnlyRRules}) + # notice here we mess with the primal doing 2.0 rather than 1.0, this is for testing purposes + # and also because apparently people actually want to do this. Weird, but 🤷 + # https://github.com/SciML/SciMLBase.jl/issues/69#issuecomment-865639754 + P(2.0), _->NoTangent() + end + + @assert StructForTestingTypeOnlyRRules().x == 1.0 + aug_primal_val, _ = Zygote.pullback(x->StructForTestingTypeOnlyRRules(), 1.2) + @test aug_primal_val.x == 2.0 + end end @testset "ChainRulesCore.rrule_via_ad" begin From 2dab48fdfaddd8a908c341eb011ef82817fba0f9 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Wed, 23 Jun 2021 17:50:32 +0100 Subject: [PATCH 102/490] Remove leftove debugging statements --- src/compiler/interface2.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index f0c4fa690..0f7da4b32 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -14,7 +14,6 @@ end chain_rrule_f = :chain_rrule_kw else cr_T = Tuple{ZygoteRuleConfig{ctx}, f, args...} - Core.println("cr_T=", cr_T) chain_rrule_f = :chain_rrule end From b9f186f8f044ad94b469773ae8fe722fea457983 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Thu, 24 Jun 2021 08:48:27 +0100 Subject: [PATCH 103/490] Update test/chainrules.jl Co-authored-by: Dhairya Gandhi --- test/chainrules.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/chainrules.jl b/test/chainrules.jl index 66058c93d..32bdd3799 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -225,7 +225,7 @@ using ChainRulesCore, ChainRulesTestUtils, Zygote # notice here we mess with the primal doing 2.0 rather than 1.0, this is for testing purposes # and also because apparently people actually want to do this. Weird, but 🤷 # https://github.com/SciML/SciMLBase.jl/issues/69#issuecomment-865639754 - P(2.0), _->NoTangent() + P(2.0), _ -> (NoTangent(),) end @assert StructForTestingTypeOnlyRRules().x == 1.0 From 46ef05ef2f839b7d9d95453654f223cf07c89a6e Mon Sep 17 00:00:00 2001 From: Akash Garg Date: Fri, 25 Jun 2021 12:48:08 -0700 Subject: [PATCH 104/490] Use abstract GPU types for broadcast. --- src/lib/broadcast.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 0689c1627..e4b8a431a 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -233,7 +233,7 @@ end end @init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin - const CuArrayStyle = CUDA.CuArrayStyle + const CuArrayStyle = CUDA.AbstractGPUArrayStyle if isdefined(CUDA, :cufunc) @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) @@ -247,22 +247,22 @@ end end end - @adjoint CUDA.CuArray{N,T}(xs::Array) where {N,T} = - CUDA.CuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) + @adjoint CUDA.DenseArray{N,T}(xs::Array) where {N,T} = + CUDA.DenseArray{N,T}(xs), Δ -> (convert(Array, Δ), ) - @adjoint function sum(xs::CUDA.CuArray; dims = :) + @adjoint function sum(xs::CUDA.DenseArray; dims = :) placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::CUDA.CuArray; kws...) + @adjoint function sum(f, xs::CUDA.DenseArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end - @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.CuArray} + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.DenseArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end From c49222c7f144670cdb20c680d1a69ae97fadb4c6 Mon Sep 17 00:00:00 2001 From: Akash Garg Date: Fri, 25 Jun 2021 14:43:22 -0700 Subject: [PATCH 105/490] Fix typo DenseArray -> DenseCuArray --- src/lib/broadcast.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index e4b8a431a..72929a592 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -247,22 +247,22 @@ end end end - @adjoint CUDA.DenseArray{N,T}(xs::Array) where {N,T} = - CUDA.DenseArray{N,T}(xs), Δ -> (convert(Array, Δ), ) + @adjoint CUDA.DenseCuArray{N,T}(xs::Array) where {N,T} = + CUDA.DenseCuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) - @adjoint function sum(xs::CUDA.DenseArray; dims = :) + @adjoint function sum(xs::CUDA.DenseCuArray; dims = :) placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::CUDA.DenseArray; kws...) + @adjoint function sum(f, xs::CUDA.DenseCuArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end - @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.DenseArray} + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.DenseCuArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end From 41b6862219a00dcd432030db0620a3df129232b2 Mon Sep 17 00:00:00 2001 From: Akash Garg Date: Fri, 25 Jun 2021 14:48:38 -0700 Subject: [PATCH 106/490] Adjoint for CuArray --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 72929a592..0192507ef 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -247,8 +247,8 @@ end end end - @adjoint CUDA.DenseCuArray{N,T}(xs::Array) where {N,T} = - CUDA.DenseCuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) + @adjoint CUDA.CuArray{N,T}(xs::Array) where {N,T} = + CUDA.CuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.DenseCuArray; dims = :) placeholder = similar(xs) From d73dabe42ddc49dee5e4da9315615f541444018d Mon Sep 17 00:00:00 2001 From: Akash Garg Date: Fri, 25 Jun 2021 15:13:24 -0700 Subject: [PATCH 107/490] updating broadcast to use AbstractGPUArray --- src/lib/broadcast.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 0192507ef..a5fa1adae 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -250,19 +250,19 @@ end @adjoint CUDA.CuArray{N,T}(xs::Array) where {N,T} = CUDA.CuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) - @adjoint function sum(xs::CUDA.DenseCuArray; dims = :) + @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::CUDA.DenseCuArray; kws...) + @adjoint function sum(f, xs::CUDA.AbstractGPUArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end - @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.DenseCuArray} + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.AbstractGPUArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end From 8e0ed5d29cdbcedcaa6ac62d661eb96bf21ee8ab Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 13:35:16 -0400 Subject: [PATCH 108/490] add withgradient function --- docs/src/utils.md | 2 ++ src/Zygote.jl | 2 +- src/compiler/interface.jl | 22 ++++++++++++++++++++++ src/lib/grad.jl | 24 +++++++++++++++++++----- test/features.jl | 4 ++++ test/utils.jl | 1 + 6 files changed, 49 insertions(+), 6 deletions(-) diff --git a/docs/src/utils.md b/docs/src/utils.md index a03ab1c25..309e43786 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -14,6 +14,8 @@ in other words you could have written them easily yourself, but they live in Zygote for convenience. ```@docs +Zygote.withgradient +Zygote.withjacobian Zygote.@showgrad Zygote.hook Zygote.dropgrad diff --git a/src/Zygote.jl b/src/Zygote.jl index 895e65f3c..ae023213c 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -13,7 +13,7 @@ using MacroTools, Requires using MacroTools: @forward import Distributed: pmap, CachingPool, workers -export Params, gradient, jacobian, hessian, diaghessian, pullback, pushforward, @code_adjoint +export Params, withgradient, gradient, withjacobian, jacobian, hessian, diaghessian, pullback, pushforward, @code_adjoint export rrule_via_ad const Numeric{T<:Number} = Union{T, AbstractArray{<:T}} diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 86e847dc4..b631763b0 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -53,6 +53,9 @@ Returns a tuple containing `∂f/∂x` for each argument `x`, the derivative (for scalar x) or the gradient. `f(args...)` must be a real number, see [`jacobian`](@ref) for array output. + +See also [`withgradient`](@ref) to keep the value `f(args...)`, +and `pullback`](@ref) for value and back-propagator. """ function gradient(f, args...) y, back = pullback(f, args...) @@ -61,6 +64,25 @@ end Base.adjoint(f::Function) = x -> gradient(f, x)[1] +""" + withgradient(f, args...) + +Returns both the value `f(args...)` and the [`gradient`](@ref), +`∂f/∂x` for each argument `x`, as a named tuple. + +```jldoctest +julia> y, ∇ = withgradient(/, 1, 2) +(val = 0.5, grad = (0.5, -0.25)) + +julia> ∇ == gradient(/, 1, 2) +true +``` +""" +function withgradient(f, args...) + y, back = pullback(f, args...) + (val=y, grad=back(sensitivity(y))) +end + # Param-style wrappers # TODO store ids only diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 827edbfc2..3a6eefa2c 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -105,7 +105,7 @@ This reverse-mode Jacobian needs to evaluate the pullback once for each element Doing so is usually only efficient when `length(y)` is small compared to `length(a)`, otherwise forward mode is likely to be better. -See also [`hessian`](@ref), [`hessian_reverse`](@ref). +See also [`withjacobian`](@ref), `hessian`](@ref), [`hessian_reverse`](@ref). # Examples @@ -137,7 +137,19 @@ julia> gradient((a,t) -> sum(a .* t[1]) + t[2], [1,2,3], (4,5)) # gradient unde ([4 4 4], (6, 1)) ``` """ -function jacobian(f, args...) +jacobian(f, args...) = withjacobian(f, args...).grad + +""" + withjacobian(f, args...) + +Returns both the value `f(args...)` and the [`jacobian`](@ref) as a named tuple. + +```jldoctest +julia> withjacobian(cumsum, [1,2,3]) +(val = [1, 3, 6], grad = ([1 0 0; 1 1 0; 1 1 1],)) +``` +""" +function withjacobian(f, args...) y, back = pullback(_jvec∘f, args...) out = map(args) do x T = promote_type(eltype(x), eltype(y)) @@ -153,7 +165,7 @@ function jacobian(f, args...) _gradcopy!(view(dx,k,:), grad) end end - out + (val=y, grad=out) end _jvec(x::AbstractArray) = vec(x) @@ -197,7 +209,9 @@ julia> Jxy[xs] 2 6 4 8 ``` """ -function jacobian(f, pars::Params) +jacobian(f, pars::Params) = withjacobian(f, pars::Params).grad + +function withjacobian(f, pars::Params) y, back = pullback(_jvec∘f, pars) out = IdDict() for p in pars @@ -213,7 +227,7 @@ function jacobian(f, pars::Params) _gradcopy!(view(out[p],k,:), grads[p]) end end - Grads(out, pars) + (val=y, grad=Grads(out, pars)) end """ diff --git a/test/features.jl b/test/features.jl index f471a29e7..ee981c8be 100644 --- a/test/features.jl +++ b/test/features.jl @@ -111,6 +111,7 @@ dx = back(4) @test dx == (12, 8) @test gradient(mul, 2, 3) == (3, 2) +@test withgradient(mul, 2, 3) == (val = 6, grad = (3, 2)) bool = true b(x) = bool ? 2x : x @@ -287,6 +288,9 @@ y, back = pullback(() -> layer(x), Params([W])) @test back([1, 1])[W] == [1 2; 1 2] @test gradient(() -> sum(W * x), Params([W]))[W] == [1 2; 1 2] +y, grad = withgradient(() -> sum(W * x), Params([W])) +@test y == 3 +@test grad[W] == [1 2; 1 2] let p = [1] diff --git a/test/utils.jl b/test/utils.jl index 1e5366fbc..b5845a7ba 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -42,6 +42,7 @@ end @testset "jacobian(f, args...)" begin @test jacobian(identity, [1,2])[1] == [1 0; 0 1] + @test withjacobian(identity, [1,2]) == (val = [1,2], grad = ([1 0; 0 1],)) j1 = jacobian((a,x) -> a.^2 .* x, [1,2,3], 1) @test j1[1] ≈ Diagonal([2,4,6]) From 84693def6161438409cdb111a2ee87e76a010ad9 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 13:35:34 -0400 Subject: [PATCH 109/490] minimal docstrings re implicit gradients --- src/compiler/interface.jl | 26 ++++++++++++++++++++++++-- 1 file changed, 24 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index b631763b0..eaa97010d 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -66,9 +66,11 @@ Base.adjoint(f::Function) = x -> gradient(f, x)[1] """ withgradient(f, args...) + withgradient(f, ::Params) Returns both the value `f(args...)` and the [`gradient`](@ref), `∂f/∂x` for each argument `x`, as a named tuple. +With imiplicit parameters, the value is `f()`. ```jldoctest julia> y, ∇ = withgradient(/, 1, 2) @@ -85,10 +87,24 @@ end # Param-style wrappers -# TODO store ids only +""" + gradient(() -> loss(), ::Params) -> Grads + +Gradient with implicit parameters. Returns a container, from which +`grads[W]` extracts the gradient with respect to some array `W`, +if this is among those being tracked, for example via `Params([W, A, B])`. +""" +gradient + +""" + Params([A, B, C...]) + +Container for implicit parameters, differentiating a zero-argument +funtion `() -> loss()` with respect to `A, B, C`. +""" struct Params order::Buffer # {Any, Vector{Any}} - params::IdSet{Any} + params::IdSet{Any} # TODO store ids only end Params() = Params(Buffer([], false), IdSet()) @@ -193,7 +209,13 @@ function copy!(x::AbstractVector, ps::Params) ps end +""" + Grads(...) +Dictionary-like container returned when taking gradients with +respect to implicit parameters. For an array `W`, appearing +within `Params([W, A, B...])`, the gradient is `g[W]`. +""" struct Grads grads::IdDict{Any,Any} params::Params From 8f811ca9844c33488f0f76a415e4db1f270c9de5 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 14:31:24 -0400 Subject: [PATCH 110/490] name clash --- test/features.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/features.jl b/test/features.jl index ee981c8be..b1434aac7 100644 --- a/test/features.jl +++ b/test/features.jl @@ -288,9 +288,9 @@ y, back = pullback(() -> layer(x), Params([W])) @test back([1, 1])[W] == [1 2; 1 2] @test gradient(() -> sum(W * x), Params([W]))[W] == [1 2; 1 2] -y, grad = withgradient(() -> sum(W * x), Params([W])) +y, gr = withgradient(() -> sum(W * x), Params([W])) @test y == 3 -@test grad[W] == [1 2; 1 2] +@test gr[W] == [1 2; 1 2] let p = [1] From b361b18e60f3e21f58925836eff960692b90dca6 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 14:33:41 -0400 Subject: [PATCH 111/490] fix jldoctest --- src/compiler/interface.jl | 2 +- src/lib/grad.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index eaa97010d..148b9a238 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -72,7 +72,7 @@ Returns both the value `f(args...)` and the [`gradient`](@ref), `∂f/∂x` for each argument `x`, as a named tuple. With imiplicit parameters, the value is `f()`. -```jldoctest +```jldoctest; setup=:(using Zygote) julia> y, ∇ = withgradient(/, 1, 2) (val = 0.5, grad = (0.5, -0.25)) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 3a6eefa2c..4ac7708b6 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -144,7 +144,7 @@ jacobian(f, args...) = withjacobian(f, args...).grad Returns both the value `f(args...)` and the [`jacobian`](@ref) as a named tuple. -```jldoctest +```jldoctest; setup=:(using Zygote) julia> withjacobian(cumsum, [1,2,3]) (val = [1, 3, 6], grad = ([1 0 0; 1 1 0; 1 1 1],)) ``` From c4b7306087dfcab1fcd9264a120b5a42a752a8d9 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 19:35:32 -0400 Subject: [PATCH 112/490] tweak docstrings, one more test --- src/compiler/interface.jl | 44 +++++++++++++++++++++++++++++---------- test/utils.jl | 4 ++++ 2 files changed, 37 insertions(+), 11 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 148b9a238..0fdd9bbeb 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -50,12 +50,22 @@ sensitivity(y) = error("Output should be scalar; gradients are not defined for o gradient(f, args...) Returns a tuple containing `∂f/∂x` for each argument `x`, -the derivative (for scalar x) or the gradient. +the derivative (for scalar `x`) or the gradient. `f(args...)` must be a real number, see [`jacobian`](@ref) for array output. See also [`withgradient`](@ref) to keep the value `f(args...)`, and `pullback`](@ref) for value and back-propagator. + +```jldoctest; setup=:(using Zygote) +julia> gradient(*, 2, 3, 5) +(15, 10, 6) + +julia> gradient([7,11,13]) do x + sum(abs2, x) + end +([14, 22, 26],) +``` """ function gradient(f, args...) y, back = pullback(f, args...) @@ -68,9 +78,8 @@ Base.adjoint(f::Function) = x -> gradient(f, x)[1] withgradient(f, args...) withgradient(f, ::Params) -Returns both the value `f(args...)` and the [`gradient`](@ref), -`∂f/∂x` for each argument `x`, as a named tuple. -With imiplicit parameters, the value is `f()`. +Returns both the value `f(args...)` and the [`gradient`](@ref) +as a named tuple. With implicit parameters, the value is `f()`. ```jldoctest; setup=:(using Zygote) julia> y, ∇ = withgradient(/, 1, 2) @@ -88,19 +97,32 @@ end # Param-style wrappers """ - gradient(() -> loss(), ::Params) -> Grads + gradient(() -> loss(), ps::Params) -> Grads + +Gradient with implicit parameters. Takes a zero-argument function, +and returns a dictionary-like container, whose keys are arrays `x in ps`. -Gradient with implicit parameters. Returns a container, from which -`grads[W]` extracts the gradient with respect to some array `W`, -if this is among those being tracked, for example via `Params([W, A, B])`. +```jldoctest; setup=:(using Zygote) +julia> x = [1 2; 3 4]; y = [5, 6]; + +julia> g = gradient(Params([x, y])) do + sum(x .* y .* y') + end +Grads(...) + +julia> g[x] +2×2 Matrix{Int64}: + 25 30 + 30 36 +``` """ gradient """ - Params([A, B, C...]) + Params([A, B, C]) -Container for implicit parameters, differentiating a zero-argument -funtion `() -> loss()` with respect to `A, B, C`. +Container for implicit parameters, used when differentiating +a zero-argument funtion `() -> loss()` with respect to `A, B, C`. """ struct Params order::Buffer # {Any, Vector{Any}} diff --git a/test/utils.jl b/test/utils.jl index b5845a7ba..9a3d83ea5 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -83,6 +83,10 @@ end Jxy = jacobian(() -> ys[1:2] .+ sum(xs.^2), Params([xs, ys])) @test Jxy[ys] ≈ [1 0 0; 0 1 0] @test Jxy[xs] ≈ [2 6 4 8; 2 6 4 8] + + z, grad = withjacobian(() -> ys[1:2] .+ sum(xs.^2), Params([xs, ys])) + @test z == [35, 37] + @test grad[ys] ≈ [1 0 0; 0 1 0] end using ForwardDiff From 821a0d93ae1d366c9ff1442a104b71f53779db05 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 26 Jun 2021 19:50:50 -0400 Subject: [PATCH 113/490] further tweaks --- src/compiler/interface.jl | 21 ++++++++++++--------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 0fdd9bbeb..c494438f1 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -78,8 +78,8 @@ Base.adjoint(f::Function) = x -> gradient(f, x)[1] withgradient(f, args...) withgradient(f, ::Params) -Returns both the value `f(args...)` and the [`gradient`](@ref) -as a named tuple. With implicit parameters, the value is `f()`. +Returns both the value of the function and the [`gradient`](@ref), +as a named tuple. ```jldoctest; setup=:(using Zygote) julia> y, ∇ = withgradient(/, 1, 2) @@ -103,26 +103,29 @@ Gradient with implicit parameters. Takes a zero-argument function, and returns a dictionary-like container, whose keys are arrays `x in ps`. ```jldoctest; setup=:(using Zygote) -julia> x = [1 2; 3 4]; y = [5, 6]; +julia> x = [1 2 3; 4 5 6]; y = [7, 8]; z = [1, 10, 100]; julia> g = gradient(Params([x, y])) do - sum(x .* y .* y') + sum(x .* y .* z') end Grads(...) julia> g[x] -2×2 Matrix{Int64}: - 25 30 - 30 36 +2×3 Matrix{Int64}: + 7 70 700 + 8 80 800 + +julia> haskey(g, z) # only x and y are parameters +false ``` """ gradient """ - Params([A, B, C]) + Params([A, B]) Container for implicit parameters, used when differentiating -a zero-argument funtion `() -> loss()` with respect to `A, B, C`. +a zero-argument funtion `() -> loss(A, B)` with respect to `A, B`. """ struct Params order::Buffer # {Any, Vector{Any}} From 0416a253381cb8e5a456c9a5fe57d01465cead1b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 27 Jun 2021 15:10:01 -0400 Subject: [PATCH 114/490] Update src/compiler/interface.jl Co-authored-by: Carlo Lucibello --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index c494438f1..605feb8f6 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -55,7 +55,7 @@ the derivative (for scalar `x`) or the gradient. `f(args...)` must be a real number, see [`jacobian`](@ref) for array output. See also [`withgradient`](@ref) to keep the value `f(args...)`, -and `pullback`](@ref) for value and back-propagator. +and [`pullback`](@ref) for value and back-propagator. ```jldoctest; setup=:(using Zygote) julia> gradient(*, 2, 3, 5) From 1121cc46fab1b6f9ca765feee9257a93c67bc5bb Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 27 Jun 2021 16:27:42 -0400 Subject: [PATCH 115/490] tweak anon func --- src/compiler/interface.jl | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 605feb8f6..19b73b732 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -61,10 +61,14 @@ and [`pullback`](@ref) for value and back-propagator. julia> gradient(*, 2, 3, 5) (15, 10, 6) -julia> gradient([7,11,13]) do x - sum(abs2, x) - end +julia> gradient(x -> sum(abs2,x), [7, 11, 13]) ([14, 22, 26],) + +julia> gradient([7, 11], 0, 1) do x, y, d + p = size(x, d) + sum(x.^p .+ y) + end +([14.0, 22.0], 2, nothing) ``` """ function gradient(f, args...) From 0d777cd6bf99ecf9098ef5e24872ea0727f7befe Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 29 Jun 2021 16:24:42 +0530 Subject: [PATCH 116/490] in requires --- src/lib/broadcast.jl | 3 ++- test/runtests.jl | 1 + 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index f20a36663..756e18ac3 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -247,6 +247,7 @@ end end @init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin + using CUDA const CuArrayStyle = CUDA.CuArrayStyle if isdefined(CUDA, :cufunc) @@ -280,5 +281,5 @@ end Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end - pull_block_vert(sz, Δ::CUDA.CuArray, A::Number) = CUDA.@allowscalar Δ[sz] + @eval pull_block_vert(sz, Δ::CUDA.CuArray, A::Number) = CUDA.@allowscalar Δ[sz] end diff --git a/test/runtests.jl b/test/runtests.jl index 67893a7a5..022727fbe 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,6 @@ using Zygote, Test using Zygote: gradient, ZygoteRuleConfig +using CUDA using CUDA: has_cuda if has_cuda() From 42cd28e52d9571d8e36c79593431e2ba1c9b25e5 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 29 Jun 2021 16:53:24 +0530 Subject: [PATCH 117/490] test the gpu case the same as cpu --- test/cuda.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/cuda.jl b/test/cuda.jl index 2a4ce40e5..95bcdf373 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -94,7 +94,7 @@ end @testset "vcat scalar indexing" begin r = cu(rand(Float32, 3)) - grads = (cu(ones(Float32, 3)), nothing) + grads = (cu(ones(Float32, 3)), 1.f0) @test gradient((x,y) -> sum(vcat(x,y)), r, 5) == grads end From bcc41b8afc144f5b26bde73afd8207ec861fa735 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 16:14:50 +0200 Subject: [PATCH 118/490] Documentation edit on implicit parameters After struggling to handle implicit parameters with a Flux model, and following a discourse discussion (https://discourse.julialang.org/t/unrecognized-gradient-using-zygote-for-ad-with-universal-differential-equations/63791/2) , I have decided to add some extra details on how to access and when to use implicit parameters. I hope this helps new users like me to avoid wasting time looking for this. --- docs/src/index.md | 18 +++++++++++++++++- 1 file changed, 17 insertions(+), 1 deletion(-) diff --git a/docs/src/index.md b/docs/src/index.md index be300581d..cac589ae0 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -131,6 +131,8 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) ## Gradients of ML models +### Explicit parameters + It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. ```julia @@ -170,7 +172,9 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` -Zygote also support one more way to take gradients, via *implicit parameters* – this is a lot like autograd-style gradients, except we don't have to thread the parameter collection through all our code. +### Implicit parameters + +Zygote also support one more way to take gradients, via *implicit parameters* – this is a lot like autograd-style gradients, except we don't have to thread the parameter collection through all our code. When working with Flux models, this is the recommended way of passing the gradients, as it ensures compatibility with Flux's built-in optimizers. ```julia julia> W = rand(2, 5); b = rand(2); @@ -181,8 +185,20 @@ linear (generic function with 2 methods) julia> grads = gradient(() -> sum(linear(x)), Params([W, b])) Grads(...) +# Apply gradients to model parameters julia> grads[W], grads[b] ([0.652543 … 0.683588], [1.0, 1.0]) ``` +Unlike with explicit gradients, in order to see implicit gradients one needs to do: + +```julia +julia> grads.grads +IdDict{Any, Any} with 5 entries: + [0.467471 0.597815 … 0.678126 … => [0.579671 0.215381 … 0.635058 0.623832; 0.579671 0.215381 … … + :(Main.x) => [1.3377, 0.930234, 0.499161, 1.33827, 1.37791] + :(Main.W) => [0.579671 0.215381 … 0.635058 0.623832; 0.579671 0.215381 … … + [0.106308, 0.705531] => 2-element Fill{Float64}: entries equal to 1.0 + :(Main.b) => 2-element Fill{Float64}: entries equal to 1.0 +``` However, implicit parameters exist mainly for compatibility with Flux's current AD; it's recommended to use the other approaches unless you need this. From a970421a674d077b126ae8ffc4a56e172fbe880a Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 18:24:32 +0200 Subject: [PATCH 119/490] Update docs/src/index.md Co-authored-by: Kyle Daruwalla --- docs/src/index.md | 11 ++--------- 1 file changed, 2 insertions(+), 9 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index cac589ae0..657560022 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -189,16 +189,9 @@ Grads(...) julia> grads[W], grads[b] ([0.652543 … 0.683588], [1.0, 1.0]) ``` -Unlike with explicit gradients, in order to see implicit gradients one needs to do: +To inspect the `Grads(...)` object returned for implicit parameters, you can index it using the parameters passed to `Params`: ```julia -julia> grads.grads -IdDict{Any, Any} with 5 entries: - [0.467471 0.597815 … 0.678126 … => [0.579671 0.215381 … 0.635058 0.623832; 0.579671 0.215381 … … - :(Main.x) => [1.3377, 0.930234, 0.499161, 1.33827, 1.37791] - :(Main.W) => [0.579671 0.215381 … 0.635058 0.623832; 0.579671 0.215381 … … - [0.106308, 0.705531] => 2-element Fill{Float64}: entries equal to 1.0 - :(Main.b) => 2-element Fill{Float64}: entries equal to 1.0 -``` +julia> [grads[p] for p in [W, b]] However, implicit parameters exist mainly for compatibility with Flux's current AD; it's recommended to use the other approaches unless you need this. From e860ac3dc3765a317b342e7219fe13b4d4c29968 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 19:14:05 +0200 Subject: [PATCH 120/490] Update on implicit/explicit parameters docs An update following some suggestions. --- docs/src/index.md | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 657560022..d55cdc014 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -129,11 +129,9 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) ((r = 0.4590887719632896, g = -9.598786801605689, b = 14.181383399012862), (r = -1.7697549557037275, g = 28.88472330558805, b = -0.044793892637761346)) ``` -## Gradients of ML models +## Explicit and implicit parameters of ML models -### Explicit parameters - -It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. +It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. There are two ways of passing *explicit* parameters: ```julia julia> linear(θ, x) = θ[:W] * x .+ θ[:b] @@ -172,8 +170,6 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` -### Implicit parameters - Zygote also support one more way to take gradients, via *implicit parameters* – this is a lot like autograd-style gradients, except we don't have to thread the parameter collection through all our code. When working with Flux models, this is the recommended way of passing the gradients, as it ensures compatibility with Flux's built-in optimizers. ```julia @@ -184,14 +180,18 @@ linear (generic function with 2 methods) julia> grads = gradient(() -> sum(linear(x)), Params([W, b])) Grads(...) +``` +To inspect the `Grads(...)` object returned for implicit parameters, you can access it using the parameters passed to `Params`: +```julia # Apply gradients to model parameters julia> grads[W], grads[b] ([0.652543 … 0.683588], [1.0, 1.0]) ``` -To inspect the `Grads(...)` object returned for implicit parameters, you can index it using the parameters passed to `Params`: -```julia -julia> [grads[p] for p in [W, b]] +Here `grads` is a dictionary-like object, whose keys are the same parameters we +indicated in `Params`. (In fact it contains a dictionary using `objectid(W)`, which +does not change if the values in `W` are mutated.) These parameters `W, b` are global +variables, but gradients with respect to other global variables are not stored. However, implicit parameters exist mainly for compatibility with Flux's current AD; it's recommended to use the other approaches unless you need this. From 51bda084777a642a3641901f833968a0104468e9 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 20:18:24 +0200 Subject: [PATCH 121/490] Update docs/src/index.md Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- docs/src/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/index.md b/docs/src/index.md index d55cdc014..d9e5ea7a0 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -129,7 +129,7 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) ((r = 0.4590887719632896, g = -9.598786801605689, b = 14.181383399012862), (r = -1.7697549557037275, g = 28.88472330558805, b = -0.044793892637761346)) ``` -## Explicit and implicit parameters of ML models +## Explicit and Implicit Parameters It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. There are two ways of passing *explicit* parameters: From 9033b71fff20a46b8e2a3bb80fd37f670e47f211 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 20:19:25 +0200 Subject: [PATCH 122/490] Update docs/src/index.md Co-authored-by: Kyle Daruwalla --- docs/src/index.md | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index d9e5ea7a0..60963608e 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -180,18 +180,13 @@ linear (generic function with 2 methods) julia> grads = gradient(() -> sum(linear(x)), Params([W, b])) Grads(...) -``` -To inspect the `Grads(...)` object returned for implicit parameters, you can access it using the parameters passed to `Params`: -```julia -# Apply gradients to model parameters -julia> grads[W], grads[b] +julia> grads[W], grads[b] # access gradients using arrays as keys ([0.652543 … 0.683588], [1.0, 1.0]) ``` Here `grads` is a dictionary-like object, whose keys are the same parameters we -indicated in `Params`. (In fact it contains a dictionary using `objectid(W)`, which -does not change if the values in `W` are mutated.) These parameters `W, b` are global -variables, but gradients with respect to other global variables are not stored. +indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which +does not change if the values in `W` are mutated). However, implicit parameters exist mainly for compatibility with Flux's current AD; it's recommended to use the other approaches unless you need this. From 49d50645f829794d7a499bb4b5efefe4563d7134 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 20:23:00 +0200 Subject: [PATCH 123/490] Formatting and new last paragraph in docs Adding the last paragraph suggested by Michael. --- docs/src/index.md | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 60963608e..b468cc27a 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -185,8 +185,6 @@ julia> grads[W], grads[b] # access gradients using arrays as keys ([0.652543 … 0.683588], [1.0, 1.0]) ``` -Here `grads` is a dictionary-like object, whose keys are the same parameters we -indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which -does not change if the values in `W` are mutated). +Here `grads` is a dictionary-like object, whose keys are the same parameters we indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which does not change if the values in `W` are mutated). -However, implicit parameters exist mainly for compatibility with Flux's current AD; it's recommended to use the other approaches unless you need this. +This implicit style is the one presently used by [Flux.jl](https://github.com/FluxML/Flux.jl), a closely related machine learning library. It uses structs like `Linear` above to define layers, and the function `Flux.params(model)` returns a `Params` object containing all the parameters of all layers. See [its documentation](https://fluxml.ai/Flux.jl/stable/models/basics/) for more details. When using Zygote for most other purposes, however, the explicit style is usually preferred. From cfe501a942caa6eccb376136c55b0e6cfe8d5d28 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 21:25:32 +0200 Subject: [PATCH 124/490] Update docs/src/index.md Co-authored-by: Kyle Daruwalla --- docs/src/index.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/index.md b/docs/src/index.md index b468cc27a..1b10e4293 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -131,7 +131,7 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) ## Explicit and Implicit Parameters -It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. There are two ways of passing *explicit* parameters: +It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. Depending on how you write your model, there are multiple ways to *explicity* take gradients with respect to parameters. For example, the function `linear` accepts the parameters as an argument to the model. So, we directly pass in the parameters, `θ`, as an argument to the function being differentiated. ```julia julia> linear(θ, x) = θ[:W] * x .+ θ[:b] From b0db7287b08a869d4294808b0f7f8e0b24dbc41a Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Fri, 2 Jul 2021 21:29:17 +0200 Subject: [PATCH 125/490] Update index.md --- docs/src/index.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 1b10e4293..b70ca9eb7 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -170,7 +170,7 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` -Zygote also support one more way to take gradients, via *implicit parameters* – this is a lot like autograd-style gradients, except we don't have to thread the parameter collection through all our code. When working with Flux models, this is the recommended way of passing the gradients, as it ensures compatibility with Flux's built-in optimizers. +On the other hand, the *implicit* style is the one presently used by [Flux.jl](https://github.com/FluxML/Flux.jl), a closely related machine learning library. It uses structs like `Linear` above to define layers, and the function `Flux.params(model)` returns a `Params` object containing all the parameters of all layers. See [its documentation](https://fluxml.ai/Flux.jl/stable/models/basics/) for more details. When using Zygote for most other purposes, however, the explicit style is usually preferred. ```julia julia> W = rand(2, 5); b = rand(2); @@ -187,4 +187,3 @@ julia> grads[W], grads[b] # access gradients using arrays as keys Here `grads` is a dictionary-like object, whose keys are the same parameters we indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which does not change if the values in `W` are mutated). -This implicit style is the one presently used by [Flux.jl](https://github.com/FluxML/Flux.jl), a closely related machine learning library. It uses structs like `Linear` above to define layers, and the function `Flux.params(model)` returns a `Params` object containing all the parameters of all layers. See [its documentation](https://fluxml.ai/Flux.jl/stable/models/basics/) for more details. When using Zygote for most other purposes, however, the explicit style is usually preferred. From b63276052e5997e63d3feaa66993f738f7c885b9 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Sun, 4 Jul 2021 20:28:42 +0530 Subject: [PATCH 126/490] whitespace --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 19b73b732..35a02e272 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -95,7 +95,7 @@ true """ function withgradient(f, args...) y, back = pullback(f, args...) - (val=y, grad=back(sensitivity(y))) + (val = y, grad = back(sensitivity(y))) end # Param-style wrappers From d70f10544df4d2fa244ca90d85a799697c3fbd9c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Sun, 4 Jul 2021 16:04:00 +0000 Subject: [PATCH 127/490] CompatHelper: bump compat for "FillArrays" to "0.12" --- Project.toml | 4 +-- docs/Manifest.toml | 21 ++++--------- examples/Manifest.toml | 68 ++++++++++++++++++++++++------------------ 3 files changed, 46 insertions(+), 47 deletions(-) diff --git a/Project.toml b/Project.toml index 3f9baa17c..6cd2fccdc 100644 --- a/Project.toml +++ b/Project.toml @@ -27,7 +27,7 @@ ChainRules = "0.8.12" ChainRulesCore = "0.10.4" ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" -FillArrays = "0.8, 0.9, 0.10, 0.11" +FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" ForwardDiff = "0.10" IRTools = "0.4" MacroTools = "0.5" @@ -45,7 +45,7 @@ Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" # otherwise we can't add a compat bound +StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] diff --git a/docs/Manifest.toml b/docs/Manifest.toml index 1d00bfcff..1628ace07 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -12,10 +12,10 @@ deps = ["Random", "Serialization", "Sockets"] uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" [[DocStringExtensions]] -deps = ["LibGit2", "Markdown", "Pkg", "Test"] -git-tree-sha1 = "50ddf44c53698f5e784bbebb3f4b21c5807401b1" +deps = ["LibGit2"] +git-tree-sha1 = "a32185f5428d3986f47c2ab78b1f216d5e6cc96f" uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.8.3" +version = "0.8.5" [[Documenter]] deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] @@ -48,13 +48,9 @@ uuid = "a63ad114-7e13-5084-954f-fe012c677804" [[Parsers]] deps = ["Dates"] -git-tree-sha1 = "50c9a9ed8c714945e01cd53a21007ed3865ed714" +git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "1.0.15" - -[[Pkg]] -deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] -uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" +version = "1.1.0" [[Printf]] deps = ["Unicode"] @@ -68,9 +64,6 @@ uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" deps = ["Serialization"] uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" -[[SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" - [[Serialization]] uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" @@ -81,9 +74,5 @@ uuid = "6462fe0b-24de-5631-8697-dd941f90decc" deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" -[[UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" - [[Unicode]] uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" diff --git a/examples/Manifest.toml b/examples/Manifest.toml index 2ecfa67d1..4ad93ad0d 100644 --- a/examples/Manifest.toml +++ b/examples/Manifest.toml @@ -82,9 +82,9 @@ version = "0.3.0" [[Conda]] deps = ["JSON", "VersionParsing"] -git-tree-sha1 = "c0647249d785f1d5139c0cc96db8f6b32f7ec416" +git-tree-sha1 = "299304989a5e6473d985212c28928899c74e9421" uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d" -version = "1.5.0" +version = "1.5.2" [[CuArrays]] deps = ["AbstractFFTs", "Adapt", "CUDAapi", "CUDAdrv", "CUDAnative", "GPUArrays", "LinearAlgebra", "MacroTools", "NNlib", "Printf", "Random", "Requires", "SparseArrays", "TimerOutputs"] @@ -93,9 +93,9 @@ uuid = "3a865a2d-5b23-5a0f-bc46-62713ec82fae" version = "1.2.1" [[DataAPI]] -git-tree-sha1 = "ad84f52c0b8f05aa20839484dbaf01690b41ff84" +git-tree-sha1 = "ee400abb2298bd13bfc3df1c412ed228061a2385" uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" -version = "1.4.0" +version = "1.7.0" [[DataStructures]] deps = ["InteractiveUtils", "OrderedCollections"] @@ -127,6 +127,11 @@ version = "1.0.2" deps = ["Random", "Serialization", "Sockets"] uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" +[[ExprTools]] +git-tree-sha1 = "555eab1f7c501166ba87eeb5d561e9f5e7d167d3" +uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04" +version = "0.1.4" + [[FFTW]] deps = ["AbstractFFTs", "BinaryProvider", "Conda", "Libdl", "LinearAlgebra", "Reexport", "Test"] git-tree-sha1 = "6c5b420da0b8c12098048561b8d58f81adea506f" @@ -151,10 +156,10 @@ uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c" version = "0.9.0" [[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "NaNMath", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "8de2519a83c6c1c2442c2f481dd9a8364855daf4" +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Printf", "Random", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "e2af66012e08966366a43251e1fd421522908be6" uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.14" +version = "0.10.18" [[GPUArrays]] deps = ["Adapt", "FFTW", "FillArrays", "LinearAlgebra", "Printf", "Random", "Serialization", "Test"] @@ -164,9 +169,9 @@ version = "1.0.4" [[IRTools]] deps = ["InteractiveUtils", "MacroTools", "Test"] -git-tree-sha1 = "c67e7515a11f726f44083e74f218d134396d6510" +git-tree-sha1 = "95215cd0076a150ef46ff7928892bc341864c73c" uuid = "7869d1d1-7146-5819-86e3-90919afe41df" -version = "0.4.2" +version = "0.4.3" [[InteractiveUtils]] deps = ["Markdown"] @@ -221,9 +226,9 @@ version = "0.5.0" [[Missings]] deps = ["DataAPI"] -git-tree-sha1 = "ed61674a0864832495ffe0a7e889c0da76b0f4c8" +git-tree-sha1 = "4ea90bd5d3985ae1f9a908bd4500ae88921c5ce7" uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" -version = "0.4.4" +version = "1.0.0" [[Mmap]] uuid = "a63ad114-7e13-5084-954f-fe012c677804" @@ -240,15 +245,15 @@ uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" version = "0.3.5" [[OrderedCollections]] -git-tree-sha1 = "cf59cfed2e2c12e8a2ff0a4f1e9b2cd8650da6db" +git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" -version = "1.3.2" +version = "1.4.1" [[Parsers]] deps = ["Dates"] -git-tree-sha1 = "50c9a9ed8c714945e01cd53a21007ed3865ed714" +git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "1.0.15" +version = "1.1.0" [[Pkg]] deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] @@ -264,9 +269,9 @@ uuid = "9abbd945-dff8-562f-b5e8-e1ebf5ef1b79" [[ProgressMeter]] deps = ["Distributed", "Printf"] -git-tree-sha1 = "45640774ee2efa24e52686dbdf895e88102e68fc" +git-tree-sha1 = "afadeba63d90ff223a6a48d2009434ecee2ec9e8" uuid = "92933f4c-e287-5a05-a399-4b506db050ca" -version = "1.4.1" +version = "1.7.1" [[REPL]] deps = ["InteractiveUtils", "Markdown", "Sockets"] @@ -298,10 +303,10 @@ uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" uuid = "6462fe0b-24de-5631-8697-dd941f90decc" [[SortingAlgorithms]] -deps = ["DataStructures", "Random", "Test"] -git-tree-sha1 = "03f5898c9959f8115e30bc7226ada7d0df554ddd" +deps = ["DataStructures"] +git-tree-sha1 = "2ec1962eba973f383239da22e75218565c390a96" uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c" -version = "0.3.1" +version = "1.0.0" [[SparseArrays]] deps = ["LinearAlgebra", "Random"] @@ -315,29 +320,34 @@ version = "0.8.0" [[StaticArrays]] deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "9da72ed50e94dbff92036da395275ed114e04d49" +git-tree-sha1 = "896d55218776ab8f23fb7b222a5a4a946d4aafc2" uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.0.1" +version = "1.2.5" [[Statistics]] deps = ["LinearAlgebra", "SparseArrays"] uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" +[[StatsAPI]] +git-tree-sha1 = "1958272568dc176a1d881acb797beb909c785510" +uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" +version = "1.0.0" + [[StatsBase]] -deps = ["DataAPI", "DataStructures", "LinearAlgebra", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics"] -git-tree-sha1 = "7bab7d4eb46b225b35179632852b595a3162cb61" +deps = ["DataAPI", "DataStructures", "LinearAlgebra", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] +git-tree-sha1 = "2f6792d523d7448bbe2fec99eca9218f06cc746d" uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.33.2" +version = "0.33.8" [[Test]] deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [[TimerOutputs]] -deps = ["Printf"] -git-tree-sha1 = "3318281dd4121ecf9713ce1383b9ace7d7476fdd" +deps = ["ExprTools", "Printf"] +git-tree-sha1 = "209a8326c4f955e2442c07b56029e88bb48299c7" uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.7" +version = "0.5.12" [[Tracker]] deps = ["Adapt", "DiffRules", "ForwardDiff", "LinearAlgebra", "MacroTools", "NNlib", "NaNMath", "Printf", "Random", "Requires", "SpecialFunctions", "Statistics", "Test"] @@ -381,4 +391,4 @@ git-tree-sha1 = "d3c2ae55d116b5360a73b1e88d1a974b446d933a" repo-rev = "ffc50480ff8f7662110bfb82b0b6d4f9cef6e59d" repo-url = "https://github.com/FluxML/Zygote.jl.git" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.0+" +version = "0.6.14+" From 468d7a4d987907c7572b0b28e153eca76cd5c848 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Mon, 5 Jul 2021 10:43:35 +0200 Subject: [PATCH 128/490] Update docs/src/index.md Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- docs/src/index.md | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index b70ca9eb7..0cfd80ae3 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -170,7 +170,7 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` -On the other hand, the *implicit* style is the one presently used by [Flux.jl](https://github.com/FluxML/Flux.jl), a closely related machine learning library. It uses structs like `Linear` above to define layers, and the function `Flux.params(model)` returns a `Params` object containing all the parameters of all layers. See [its documentation](https://fluxml.ai/Flux.jl/stable/models/basics/) for more details. When using Zygote for most other purposes, however, the explicit style is usually preferred. +Zygote also support another way to take gradients, via *implicit parameters*. Here the loss function takes zero arguments, but the variables of interest are indicated by a special `Params` object. The function `linear` which depends on `W` and `b` is executed when the loss function `() -> sum(linear(x))` is called, and hence this dependence is visible to Zygote: ```julia julia> W = rand(2, 5); b = rand(2); @@ -186,4 +186,3 @@ julia> grads[W], grads[b] # access gradients using arrays as keys ``` Here `grads` is a dictionary-like object, whose keys are the same parameters we indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which does not change if the values in `W` are mutated). - From 4f1f7e64d0b4afa7845df694c3bbd03cc68c0441 Mon Sep 17 00:00:00 2001 From: Jordi Bolibar Date: Tue, 6 Jul 2021 11:24:31 +0200 Subject: [PATCH 129/490] Update docs/src/index.md Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- docs/src/index.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/index.md b/docs/src/index.md index 0cfd80ae3..36bf0ae8f 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -186,3 +186,5 @@ julia> grads[W], grads[b] # access gradients using arrays as keys ``` Here `grads` is a dictionary-like object, whose keys are the same parameters we indicated in `Params`. (In fact it wraps a dictionary using `objectid(W)` as keys, which does not change if the values in `W` are mutated). + +This implicit style is the one presently used by [Flux.jl](https://github.com/FluxML/Flux.jl), a closely related machine learning library. It uses structs like `Linear` above to define layers, and the function `Flux.params(model)` returns a `Params` object containing all the parameters of all layers. See [its documentation](https://fluxml.ai/Flux.jl/stable/models/basics/) for more details. When using Zygote for most other purposes, however, the explicit style is usually preferred. From f02b1259bf56b8f26c6348247e8c21edc2048fb0 Mon Sep 17 00:00:00 2001 From: Shuhei Kadowaki Date: Fri, 9 Jul 2021 14:50:11 +0900 Subject: [PATCH 130/490] documentation updates --- docs/Manifest.toml | 15 +++++++++------ docs/src/adjoints.md | 4 ++-- docs/src/internals.md | 4 ++-- docs/src/utils.md | 10 +++++----- 4 files changed, 18 insertions(+), 15 deletions(-) diff --git a/docs/Manifest.toml b/docs/Manifest.toml index 1628ace07..eac8a8401 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -7,10 +7,6 @@ uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" deps = ["Printf"] uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" -[[Distributed]] -deps = ["Random", "Serialization", "Sockets"] -uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" - [[DocStringExtensions]] deps = ["LibGit2"] git-tree-sha1 = "a32185f5428d3986f47c2ab78b1f216d5e6cc96f" @@ -34,6 +30,7 @@ uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" version = "0.21.1" [[LibGit2]] +deps = ["Base64", "NetworkOptions", "Printf", "SHA"] uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" [[Logging]] @@ -46,6 +43,9 @@ uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" [[Mmap]] uuid = "a63ad114-7e13-5084-954f-fe012c677804" +[[NetworkOptions]] +uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" + [[Parsers]] deps = ["Dates"] git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" @@ -57,13 +57,16 @@ deps = ["Unicode"] uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" [[REPL]] -deps = ["InteractiveUtils", "Markdown", "Sockets"] +deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" [[Random]] deps = ["Serialization"] uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +[[SHA]] +uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" + [[Serialization]] uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" @@ -71,7 +74,7 @@ uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" uuid = "6462fe0b-24de-5631-8697-dd941f90decc" [[Test]] -deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] +deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [[Unicode]] diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 89a76a2bd..9cf72c943 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -4,9 +4,9 @@ Zygote supports the use of [ChainRulesCore](http://www.juliadiff.org/ChainRulesCore.jl/stable/) to define custom sensitivities. It is prefered to define the custom sensitivities using `ChainRulesCore.rrule` as they will work for many AD systems, not just Zygote. These sensitivities can be added in your own package, or for Base/StdLib functions they can be added to [ChainRules.jl](https://github.com/JuliaDiff/ChainRules.jl/). - To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)` [ChainRules project's documentation for more information](https://www.juliadiff.org/ChainRulesCore.jl/stable/). + To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)`. Head for [ChainRules project's documentation](https://www.juliadiff.org/ChainRulesCore.jl/stable/) for more information. **If you are defining your custom adjoints using ChainRulesCore then you do not need to read this page**, and can consider it as documenting a legacy feature. - + This page exists to descibe how Zygote works, and how adjoints can be directly defined for Zygote. Defining adjoints this way does not make them accessible to other AD systems, but does let you do things that directly depend on how Zygote works. It allows for specific definitions of adjoints that are only defined for Zgyote (which might work differently to more generic definitions defined for all AD). diff --git a/docs/src/internals.md b/docs/src/internals.md index 3c808ad79..70651a697 100644 --- a/docs/src/internals.md +++ b/docs/src/internals.md @@ -137,7 +137,7 @@ We convert the code to SSA form using Julia's built-in IR data structure, after julia> Zygote.@code_ir foo(1) 1 1 ─ %1 = (Main.bar)(_2)::Any │ %2 = (Main.baz)(%1)::Any - └── return %2 + └── return %2 ``` (There isn't much difference unless there's some control flow.) @@ -202,7 +202,7 @@ function J(::typeof(foo), x) return b, Pullback{typeof(foo)}((da, db)) end -function(p::Pullback{typeof(foo)})(b̄) +function (p::Pullback{typeof(foo)})(b̄) da, db = p.data[1], p.data[2] ā = db(b̄) x̄ = da(ā) diff --git a/docs/src/utils.md b/docs/src/utils.md index 309e43786..b7e779185 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -37,17 +37,17 @@ using Zygote, Test w, x1, x2, b = rand(2), rand(2), rand(2), rand(2) -gs1 = gradient(() -> sum(tanh.(w .* x1 .+ b)), Params([w, b])) -gs2 = gradient(() -> sum(tanh.(w .* x2 .+ b)), Params([w, b])) +gs1 = gradient(() -> sum(tanh.(w .* x1 .+ b)), Params([w, b])) +gs2 = gradient(() -> sum(tanh.(w .* x2 .+ b)), Params([w, b])) # accumulate gradients gs = gs1 .+ gs2 -@test gs[w] ≈ gs1[w] + gs2[w] -@test gs[b] ≈ gs1[b] + gs2[b] +@test gs[w] ≈ gs1[w] + gs2[w] +@test gs[b] ≈ gs1[b] + gs2[b] # gradients and IdDict interact nicely # note that an IdDict must be used for gradient algebra on the GPU -gs .+= IdDict(p => randn(size(p)) for p in keys(gs)) +gs .+= IdDict(p => randn(size(p)) for p in keys(gs)) # clip gradients map(x -> clamp.(x, -0.1, 0.1), gs) From d087bb6c63043c7b2e2fde54b15f3d19a76bba84 Mon Sep 17 00:00:00 2001 From: Shuhei Kadowaki <40514306+aviatesk@users.noreply.github.com> Date: Fri, 9 Jul 2021 20:19:54 +0900 Subject: [PATCH 131/490] Update docs/src/adjoints.md Co-authored-by: Dhairya Gandhi --- docs/src/adjoints.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 9cf72c943..1d6ddc527 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -4,7 +4,7 @@ Zygote supports the use of [ChainRulesCore](http://www.juliadiff.org/ChainRulesCore.jl/stable/) to define custom sensitivities. It is prefered to define the custom sensitivities using `ChainRulesCore.rrule` as they will work for many AD systems, not just Zygote. These sensitivities can be added in your own package, or for Base/StdLib functions they can be added to [ChainRules.jl](https://github.com/JuliaDiff/ChainRules.jl/). - To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)`. Head for [ChainRules project's documentation](https://www.juliadiff.org/ChainRulesCore.jl/stable/) for more information. + To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)`. Head to [ChainRules project's documentation](https://www.juliadiff.org/ChainRulesCore.jl/stable/) for more information. **If you are defining your custom adjoints using ChainRulesCore then you do not need to read this page**, and can consider it as documenting a legacy feature. This page exists to descibe how Zygote works, and how adjoints can be directly defined for Zygote. From 2f91749b06f27d894ffb314db0ebc4eea697faeb Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 11 Jul 2021 22:34:37 -0400 Subject: [PATCH 132/490] add error for broadcasting over Params --- src/compiler/interface.jl | 5 +++++ test/interface.jl | 3 +++ 2 files changed, 8 insertions(+) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 35a02e272..3ba61ad0c 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -200,6 +200,11 @@ function Base.delete!(ps::Params, x) end Base.Broadcast.broadcasted(f, ps::Params) = broadcasted(f, ps.order) +# Broadcast.broadcastable(ps::Params) = ps.order + +@adjoint function Broadcast.broadcasted(f::Function, ps::Params) + f.(ps), _ -> throw(ArgumentError("Zygote.Params does not support broadcasting within gradients, try iteration `for p in ps`")) +end Base.:(==)(x::Params, y::Params) = x.order.data == y.order.data diff --git a/test/interface.jl b/test/interface.jl index 159f4bce1..18b0a9875 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -38,7 +38,10 @@ using Zygote: Grads x, y = [1,2], [1] ps = Params([x, y]) @test length.(ps) == length.([x, y]) # 617 + @test size.(ps, 1) == [2, 1] @test all(Params([[1,1]]) .== Params([[1,1]])) + + @test_throws ArgumentError gradient(() -> sum(sum.(ps)), ps) end @testset "indexing" begin From 6bdc92a88baad913365697acb1350f49ff9a55c8 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 11 Jul 2021 22:35:45 -0400 Subject: [PATCH 133/490] extend Mutating arrays is not supported messages --- src/lib/array.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index aaf122cb4..48092191f 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -73,14 +73,14 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra @adjoint getindex(::Type{T}, xs...) where {T} = T[xs...], dy -> (nothing, dy...) @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), - _ -> error("Mutating arrays is not supported") + _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), ...)") @adjoint! copyto!(args...) = copyto!(args...), - _ -> error("Mutating arrays is not supported") + _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), ...)") for f in [push!, pop!, pushfirst!, popfirst!] @eval @adjoint! $f(xs, x...) = - push!(xs, x...), _ -> error("Mutating arrays is not supported") + push!(xs, x...), _ -> error("Mutating arrays is not supported -- called $f(::$(typeof(xs)), ...)") end # This is kind of bad, but at least we don't materialize the whole From 6ffb0d1fd4ba19a12825e79446be1f0cb4f1ffa0 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 11 Jul 2021 22:47:19 -0400 Subject: [PATCH 134/490] remove "Flux-style models" comment --- docs/src/index.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index 36bf0ae8f..1eec9768f 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -153,7 +153,9 @@ Dict{Any,Any} with 2 entries: :W => [0.628998 … 0.433006] ``` -An extension of this is the Flux-style model in which we use call overloading to combine the weight object with the pullback pass (equivalent to a closure). +We can combine the role of the dictionary and the function here by making a callable struct which +contains the parameters, equivalent to a closure. Passed explicitly to `gradient`, we get a named tuple +with the same field names: ```julia julia> struct Linear @@ -170,7 +172,7 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` -Zygote also support another way to take gradients, via *implicit parameters*. Here the loss function takes zero arguments, but the variables of interest are indicated by a special `Params` object. The function `linear` which depends on `W` and `b` is executed when the loss function `() -> sum(linear(x))` is called, and hence this dependence is visible to Zygote: +Zygote also supports another way to take gradients, via *implicit parameters*. Here the loss function takes zero arguments, but the variables of interest are indicated by a special `Params` object. The function `linear` which depends on `W` and `b` is executed when the loss function `() -> sum(linear(x))` is called, and hence this dependence is visible to Zygote: ```julia julia> W = rand(2, 5); b = rand(2); From 8f659c8a26e50472c5f67a2c2c686f84d555e3eb Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 11 Jul 2021 23:06:01 -0400 Subject: [PATCH 135/490] double-quote --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 48092191f..246f12dff 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -80,7 +80,7 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra for f in [push!, pop!, pushfirst!, popfirst!] @eval @adjoint! $f(xs, x...) = - push!(xs, x...), _ -> error("Mutating arrays is not supported -- called $f(::$(typeof(xs)), ...)") + push!(xs, x...), _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(xs)), ...)") end # This is kind of bad, but at least we don't materialize the whole From 98f4590e3c9dd7db2335ae039c4548ca80bbbb5f Mon Sep 17 00:00:00 2001 From: Shuhei Kadowaki Date: Mon, 12 Jul 2021 16:36:01 +0900 Subject: [PATCH 136/490] make sure to throw explicit `CompileError` The previous error handling is user-unfriendly IMHO. > before ```julia julia> gradient(pow_try, 1) ERROR: MethodError: no method matching iterate(::ErrorException) Closest candidates are: iterate(::Union{LinRange, StepRangeLen}) at range.jl:806 iterate(::Union{LinRange, StepRangeLen}, ::Int64) at range.jl:806 iterate(::T) where T<:Union{Base.KeySet{<:Any, <:Dict}, Base.ValueIterator{<:Dict}} at dict.jl:695 ... Stacktrace: [1] indexed_iterate(I::ErrorException, i::Int64) @ Base ./tuple.jl:91 [2] #s3061#1245 @ ~/julia/packages/Zygote/src/compiler/interface2.jl:34 [inlined] ... ``` > after ```julia julia> gradient(pow_try, 1) ERROR: Compiling Tuple{typeof(pow_try), Int64}: try/catch is not supported. Stacktrace: [1] error(s::String) @ Base ./error.jl:33 [2] instrument(ir::IRTools.Inner.IR) @ Zygote ~/julia/packages/Zygote/src/compiler/reverse.jl:121 ... ``` --- src/compiler/interface2.jl | 8 ++++++-- test/features.jl | 3 ++- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 0f7da4b32..bf3692a30 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -24,7 +24,11 @@ end T = Tuple{f,args...} ignore_sig(T) && return :(f(args...), Pullback{$T}(())) - g = try _generate_pullback_via_decomposition(T) catch e e end + g = try + _generate_pullback_via_decomposition(T) + catch e + rethrow(CompileError(T,e)) + end g === nothing && return :(f(args...), Pullback{$T}((f,))) meta, forw, _ = g argnames!(meta, Symbol("#self#"), :ctx, :f, :args) @@ -38,7 +42,7 @@ end @generated function (j::Pullback{T})(Δ) where T ignore_sig(T) && return :nothing - g = try + g = try _generate_pullback_via_decomposition(T) catch e rethrow(CompileError(T,e)) diff --git a/test/features.jl b/test/features.jl index b1434aac7..766673350 100644 --- a/test/features.jl +++ b/test/features.jl @@ -397,6 +397,7 @@ function pow_try(x) end @test_broken gradient(pow_try, 1) == (2,) +@test_throws Zygote.CompileError gradient(pow_try, 1) function pow_simd(x, n) r = 1 @@ -508,7 +509,7 @@ end @test gradient(x -> sum(x .+ ones(2,2)), (1,2)) == ((2,2),) @test gradient(x -> sum(x .+ ones(2,2)), (1,)) == ((4,),) @test gradient(x -> sum(x .+ ones(2,1)), (1,2)) == ((1,1),) - + # https://github.com/FluxML/Zygote.jl/issues/975 gt = gradient((x,p) -> prod(x .^ p), [3,4], (1,2)) gv = gradient((x,p) -> prod(x .^ p), [3,4], [1,2]) From aa44b5f5f9245d75d0273257ffbf100466726002 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 12 Jul 2021 20:53:16 -0400 Subject: [PATCH 137/490] tweak --- src/compiler/interface.jl | 2 +- src/lib/array.jl | 8 ++++---- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 3ba61ad0c..2d9cdce11 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -200,7 +200,7 @@ function Base.delete!(ps::Params, x) end Base.Broadcast.broadcasted(f, ps::Params) = broadcasted(f, ps.order) -# Broadcast.broadcastable(ps::Params) = ps.order +Base.Broadcast.broadcastable(ps::Params) = ps.order @adjoint function Broadcast.broadcasted(f::Function, ps::Params) f.(ps), _ -> throw(ArgumentError("Zygote.Params does not support broadcasting within gradients, try iteration `for p in ps`")) diff --git a/src/lib/array.jl b/src/lib/array.jl index 246f12dff..d5f9557a4 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -73,14 +73,14 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra @adjoint getindex(::Type{T}, xs...) where {T} = T[xs...], dy -> (nothing, dy...) @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), - _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), ...)") + _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), _...)") @adjoint! copyto!(args...) = copyto!(args...), - _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), ...)") + _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), _...)") for f in [push!, pop!, pushfirst!, popfirst!] - @eval @adjoint! $f(xs, x...) = - push!(xs, x...), _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(xs)), ...)") + @eval @adjoint! $f(xs, x...) = $f(xs, x...), + _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(xs)), _...)") end # This is kind of bad, but at least we don't materialize the whole From d7cd2ec8be5784bae32ef6276acb5fe6628d2397 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 12 Jul 2021 21:02:02 -0400 Subject: [PATCH 138/490] tweak --- src/compiler/interface.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 2d9cdce11..6ca8257d5 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -200,7 +200,6 @@ function Base.delete!(ps::Params, x) end Base.Broadcast.broadcasted(f, ps::Params) = broadcasted(f, ps.order) -Base.Broadcast.broadcastable(ps::Params) = ps.order @adjoint function Broadcast.broadcasted(f::Function, ps::Params) f.(ps), _ -> throw(ArgumentError("Zygote.Params does not support broadcasting within gradients, try iteration `for p in ps`")) From 7d40e94a20a31b8df3dc7a4d2614905f84659614 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 13 Jul 2021 20:21:07 -0400 Subject: [PATCH 139/490] v0.6.15 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 6cd2fccdc..8c7df235d 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.14" +version = "0.6.15" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From b33920efbe37acbc2507846a642b97628c4a1b76 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Wed, 14 Jul 2021 18:14:13 +0530 Subject: [PATCH 140/490] fix resolve message --- src/lib/broadcast.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 57bb7b38f..cdcd21547 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -247,7 +247,6 @@ end end @init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin - using CUDA const CuArrayStyle = CUDA.AbstractGPUArrayStyle if isdefined(CUDA, :cufunc) From 4d22edf4a27db0302badb12e1faec6d5ee31fd9d Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 14 Jul 2021 18:35:34 +0200 Subject: [PATCH 141/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8c7df235d..986d0c10a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.15" +version = "0.6.16" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 9203ea8ac035a257f4f8f3c7769581d8e6242c64 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Mon, 19 Jul 2021 10:42:07 +0200 Subject: [PATCH 142/490] construct Params with empty tuple --- Project.toml | 2 +- src/tools/idset.jl | 16 ++++++++++------ test/interface.jl | 5 +++++ 3 files changed, 16 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index 986d0c10a..e209009ad 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.16" +version = "0.6.17" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/tools/idset.jl b/src/tools/idset.jl index d9f0ceb04..9f3566699 100644 --- a/src/tools/idset.jl +++ b/src/tools/idset.jl @@ -3,18 +3,22 @@ struct IdSet{T} <: AbstractSet{T} IdSet{T}() where T = new(IdDict{T,Nothing}()) end -Base.eltype(::IdSet{T}) where T = T +IdSet(xs) = IdSet{eltype(xs)}(xs) IdSet() = IdSet{Any}() +function IdSet{T}(xs) where T + s = IdSet{T}() + for x in xs + push!(s, x) + end + return s +end + Base.push!(s::IdSet{T}, x::T) where T = (s.dict[x] = nothing; s) Base.delete!(s::IdSet{T}, x::T) where T = (delete!(s.dict, x); s) Base.in(x, s::IdSet) = haskey(s.dict, x) - -IdSet{T}(xs) where T = push!(IdSet{T}(), xs...) - -IdSet(xs) = IdSet{eltype(xs)}(xs) - +Base.eltype(::IdSet{T}) where T = T Base.collect(s::IdSet) = Base.collect(keys(s.dict)) Base.similar(s::IdSet, T::Type) = IdSet{T}() diff --git a/test/interface.jl b/test/interface.jl index 18b0a9875..ff45e7ebd 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -82,6 +82,11 @@ using Zygote: Grads @test ps isa Params @test issetequal(ps, Set([y])) end + + @testset "constructor with empty args" begin + @test length(Params()) == 0 + @test length(Params(())) == 0 + end end @testset "Grads" begin From 3df54fbc7d6d2d4fc68a1c190ef047fcc933d832 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Mon, 19 Jul 2021 11:00:21 +0200 Subject: [PATCH 143/490] use splatting --- src/tools/idset.jl | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/src/tools/idset.jl b/src/tools/idset.jl index 9f3566699..d8a600992 100644 --- a/src/tools/idset.jl +++ b/src/tools/idset.jl @@ -7,13 +7,7 @@ IdSet(xs) = IdSet{eltype(xs)}(xs) IdSet() = IdSet{Any}() -function IdSet{T}(xs) where T - s = IdSet{T}() - for x in xs - push!(s, x) - end - return s -end +IdSet{T}(xs) = isempty(xs) ? IdSet{T}() : push!(IdSet{T}(), xs...) Base.push!(s::IdSet{T}, x::T) where T = (s.dict[x] = nothing; s) Base.delete!(s::IdSet{T}, x::T) where T = (delete!(s.dict, x); s) From 8467723a14aef3b23fa0f6dc417b6c991ed66810 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Mon, 19 Jul 2021 11:02:04 +0200 Subject: [PATCH 144/490] use splatting --- src/tools/idset.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/tools/idset.jl b/src/tools/idset.jl index d8a600992..a0aa93df0 100644 --- a/src/tools/idset.jl +++ b/src/tools/idset.jl @@ -7,7 +7,7 @@ IdSet(xs) = IdSet{eltype(xs)}(xs) IdSet() = IdSet{Any}() -IdSet{T}(xs) = isempty(xs) ? IdSet{T}() : push!(IdSet{T}(), xs...) +IdSet{T}(xs) where T = isempty(xs) ? IdSet{T}() : push!(IdSet{T}(), xs...) Base.push!(s::IdSet{T}, x::T) where T = (s.dict[x] = nothing; s) Base.delete!(s::IdSet{T}, x::T) where T = (delete!(s.dict, x); s) From a2a0393959239859b7673a5fc2177c2aecaaeabb Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Mon, 19 Jul 2021 11:02:53 +0200 Subject: [PATCH 145/490] more tests --- test/interface.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/interface.jl b/test/interface.jl index ff45e7ebd..0bee98321 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -86,6 +86,7 @@ using Zygote: Grads @testset "constructor with empty args" begin @test length(Params()) == 0 @test length(Params(())) == 0 + @test length(Params([])) == 0 end end From 8ce8b32e52976340d81b70b40dc429a7f9a258d8 Mon Sep 17 00:00:00 2001 From: Christopher Rackauckas Date: Mon, 19 Jul 2021 08:13:19 -0400 Subject: [PATCH 146/490] Add some downstream testing Adds downstream testing to CI, with a few application packages that should flex the API a bit. --- .github/workflows/Downstream.yml | 55 ++++++++++++++++++++++++++++++++ 1 file changed, 55 insertions(+) create mode 100644 .github/workflows/Downstream.yml diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml new file mode 100644 index 000000000..6565e82d5 --- /dev/null +++ b/.github/workflows/Downstream.yml @@ -0,0 +1,55 @@ + +name: IntegrationTest +on: + push: + branches: [master] + tags: [v*] + pull_request: + +jobs: + test: + name: ${{ matrix.package.repo }}/${{ matrix.package.group }} + runs-on: ${{ matrix.os }} + env: + GROUP: ${{ matrix.package.group }} + strategy: + fail-fast: false + matrix: + julia-version: [1] + os: [ubuntu-latest] + package: + - {user: FluxML, repo: Flux.jl, group: All} + - {user: FluxML, repo: NNlib.jl, group: All} + - {user: FluxML, repo: FastAI.jl, group: All} + - {user: FluxML, repo: GeometricFlux.jl, group: All} + - {user: SciML, repo: DiffEqFlux.jl, group: Layers} + - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} + steps: + - uses: actions/checkout@v2 + - uses: julia-actions/setup-julia@v1 + with: + version: ${{ matrix.julia-version }} + arch: x64 + - uses: julia-actions/julia-buildpkg@latest + - name: Clone Downstream + uses: actions/checkout@v2 + with: + repository: ${{ matrix.package.user }}/${{ matrix.package.repo }} + path: downstream + - name: Load this and run the downstream tests + shell: julia --color=yes --project=downstream {0} + run: | + using Pkg + try + # force it to use this PR's version of the package + Pkg.develop(PackageSpec(path=".")) # resolver may fail with main deps + Pkg.update() + Pkg.test() # resolver may fail with test time deps + catch err + err isa Pkg.Resolve.ResolverError || rethrow() + # If we can't resolve that means this is incompatible by SemVer and this is fine + # It means we marked this as a breaking change, so we don't need to worry about + # Mistakenly introducing a breaking change, as we have intentionally made one + @info "Not compatible with this release. No problem." exception=err + exit(0) # Exit immediately, as a success + end From 05cebdc8badf4a145bb97ef386f806cefb21f572 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 20 Jul 2021 13:25:25 +0100 Subject: [PATCH 147/490] Add support for ChainRules optout --- src/compiler/chainrules.jl | 53 ++++++++++++++++++++++++++++---------- test/chainrules.jl | 30 +++++++++++++++++++++ 2 files changed, 69 insertions(+), 14 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index c4e72f07e..3698f21fb 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -4,8 +4,7 @@ end ZygoteRuleConfig() = ZygoteRuleConfig(Context()) -const rrule_fallback_method = Base.which(rrule, Tuple{Any, Vararg{Any}}) -const rrule_redispatcher_method = Base.which(rrule, Tuple{RuleConfig, Any, Vararg{Any}}) +_is_rrule_redispatcher(m::Method) = m.sig == Tuple{typeof(rrule), RuleConfig, Vararg} """ has_chain_rrule(T) @@ -18,19 +17,45 @@ such that if a suitable rule is defined later, the generated function will recom """ function has_chain_rrule(T) config_T, arg_Ts = Iterators.peel(T.parameters) - m_with_config = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) - if m_with_config.method === rrule_redispatcher_method - # it is being redispatched without config, so check it that hits the fallback - m_without_config = meta(Tuple{typeof(rrule), arg_Ts...}) - if m_without_config.method === rrule_fallback_method - # no rrule exists, return instance for m_with_config as that will be invalidated - # directly if configured rule added, or indirectly if unconfigured rule added - return false, m_with_config.instance - end + configured_rrule_m = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) + if _is_rrule_redispatcher(configured_rrule_m.method) + # it is being redispatched without config, so get the method it redispatches to + rrule_m = meta(Tuple{typeof(rrule), arg_Ts...}) + no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), arg_Ts...}) + else + # Not being redispatched + rrule_m = configured_rrule_m + no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}) + end + + do_not_use_rrule = matching_cr_sig(no_rrule_m, rrule_m) + if do_not_use_rrule + # return instance for configured_rrule_m as that will be invalidated + # directly if configured rule added, or indirectly if unconfigured rule added + # Do not need an edge for `no_rrule` as no addition of methods to that can cause this + # decision to need to be revisited (only changes to `rrule`), since we are already not + # using the rrule, so not using more rules wouldn't change anything + return false, configured_rrule_m.instance + else + # otherwise found a rrule, no need to add any edges for `rrule`, as it will generate + # code with natural edges if a new method is defined there. + # We also do not need an edge to `no_rrule`, as any time a method is added to `no_rrule` + # a corresponding method is added to `rrule` (to return `nothing`), thus we will already + # be revisiting this decision when a new opt-out is added + return true, nothing end - # otherwise found a rrule, no need to add any edges, as it will generate code with - # natural edges. - return true, nothing +end + +matching_cr_sig(t, s) = matching_cr_sig(t.method.sig, s.method.sig) +matching_cr_sig(::DataType, ::UnionAll) = false +matching_cr_sig(::UnionAll, ::DataType) = false +matching_cr_sig(t::Type, s::Type) = type_tuple_tail(t) == type_tuple_tail(s) + +type_tuple_tail(d::DataType) = Tuple{d.parameters[2:end]...} +function type_tuple_tail(d::UnionAll) + body = Base.unwrap_unionall(d) + body_tt = type_tuple_tail(body) + return Base.rewrap_unionall(body_tt, d) end """ diff --git a/test/chainrules.jl b/test/chainrules.jl index 32bdd3799..3fb323e69 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -232,6 +232,36 @@ using ChainRulesCore, ChainRulesTestUtils, Zygote aug_primal_val, _ = Zygote.pullback(x->StructForTestingTypeOnlyRRules(), 1.2) @test aug_primal_val.x == 2.0 end + + @testset "@opt_out" begin + oa_id(x) = x + oa_id_rrule_hitcount = Ref(0) + function ChainRulesCore.rrule(::typeof(oa_id), x::Any) + oa_id_rrule_hitcount[] += 1 + oa_id_pullback(ȳ) = (NoTangent(), ȳ) + return oa_id(x), oa_id_pullback + end + + @opt_out ChainRulesCore.rrule(::typeof(oa_id), x::AbstractArray) + + # Hit one we haven't opted out + oa_id_rrule_hitcount[] = 0 + oa_id_outer(x) = sum(oa_id(x)) + @test (1.0,) == Zygote.gradient(oa_id_outer, π) + @test oa_id_rrule_hitcount[] == 1 + + # make sure don't hit the one we have opted out + oa_id_rrule_hitcount[] = 0 + @test ([1.0],) == Zygote.gradient(oa_id_outer, [π]) + @test oa_id_rrule_hitcount[] == 0 + + # Now try opting out After we have already used it + @opt_out ChainRulesCore.rrule(::typeof(oa_id), x::Real) + oa_id_rrule_hitcount[] = 0 + oa_id_outer(x) = sum(oa_id(x)) + @test (1.0,) == Zygote.gradient(oa_id_outer, π) + @test oa_id_rrule_hitcount[] == 0 + end end @testset "ChainRulesCore.rrule_via_ad" begin From 06aaae2dc8964154ade4fc2f6565d83f4bef77a9 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Wed, 21 Jul 2021 14:36:06 +0100 Subject: [PATCH 148/490] More explain how has_chain_rrule works --- src/compiler/chainrules.jl | 35 +++++++++++++++++++++++++++++------ 1 file changed, 29 insertions(+), 6 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 3698f21fb..6d43946bc 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -19,29 +19,52 @@ function has_chain_rrule(T) config_T, arg_Ts = Iterators.peel(T.parameters) configured_rrule_m = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) if _is_rrule_redispatcher(configured_rrule_m.method) - # it is being redispatched without config, so get the method it redispatches to + # The config is not being used: + # it is being redispatched without config, so we need the method it redispatches to rrule_m = meta(Tuple{typeof(rrule), arg_Ts...}) + # Thus any no_rrule that might apply must also not have a config because if there was a + # no_rrule with a config that applied then there would also be a rrule with config that applied no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), arg_Ts...}) else - # Not being redispatched + # Not being redispatched: it does have a config rrule_m = configured_rrule_m + # Thus any no_rrule that might apply must also have a config because if it applied + # it will be identical, and if it doesn't we don't care what it is. no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}) end + # To understand why we only need to check if the sigs match between no_rrule_m and rrule_m + # in order to decide if to use, one must consider the following facts: + # - for every method in `no_rrule` there is a identical one in `rrule` that returns nothing + # - this includes the general fallback `rrule(::Any...)=nothing`. + # - a configured rrule/no_rrule is always more specific than a otherwise equivalent unconfigured rrule/no_rrule + # + # Consider the following truth table, for what can occur: + # rrule: fallback, no_rrule: fallback => matches => do not use rrule. + # rrule: specific, no_rrule: fallback => !matches => do use rrule, as haven't opted out. + # rrule: fallback, no_rrule: specific => IMPOSSIBLE, every no_rule us identical to some rrule + # rrule: specific, no_rrule: specific => matches => do not use rrule as opted out + # rrule: specific, no_rrule: general => !matches => do use rrule as a more specific rrule takes preciedent over more general opted out + # rrule: general , no_rrule: specific => IMPOSSIBLE, every no_rule us identical to some rrule so can't have a more general rrule being hit, as the specific one would hit first + # + # Note that the fallback cases are the same outcome as the general cases as fallback is just most general. + # It can be seen that checking if it matches is the correct way to decide if we should ue the rrule or not. + + do_not_use_rrule = matching_cr_sig(no_rrule_m, rrule_m) if do_not_use_rrule - # return instance for configured_rrule_m as that will be invalidated + # Return instance for configured_rrule_m as that will be invalidated # directly if configured rule added, or indirectly if unconfigured rule added # Do not need an edge for `no_rrule` as no addition of methods to that can cause this # decision to need to be revisited (only changes to `rrule`), since we are already not - # using the rrule, so not using more rules wouldn't change anything + # using the rrule, so not using more rules wouldn't change anything. return false, configured_rrule_m.instance else - # otherwise found a rrule, no need to add any edges for `rrule`, as it will generate + # Otherwise found a rrule, no need to add any edges for `rrule`, as it will generate # code with natural edges if a new method is defined there. # We also do not need an edge to `no_rrule`, as any time a method is added to `no_rrule` # a corresponding method is added to `rrule` (to return `nothing`), thus we will already - # be revisiting this decision when a new opt-out is added + # be revisiting this decision when a new opt-out is added. return true, nothing end end From 9e18f12ad89fe676e6a2e9913290f31c4162f7da Mon Sep 17 00:00:00 2001 From: Sheehan Olver Date: Wed, 21 Jul 2021 14:42:16 +0100 Subject: [PATCH 149/490] Fix UndefVarError --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d5f9557a4..1ad7eb6a7 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -75,7 +75,7 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), _...)") -@adjoint! copyto!(args...) = copyto!(args...), +@adjoint! copyto!(xs, args...) = copyto!(xs, args...), _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), _...)") for f in [push!, pop!, pushfirst!, popfirst!] From f878cf7cc1c96444d27db45c4e2b4277fde9848c Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 21 Jul 2021 17:59:53 +0200 Subject: [PATCH 150/490] fix copyto! error message --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d5f9557a4..a9a096b8e 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -76,7 +76,7 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), _...)") @adjoint! copyto!(args...) = copyto!(args...), - _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), _...)") + _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(args))..., _...)") for f in [push!, pop!, pushfirst!, popfirst!] @eval @adjoint! $f(xs, x...) = $f(xs, x...), From dbde9a541ca5579518ed6220282c1e8242a4953b Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 23 Jul 2021 11:34:30 +0200 Subject: [PATCH 151/490] fix convert --- src/compiler/chainrules.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 6d43946bc..3e1745c10 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -111,7 +111,7 @@ for T_outer in (:Tuple, :NamedTuple) # than happy. @eval @inline function wrap_chainrules_output(x::ChainRules.Tangent{P, T}) where {P, T<:$T_outer} xp = map(wrap_chainrules_output, canonicalize(x)) - convert($T_outer, xp) + ChainRulesCore.backing(xp) end end From aecec9be88003620e2e66675e6c155be9a191a90 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 23 Jul 2021 11:35:44 +0200 Subject: [PATCH 152/490] fix tests which were wrong to accommodate no projection --- test/complex.jl | 2 +- test/features.jl | 4 ++-- test/gradcheck.jl | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/test/complex.jl b/test/complex.jl index 54f99fd0f..6a0445b85 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -18,7 +18,7 @@ using Zygote, Test, LinearAlgebra @test gradient(x -> real(logabsdet(x)[1]), [1 2im; 3im 4])[1] ≈ [4 3im; 2im 1]/10 # https://github.com/FluxML/Zygote.jl/issues/705 -@test gradient(x -> imag(sum(exp, x)), [1,2,3])[1] ≈ im .* exp.(1:3) +@test gradient(x -> imag(sum(exp, x)), [1,2,3])[1] ≈ real(im .* exp.(1:3)) @test gradient(x -> imag(sum(exp, x)), [1+0im,2,3])[1] ≈ im .* exp.(1:3) fs_C_to_R = (real, diff --git a/test/features.jl b/test/features.jl index 766673350..b17f55b41 100644 --- a/test/features.jl +++ b/test/features.jl @@ -449,12 +449,12 @@ end @test pullback(type_test)[1] == Complex{<:Real} @testset "Pairs" begin - @test (x->10*pairs((a=x, b=2))[1])'(100) === 10 + @test (x->10*pairs((a=x, b=2))[1])'(100) === 10.0 @test (x->10*pairs((a=x, b=2))[2])'(100) === 0 foo(;kw...) = 1 @test gradient(() -> foo(a=1,b=2.0)) === () - @test (x->10*(x => 2)[1])'(100) === 10 + @test (x->10*(x => 2)[1])'(100) === 10.0 @test (x->10*(x => 2)[2])'(100) === 0 end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 08dfd45db..9ffd04260 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -81,7 +81,7 @@ end @test gradient(xs ->sum(xs .^ _pow), [4, -1]) == ([_pow*4^9, -10],) @test gradient(x -> real((1+3im) * x^2), 5+7im) == (-32 - 44im,) - @test gradient(p -> real((1+3im) * (5+7im)^p), 2)[1] ≈ (-234 + 2im)*log(5 - 7im) + @test gradient(p -> real((1+3im) * (5+7im)^p), 2)[1] ≈ real((-234 + 2im)*log(5 - 7im)) # D[(1+3I)x^p, p] /. {x->5+7I, p->2} // Conjugate end @@ -160,7 +160,7 @@ end # https://github.com/FluxML/Zygote.jl/issues/376 _, back = Zygote._pullback(x->x[1]*im, randn(2)) - @test back(1.0)[2] == [-im, 0] + @test back(1.0)[2] == real([-im, 0]) # _droplike @test gradient(x -> sum(inv, x[1, :]'), ones(2, 2)) == ([-1 -1; 0 0],) From 47d09d8ee6df2b3cfe17e9a6153e0464d81948e8 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 23 Jul 2021 12:49:42 +0200 Subject: [PATCH 153/490] add dependency on the PR that fixes the Dual error --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e209009ad..9ac8ab690 100644 --- a/Project.toml +++ b/Project.toml @@ -28,7 +28,7 @@ ChainRulesCore = "0.10.4" ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" -ForwardDiff = "0.10" +ForwardDiff = "0.10.20" IRTools = "0.4" MacroTools = "0.5" NaNMath = "0.3" From 6ae2ad5de0372c6d40ff2528c44ae7f1224d6e3a Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 23 Jul 2021 12:50:22 +0200 Subject: [PATCH 154/490] compat to CRC 1.0 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 9ac8ab690..c8d96bd7e 100644 --- a/Project.toml +++ b/Project.toml @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "0.8.12" -ChainRulesCore = "0.10.4" +ChainRulesCore = "1" ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" From 26286fc64c48149bb7a6c0b6e1108147ad1e3a5b Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 23 Jul 2021 13:31:51 +0200 Subject: [PATCH 155/490] mark ForwardDiff test broken --- Project.toml | 2 +- test/utils.jl | 13 +++++++++---- 2 files changed, 10 insertions(+), 5 deletions(-) diff --git a/Project.toml b/Project.toml index c8d96bd7e..edcbfc5ed 100644 --- a/Project.toml +++ b/Project.toml @@ -28,7 +28,7 @@ ChainRulesCore = "1" ChainRulesTestUtils = "0.7.1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" -ForwardDiff = "0.10.20" +ForwardDiff = "0.10" IRTools = "0.4" MacroTools = "0.5" NaNMath = "0.3" diff --git a/test/utils.jl b/test/utils.jl index 9a3d83ea5..3b461f82b 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -24,10 +24,15 @@ end xs, y = randn(2,3), rand() f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments - dx, dy = diaghessian(f34, xs, y) - @test size(dx) == size(xs) - @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) - @test dy ≈ hessian(y -> f34(xs,y), y) + + function broken() + dx, dy = diaghessian(f34, xs, y) # This fails becase ProjectTo can't project a Dual onto a Float + c1 = size(dx) == size(xs) + c2 = vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) + c3 = dy ≈ hessian(y -> f34(xs,y), y) + return all([c1, c2, c3]) + end + @test_broken broken() zs = randn(7,13) # test chunk mode @test length(zs) > ForwardDiff.DEFAULT_CHUNK_THRESHOLD From b89989a6a99cdbcf34fa777b4e148257e418808d Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 23 Jul 2021 13:49:55 +0100 Subject: [PATCH 156/490] Apply suggestions from code review --- src/compiler/chainrules.jl | 2 +- test/utils.jl | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 3e1745c10..99d907299 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -111,7 +111,7 @@ for T_outer in (:Tuple, :NamedTuple) # than happy. @eval @inline function wrap_chainrules_output(x::ChainRules.Tangent{P, T}) where {P, T<:$T_outer} xp = map(wrap_chainrules_output, canonicalize(x)) - ChainRulesCore.backing(xp) + ChainRulesCore.backing(xp) # this is accessing ChainRulesCore internals, but it is prob safe enough, and it is fastest end end diff --git a/test/utils.jl b/test/utils.jl index 3b461f82b..ee01fa7ed 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -25,6 +25,16 @@ end xs, y = randn(2,3), rand() f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments + # Follow is should work ones we workout what ForwardDiff should do when `Float64` is called on a `Dual` + # https://github.com/JuliaDiff/ForwardDiff.jl/pull/538 + # else might need a custom overload of `(;;ChainRulesCore.ProjectTo)(::Dual)` + # When fixed uncomment the below and delete the broken function + #== + dx, dy = diaghessian(f34, xs, y) + @test size(dx) == size(xs) + @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) + @test dy ≈ hessian(y -> f34(xs,y), y) + ==# function broken() dx, dy = diaghessian(f34, xs, y) # This fails becase ProjectTo can't project a Dual onto a Float c1 = size(dx) == size(xs) From 91536890929d2398539ef382db4c100c4d45175f Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 23 Jul 2021 13:50:50 +0100 Subject: [PATCH 157/490] Apply suggestions from code review --- test/gradcheck.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 9ffd04260..eab959ddd 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -160,7 +160,7 @@ end # https://github.com/FluxML/Zygote.jl/issues/376 _, back = Zygote._pullback(x->x[1]*im, randn(2)) - @test back(1.0)[2] == real([-im, 0]) + @test back(1.0)[2] == real([-im, 0]) == [0, 0] # _droplike @test gradient(x -> sum(inv, x[1, :]'), ones(2, 2)) == ([-1 -1; 0 0],) From e30fcf6196681ac410fce00db3300925d7f821fd Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 23 Jul 2021 19:58:08 +0100 Subject: [PATCH 158/490] rename test out out function --- test/chainrules.jl | 37 ++++++++++++++++++------------------- 1 file changed, 18 insertions(+), 19 deletions(-) diff --git a/test/chainrules.jl b/test/chainrules.jl index 3fb323e69..1ab2edc5a 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -234,33 +234,32 @@ using ChainRulesCore, ChainRulesTestUtils, Zygote end @testset "@opt_out" begin - oa_id(x) = x - oa_id_rrule_hitcount = Ref(0) - function ChainRulesCore.rrule(::typeof(oa_id), x::Any) - oa_id_rrule_hitcount[] += 1 - oa_id_pullback(ȳ) = (NoTangent(), ȳ) - return oa_id(x), oa_id_pullback + oout_id(x) = x + oout_id_rrule_hitcount = Ref(0) + function ChainRulesCore.rrule(::typeof(oout_id), x::Any) + oout_id_rrule_hitcount[] += 1 + oout_id_pullback(ȳ) = (NoTangent(), ȳ) + return oout_id(x), oout_id_pullback end - @opt_out ChainRulesCore.rrule(::typeof(oa_id), x::AbstractArray) + @opt_out ChainRulesCore.rrule(::typeof(oout_id), x::AbstractArray) # Hit one we haven't opted out - oa_id_rrule_hitcount[] = 0 - oa_id_outer(x) = sum(oa_id(x)) - @test (1.0,) == Zygote.gradient(oa_id_outer, π) - @test oa_id_rrule_hitcount[] == 1 + oout_id_rrule_hitcount[] = 0 + oout_id_outer(x) = sum(oout_id(x)) + @test (1.0,) == Zygote.gradient(oout_id_outer, π) + @test oout_id_rrule_hitcount[] == 1 # make sure don't hit the one we have opted out - oa_id_rrule_hitcount[] = 0 - @test ([1.0],) == Zygote.gradient(oa_id_outer, [π]) - @test oa_id_rrule_hitcount[] == 0 + oout_id_rrule_hitcount[] = 0 + @test ([1.0],) == Zygote.gradient(oout_id_outer, [π]) + @test oout_id_rrule_hitcount[] == 0 # Now try opting out After we have already used it - @opt_out ChainRulesCore.rrule(::typeof(oa_id), x::Real) - oa_id_rrule_hitcount[] = 0 - oa_id_outer(x) = sum(oa_id(x)) - @test (1.0,) == Zygote.gradient(oa_id_outer, π) - @test oa_id_rrule_hitcount[] == 0 + @opt_out ChainRulesCore.rrule(::typeof(oout_id), x::Real) + oout_id_rrule_hitcount[] = 0 + @test (1.0,) == Zygote.gradient(oout_id_outer, π) + @test oout_id_rrule_hitcount[] == 0 end end From e1d481927d89004e2ff3a2fdaa11f8c6efc89cdc Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 23 Jul 2021 19:58:54 +0100 Subject: [PATCH 159/490] CR v1 --- Project.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index edcbfc5ed..2b595b150 100644 --- a/Project.toml +++ b/Project.toml @@ -23,9 +23,9 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "0.8.12" +ChainRules = "1" ChainRulesCore = "1" -ChainRulesTestUtils = "0.7.1" +ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" ForwardDiff = "0.10" From 01b75a733930d3ab506419d05cca54a6f58dc492 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Sun, 25 Jul 2021 14:27:07 +0530 Subject: [PATCH 160/490] update CompatHelper script --- .github/workflows/CompatHelper.yml | 39 +++++++++++++++--------------- 1 file changed, 20 insertions(+), 19 deletions(-) diff --git a/.github/workflows/CompatHelper.yml b/.github/workflows/CompatHelper.yml index 0243c7062..1696bd76e 100644 --- a/.github/workflows/CompatHelper.yml +++ b/.github/workflows/CompatHelper.yml @@ -1,26 +1,27 @@ name: CompatHelper - on: schedule: - - cron: '00 * * * *' - issues: - types: [opened, reopened] - + - cron: 0 0 * * * + workflow_dispatch: jobs: - build: - runs-on: ${{ matrix.os }} - strategy: - matrix: - julia-version: [1.2.0] - julia-arch: [x86] - os: [ubuntu-latest] + CompatHelper: + runs-on: ubuntu-latest steps: - - uses: julia-actions/setup-julia@latest - with: - version: ${{ matrix.julia-version }} - - name: Pkg.add("CompatHelper") - run: julia -e 'using Pkg; Pkg.add("CompatHelper")' - - name: CompatHelper.main() + - name: "Install CompatHelper" + run: | + import Pkg + name = "CompatHelper" + uuid = "aa819f21-2bde-4658-8897-bab36330d9b7" + version = "2" + Pkg.add(; name, uuid, version) + shell: julia --color=yes {0} + - name: "Run CompatHelper" + run: | + import CompatHelper + CompatHelper.main() + shell: julia --color=yes {0} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: julia -e 'using CompatHelper; CompatHelper.main()' + COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }} + # COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} + From 13e277af1e123fbe1e21f567e7937a304ce4c86c Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Mon, 26 Jul 2021 18:47:26 +0100 Subject: [PATCH 161/490] Fix rrule_via_ad troubles --- src/compiler/chainrules.jl | 3 ++- test/chainrules.jl | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 99d907299..15653c47b 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -100,7 +100,8 @@ is_kwfunc(k, ::Type{<:NamedTuple}, f, args...) = k===Core.kwftype(f) Convert `x` from the differentials types ChainRules uses to the format Zygote uses internally. """ -@inline wrap_chainrules_output(x) = unthunk(x) # For now we are just not going to deal with thunks +@inline wrap_chainrules_output(x) = x +@inline wrap_chainrules_output(x::AbstractThunk) = wrap_chainrules_output(unthunk(x)) # For now we are just not going to deal with thunks @inline wrap_chainrules_output(x::Tuple) = map(wrap_chainrules_output, x) # Zygote convention: even if many AbstractZero partials (i.e. multi-input function), make just 1 nothing. @inline wrap_chainrules_output(x::Tuple{Vararg{ChainRules.AbstractZero}}) = nothing diff --git a/test/chainrules.jl b/test/chainrules.jl index 1ab2edc5a..30b758d04 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -304,7 +304,7 @@ end ZygoteRuleConfig(), my_namedtuple, 1., 2., 3.; rrule_f=rrule_via_ad ) test_rrule( - ZygoteRuleConfig(), my_namedtuple, 1., (2.0, "str"), 3.; rrule_f=rrule_via_ad + ZygoteRuleConfig(), my_namedtuple, 1., (2.0, 2.4), 3.; rrule_f=rrule_via_ad ) test_rrule(ZygoteRuleConfig(), sum, (1.0, 2.0, 3.0); rrule_f=rrule_via_ad) test_rrule( From 073d86942c8b7aafcbe3bc6d13632a9e3ddc0515 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Mon, 26 Jul 2021 19:08:42 +0100 Subject: [PATCH 162/490] bump versions --- Project.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index 2b595b150..9d623d263 100644 --- a/Project.toml +++ b/Project.toml @@ -6,6 +6,7 @@ version = "0.6.17" AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" @@ -24,7 +25,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "1" -ChainRulesCore = "1" +ChainRulesCore = "1.0.1" ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" @@ -33,7 +34,7 @@ IRTools = "0.4" MacroTools = "0.5" NaNMath = "0.3" Requires = "1.1" -SpecialFunctions = "0.10, 1.0" +SpecialFunctions = "1.6" StatsFuns = "0.9.8" ZygoteRules = "0.2.1" julia = "1.3" From 9ef6332829752cc6571badabf80b401f28e72f3c Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Mon, 26 Jul 2021 19:23:42 +0100 Subject: [PATCH 163/490] Remove direct dependency on CRTU --- Project.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/Project.toml b/Project.toml index 9d623d263..4a71b47cb 100644 --- a/Project.toml +++ b/Project.toml @@ -6,7 +6,6 @@ version = "0.6.17" AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" ChainRules = "082447d4-558c-5d27-93f4-14fc19e9eca2" ChainRulesCore = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" From 1cc024e8840ed9585acd3e3f75cbc40ea64534ef Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 09:51:04 +0100 Subject: [PATCH 164/490] Update test/utils.jl Co-authored-by: Dhairya Gandhi --- test/utils.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/utils.jl b/test/utils.jl index ee01fa7ed..ecbfc2e14 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -25,7 +25,7 @@ end xs, y = randn(2,3), rand() f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments - # Follow is should work ones we workout what ForwardDiff should do when `Float64` is called on a `Dual` + # Following should work once we workout what ForwardDiff should do when `Float64` is called on a `Dual` # https://github.com/JuliaDiff/ForwardDiff.jl/pull/538 # else might need a custom overload of `(;;ChainRulesCore.ProjectTo)(::Dual)` # When fixed uncomment the below and delete the broken function From 8e44dc7875cdba1df0289830f35f141288a69a0e Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 12:35:07 +0100 Subject: [PATCH 165/490] renable diaghessian test --- test/utils.jl | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/test/utils.jl b/test/utils.jl index ecbfc2e14..70a8ebd63 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -25,24 +25,10 @@ end xs, y = randn(2,3), rand() f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments - # Following should work once we workout what ForwardDiff should do when `Float64` is called on a `Dual` - # https://github.com/JuliaDiff/ForwardDiff.jl/pull/538 - # else might need a custom overload of `(;;ChainRulesCore.ProjectTo)(::Dual)` - # When fixed uncomment the below and delete the broken function - #== dx, dy = diaghessian(f34, xs, y) @test size(dx) == size(xs) @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) @test dy ≈ hessian(y -> f34(xs,y), y) - ==# - function broken() - dx, dy = diaghessian(f34, xs, y) # This fails becase ProjectTo can't project a Dual onto a Float - c1 = size(dx) == size(xs) - c2 = vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) - c3 = dy ≈ hessian(y -> f34(xs,y), y) - return all([c1, c2, c3]) - end - @test_broken broken() zs = randn(7,13) # test chunk mode @test length(zs) > ForwardDiff.DEFAULT_CHUNK_THRESHOLD From f417fcb5487c2d171c279bce701067984e90da8e Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 17:31:00 +0100 Subject: [PATCH 166/490] Fix Manifest for Docs --- docs/Manifest.toml | 213 +++++++++++++++++++++++++++++++++++++++++++++ docs/Project.toml | 1 + 2 files changed, 214 insertions(+) diff --git a/docs/Manifest.toml b/docs/Manifest.toml index eac8a8401..dfeb0b184 100644 --- a/docs/Manifest.toml +++ b/docs/Manifest.toml @@ -1,12 +1,72 @@ # This file is machine-generated - editing it directly is not advised +[[AbstractFFTs]] +deps = ["LinearAlgebra"] +git-tree-sha1 = "485ee0867925449198280d4af84bdb46a2a404d0" +uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" +version = "1.0.1" + +[[ArgTools]] +uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" + +[[Artifacts]] +uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" + [[Base64]] uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" +[[ChainRules]] +deps = ["ChainRulesCore", "Compat", "LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "346588c81effb94da6a30c1617e56af6a878e4d6" +uuid = "082447d4-558c-5d27-93f4-14fc19e9eca2" +version = "1.0.1" + +[[ChainRulesCore]] +deps = ["Compat", "LinearAlgebra", "SparseArrays"] +git-tree-sha1 = "ad613c934ec3a3aa0ff19b91f15a16d56ed404b5" +uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" +version = "1.0.2" + +[[CommonSubexpressions]] +deps = ["MacroTools", "Test"] +git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7" +uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" +version = "0.3.0" + +[[Compat]] +deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] +git-tree-sha1 = "dc7dedc2c2aa9faf59a55c622760a25cbefbe941" +uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" +version = "3.31.0" + +[[CompilerSupportLibraries_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" + [[Dates]] deps = ["Printf"] uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" +[[DelimitedFiles]] +deps = ["Mmap"] +uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" + +[[DiffResults]] +deps = ["StaticArrays"] +git-tree-sha1 = "c18e98cba888c6c25d1c3b048e4b3380ca956805" +uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" +version = "1.0.3" + +[[DiffRules]] +deps = ["NaNMath", "Random", "SpecialFunctions"] +git-tree-sha1 = "214c3fcac57755cfda163d91c58893a8723f93e9" +uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" +version = "1.0.2" + +[[Distributed]] +deps = ["Random", "Serialization", "Sockets"] +uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" + [[DocStringExtensions]] deps = ["LibGit2"] git-tree-sha1 = "a32185f5428d3986f47c2ab78b1f216d5e6cc96f" @@ -19,39 +79,126 @@ git-tree-sha1 = "395fa1554c69735802bba37d9e7d9586fd44326c" uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" version = "0.24.11" +[[Downloads]] +deps = ["ArgTools", "LibCURL", "NetworkOptions"] +uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" + +[[FillArrays]] +deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] +git-tree-sha1 = "8c8eac2af06ce35973c3eadb4ab3243076a408e7" +uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" +version = "0.12.1" + +[[ForwardDiff]] +deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Printf", "Random", "SpecialFunctions", "StaticArrays"] +git-tree-sha1 = "e2af66012e08966366a43251e1fd421522908be6" +uuid = "f6369f11-7733-5829-9624-2563aa707210" +version = "0.10.18" + +[[IRTools]] +deps = ["InteractiveUtils", "MacroTools", "Test"] +git-tree-sha1 = "95215cd0076a150ef46ff7928892bc341864c73c" +uuid = "7869d1d1-7146-5819-86e3-90919afe41df" +version = "0.4.3" + [[InteractiveUtils]] deps = ["Markdown"] uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" +[[JLLWrappers]] +deps = ["Preferences"] +git-tree-sha1 = "642a199af8b68253517b80bd3bfd17eb4e84df6e" +uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" +version = "1.3.0" + [[JSON]] deps = ["Dates", "Mmap", "Parsers", "Unicode"] git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4" uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" version = "0.21.1" +[[LibCURL]] +deps = ["LibCURL_jll", "MozillaCACerts_jll"] +uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" + +[[LibCURL_jll]] +deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] +uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" + [[LibGit2]] deps = ["Base64", "NetworkOptions", "Printf", "SHA"] uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" +[[LibSSH2_jll]] +deps = ["Artifacts", "Libdl", "MbedTLS_jll"] +uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" + +[[Libdl]] +uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" + +[[LinearAlgebra]] +deps = ["Libdl"] +uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" + +[[LogExpFunctions]] +deps = ["DocStringExtensions", "LinearAlgebra"] +git-tree-sha1 = "7bd5f6565d80b6bf753738d2bc40a5dfea072070" +uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" +version = "0.2.5" + [[Logging]] uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" +[[MacroTools]] +deps = ["Markdown", "Random"] +git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0" +uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" +version = "0.5.6" + [[Markdown]] deps = ["Base64"] uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" +[[MbedTLS_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" + [[Mmap]] uuid = "a63ad114-7e13-5084-954f-fe012c677804" +[[MozillaCACerts_jll]] +uuid = "14a3606d-f60d-562e-9121-12d972cd8159" + +[[NaNMath]] +git-tree-sha1 = "bfe47e760d60b82b66b61d2d44128b62e3a369fb" +uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +version = "0.3.5" + [[NetworkOptions]] uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" +[[OpenSpecFun_jll]] +deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] +git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" +uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" +version = "0.5.5+0" + [[Parsers]] deps = ["Dates"] git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" version = "1.1.0" +[[Pkg]] +deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] +uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" + +[[Preferences]] +deps = ["TOML"] +git-tree-sha1 = "00cfd92944ca9c760982747e9a1d0d5d86ab1e5a" +uuid = "21216c6a-2e73-6563-6e65-726566657250" +version = "1.2.2" + [[Printf]] deps = ["Unicode"] uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" @@ -64,18 +211,84 @@ uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" deps = ["Serialization"] uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" +[[Requires]] +deps = ["UUIDs"] +git-tree-sha1 = "4036a3bd08ac7e968e27c203d45f5fff15020621" +uuid = "ae029012-a4dd-5104-9daa-d747884805df" +version = "1.1.3" + [[SHA]] uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" [[Serialization]] uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" +[[SharedArrays]] +deps = ["Distributed", "Mmap", "Random", "Serialization"] +uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" + [[Sockets]] uuid = "6462fe0b-24de-5631-8697-dd941f90decc" +[[SparseArrays]] +deps = ["LinearAlgebra", "Random"] +uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" + +[[SpecialFunctions]] +deps = ["ChainRulesCore", "LogExpFunctions", "OpenSpecFun_jll"] +git-tree-sha1 = "508822dca004bf62e210609148511ad03ce8f1d8" +uuid = "276daf66-3868-5448-9aa4-cd146d93841b" +version = "1.6.0" + +[[StaticArrays]] +deps = ["LinearAlgebra", "Random", "Statistics"] +git-tree-sha1 = "5b2f81eeb66bcfe379947c500aae773c85c31033" +uuid = "90137ffa-7385-5640-81b9-e52037218182" +version = "1.2.8" + +[[Statistics]] +deps = ["LinearAlgebra", "SparseArrays"] +uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" + +[[TOML]] +deps = ["Dates"] +uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" + +[[Tar]] +deps = ["ArgTools", "SHA"] +uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" + [[Test]] deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +[[UUIDs]] +deps = ["Random", "SHA"] +uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" + [[Unicode]] uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" + +[[Zlib_jll]] +deps = ["Libdl"] +uuid = "83775a58-1f1d-513f-b197-d71354ab007a" + +[[Zygote]] +deps = ["AbstractFFTs", "ChainRules", "ChainRulesCore", "DiffRules", "Distributed", "FillArrays", "ForwardDiff", "IRTools", "InteractiveUtils", "LinearAlgebra", "MacroTools", "NaNMath", "Random", "Requires", "SpecialFunctions", "Statistics", "ZygoteRules"] +path = ".." +uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" +version = "0.6.17" + +[[ZygoteRules]] +deps = ["MacroTools"] +git-tree-sha1 = "9e7a1e8ca60b742e508a315c17eef5211e7fbfd7" +uuid = "700de1a5-db45-46bc-99cf-38207098b444" +version = "0.2.1" + +[[nghttp2_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" + +[[p7zip_jll]] +deps = ["Artifacts", "Libdl"] +uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" diff --git a/docs/Project.toml b/docs/Project.toml index 1b9ab1f81..2a4c85433 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -1,5 +1,6 @@ [deps] Documenter = "e30172f5-a6a5-5a46-863b-614d45cd2de4" +Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" [compat] Documenter = "0.24" From 882a939986871d834b98b3cbbf2fab4bba6183ff Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 17:52:58 +0100 Subject: [PATCH 167/490] Fix doctests --- src/compiler/interface.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 6ca8257d5..e4db33471 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -58,11 +58,11 @@ See also [`withgradient`](@ref) to keep the value `f(args...)`, and [`pullback`](@ref) for value and back-propagator. ```jldoctest; setup=:(using Zygote) -julia> gradient(*, 2, 3, 5) -(15, 10, 6) +julia> gradient(*, 2.0, 3.0, 5.0) +(15.0, 10.0, 6.0) -julia> gradient(x -> sum(abs2,x), [7, 11, 13]) -([14, 22, 26],) +julia> gradient(x -> sum(abs2,x), [7.0, 11.0, 13.0]) +([14.0, 22.0, 26.0],) julia> gradient([7, 11], 0, 1) do x, y, d p = size(x, d) From 31150fa019030cb28deddfb0c5ffdff36fc7da0a Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 27 Jul 2021 22:30:02 +0530 Subject: [PATCH 168/490] whitespace --- src/compiler/chainrules.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 15653c47b..d6a1894c2 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -28,8 +28,8 @@ function has_chain_rrule(T) else # Not being redispatched: it does have a config rrule_m = configured_rrule_m - # Thus any no_rrule that might apply must also have a config because if it applied - # it will be identical, and if it doesn't we don't care what it is. + # Thus any no_rrule that might apply must also have a config because if it applied + # it will be identical, and if it doesn't we don't care what it is. no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}) end From 5fb12bcc06b9ad002fb707e1c575da8f296b81be Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 27 Jul 2021 22:44:37 +0530 Subject: [PATCH 169/490] typos --- src/compiler/chainrules.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index d6a1894c2..6fcdcdf40 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -42,13 +42,13 @@ function has_chain_rrule(T) # Consider the following truth table, for what can occur: # rrule: fallback, no_rrule: fallback => matches => do not use rrule. # rrule: specific, no_rrule: fallback => !matches => do use rrule, as haven't opted out. - # rrule: fallback, no_rrule: specific => IMPOSSIBLE, every no_rule us identical to some rrule + # rrule: fallback, no_rrule: specific => IMPOSSIBLE, every no_rule is identical to some rrule # rrule: specific, no_rrule: specific => matches => do not use rrule as opted out # rrule: specific, no_rrule: general => !matches => do use rrule as a more specific rrule takes preciedent over more general opted out # rrule: general , no_rrule: specific => IMPOSSIBLE, every no_rule us identical to some rrule so can't have a more general rrule being hit, as the specific one would hit first # - # Note that the fallback cases are the same outcome as the general cases as fallback is just most general. - # It can be seen that checking if it matches is the correct way to decide if we should ue the rrule or not. + # Note that the fallback cases are the same outcome as the general cases as fallback is just most general. + # It can be seen that checking if it matches is the correct way to decide if we should use the rrule or not. do_not_use_rrule = matching_cr_sig(no_rrule_m, rrule_m) From 002c937db45ce25855eb2427781c19cd1d351bd0 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 19:12:17 +0100 Subject: [PATCH 170/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4a71b47cb..5d04edb68 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.17" +version = "0.6.18" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 5b803d12f2d235755ba6f23a9017fc29fefd9985 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Tue, 27 Jul 2021 19:15:46 +0100 Subject: [PATCH 171/490] let CR1 rest as 0.6.18-DEV for a bit --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5d04edb68..a1b9e5d0c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.18" +version = "0.6.18-DEV" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 7cde196d9fca10b829d4da3792f4c0944bb9899a Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Thu, 29 Jul 2021 11:53:43 +0200 Subject: [PATCH 172/490] remove artifact warning --- src/compiler/chainrules.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 6fcdcdf40..fab01e01c 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -194,7 +194,7 @@ z2d(x, ::Any) = x z2d(::Nothing, ::Any) = NoTangent() z2d(a::AbstractArray{<:Number}, primal::AbstractArray{T}) where T = a z2d(a::AbstractArray, primal::AbstractArray{T}) where T = z2d.(a, primal) -z2d(x::Union{AbstractZero, Tangent}, ::Any) = (difftype_warn(x); return x) +z2d(x::Union{AbstractZero, Tangent}, ::Any) = return x function z2d(t::Tuple, primal::Tuple) tp::Tuple = map(z2d, t, primal) primal_type = typeof(primal) From 149bae5492e1f1e7e71903908d155a9107dad3fd Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Thu, 29 Jul 2021 12:57:40 +0100 Subject: [PATCH 173/490] add comment --- src/compiler/chainrules.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index fab01e01c..fc02d68a8 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -194,6 +194,9 @@ z2d(x, ::Any) = x z2d(::Nothing, ::Any) = NoTangent() z2d(a::AbstractArray{<:Number}, primal::AbstractArray{T}) where T = a z2d(a::AbstractArray, primal::AbstractArray{T}) where T = z2d.(a, primal) +# Note: this should never be hit if we are converting things right, but it seems to be +# happening in the wild for sufficiently weird functions/types. +# This fixes most (all?) cases, but it would be good to find what we miss. z2d(x::Union{AbstractZero, Tangent}, ::Any) = return x function z2d(t::Tuple, primal::Tuple) tp::Tuple = map(z2d, t, primal) From 005448bf57913a4ed270fb9b63ba248219973890 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 31 Jul 2021 21:51:51 -0400 Subject: [PATCH 174/490] rm examples --- examples/Manifest.toml | 394 ---------------------------------- examples/Project.toml | 5 - examples/linear_regression.jl | 99 --------- examples/mnist_mlp.jl | 107 --------- examples/profiler.jl | 38 ---- 5 files changed, 643 deletions(-) delete mode 100644 examples/Manifest.toml delete mode 100644 examples/Project.toml delete mode 100644 examples/linear_regression.jl delete mode 100644 examples/mnist_mlp.jl delete mode 100644 examples/profiler.jl diff --git a/examples/Manifest.toml b/examples/Manifest.toml deleted file mode 100644 index 4ad93ad0d..000000000 --- a/examples/Manifest.toml +++ /dev/null @@ -1,394 +0,0 @@ -# This file is machine-generated - editing it directly is not advised - -[[AbstractFFTs]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "380e36c66edfa099cd90116b24c1ce8cafccac40" -uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" -version = "0.4.1" - -[[AbstractTrees]] -deps = ["Markdown", "Test"] -git-tree-sha1 = "6621d9645702c1c4e6970cc6a3eae440c768000b" -uuid = "1520ce14-60c1-5f80-bbc7-55ef81b5835c" -version = "0.2.1" - -[[Adapt]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "fd04049c7dd78cfef0b06cdc1f0f181467655712" -uuid = "79e6a3ab-5dfb-504d-930d-738a2a938a0e" -version = "1.1.0" - -[[Base64]] -uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" - -[[BinDeps]] -deps = ["Libdl", "Pkg", "SHA", "URIParser", "Unicode"] -git-tree-sha1 = "1289b57e8cf019aede076edab0587eb9644175bd" -uuid = "9e28174c-4ba2-5203-b857-d8d62c4213ee" -version = "1.0.2" - -[[BinaryProvider]] -deps = ["Libdl", "Logging", "SHA"] -git-tree-sha1 = "ecdec412a9abc8db54c0efc5548c64dfce072058" -uuid = "b99e7846-7c00-51b0-8f62-c81ae34c0232" -version = "0.5.10" - -[[CEnum]] -git-tree-sha1 = "215a9aa4a1f23fbd05b92769fdd62559488d70e9" -uuid = "fa961155-64e5-5f13-b03f-caf6b980ea82" -version = "0.4.1" - -[[CUDAapi]] -deps = ["Libdl", "Logging"] -git-tree-sha1 = "e063efb91cfefd7e6afd92c435d01398107a500b" -uuid = "3895d2a7-ec45-59b8-82bb-cfc6a382f9b3" -version = "1.2.0" - -[[CUDAdrv]] -deps = ["CUDAapi", "Libdl", "Printf"] -git-tree-sha1 = "9ce99b5732c70e06ed97c042187baed876fb1698" -uuid = "c5f51814-7f29-56b8-a69c-e4d8f6be1fde" -version = "3.1.0" - -[[CUDAnative]] -deps = ["Adapt", "CUDAapi", "CUDAdrv", "DataStructures", "InteractiveUtils", "LLVM", "Libdl", "Logging", "Printf", "TimerOutputs"] -git-tree-sha1 = "3d6427f28430730c0e4107d8f26c4943a9a142dc" -uuid = "be33ccc6-a3ff-5ff2-a52e-74243cff1e17" -version = "2.4.0" - -[[CodecZlib]] -deps = ["BinaryProvider", "Libdl", "TranscodingStreams"] -git-tree-sha1 = "05916673a2627dd91b4969ff8ba6941bc85a960e" -uuid = "944b1d66-785c-5afd-91f1-9de20f533193" -version = "0.6.0" - -[[ColorTypes]] -deps = ["FixedPointNumbers", "Random"] -git-tree-sha1 = "7b62b728a5f3dd6ee3b23910303ccf27e82fad5e" -uuid = "3da002f7-5984-5a60-b8a6-cbb66c0b333f" -version = "0.8.1" - -[[Colors]] -deps = ["ColorTypes", "FixedPointNumbers", "InteractiveUtils", "Printf", "Reexport"] -git-tree-sha1 = "c9c1845d6bf22e34738bee65c357a69f416ed5d1" -uuid = "5ae59095-9a9b-59fe-a467-6f913c188581" -version = "0.9.6" - -[[CommonSubexpressions]] -deps = ["MacroTools", "Test"] -git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7" -uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" -version = "0.3.0" - -[[Conda]] -deps = ["JSON", "VersionParsing"] -git-tree-sha1 = "299304989a5e6473d985212c28928899c74e9421" -uuid = "8f4d0f93-b110-5947-807f-2305c1781a2d" -version = "1.5.2" - -[[CuArrays]] -deps = ["AbstractFFTs", "Adapt", "CUDAapi", "CUDAdrv", "CUDAnative", "GPUArrays", "LinearAlgebra", "MacroTools", "NNlib", "Printf", "Random", "Requires", "SparseArrays", "TimerOutputs"] -git-tree-sha1 = "46b48742a84bb839e74215b7e468a4a1c6ba30f9" -uuid = "3a865a2d-5b23-5a0f-bc46-62713ec82fae" -version = "1.2.1" - -[[DataAPI]] -git-tree-sha1 = "ee400abb2298bd13bfc3df1c412ed228061a2385" -uuid = "9a962f9c-6df0-11e9-0e5d-c546b8b5ee8a" -version = "1.7.0" - -[[DataStructures]] -deps = ["InteractiveUtils", "OrderedCollections"] -git-tree-sha1 = "88d48e133e6d3dd68183309877eac74393daa7eb" -uuid = "864edb3b-99cc-5e75-8d2d-829cb0a9cfe8" -version = "0.17.20" - -[[Dates]] -deps = ["Printf"] -uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" - -[[DelimitedFiles]] -deps = ["Mmap"] -uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" - -[[DiffResults]] -deps = ["StaticArrays"] -git-tree-sha1 = "c18e98cba888c6c25d1c3b048e4b3380ca956805" -uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" -version = "1.0.3" - -[[DiffRules]] -deps = ["NaNMath", "Random", "SpecialFunctions"] -git-tree-sha1 = "214c3fcac57755cfda163d91c58893a8723f93e9" -uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "1.0.2" - -[[Distributed]] -deps = ["Random", "Serialization", "Sockets"] -uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" - -[[ExprTools]] -git-tree-sha1 = "555eab1f7c501166ba87eeb5d561e9f5e7d167d3" -uuid = "e2ba6199-217a-4e67-a87a-7c52f15ade04" -version = "0.1.4" - -[[FFTW]] -deps = ["AbstractFFTs", "BinaryProvider", "Conda", "Libdl", "LinearAlgebra", "Reexport", "Test"] -git-tree-sha1 = "6c5b420da0b8c12098048561b8d58f81adea506f" -uuid = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" -version = "1.0.1" - -[[FillArrays]] -deps = ["LinearAlgebra", "Random", "SparseArrays"] -git-tree-sha1 = "de38b0253ade98340fabaf220f368f6144541938" -uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" -version = "0.7.4" - -[[FixedPointNumbers]] -git-tree-sha1 = "d14a6fa5890ea3a7e5dcab6811114f132fec2b4b" -uuid = "53c48c17-4a7d-5ca2-90c5-79b7896eea93" -version = "0.6.1" - -[[Flux]] -deps = ["AbstractTrees", "Adapt", "CUDAapi", "CodecZlib", "Colors", "CuArrays", "DelimitedFiles", "Juno", "LinearAlgebra", "MacroTools", "NNlib", "Pkg", "Printf", "Random", "Reexport", "SHA", "Statistics", "StatsBase", "Tracker", "ZipFile"] -git-tree-sha1 = "b5ebbd896dcd8ff19c6cb7297c4d323155b26bcf" -uuid = "587475ba-b771-5e3f-ad9e-33799f191a9c" -version = "0.9.0" - -[[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Printf", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "e2af66012e08966366a43251e1fd421522908be6" -uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.18" - -[[GPUArrays]] -deps = ["Adapt", "FFTW", "FillArrays", "LinearAlgebra", "Printf", "Random", "Serialization", "Test"] -git-tree-sha1 = "8d74ced24448c52b539a23d107bd2424ee139c0f" -uuid = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" -version = "1.0.4" - -[[IRTools]] -deps = ["InteractiveUtils", "MacroTools", "Test"] -git-tree-sha1 = "95215cd0076a150ef46ff7928892bc341864c73c" -uuid = "7869d1d1-7146-5819-86e3-90919afe41df" -version = "0.4.3" - -[[InteractiveUtils]] -deps = ["Markdown"] -uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" - -[[JSON]] -deps = ["Dates", "Mmap", "Parsers", "Unicode"] -git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4" -uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "0.21.1" - -[[Juno]] -deps = ["Base64", "Logging", "Media", "Profile", "Test"] -git-tree-sha1 = "30d94657a422d09cb97b6f86f04f750fa9c50df8" -uuid = "e5e0dc1b-0480-54bc-9374-aad01c23163d" -version = "0.7.2" - -[[LLVM]] -deps = ["CEnum", "Libdl", "Printf", "Unicode"] -git-tree-sha1 = "d9c6e1efcaa6c2fcd043da812a62b3e489a109a3" -uuid = "929cbde3-209d-540e-8aea-75f648917ca0" -version = "1.7.0" - -[[LibGit2]] -uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" - -[[Libdl]] -uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" - -[[LinearAlgebra]] -deps = ["Libdl"] -uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" - -[[Logging]] -uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" - -[[MacroTools]] -deps = ["Markdown", "Random"] -git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0" -uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.6" - -[[Markdown]] -deps = ["Base64"] -uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" - -[[Media]] -deps = ["MacroTools", "Test"] -git-tree-sha1 = "75a54abd10709c01f1b86b84ec225d26e840ed58" -uuid = "e89f7d12-3494-54d1-8411-f7d8b9ae1f27" -version = "0.5.0" - -[[Missings]] -deps = ["DataAPI"] -git-tree-sha1 = "4ea90bd5d3985ae1f9a908bd4500ae88921c5ce7" -uuid = "e1d29d7a-bbdc-5cf2-9ac0-f12de2c33e28" -version = "1.0.0" - -[[Mmap]] -uuid = "a63ad114-7e13-5084-954f-fe012c677804" - -[[NNlib]] -deps = ["BinaryProvider", "Libdl", "LinearAlgebra", "Requires", "Statistics"] -git-tree-sha1 = "d9f196d911f55aeaff11b11f681b135980783824" -uuid = "872c559c-99b0-510c-b3b7-b6c96a88d5cd" -version = "0.6.6" - -[[NaNMath]] -git-tree-sha1 = "bfe47e760d60b82b66b61d2d44128b62e3a369fb" -uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" -version = "0.3.5" - -[[OrderedCollections]] -git-tree-sha1 = "85f8e6578bf1f9ee0d11e7bb1b1456435479d47c" -uuid = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" -version = "1.4.1" - -[[Parsers]] -deps = ["Dates"] -git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" -uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "1.1.0" - -[[Pkg]] -deps = ["Dates", "LibGit2", "Markdown", "Printf", "REPL", "Random", "SHA", "UUIDs"] -uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" - -[[Printf]] -deps = ["Unicode"] -uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" - -[[Profile]] -deps = ["Printf"] -uuid = "9abbd945-dff8-562f-b5e8-e1ebf5ef1b79" - -[[ProgressMeter]] -deps = ["Distributed", "Printf"] -git-tree-sha1 = "afadeba63d90ff223a6a48d2009434ecee2ec9e8" -uuid = "92933f4c-e287-5a05-a399-4b506db050ca" -version = "1.7.1" - -[[REPL]] -deps = ["InteractiveUtils", "Markdown", "Sockets"] -uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" - -[[Random]] -deps = ["Serialization"] -uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" - -[[Reexport]] -deps = ["Pkg"] -git-tree-sha1 = "7b1d07f411bc8ddb7977ec7f377b97b158514fe0" -uuid = "189a3867-3050-52da-a836-e630ba90ab69" -version = "0.2.0" - -[[Requires]] -deps = ["Test"] -git-tree-sha1 = "f6fbf4ba64d295e146e49e021207993b6b48c7d1" -uuid = "ae029012-a4dd-5104-9daa-d747884805df" -version = "0.5.2" - -[[SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" - -[[Serialization]] -uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" - -[[Sockets]] -uuid = "6462fe0b-24de-5631-8697-dd941f90decc" - -[[SortingAlgorithms]] -deps = ["DataStructures"] -git-tree-sha1 = "2ec1962eba973f383239da22e75218565c390a96" -uuid = "a2af1166-a08f-5f64-846c-94a0d3cef48c" -version = "1.0.0" - -[[SparseArrays]] -deps = ["LinearAlgebra", "Random"] -uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" - -[[SpecialFunctions]] -deps = ["BinDeps", "BinaryProvider", "Libdl"] -git-tree-sha1 = "3bdd374b6fd78faf0119b8c5d538788dbf910c6e" -uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "0.8.0" - -[[StaticArrays]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "896d55218776ab8f23fb7b222a5a4a946d4aafc2" -uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.2.5" - -[[Statistics]] -deps = ["LinearAlgebra", "SparseArrays"] -uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[[StatsAPI]] -git-tree-sha1 = "1958272568dc176a1d881acb797beb909c785510" -uuid = "82ae8749-77ed-4fe6-ae5f-f523153014b0" -version = "1.0.0" - -[[StatsBase]] -deps = ["DataAPI", "DataStructures", "LinearAlgebra", "Missings", "Printf", "Random", "SortingAlgorithms", "SparseArrays", "Statistics", "StatsAPI"] -git-tree-sha1 = "2f6792d523d7448bbe2fec99eca9218f06cc746d" -uuid = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91" -version = "0.33.8" - -[[Test]] -deps = ["Distributed", "InteractiveUtils", "Logging", "Random"] -uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[[TimerOutputs]] -deps = ["ExprTools", "Printf"] -git-tree-sha1 = "209a8326c4f955e2442c07b56029e88bb48299c7" -uuid = "a759f4b9-e2f1-59dc-863e-4aeb61b1ea8f" -version = "0.5.12" - -[[Tracker]] -deps = ["Adapt", "DiffRules", "ForwardDiff", "LinearAlgebra", "MacroTools", "NNlib", "NaNMath", "Printf", "Random", "Requires", "SpecialFunctions", "Statistics", "Test"] -git-tree-sha1 = "86929a5811dca5ce76c65a1d3fecda92d90c2e49" -uuid = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" -version = "0.2.6" - -[[TranscodingStreams]] -deps = ["Random", "Test"] -git-tree-sha1 = "7c53c35547de1c5b9d46a4797cf6d8253807108c" -uuid = "3bb67fe8-82b1-5028-8e26-92a6c54297fa" -version = "0.9.5" - -[[URIParser]] -deps = ["Unicode"] -git-tree-sha1 = "53a9f49546b8d2dd2e688d216421d050c9a31d0d" -uuid = "30578b45-9adc-5946-b283-645ec420af67" -version = "0.4.1" - -[[UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" - -[[Unicode]] -uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" - -[[VersionParsing]] -git-tree-sha1 = "80229be1f670524750d905f8fc8148e5a8c4537f" -uuid = "81def892-9a0e-5fdd-b105-ffc91e053289" -version = "1.2.0" - -[[ZipFile]] -deps = ["BinaryProvider", "Libdl", "Printf"] -git-tree-sha1 = "7fbfbc51c186f0ccdbe091f32d3dff8608973f8e" -uuid = "a5390f91-8eb1-5f08-bee0-b1d1ffed6cea" -version = "0.8.4" - -[[Zygote]] -deps = ["DiffRules", "ForwardDiff", "IRTools", "InteractiveUtils", "LinearAlgebra", "MacroTools", "NNlib", "NaNMath", "Random", "Requires", "SpecialFunctions", "Statistics"] -git-tree-sha1 = "d3c2ae55d116b5360a73b1e88d1a974b446d933a" -repo-rev = "ffc50480ff8f7662110bfb82b0b6d4f9cef6e59d" -repo-url = "https://github.com/FluxML/Zygote.jl.git" -uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.14+" diff --git a/examples/Project.toml b/examples/Project.toml deleted file mode 100644 index 541d5a4f5..000000000 --- a/examples/Project.toml +++ /dev/null @@ -1,5 +0,0 @@ -[deps] -Flux = "587475ba-b771-5e3f-ad9e-33799f191a9c" -Juno = "e5e0dc1b-0480-54bc-9374-aad01c23163d" -ProgressMeter = "92933f4c-e287-5a05-a399-4b506db050ca" -Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" diff --git a/examples/linear_regression.jl b/examples/linear_regression.jl deleted file mode 100644 index 8b1e2cfe8..000000000 --- a/examples/linear_regression.jl +++ /dev/null @@ -1,99 +0,0 @@ -# Initialize environment in current directory -@info("Ensuring example environment instantiated...") -import Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() - -@info("Loading Zygote...") -using Zygote, LinearAlgebra - -# This example will showcase how we do a simple linear fit with Zygote, making -# use of complex datastructures, a home-grown stochastic gradient descent -# optimizer, and some good old-fashioned math. We start with the problem -# statement: We wish to learn the mapping `f(X) -> Y`, where `X` is a matrix -# of vector observations, `f()` is a linear mapping function and `Y` is a -# vector of scalar observations. - -# Because we like complex objects, we will define our linear regression as the -# following object: -mutable struct LinearRegression - # These values will be implicitly learned - weights::Matrix - bias::Float64 - - # These values will not be learned - name::String -end -LinearRegression(nparams, name) = LinearRegression(randn(1, nparams), 0.0, name) - -# Our linear prediction looks very familiar; w*X + b -function predict(model::LinearRegression, X) - return model.weights * X .+ model.bias -end - -# Our "loss" that must be minimized is the l2 norm between our current -# prediction and our ground-truth Y -function loss(model::LinearRegression, X, Y) - return norm(predict(model, X) .- Y, 2) -end - - -# Our "ground truth" values (that we will learn, to prove that this works) -weights_gt = [1.0, 2.7, 0.3, 1.2]' -bias_gt = 0.4 - -# Generate a dataset of many observations -X = randn(length(weights_gt), 10000) -Y = weights_gt * X .+ bias_gt - -# Add a little bit of noise to `X` so that we do not have an exact solution, -# but must instead do a least-squares fit: -X .+= 0.001.*randn(size(X)) - - -# Now we begin our "training loop", where we take examples from `X`, -# calculate loss with respect to the corresponding entry in `Y`, find the -# gradient upon our model, update the model, and continue. Before we jump -# in, let's look at what `Zygote.gradient()` gives us: -@info("Building model...") -model = LinearRegression(size(X, 1), "Example") - -# Calculate gradient upon `model` for the first example in our training set -@info("Calculating gradient (the first time can take a while to compile...)") -grads = Zygote.gradient(model) do m - return loss(m, X[:,1], Y[1]) -end - -# The `grads` object is a Tuple containing one element per argument to -# `gradient()`, so we take the first one to get the gradient upon `model`: -grads = grads[1] - -# Because our LinearRegression object is mutable, the gradient holds a -# reference to it, which we peel via `grads[]`: -grads = grads[] - -# We now get a `NamedTuple` so we can now do things like `grads.weight`. Let's -# print it out, just to see what it looks like. Note that while `weights` and -# `bias` have gradients, `name` just naturally has a gradient of `nothing`, -# because it was not involved in the calculation of the output loss. -@info grads - -# Let's define an update rule that will allow us to modify the weights -# of our model a tad bit according to the gradients -function sgd_update!(model::LinearRegression, grads, η = 0.001) - model.weights .-= η .* grads.weights - model.bias -= η * grads.bias -end - -# Now let's do that for each example in our training set: -@info("Running train loop for $(size(X,2)) iterations") -for idx in 1:size(X, 2) - grads = Zygote.gradient(m -> loss(m, X[:, idx], Y[idx]), model)[1][] - sgd_update!(model, grads) -end - -# Now let's look at how well we've approximated the ground truth weights/bias: -@info("Ground truth weights: $(weights_gt)") -@info("Learned weights: $(round.(model.weights; digits=3))") -@info("Ground truth bias: $(bias_gt)") -@info("Learned bias: $(round(model.bias; digits=3))") diff --git a/examples/mnist_mlp.jl b/examples/mnist_mlp.jl deleted file mode 100644 index 60c3e7e02..000000000 --- a/examples/mnist_mlp.jl +++ /dev/null @@ -1,107 +0,0 @@ -# Initialize environment in current directory -@info("Ensuring example environment instantiated...") -import Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() - -@info("Loading Zygote and Flux...") -using Zygote, Flux, Random, Statistics -using Flux.Data.MNIST - -# We're going to showcase how to use Zygote with Flux; we'll create a simple -# Multi-Layer Perceptron network to do digit classification upon the MNIST -# dataset. We start with some setup that is ripped straight from the Flux -# model zoo: - -# First, we load the MNIST images and flatten them into a giant matrix: -@info("Loading dataset...") -X = hcat(float.(reshape.(MNIST.images(), :))...) - -# Load labels as well, one-hot encoding them -Y = float.(Flux.onehotbatch(MNIST.labels(), 0:9)) - -# Do the same for the test data/labels: -X_test = hcat(float.(reshape.(MNIST.images(:test), :))...) -Y_test = float.(Flux.onehotbatch(MNIST.labels(:test), 0:9)) - -@info("Constructing MLP model...") -model = Chain( - Dense(28^2, 32, relu), - Dense(32, 10), - softmax, -) - -# Until Flux drops Tracker as its default Automatic Differentiation library, -# strip it out with this line: -model = Flux.mapleaves(Flux.data, model) - -# Our loss is the classical multiclass crossentropy loss -loss(model, X, Y) = Flux.crossentropy(model(X), Y) - -# Helper function to calculate accuracy of our model -accuracy(model, X, Y) = mean(Flux.onecold(model(X)) .== Flux.onecold(Y)) - - -# Recursive zygote update method, this is the general recursion case: -function zyg_update!(opt, model, updates) - # If this `model` node has no fields, then just return it - if nfields(model) == 0 - return model - end - - # If it does have fields, recurse into them: - for field_idx in 1:nfields(model) - zyg_update!(opt, getfield(model, field_idx), getfield(updates, field_idx)) - end - - # In the end, return the `model` - return model -end -# If the `updates` is set to `Nothing`, then just return `model`; this means -# that there were no changes to be applied to this piece of the model. -zyg_update!(opt, model, updates::Nothing) = model - -# If `model` is an `AbstractArray` and `updates` is too, then apply our Flux -# optimizer to the incoming gradients and apply them to the model! -function zyg_update!(opt, model::AbstractArray, updates::AbstractArray) - # Sub off to Flux's ADAM optimizer - Flux.Optimise.apply!(opt, model, updates) - return model .-= updates -end - - -# We will train for a number of epochs, with minibatches, using the `ADAM` -# optimizer to nudge our weights toward perfection. -opt = ADAM(0.001) -num_epochs = 10 -@info("Training for $(num_epochs) epochs...") -for epoch_idx in 1:num_epochs - # "global" here to dodgescoping issues with for loops at top-level - global X, Y, model - - # Shuffle the data each epoch: - perm = shuffle(1:size(X,2)) - X = X[:, perm] - Y = Y[:, perm] - - # Iterate over batches - batch_size = 512 - batch_idxs = 1:batch_size:(size(X,2) - batch_size) - for bidx in batch_idxs - # Calculate gradients upon the model for this batch - grads = Zygote.gradient(model) do model - return loss(model, X[:, bidx:bidx+batch_size], - Y[:, bidx:bidx+batch_size]) - end - - # Peel outer Tuple to access gradient of first parameter - grads = grads[1] - - # Apply recursive update to our model: - zyg_update!(opt, model, grads) - end - - # After each epoch, report our accuracy on the test set: - acc = accuracy(model, X_test, Y_test) - @info("[$(epoch_idx)] Accuracy: $(round(100*acc; digits=1))%") -end diff --git a/examples/profiler.jl b/examples/profiler.jl deleted file mode 100644 index 513fd6933..000000000 --- a/examples/profiler.jl +++ /dev/null @@ -1,38 +0,0 @@ -# Initialize environment in current directory -@info("Ensuring example environment instantiated...") -import Pkg -Pkg.activate(@__DIR__) -Pkg.instantiate() - -@info("Loading Zygote...") -using Zygote - -function f(x) - for i = 1:5 - x = sin(cos(x)) - end - return x -end - -function loop(x, n) - r = x/x - for i = 1:n - r *= f(x) - end - return sin(cos(r)) -end - -gradient(loop, 2, 3) - -Zygote.@profile loop(2, 3) - -function logsumexp(x::Array{Float64,1}) - A = maximum(x); - ema = exp.(x .- A); - sema = sum(ema); - log(sema) + A; -end - -gradient(logsumexp, rand(100)) - -Zygote.@profile logsumexp(rand(100)) From d89dc341d82bcd66def879eb7cece70e4f549495 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 4 Jul 2021 09:46:37 -0400 Subject: [PATCH 175/490] use _pullback to avoid some dy -> (nothing, back(dy)...) --- src/lib/broadcast.jl | 18 ++++++------------ 1 file changed, 6 insertions(+), 12 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index cdcd21547..32b4085df 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -75,23 +75,17 @@ unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x.*y, Δ -> (nothing, unbroadcast(x, Δ .* conj.(y)), unbroadcast(y, Δ .* conj.(x))) -@adjoint function broadcasted(::typeof(*), x::Number, y::AbstractArray{<:Number}) - z, back = pullback(*, x, y) # this uses dot(y,Δ) instead of Δ .* conj.(y) - z, Δ -> (nothing, back(Δ)...) -end -@adjoint function broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) - z, back = pullback(*, x, y) - z, Δ -> (nothing, back(Δ)...) -end +@adjoint broadcasted(::typeof(*), x::Number, y::AbstractArray{<:Number}) = + _pullback(*, x, y) # this uses dot(y,Δ) instead of sum(Δ .* conj.(y)) +@adjoint broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) = + _pullback(*, x, y) @adjoint function broadcasted(::typeof(/), x::Numeric, y::Numeric) res = x ./ y res, Δ -> (nothing, unbroadcast(x, Δ ./ conj.(y)), unbroadcast(y, .-Δ .* conj.(res ./ y))) end -@adjoint function broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) - z, back = pullback(/, x, y) - z, Δ -> (nothing, back(Δ)...) -end +@adjoint broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) = + _pullback(/, x, y) @adjoint function broadcasted(::typeof(Base.literal_pow), ::typeof(^), x::Numeric, exp::Val{p}) where p y = Base.literal_pow.(^, x, exp) From 29755a5bf7ed71e978da714ad3a175e7b7c9f8c0 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 4 Jul 2021 09:47:01 -0400 Subject: [PATCH 176/490] tidy up broadcast_forward --- src/lib/broadcast.jl | 37 ++++++++++++++++++++----------------- 1 file changed, 20 insertions(+), 17 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 32b4085df..6afafbefa 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -174,10 +174,9 @@ _dual_safearg(x) = false T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: if T == Bool - return f.(args...), _->nothing + return (f.(args...), _ -> nothing) elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) - y, back = broadcast_forward(f, args...) - return y, ȳ -> (nothing, nothing, back(ȳ)...) + return broadcast_forward(f, args...) end len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) @@ -189,7 +188,7 @@ _dual_safearg(x) = false end (nothing, accum_sum(dxs[1]), map(unbroadcast, args, Base.tail(dxs))...) end - y, ∇broadcasted + return y, ∇broadcasted end @adjoint function broadcasted(::AbstractArrayStyle{0}, f, args...) @@ -231,28 +230,32 @@ function dual_function(f::F) where F end @inline function broadcast_forward(f, args::Vararg{Any,N}) where N - T = Broadcast.combine_eltypes(f, args) + valN = Val(N) out = dual_function(f).(args...) eltype(out) <: Dual || return (out, _ -> nothing) y = map(x -> x.value, out) - _back(ȳ, i) = unbroadcast(args[i], ((a, b) -> a*b.partials[i]).(ȳ, out)) - back(ȳ) = ntuple(i -> _back(ȳ, i), N) - return y, back + function bc_fwd_back(ȳ) + dargs = ntuple(valN) do i + unbroadcast(args[i], broadcast((y1, o1) -> y1 * o1.partials[i], ȳ, out)) + end + (nothing, nothing, dargs...) # nothings for broadcasted & f + end + return y, bc_fwd_back end @init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin const CuArrayStyle = CUDA.AbstractGPUArrayStyle - if isdefined(CUDA, :cufunc) - @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) - y, back = broadcast_forward(CUDA.cufunc(f), args...) - y, ȳ -> (nothing, nothing, back(ȳ)...) - end + if isdefined(CUDA, :cufunc) # CUDA < 3.0 + + @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = + broadcast_forward(CUDA.cufunc(f), args...) + else # CUDA >= 3.0 - @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) - y, back = broadcast_forward(f, args...) - y, ȳ -> (nothing, nothing, back(ȳ)...) - end + + @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = + broadcast_forward(f, args...) + end @adjoint CUDA.CuArray{N,T}(xs::Array) where {N,T} = From c43e48140cea59a56263d5aa02ddd71e13eb7996 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 4 Jul 2021 09:48:38 -0400 Subject: [PATCH 177/490] rm explicit case for CuArray --- src/lib/broadcast.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 6afafbefa..e189d9dea 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -251,10 +251,10 @@ end @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(CUDA.cufunc(f), args...) - else # CUDA >= 3.0 - - @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = - broadcast_forward(f, args...) + # else CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe + + # @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = + # broadcast_forward(f, args...) end From 5d6516adc36765ee6082c126aeb3944fddf7b070 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 4 Jul 2021 10:45:05 -0400 Subject: [PATCH 178/490] solve two warnings re CUDA ArgumentError: Package Zygote does not have CUDA in its dependencies WARNING: using CUDA.trim in module Zygote conflicts with an existing identifier --- src/lib/broadcast.jl | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index e189d9dea..e9ae579a8 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -45,18 +45,18 @@ function Base.reducedim_init(::typeof(identity), ::typeof(accum), A::AbstractArr Base.reducedim_initarray(A, region, nothing, Union{Nothing,eltype(A)}) end -trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) -trim(x::Tuple, Δ) = ntuple(k -> Δ[k], length(x)) +_trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) +_trim(x::Tuple, Δ) = NTuple{length(x)}(Δ) unbroadcast(x::AbstractArray, x̄) = size(x) == size(x̄) ? x̄ : - length(x) == length(x̄) ? trim(x, x̄) : - trim(x, accum_sum(x̄, dims = ntuple(i -> size(x, i) == 1 ? i : ndims(x̄)+1, Val(ndims(x̄))))) + length(x) == length(x̄) ? _trim(x, x̄) : + _trim(x, accum_sum(x̄, dims = ntuple(i -> size(x, i) == 1 ? i : ndims(x̄)+1, Val(ndims(x̄))))) unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) -unbroadcast(x::Tuple, x̄) = trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 +unbroadcast(x::Tuple, x̄) = _trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @@ -244,6 +244,7 @@ end end @init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin + const CuArrayStyle = CUDA.AbstractGPUArrayStyle if isdefined(CUDA, :cufunc) # CUDA < 3.0 From ed3ec9bb19a46cb4d64bd452a9c73229217bf5f3 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 11 Jul 2021 21:24:09 -0400 Subject: [PATCH 179/490] use real.(x) never real(x) --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index e9ae579a8..df3c3dd16 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -100,10 +100,10 @@ end end @adjoint broadcasted(::typeof(conj), x::Numeric) = - conj.(x), z̄ -> (nothing, conj.(z̄)) + conj(x), z̄ -> (nothing, conj(z̄)) @adjoint broadcasted(::typeof(real), x::Numeric) = - real.(x), z̄ -> (nothing, real.(z̄)) + real(x), z̄ -> (nothing, real(z̄)) @adjoint broadcasted(::typeof(imag), x::Numeric) = imag.(x), z̄ -> (nothing, im .* real.(z̄)) From 7b7c6902017293a1d1e9257f0679ea4f4d882a3d Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 12 Jul 2021 23:04:34 -0400 Subject: [PATCH 180/490] add examples from 1027 --- src/lib/array.jl | 2 +- test/cuda.jl | 7 ++++++- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 1ad7eb6a7..51e2ba6d3 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -8,7 +8,7 @@ using Distributed: pmap, AbstractWorkerPool @adjoint Array(xs::AbstractArray) = Array(xs), ȳ -> (ȳ,) @adjoint Array(xs::Array) = Array(xs), ȳ -> (ȳ,) -@nograd ones, zeros, Base.OneTo, Colon(), one, zero, sizehint! +@nograd ones, zeros, Base.OneTo, Colon(), one, zero, sizehint!, count @adjoint Base.vect(xs...) = Base.vect(xs...), Δ -> (Δ...,) diff --git a/test/cuda.jl b/test/cuda.jl index 95bcdf373..1d07d39fd 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -9,7 +9,7 @@ CUDA.allowscalar(false) @test gradient(x -> sum(cu(x)), r)[1] isa Array{Float32, 2} end -@testset "basic bcasting" begin +@testset "broadcasting" begin a = Float32.(1:9) a_gpu = a |> cu @@ -24,6 +24,11 @@ end g_gpu = gradient(x -> w(x), a_gpu)[1] @test g_gpu isa CuArray @test g_gpu |> collect ≈ g + + # https://github.com/FluxML/Zygote.jl/issues/1027 + @test gradient(x -> sum(x .!= 0), a_gpu) == (nothing,) + g3 = gradient(x -> sum(x .^ 3) ./ count(x .> 3), a)[1] + @test cu(g3) ≈ gradient(x -> sum(x .^ 3) ./ sum(x .> 3), a_gpu)[1] end @testset "sum(f, x)" begin From 08b1c1ea45321eddfe26f1f13258126cdec8177a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Jul 2021 15:46:32 -0400 Subject: [PATCH 181/490] bools aren't diff --- src/lib/broadcast.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index df3c3dd16..00f536366 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -219,6 +219,7 @@ using ForwardDiff: Dual dual(x, p) = x dual(x::Real, p) = Dual(x, p) +dual(x::Bool, p) = x function dual_function(f::F) where F function (args::Vararg{Any,N}) where N From f83f20a59b87cb5dc35d5c6e6fb967e12953b9fe Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Jul 2021 16:52:54 -0400 Subject: [PATCH 182/490] un-comment CUDA broadcasting --- src/lib/broadcast.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 00f536366..b4e451aa9 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -253,10 +253,10 @@ end @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(CUDA.cufunc(f), args...) - # else CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe + else CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe - # @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = - # broadcast_forward(f, args...) + @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = + broadcast_forward(f, args...) end From 6642fa7cc9aa2c7e727ee0f314c9d0c51965a55c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Jul 2021 16:59:14 -0400 Subject: [PATCH 183/490] typo --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index b4e451aa9..4e3e3e83e 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -253,7 +253,7 @@ end @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(CUDA.cufunc(f), args...) - else CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe + else # CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(f, args...) From 7e7ca7565a242b3b8188a6065df2a2e90c8fdea8 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Jul 2021 17:01:22 -0400 Subject: [PATCH 184/490] another typo --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 4e3e3e83e..bc317ae89 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -255,7 +255,7 @@ end else # CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe - @eval @adjoint function broadcasted(::CuArrayStyle, f, args...) = + @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(f, args...) end From 45ab282e88aa2b23ebb0f38cf7d69e3546826b04 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 27 Jul 2021 17:43:30 -0400 Subject: [PATCH 185/490] rm two dots --- test/cuda.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/cuda.jl b/test/cuda.jl index 1d07d39fd..8bb59629e 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -27,8 +27,8 @@ end # https://github.com/FluxML/Zygote.jl/issues/1027 @test gradient(x -> sum(x .!= 0), a_gpu) == (nothing,) - g3 = gradient(x -> sum(x .^ 3) ./ count(x .> 3), a)[1] - @test cu(g3) ≈ gradient(x -> sum(x .^ 3) ./ sum(x .> 3), a_gpu)[1] + g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] + @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] end @testset "sum(f, x)" begin From 2cca8fc8cae4247e8a5b0950909c44cbb43b00bc Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 1 Aug 2021 08:31:54 -0400 Subject: [PATCH 186/490] revert change to trim's name --- src/lib/broadcast.jl | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index bc317ae89..3c4f7f215 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -45,18 +45,18 @@ function Base.reducedim_init(::typeof(identity), ::typeof(accum), A::AbstractArr Base.reducedim_initarray(A, region, nothing, Union{Nothing,eltype(A)}) end -_trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) -_trim(x::Tuple, Δ) = NTuple{length(x)}(Δ) +trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) +trim(x::Tuple, Δ) = NTuple{length(x)}(Δ) unbroadcast(x::AbstractArray, x̄) = size(x) == size(x̄) ? x̄ : - length(x) == length(x̄) ? _trim(x, x̄) : - _trim(x, accum_sum(x̄, dims = ntuple(i -> size(x, i) == 1 ? i : ndims(x̄)+1, Val(ndims(x̄))))) + length(x) == length(x̄) ? trim(x, x̄) : + trim(x, accum_sum(x̄, dims = ntuple(i -> size(x, i) == 1 ? i : ndims(x̄)+1, Val(ndims(x̄))))) unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) -unbroadcast(x::Tuple, x̄) = _trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 +unbroadcast(x::Tuple, x̄) = trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::AbstractArray, x̄::Nothing) = nothing From 19862d1ff517baa5a69933a40eb7e522e3e6d92e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 1 Aug 2021 09:46:52 -0400 Subject: [PATCH 187/490] comments --- src/lib/broadcast.jl | 5 ++++- test/cuda.jl | 6 +++--- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 3c4f7f215..bcd0cbc35 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -253,7 +253,10 @@ end @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(CUDA.cufunc(f), args...) - else # CUDA >= 3.0 -- don't need cufunc(f), and ordinary broadcasting calls broadcast_forward when safe + else # CUDA >= 3.0 -- don't need cufunc(f). + # Ordinary broadcasting calls broadcast_forward anyway when certain its' safe, + # so perhaps this can be deleted? Possible edge case here: + # https://github.com/FluxML/Zygote.jl/pull/1018#issuecomment-873629415 @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(f, args...) diff --git a/test/cuda.jl b/test/cuda.jl index 8bb59629e..9eebafc14 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -26,9 +26,9 @@ end @test g_gpu |> collect ≈ g # https://github.com/FluxML/Zygote.jl/issues/1027 - @test gradient(x -> sum(x .!= 0), a_gpu) == (nothing,) - g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] - @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] + @test gradient(x -> sum(x .!= 0), a_gpu) == (nothing,) # was MethodError: no method matching iterate(::Nothing) + g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] # was Can't differentiate gc_preserve_end expression + @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] # was KernelException -- Zygote v0.6.14, CUDA v3.3.0 end @testset "sum(f, x)" begin From 687adbce9b6cdf73d70756b4cafffb71c9c16d6c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 1 Aug 2021 09:55:01 -0400 Subject: [PATCH 188/490] mark one broken, not a regression --- test/cuda.jl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/cuda.jl b/test/cuda.jl index 9eebafc14..3999ace59 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -25,10 +25,12 @@ end @test g_gpu isa CuArray @test g_gpu |> collect ≈ g - # https://github.com/FluxML/Zygote.jl/issues/1027 + # https://github.com/FluxML/Zygote.jl/issues/1027 # status on Zygote v0.6.14, CUDA v3.3.0 in comments: @test gradient(x -> sum(x .!= 0), a_gpu) == (nothing,) # was MethodError: no method matching iterate(::Nothing) + @test gradient(x -> sum(x .> 3), a_gpu) == (nothing,) g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] # was Can't differentiate gc_preserve_end expression - @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] # was KernelException -- Zygote v0.6.14, CUDA v3.3.0 + @test_skip cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] # was KernelException -- not fixed by PR #1018 + @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / count(x .> 3), a_gpu)[1] end @testset "sum(f, x)" begin From 88a0182f96d54ec5bee4cd333610082a5f3ee931 Mon Sep 17 00:00:00 2001 From: WT Date: Mon, 2 Aug 2021 17:27:41 +0100 Subject: [PATCH 189/490] Bump CR dep to 1.5 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a1b9e5d0c..22b32dd6a 100644 --- a/Project.toml +++ b/Project.toml @@ -23,7 +23,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1" +ChainRules = "1.5" ChainRulesCore = "1.0.1" ChainRulesTestUtils = "1" DiffRules = "1.0" From b7d7cc2e47df158a72dcd0be6af234fe60d0eea5 Mon Sep 17 00:00:00 2001 From: WT Date: Mon, 2 Aug 2021 17:27:51 +0100 Subject: [PATCH 190/490] Remove redundant rules --- src/lib/array.jl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 51e2ba6d3..387b955cb 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -3,22 +3,17 @@ using FillArrays: AbstractFill, getindex_value using Base.Broadcast: broadcasted, broadcast_shape using Distributed: pmap, AbstractWorkerPool -@adjoint (::Type{T})(::UndefInitializer, args...) where T<:Array = T(undef, args...), Δ -> nothing - @adjoint Array(xs::AbstractArray) = Array(xs), ȳ -> (ȳ,) @adjoint Array(xs::Array) = Array(xs), ȳ -> (ȳ,) @nograd ones, zeros, Base.OneTo, Colon(), one, zero, sizehint!, count -@adjoint Base.vect(xs...) = Base.vect(xs...), Δ -> (Δ...,) - @adjoint copy(x::AbstractArray) = copy(x), ȳ -> (ȳ,) @adjoint collect(x::Tuple) = collect(x), dy -> (Tuple(dy),) @adjoint collect(x::AbstractArray) = collect(x), dy -> (dy,) # Array Constructors -@adjoint (::Type{T})(x::T) where T<:Array = T(x), ȳ -> (ȳ,) @adjoint function (::Type{T})(x::Number, sz) where {T <: Fill} back(Δ::AbstractArray) = (sum(Δ), nothing) back(Δ::NamedTuple) = (Δ.value, nothing) From ff3d28251e59bddca4cb3840eeec76affc3884c4 Mon Sep 17 00:00:00 2001 From: WT Date: Mon, 2 Aug 2021 17:30:01 +0100 Subject: [PATCH 191/490] Remove more redundant rules --- src/lib/array.jl | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 387b955cb..15b994564 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -101,17 +101,6 @@ end @adjoint collect(x::Array) = collect(x), Δ -> (Δ,) -@adjoint fill(x::Real, dims...) = fill(x, dims...), Δ->(sum(Δ), map(_->nothing, dims)...) - -@adjoint function circshift(A, shifts) - circshift(A, shifts), Δ -> (circshift(Δ, map(-, shifts)), nothing) -end - -@adjoint function reverse(x::AbstractArray, args...; kwargs...) - _reverse(t) = reverse(t, args...; kwargs...) - _reverse(x), Δ->(_reverse(Δ), map(_->nothing, args)...) -end - @adjoint permutedims(xs) = permutedims(xs), Δ -> (permutedims(Δ),) @adjoint permutedims(xs::AbstractVector) = permutedims(xs), Δ -> (vec(permutedims(Δ)),) From b952f4976ce710323d470e0599722b234daa12ce Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Tue, 3 Aug 2021 11:59:03 +0100 Subject: [PATCH 192/490] Bump patch (#1052) --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a1b9e5d0c..5d04edb68 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.18-DEV" +version = "0.6.18" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 4e2ec8fb42d215fd2c66714f11e2a02b5b50b9ae Mon Sep 17 00:00:00 2001 From: WT Date: Tue, 3 Aug 2021 13:37:31 +0100 Subject: [PATCH 193/490] Bump patch --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index dc81fbc0a..7f777adb4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.18" +version = "0.6.19" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From ae822e09bf892d0fef6eadb5ffb1c8b6df5985df Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Tue, 3 Aug 2021 14:09:26 +0100 Subject: [PATCH 194/490] Update Project.toml Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 7f777adb4..8f0a00973 100644 --- a/Project.toml +++ b/Project.toml @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "1.5" -ChainRulesCore = "1.0.1" +ChainRulesCore = "1.1" ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" From ae9e1c37b4b9286378cd126d40495775448b3407 Mon Sep 17 00:00:00 2001 From: st-- Date: Thu, 5 Aug 2021 15:38:28 +0300 Subject: [PATCH 195/490] Fix docstring cross-reference --- src/lib/grad.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 4ac7708b6..a522d685a 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -105,7 +105,7 @@ This reverse-mode Jacobian needs to evaluate the pullback once for each element Doing so is usually only efficient when `length(y)` is small compared to `length(a)`, otherwise forward mode is likely to be better. -See also [`withjacobian`](@ref), `hessian`](@ref), [`hessian_reverse`](@ref). +See also [`withjacobian`](@ref), [`hessian`](@ref), [`hessian_reverse`](@ref). # Examples From 05d0c2ae04f334a2ec61e42decfe1172d0f2e6e8 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 20 Aug 2021 19:20:34 +0200 Subject: [PATCH 196/490] Support kwargs in `rrule_via_ad` (#1055) * allow kwargs * add test for kwarg support * bump patch * unwrap closure * first solution * second solution kwf() * Revert "second solution kwf()" This reverts commit b53a381855963eebde2ed4274fb2059f9263e6bf. * avoid creating closure unnecesasrily * short function * first instead of only --- Project.toml | 2 +- src/compiler/chainrules.jl | 14 +++++++++++--- test/chainrules.jl | 7 +++++++ 3 files changed, 19 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index 8f0a00973..7e19b5a9a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.19" +version = "0.6.20" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index fc02d68a8..aaec3951f 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -174,10 +174,18 @@ As per [`chain_rrule`](@ref) but with support for kwargs. return y, kw_zpullback end +function ChainRulesCore.rrule_via_ad(config::ZygoteRuleConfig, f_args...; kwargs...) + # create a closure to work around _pullback not accepting kwargs + # but avoid creating a closure unnecessarily (pullbacks of closures do not infer) + y, pb = if !isempty(kwargs) + kwf() = first(f_args)(Base.tail(f_args)...; kwargs...) + _y, _pb = _pullback(config.context, kwf) + _y, Δ -> first(_pb(Δ)).f_args # `first` should be `only` + else + _pullback(config.context, f_args...) + end -function ChainRulesCore.rrule_via_ad(config::ZygoteRuleConfig, f, args...) - y, pb = _pullback(config.context, f, args...) - ad_pullback(Δ) = zygote2differential(pb(wrap_chainrules_output(Δ)), (f, args...)) + ad_pullback(Δ) = zygote2differential(pb(wrap_chainrules_output(Δ)), f_args) return y, ad_pullback end diff --git a/test/chainrules.jl b/test/chainrules.jl index 30b758d04..2a76b081e 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -275,6 +275,13 @@ end test_rrule(ZygoteRuleConfig(), getindex, rand(5), 3; rrule_f=rrule_via_ad) end + @testset "kwargs" begin + test_rrule( + ZygoteRuleConfig(), sum, [1.0 2; 3 4]; + rrule_f=rrule_via_ad, check_inferred=false, fkwargs=(;dims=1) + ) + end + @testset "struct" begin struct Foo x From 9b977a97c67cf07f37a076e4b8b2ee4a05e0cda8 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 6 Sep 2021 14:43:57 +0530 Subject: [PATCH 197/490] add buffer typevar --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index bcd0cbc35..8510ae2dd 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -263,8 +263,8 @@ end end - @adjoint CUDA.CuArray{N,T}(xs::Array) where {N,T} = - CUDA.CuArray{N,T}(xs), Δ -> (convert(Array, Δ), ) + @adjoint CUDA.CuArray{N,T,B}(xs::Array) where {N,T,B} = + CUDA.CuArray{N,T,B}(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) placeholder = similar(xs) From 1ef96e7477da29661c0d5c7b4c9d9e4ec2a079d7 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 6 Sep 2021 19:56:24 +0530 Subject: [PATCH 198/490] use type dispatch --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 8510ae2dd..b9d11aa79 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -263,8 +263,8 @@ end end - @adjoint CUDA.CuArray{N,T,B}(xs::Array) where {N,T,B} = - CUDA.CuArray{N,T,B}(xs), Δ -> (convert(Array, Δ), ) + @adjoint (::Type{CUDA.CuArray{T,N}})(xs::Array) where {T,N} = + CUDA.CuArray{T,N}(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) placeholder = similar(xs) From 807e5e5b3e99e8f5bb60e721b77d9a95261e62a9 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Mon, 6 Sep 2021 23:12:34 +0530 Subject: [PATCH 199/490] fix rule --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index b9d11aa79..1277f70c5 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -263,7 +263,7 @@ end end - @adjoint (::Type{CUDA.CuArray{T,N}})(xs::Array) where {T,N} = + @adjoint (::Type{<:CUDA.CuArray{T,N}})(xs::Array) where {T,N} = CUDA.CuArray{T,N}(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) From 0c080e786e05d907d92200d3f03e11f1374831dd Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 7 Sep 2021 21:48:01 +0530 Subject: [PATCH 200/490] Update src/lib/broadcast.jl Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- src/lib/broadcast.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 1277f70c5..2b2154e75 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -263,8 +263,8 @@ end end - @adjoint (::Type{<:CUDA.CuArray{T,N}})(xs::Array) where {T,N} = - CUDA.CuArray{T,N}(xs), Δ -> (convert(Array, Δ), ) + @adjoint (::Type{T})(xs::Array) where {T<:CUDA.CuArray} = + T(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) placeholder = similar(xs) From 649a6ac73fac3e3195342e10725319a4f5d924bc Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Tue, 7 Sep 2021 21:48:56 +0530 Subject: [PATCH 201/490] whitespaces --- src/lib/broadcast.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 2b2154e75..de69e7a85 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -263,7 +263,7 @@ end end - @adjoint (::Type{T})(xs::Array) where {T<:CUDA.CuArray} = + @adjoint (::Type{T})(xs::Array) where {T <: CUDA.CuArray} = T(xs), Δ -> (convert(Array, Δ), ) @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) From f4536a87467d22432a401e260a617d6c6821fe91 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 8 Sep 2021 07:02:31 +0200 Subject: [PATCH 202/490] fix pair getfield adjoint --- src/lib/base.jl | 25 ++++++++++++++----------- test/features.jl | 29 +++++++++++++++++------------ 2 files changed, 31 insertions(+), 23 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 67f8b2c5e..d9e748f9c 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -118,23 +118,26 @@ end # named tuple @adjoint function pairs(t::NamedTuple{N}) where N - pairs_namedtuple(dx::NamedTuple) = (dx.data,) - function pairs_namedtuple(Δ::Dict) - t0 = map(zero, t) - for (idx, v) in Δ - t0 = NamedTuple{N}(Base.setindex((t0...,), v, idx)) - end - return (t0,) + + pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) + + function pairs_namedtuple_pullback(Δ::Dict) + t0 = map(zero, t) + for (idx, v) in Δ + t0 = NamedTuple{N}(Base.setindex((t0...,), v, idx)) end - return pairs(t), pairs_namedtuple + return (t0,) + end + + return pairs(t), pairs_namedtuple_pullback end @adjoint function Base.getfield(p::Pair, i::Int) - function pair_getfield(Δ) - f, s = i == 1 ? (Δ, zero(p[2])) : (zero(p[1]), Δ) + function pair_getfield_pullback(Δ) + f, s = i == 1 ? (Δ, nothing) : (nothing, Δ) return (first=f, second=s), nothing end - return getfield(p, i), pair_getfield + return getfield(p, i), pair_getfield_pullback end @adjoint Base.nameof(x::UnionAll) = nameof(x), _ -> (nothing,) diff --git a/test/features.jl b/test/features.jl index b17f55b41..7f9a1f70c 100644 --- a/test/features.jl +++ b/test/features.jl @@ -424,17 +424,17 @@ end end mutable struct MyMutable - value::Float64 + value::Float64 end function foo!(m::MyMutable, x) - m.value = x + m.value = x end function baz(args) - m = MyMutable(0.) - foo!(m, args...) - m.value + m = MyMutable(0.) + foo!(m, args...) + m.value end let @@ -449,13 +449,18 @@ end @test pullback(type_test)[1] == Complex{<:Real} @testset "Pairs" begin - @test (x->10*pairs((a=x, b=2))[1])'(100) === 10.0 - @test (x->10*pairs((a=x, b=2))[2])'(100) === 0 - foo(;kw...) = 1 - @test gradient(() -> foo(a=1,b=2.0)) === () - - @test (x->10*(x => 2)[1])'(100) === 10.0 - @test (x->10*(x => 2)[2])'(100) === 0 + @test (x->10*pairs((a=x, b=2))[1])'(100) === 10.0 + @test (x->10*pairs((a=x, b=2))[2])'(100) === 0 + foo(;kw...) = 1 + @test gradient(() -> foo(a=1,b=2.0)) === () + + @test (x->10*(x => 2)[1])'(100) === 10.0 + @test (x->10*(x => 2)[2])'(100) === 0 + + @test gradient(x-> (:x => x)[2], 17) == (1,) + + d = Dict(:x=>1.0, :y=>3.0); + @test gradient(d -> Dict(:x => d[:x])[:x], d) == (Dict(:x => 1),) end # https://github.com/JuliaDiff/ChainRules.jl/issues/257 From b7ee5381822a7de3265223baaf8f688cda1ab2a1 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 8 Sep 2021 07:58:52 +0200 Subject: [PATCH 203/490] fix test --- src/lib/array.jl | 1 + test/features.jl | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 15b994564..c63f0f74a 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -246,6 +246,7 @@ end @nograd workers function _pullback(cx::AContext, ::typeof(collect), g::Base.Generator) + @show g.f g.iter y, b = ∇map(cx, g.f, g.iter) back(::Nothing) = nothing function back(ȳ) diff --git a/test/features.jl b/test/features.jl index 7f9a1f70c..f3931464d 100644 --- a/test/features.jl +++ b/test/features.jl @@ -455,7 +455,7 @@ end @test gradient(() -> foo(a=1,b=2.0)) === () @test (x->10*(x => 2)[1])'(100) === 10.0 - @test (x->10*(x => 2)[2])'(100) === 0 + @test (x->10*(x => 2)[2])'(100) === nothing @test gradient(x-> (:x => x)[2], 17) == (1,) From d9227ba07bdc36d74f804fbdd04aa251755cb0e5 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 8 Sep 2021 10:31:59 +0200 Subject: [PATCH 204/490] cleanup --- src/lib/array.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index c63f0f74a..15b994564 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -246,7 +246,6 @@ end @nograd workers function _pullback(cx::AContext, ::typeof(collect), g::Base.Generator) - @show g.f g.iter y, b = ∇map(cx, g.f, g.iter) back(::Nothing) = nothing function back(ȳ) From c658277c8b6208b33b70866fab56a4bc25955d3c Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Thu, 2 Sep 2021 16:15:26 +0100 Subject: [PATCH 205/490] Support functions that splat namedtuples as keyword arguments --- src/lib/base.jl | 9 +++++---- test/features.jl | 9 +++++++++ 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index d9e748f9c..8c8799585 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -118,17 +118,18 @@ end # named tuple @adjoint function pairs(t::NamedTuple{N}) where N - - pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) + pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) + function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) for (idx, v) in Δ - t0 = NamedTuple{N}(Base.setindex((t0...,), v, idx)) + ii = idx isa Integer ? idx : findfirst(==(idx), keys(t)) + t0 = NamedTuple{N}(Base.setindex((t0...,), v, ii)) end return (t0,) end - + return pairs(t), pairs_namedtuple_pullback end diff --git a/test/features.jl b/test/features.jl index f3931464d..819ab3fd0 100644 --- a/test/features.jl +++ b/test/features.jl @@ -463,6 +463,15 @@ end @test gradient(d -> Dict(:x => d[:x])[:x], d) == (Dict(:x => 1),) end +@testset "kwarg splatting, pass in object" begin + g(; kwargs...) = kwargs[:x] * kwargs[:z] + h(somedata) = g(; somedata...) + @test gradient(h, (; x=3.0, y=4.0, z=2.3)) == ((x = 2.3, y = 0.0, z = 3.0),) + + # Currently broken because we fallback to ADing the `merge(::NamedTuple, itr)` which uses `push!`. + @test_broken gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) isa Any +end + # https://github.com/JuliaDiff/ChainRules.jl/issues/257 @testset "Keyword Argument Passing" begin struct Type1{VJP} From 3172e1cd5a8de885e495008d0e7a73e5831fdcb5 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Thu, 2 Sep 2021 16:20:24 +0100 Subject: [PATCH 206/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 7e19b5a9a..0cc618af0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.20" +version = "0.6.21" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From a6615f1a7be3e2cb6e240bac0f04c1c8e89abce4 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Thu, 2 Sep 2021 16:44:10 +0100 Subject: [PATCH 207/490] Support passing kwargs as splatted dict (by writing a adjoint for merge(namedtuple, dict) --- src/lib/base.jl | 9 +++++++++ test/features.jl | 4 +--- 2 files changed, 10 insertions(+), 3 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 8c8799585..b7c2072a2 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -133,6 +133,15 @@ end return pairs(t), pairs_namedtuple_pullback end +# For merge between NamedTuple and Dict, we will just convert the Dict to a NamedTuple. +# and then call `pullback`, which should overall be pretty efficient code generated, +# and it avoids trying to AD the problematic generic `merge(::NamedTuple, ::iter)` method which uses `push!`. +if VERSION >= v"1.6" + @adjoint merge(nt::NamedTuple, dict::Dict) = pullback(merge, nt, NamedTuple(dict)) +else + @adjoint merge(nt::NamedTuple, dict::Dict) = pullback(merge, nt, (;dict...)) +end + @adjoint function Base.getfield(p::Pair, i::Int) function pair_getfield_pullback(Δ) f, s = i == 1 ? (Δ, nothing) : (nothing, Δ) diff --git a/test/features.jl b/test/features.jl index 819ab3fd0..2cf7d1976 100644 --- a/test/features.jl +++ b/test/features.jl @@ -467,9 +467,7 @@ end g(; kwargs...) = kwargs[:x] * kwargs[:z] h(somedata) = g(; somedata...) @test gradient(h, (; x=3.0, y=4.0, z=2.3)) == ((x = 2.3, y = 0.0, z = 3.0),) - - # Currently broken because we fallback to ADing the `merge(::NamedTuple, itr)` which uses `push!`. - @test_broken gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) isa Any + @test gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) == ((y = 0.0, z = 3.0, x = 2.3),) end # https://github.com/JuliaDiff/ChainRules.jl/issues/257 From 76c27d809a85592526b1fa7dd1ec2384958a1889 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 9 Sep 2021 14:01:40 -0400 Subject: [PATCH 208/490] use slow broadcasting for 2nd order --- src/lib/broadcast.jl | 2 +- test/features.jl | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index bcd0cbc35..300be6f8b 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -175,7 +175,7 @@ _dual_safearg(x) = false # Avoid generic broadcasting in two easy cases: if T == Bool return (f.(args...), _ -> nothing) - elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) + elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) && !isderiving() return broadcast_forward(f, args...) end len = inclen(args) diff --git a/test/features.jl b/test/features.jl index f3931464d..111be3759 100644 --- a/test/features.jl +++ b/test/features.jl @@ -551,4 +551,9 @@ end @test gradient((x,p) -> sum(x .^ p), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] @test gradient((x,p) -> sum(z -> z^p, x), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] @test gradient((x,p) -> mapreduce(z -> z^p, +, x), [1.0,2.0,4.0], -1)[1] ≈ [-1.0, -0.25, -0.0625] + + # second order + @test gradient(x -> sum(gradient(y -> sum(y.^2), x)[1]), [1, 2])[1] ≈ [2, 2] + @test gradient(x -> sum(gradient(y -> sum(sin.(y)), x)[1]), [1, 2])[1] ≈ [-0.8414709848078965, -0.9092974268256817] + @test gradient(x -> sum(abs, gradient(y -> sum(log.(2 .* exp.(y)) .^ 2), x)[1]), [1, 2])[1] ≈ [2,2] end From 52f5fb27b2f42aba29377cfe6a697745785ee95a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 9 Sep 2021 14:52:08 -0400 Subject: [PATCH 209/490] add short-circuit to rrule_via_ad --- src/compiler/chainrules.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index aaec3951f..4bf7da28a 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -175,6 +175,10 @@ As per [`chain_rrule`](@ref) but with support for kwargs. end function ChainRulesCore.rrule_via_ad(config::ZygoteRuleConfig, f_args...; kwargs...) + # first check whether there is an `rrule` which handles this directly + direcct = rrule(config, f_args...; kwargs...) + direcct === nothing || return direcct + # create a closure to work around _pullback not accepting kwargs # but avoid creating a closure unnecessarily (pullbacks of closures do not infer) y, pb = if !isempty(kwargs) From 3b50c5c7dd94516239d6a2f338c628f5798547cc Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 9 Sep 2021 19:30:29 -0400 Subject: [PATCH 210/490] test for shortcut --- test/chainrules.jl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/chainrules.jl b/test/chainrules.jl index 2a76b081e..ec13f6e96 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -325,6 +325,18 @@ end test_rrule(ZygoteRuleConfig(), +, rand(3), rand(3); rrule_f=rrule_via_ad) test_rrule(ZygoteRuleConfig(), *, rand(1, 3), rand(3); rrule_f=rrule_via_ad) end + + @testset "rules which call rrule_via_ad" begin + # since cbrt has a rule, this will test the shortcut: + test_rrule(ZygoteRuleConfig(), sum, cbrt, randn(5)) + test_rrule(ZygoteRuleConfig(), sum, cbrt, randn(5); rrule_f=rrule_via_ad) + + # but x -> cbrt(x) has no rule, so will be done by Zygote + test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5)) + test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad) + + test_rrule(ZygoteRuleConfig(), identity∘sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad, check_inferred=false) + end end @testset "FastMath support" begin From 1b7dacc368e80d0b54c3d95431444571e526dc18 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 9 Sep 2021 19:38:33 -0400 Subject: [PATCH 211/490] bump --- Project.toml | 2 +- test/chainrules.jl | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index 7e19b5a9a..0cc618af0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.20" +version = "0.6.21" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/test/chainrules.jl b/test/chainrules.jl index ec13f6e96..b87a9ea3e 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -334,8 +334,6 @@ end # but x -> cbrt(x) has no rule, so will be done by Zygote test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5)) test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad) - - test_rrule(ZygoteRuleConfig(), identity∘sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad, check_inferred=false) end end From 7f2fed9c58d4650c0295fd9aa0ee92e643a0f0c0 Mon Sep 17 00:00:00 2001 From: Lyndon White Date: Fri, 10 Sep 2021 10:51:04 +0100 Subject: [PATCH 212/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 0cc618af0..476419f64 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.21" +version = "0.6.22" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 57adb2d2ca919d937dc815e3e660a121feb1c160 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 10 Sep 2021 11:34:05 -0400 Subject: [PATCH 213/490] unbump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 476419f64..0cc618af0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.22" +version = "0.6.21" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 528e0be677d1feb9ccf6fc4ab298f4d8a106de10 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 21 Sep 2021 23:04:13 -0400 Subject: [PATCH 214/490] Use `ProjectTo` in broadcasting & `gradient` (#1044) * use ProjectTo in broadcasting, etc * separate methods for Params * move after defn * better dims handling in unbroadcast * tidier * tests * more wrapping * fix a test * handle a few nothings * fix more, including FFT tests * tests * one test * tests * tests * tests * these are fixed * add Compat * tests * add tests for issues closed * simplify, some doctests * fix some tests * less piracy * adjoint * piract * skip a test * splat tests * skip on 1.3 * simplify _project * a typo * tweak * broken GPU test, unrelated * unexpected pass * only broken on 1.6 * let nothing through * rm some broken things * target 1.3 fix * comments * update for ProjectTo(::Any) * fix a test * Update test/utils.jl Co-authored-by: Lyndon White * Update src/lib/broadcast.jl * cu tests * v0.6.22 Co-authored-by: Lyndon White --- Project.toml | 4 +- README.md | 2 +- src/compiler/chainrules.jl | 22 +++++++++ src/compiler/interface.jl | 23 ++++++--- src/lib/array.jl | 2 +- src/lib/broadcast.jl | 19 ++++---- test/complex.jl | 33 ++++++++++++- test/cuda.jl | 39 ++++++++++++++- test/features.jl | 24 +++++++++- test/forward/forward.jl | 3 +- test/gradcheck.jl | 97 +++++++++++++++++++++----------------- test/structures.jl | 1 + test/utils.jl | 23 +++++---- 13 files changed, 214 insertions(+), 78 deletions(-) diff --git a/Project.toml b/Project.toml index 0cc618af0..56d086f99 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.21" +version = "0.6.22" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "1.5" -ChainRulesCore = "1.1" +ChainRulesCore = "1.6" ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" diff --git a/README.md b/README.md index 8551bca87..6b2a6517d 100644 --- a/README.md +++ b/README.md @@ -18,7 +18,7 @@ julia> using Zygote julia> f(x) = 5x + 3 julia> f(10), f'(10) -(53, 5) +(53, 5.0) julia> @code_llvm f'(10) define i64 @"julia_#625_38792"(i64) { diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 4bf7da28a..e879af3f8 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -123,11 +123,33 @@ Convert `x` from the format Zygote uses internally to differentials types ChainR """ @inline wrap_chainrules_input(x) = x @inline wrap_chainrules_input(::Nothing) = ChainRules.ZeroTangent() +@inline wrap_chainrules_input(::AbstractArray{Nothing}) = ChainRules.ZeroTangent() @inline function wrap_chainrules_input(xs::Union{Tuple, NamedTuple}) xp = map(wrap_chainrules_input, xs) ChainRules.Tangent{Any, typeof(xp)}(xp) end +""" + _project(x, dx) + +Uses `ChainRulesCore.ProjectTo` to standardise the gradient `dx` for type & shape. +Also handles some Zygote-specific corrections, such as `x::Array, dx::Tuple`. +Safe to apply to arbitrary input. +""" +@inline function _project(x, dx) + wrap_chainrules_output(ProjectTo(x)(wrap_chainrules_input(dx))) +end + +# Restore splatted arrays +_project(x::AbstractArray, dx::Tuple) = _project(x, reshape(collect(dx), axes(x))) + +# Piracy: +# wrap_chainrules_input doesn't handle array of Union{Int,Nothing} +(::ChainRulesCore.ProjectTo)(::Nothing) = ChainRulesCore.NoTangent() + +# CRC likes Tangent{<:Complex}, but Zygote makes Tangent{Any} +(project::ProjectTo{<:Complex})(dx::Tangent) = project(Complex(dx.re, dx.im)) + """ ZBack{F}(back) <: Function diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index e4db33471..9dc934a49 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -68,15 +68,20 @@ julia> gradient([7, 11], 0, 1) do x, y, d p = size(x, d) sum(x.^p .+ y) end -([14.0, 22.0], 2, nothing) +([14.0, 22.0], 2.0, nothing) ``` """ function gradient(f, args...) y, back = pullback(f, args...) - return back(sensitivity(y)) + grad = back(sensitivity(y)) + isnothing(grad) ? nothing : map(_project, args, grad) end -Base.adjoint(f::Function) = x -> gradient(f, x)[1] +# Base.adjoint(f::Function) = x -> gradient(f, x)[1] # piracy! +Base.adjoint(f::Function) = x -> begin # still piracy! avoids projection for legacy reasons + y, back = pullback(f, x) + back(sensitivity(y))[1] +end """ withgradient(f, args...) @@ -95,7 +100,9 @@ true """ function withgradient(f, args...) y, back = pullback(f, args...) - (val = y, grad = back(sensitivity(y))) + grad = back(sensitivity(y)) + results = isnothing(grad) ? map(_ -> nothing, args) : map(_project, args, grad) + (val=y, grad=results) end # Param-style wrappers @@ -115,9 +122,9 @@ julia> g = gradient(Params([x, y])) do Grads(...) julia> g[x] -2×3 Matrix{Int64}: - 7 70 700 - 8 80 800 +2×3 Matrix{Float64}: + 7.0 70.0 700.0 + 8.0 80.0 800.0 julia> haskey(g, z) # only x and y are parameters false @@ -144,6 +151,8 @@ Params(xs::Tuple) = Params(collect(xs)) @forward Params.order Base.iterate, Base.length, Base.getindex @forward Params.params Base.in +Base.map(::typeof(_project), args::Tuple{Params}, grad) = grad # skip _project in gradient(f, ::Params) + function Base.union!(ps::Params, itrs...) foreach(itr -> foreach(x -> push!(ps, x), itr), itrs) return ps diff --git a/src/lib/array.jl b/src/lib/array.jl index 15b994564..9bec64b95 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -38,7 +38,7 @@ end dxv = view(dx, inds...) dxv .= accum.(dxv, _droplike(dy, dxv)) end - return (dx, map(_->nothing, inds)...) + return (_project(x, dx), map(_->nothing, inds)...) end """ diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 446e919b1..4e7a3a1cc 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -45,18 +45,19 @@ function Base.reducedim_init(::typeof(identity), ::typeof(accum), A::AbstractArr Base.reducedim_initarray(A, region, nothing, Union{Nothing,eltype(A)}) end -trim(x, Δ) = reshape(Δ, ntuple(i -> size(Δ, i), Val(ndims(x)))) -trim(x::Tuple, Δ) = NTuple{length(x)}(Δ) - -unbroadcast(x::AbstractArray, x̄) = - size(x) == size(x̄) ? x̄ : - length(x) == length(x̄) ? trim(x, x̄) : - trim(x, accum_sum(x̄, dims = ntuple(i -> size(x, i) == 1 ? i : ndims(x̄)+1, Val(ndims(x̄))))) - +function unbroadcast(x::AbstractArray, x̄) + N = ndims(x̄) + if length(x) == length(x̄) + _project(x, x̄) # ProjectTo handles reshape, offsets, structured matrices, row vectors + else + dims = ntuple(d -> size(x, d) == 1 ? d : ndims(x̄)+1, ndims(x̄)) + _project(x, accum_sum(x̄; dims = dims)) + end +end unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) -unbroadcast(x::Tuple, x̄) = trim(x, length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 +unbroadcast(x::Tuple, x̄) = NTuple{length(x)}(length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::AbstractArray, x̄::Nothing) = nothing diff --git a/test/complex.jl b/test/complex.jl index 6a0445b85..1abd1303f 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -1,9 +1,13 @@ using Zygote, Test, LinearAlgebra +@testset "basic" begin + @test gradient(x -> real(abs(x)*exp(im*angle(x))), 10+20im)[1] ≈ 1 @test gradient(x -> imag(real(x)+0.3im), 0.3)[1] ≈ 0 -@test gradient(x -> imag(conj(x)+0.3im), 0.3)[1] ≈ -1im -@test gradient(x -> abs((imag(x)+0.3)), 0.3)[1] == 1im +@test gradient(x -> imag(conj(x)+0.3im), 0.3 + 0im)[1] ≈ -1im +@test gradient(x -> imag(conj(x)+0.3im), 0.3)[1] ≈ 0 # projected to zero +@test gradient(x -> abs((imag(x)+0.3)), 0.3 + 0im)[1] ≈ 1im +@test gradient(x -> abs((imag(x)+0.3)), 0.3)[1] ≈ 0 @test gradient(a -> real((a*conj(a))), 0.3im)[1] == 0.6im @test gradient(a -> real((a.*conj(a))), 0.3im)[1] == 0.6im @@ -21,6 +25,8 @@ using Zygote, Test, LinearAlgebra @test gradient(x -> imag(sum(exp, x)), [1,2,3])[1] ≈ real(im .* exp.(1:3)) @test gradient(x -> imag(sum(exp, x)), [1+0im,2,3])[1] ≈ im .* exp.(1:3) +end # @testset + fs_C_to_R = (real, imag, abs, @@ -81,3 +87,26 @@ fs_C_to_C_non_holomorphic = (conj, end end end + +@testset "issue 342" begin + @test Zygote.gradient(x->real(x + 2.0*im), 3.0) == (1.0,) + @test Zygote.gradient(x->imag(x + 2.0*im), 3.0) == (0.0,) +end + +@testset "issue 402" begin + A = [1,2,3.0] + y, B_getindex = Zygote.pullback(x->getindex(x,2,1),Diagonal(A)) + bA = B_getindex(1)[1] + @test bA isa Diagonal + @test bA == [0.0 0.0 0.0; 0.0 0.0 0.0; 0.0 0.0 0.0] +end + +@testset "issue #917" begin + function fun(v) + c = v[1:3] + v[4:6]*im + r = v[7:9] + sum(r .* abs2.(c)) # This would be calling my actual function depending on r and c + end + @test Zygote.hessian(fun, collect(1:9)) ≈ [14 0 0 0 0 0 2 0 0; 0 16 0 0 0 0 0 4 0; 0 0 18 0 0 0 0 0 6; 0 0 0 14 0 0 8 0 0; 0 0 0 0 16 0 0 10 0; 0 0 0 0 0 18 0 0 12; 2 0 0 8 0 0 0 0 0; 0 4 0 0 10 0 0 0 0; 0 0 6 0 0 12 0 0 0] +end + diff --git a/test/cuda.jl b/test/cuda.jl index 3999ace59..5cb1c8cdc 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -1,12 +1,20 @@ using CUDA using Zygote: Grads +using LinearAlgebra using Random: randn! CUDA.allowscalar(false) # Test GPU movement inside the call to `gradient` @testset "GPU movement" begin r = rand(Float32, 3,3) - @test gradient(x -> sum(cu(x)), r)[1] isa Array{Float32, 2} + @test gradient(x -> sum(cu(x)), r)[1] isa Matrix{Float32} + @test gradient(x -> sum(x->log(x), cu(x)), r)[1] isa Matrix + @test gradient((x,cy) -> sum(cu(x) * cy) + sum(cy'), r, cu(r))[2] isa CUDA.CuArray + @test_skip gradient((x,cy) -> sum(cu(x[:,1])' * cy), r, cu(r))[2] isa CUDA.CuArray # generic_matmatmul! + + # Other direction: + @test_skip gradient(x -> sum(Array(x)), cu(r))[1] isa CUDA.CuArray + @test_skip gradient((x,cy) -> sum(x * Array(cy)) + sum(cy'), r, cu(r))[2] isa CUDA.CuArray end @testset "broadcasting" begin @@ -31,10 +39,19 @@ end g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] # was Can't differentiate gc_preserve_end expression @test_skip cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] # was KernelException -- not fixed by PR #1018 @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / count(x .> 3), a_gpu)[1] + + # Projection: eltype preservation: + @test gradient(x -> 2.3 * sum(x.^4), a_gpu)[1] isa CuArray{Float32} + @test_skip gradient(x -> sum(x .* 5.6), a_gpu)[1] isa CUDA.CuArray{Float32} # dot(x::CuArray{Float64}, y::CuArray{Float32}) fallback + # structure restoration: + @test gradient(x -> sum(sqrt.(x)), a_gpu')[1] isa Adjoint # previously a matrix + @test gradient(x -> sum(exp.(x)), Diagonal(a_gpu))[1] isa Diagonal + # non-differentiables + @test gradient((x,y) -> sum(x.^2 .+ y'), a_gpu, a_gpu .> 0)[2] === nothing end @testset "sum(f, x)" begin - a = Float32.([-1.5, -9.0, 2.4, -1.3, 0.01]) + a = Float32[-1.5, -9.0, 2.4, -1.3, 0.01] a_gpu = a |> cu f(x) = sum(abs, x) @@ -42,6 +59,18 @@ end g_gpu = gradient(f, a_gpu)[1] @test g_gpu isa CuArray @test g_gpu |> collect ≈ g + + f2(x) = sum(abs2, x) # sum(abs2, x) has its own rrule + g2 = gradient(f2, a)[1] + g2_gpu = gradient(f2, a_gpu)[1] + @test g2_gpu isa CuArray + @test g2_gpu |> collect ≈ g2 + + f3(x) = sum(y->y^3, x') # anonymous function + g3 = gradient(f3, a')[1] + g3_gpu = gradient(f3, a_gpu')[1] + @test g3_gpu isa Adjoint{Float32, <:CuArray{Float32, 1}} # preserves structure + @test g3_gpu |> collect ≈ g3 end @testset "jacobian" begin @@ -103,5 +132,11 @@ end r = cu(rand(Float32, 3)) grads = (cu(ones(Float32, 3)), 1.f0) @test gradient((x,y) -> sum(vcat(x,y)), r, 5) == grads + + @test gradient((x,y) -> sum(vcat(x,y)), r, Float64(5))[1] isa CUDA.CuArray{Float32} + @test gradient((x,y) -> sum(vcat(x,y)), r, Float64(5))[2] isa Float64 # projection + + @test_skip gradient((x,y) -> sum(vcat(x,y)), 5f0, r)[2] isa CUDA.CuArray{Float32} # wrong order + @test_skip gradient((x,y) -> sum(vcat(x,y)), 1f0, r, 2f0, r)[2] isa CUDA.CuArray{Float32} end diff --git a/test/features.jl b/test/features.jl index 8c460dc98..d683d0d94 100644 --- a/test/features.jl +++ b/test/features.jl @@ -176,9 +176,9 @@ end @test gradient(t -> t[1]*t[2], (2, 3)) == ((3, 2),) -@test gradient(x -> x.re, 2+3im) == ((re = 1, im = nothing),) +@test gradient(x -> x.re, 2+3im) === (1.0 + 0.0im,) -@test gradient(x -> x.re*x.im, 2+3im) == ((re = 3, im = 2),) +@test gradient(x -> x.re*x.im, 2+3im) == (3.0 + 2.0im,) struct Bar{T} a::T @@ -262,6 +262,7 @@ D(f, x) = grad(f, x)[1] @test D(x -> x*D(y -> x+y, 1), 1) == 1 @test D(x -> x*D(y -> x*y, 1), 4) == 8 +@test sin''(1.0) == -sin(1.0) @test sin'''(1.0) == -cos(1.0) f(x) = throw(DimensionMismatch("fubar")) @@ -499,6 +500,25 @@ end @test x[1] == x[2] end +@testset "splats" begin + @test gradient(x -> max(x...), [1,2,3])[1] == [0,0,1] + @test gradient(x -> min(x...), (1,2,3))[1] === (1.0, 0.0, 0.0) + + @test gradient(x -> max(x...), [1 2; 3 4])[1] == [0 0; 0 1] + @test gradient(x -> max(x...), [1,2,3]')[1] == [0 0 1] + + # https://github.com/FluxML/Zygote.jl/issues/599 + @test gradient(w -> sum([w...]), [1,1])[1] isa AbstractVector + + # https://github.com/FluxML/Zygote.jl/issues/866 + f866(x) = reshape(x, fill(2, 2)...) + @test gradient(x->sum(f866(x)), rand(4))[1] == [1,1,1,1] + + # https://github.com/FluxML/Zygote.jl/issues/731 + f731(x) = sum([x' * x, x...]) + @test_broken gradient(f731, ones(3)) # MethodError: no method matching +(::Tuple{Float64, Float64, Float64}, ::Vector{Float64}) +end + @testset "accumulation" begin # from https://github.com/FluxML/Zygote.jl/issues/905 function net(x1) diff --git a/test/forward/forward.jl b/test/forward/forward.jl index 3ae0f6e3a..6aa9173ef 100644 --- a/test/forward/forward.jl +++ b/test/forward/forward.jl @@ -36,7 +36,8 @@ end == 1 x end == 0 -@test D(x -> abs(x+2im), 1) == gradient(x -> abs(x+2im), 1)[1] +@test D(x -> abs(x+2im), 1) == gradient(x -> abs(x+2im), 1+0im)[1] +@test real(D(x -> abs(x+2im), 1)) == gradient(x -> abs(x+2im), 1)[1] # ProjectTo means gradient here is real using LinearAlgebra diff --git a/test/gradcheck.jl b/test/gradcheck.jl index eab959ddd..af49b7697 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -177,7 +177,7 @@ end # Ensure that nothings work with non-numeric types. _, back = Zygote.pullback(getindex, [randn(2) for _ in 1:3], [1]) - @test back([nothing]) == ([nothing for _ in 1:3], nothing) + @test back([nothing]) == (nothing, nothing) end @testset "view" begin @@ -332,10 +332,10 @@ end @test gradient(x -> sum(log, filter(iseven, x)), 1:10) == (map(x -> iseven(x) ? 1/x : 0, 1:10),) @test gradient(x -> sum(abs2, im .+ filter(iseven, x)), 1:10) == - (map(x -> iseven(x) ? 2x+2im : 0, 1:10),) + (map(x -> iseven(x) ? 2x : 0, 1:10),) + # (map(x -> iseven(x) ? 2x+2im : 0, 1:10),) end - @testset "mean" begin @test gradtest(mean, rand(2, 3)) @@ -1157,10 +1157,10 @@ end end @testset "hvcat" begin - @test gradient(xs -> hvcat((2,2),xs...)[1,1], [1,2,3,4])[1] == (1,0,0,0) - @test gradient(xs -> hvcat((2,2),xs...)[2,1], [1,2,3,4])[1] == (0,0,1,0) - @test gradient(xs -> hvcat((2,2),xs...)[1,2], [1,2,3,4])[1] == (0,1,0,0) - @test gradient(xs -> hvcat((2,2),xs...)[2,2], [1,2,3,4])[1] == (0,0,0,1) + @test gradient(xs -> hvcat((2,2),xs...)[1,1], [1,2,3,4])[1] == [1,0,0,0] + @test gradient(xs -> hvcat((2,2),xs...)[2,1], [1,2,3,4])[1] == [0,0,1,0] + @test gradient(xs -> hvcat((2,2),xs...)[1,2], [1,2,3,4])[1] == [0,1,0,0] + @test gradient(xs -> hvcat((2,2),xs...)[2,2], [1,2,3,4])[1] == [0,0,0,1] # https://github.com/FluxML/Zygote.jl/issues/513 @test gradient(x -> hvcat((2,2),1,2,3,x)[4], 4.0) == (1.0,) end @@ -1375,10 +1375,10 @@ using Zygote: Buffer @test gs[1] ≈ map(x -> one.(x), p) @test gs[2] ≈ one.(r) - p = [rand(3,3), rand(3,3)] # redefine `p` after mutation - gs = gradient(x -> sum(pop!(x)), p) - @test length(gs[1]) == 2 - @test gs[1][1] == one.(p[1]) + # p = [rand(3,3), rand(3,3)] # redefine `p` after mutation + # gs = gradient(x -> sum(pop!(x)), p) + # @test length(gs[1]) == 2 + # @test gs[1][1] == one.(p[1]) end end @@ -1403,6 +1403,17 @@ end end @testset "AbstractFFTs" begin + + # Many of these tests check a complex gradient to a function with real input. This is now + # clamped to real by ProjectTo, but to run the old tests, use here the old gradient function: + function oldgradient(f, args...) + y, back = Zygote.pullback(f, args...) + back(Zygote.sensitivity(y)) + end + # Eventually these rules and tests will be moved to ChainRules.jl, at which point the tests + # can be updated to use real / complex consistently. + # https://github.com/JuliaMath/AbstractFFTs.jl/pull/58 + findicateMat(i,j,n1,n2) = [(k==i) && (l==j) ? 1.0 : 0.0 for k=1:n1, l=1:n2] mirrorIndex(i,N) = i - 2*max(0,i - (N>>1+1)) @@ -1415,11 +1426,11 @@ end indicateMat = [(k==i) && (l==j) ? 1.0 : 0.0 for k=1:size(X, 1), l=1:size(X,2)] # gradient of ifft(fft) must be (approximately) 1 (for various cases) - @test gradient((X)->real.(ifft(fft(X))[i, j]), X)[1] ≈ indicateMat + @test oldgradient((X)->real.(ifft(fft(X))[i, j]), X)[1] ≈ indicateMat # same for the inverse - @test gradient((X̂)->real.(fft(ifft(X̂))[i, j]), X̂)[1] ≈ indicateMat + @test oldgradient((X̂)->real.(fft(ifft(X̂))[i, j]), X̂)[1] ≈ indicateMat # same for rfft(irfft) - @test gradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) + @test oldgradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) # rfft isn't actually surjective, so rffft(irfft) can't really be tested this way. # the gradients are actually just evaluating the inverse transform on the @@ -1438,22 +1449,22 @@ end ((K)->(irfft(K,sizeX[1])), 1/N * rfft(indicateMat), zeros(size(X̂r)), plan_rfft(X), i, X̂r)] for (trans, solRe, solIm, P, mI, evalX) in listOfSols - @test gradient((X)->real.(trans(X))[mI, j], evalX)[1] ≈ + @test oldgradient((X)->real.(trans(X))[mI, j], evalX)[1] ≈ solRe - @test gradient((X)->imag.(trans(X))[mI, j], evalX)[1] ≈ + @test oldgradient((X)->imag.(trans(X))[mI, j], evalX)[1] ≈ solIm if typeof(P) <:AbstractFFTs.Plan && maximum(trans .== [fft,rfft]) - @test gradient((X)->real.(P * X)[mI, j], evalX)[1] ≈ + @test oldgradient((X)->real.(P * X)[mI, j], evalX)[1] ≈ solRe - @test gradient((X)->imag.(P * X)[mI, j], evalX)[1] ≈ + @test oldgradient((X)->imag.(P * X)[mI, j], evalX)[1] ≈ solIm elseif typeof(P) <: AbstractFFTs.Plan - @test gradient((X)->real.(P \ X)[mI, j], evalX)[1] ≈ + @test oldgradient((X)->real.(P \ X)[mI, j], evalX)[1] ≈ solRe # for whatever reason the rfft_plan doesn't handle this case well, # even though irfft does if eltype(evalX) <: Real - @test gradient((X)->imag.(P \ X)[mI, j], evalX)[1] ≈ + @test oldgradient((X)->imag.(P \ X)[mI, j], evalX)[1] ≈ solIm end end @@ -1464,47 +1475,47 @@ end x = [-0.353213 -0.789656 -0.270151; -0.95719 -1.27933 0.223982] # check ffts for individual dimensions for trans in (fft, ifft, bfft) - @test gradient((x)->sum(abs.(trans(x))), x)[1] ≈ - gradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] + @test oldgradient((x)->sum(abs.(trans(x))), x)[1] ≈ + oldgradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] # switch sum abs order - @test gradient((x)->abs(sum((trans(x)))),x)[1] ≈ - gradient( (x) -> abs(sum(trans(trans(x,1),2))), x)[1] + @test oldgradient((x)->abs(sum((trans(x)))),x)[1] ≈ + oldgradient( (x) -> abs(sum(trans(trans(x,1),2))), x)[1] # dims parameter for the function - @test gradient((x, dims)->sum(abs.(trans(x,dims))), x, (1,2))[1] ≈ - gradient( (x) -> sum(abs.(trans(x))), x)[1] + @test oldgradient((x, dims)->sum(abs.(trans(x,dims))), x, (1,2))[1] ≈ + oldgradient( (x) -> sum(abs.(trans(x))), x)[1] # (1,2) should be the same as no index - @test gradient( (x) -> sum(abs.(trans(x,(1,2)))), x)[1] ≈ - gradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] + @test oldgradient( (x) -> sum(abs.(trans(x,(1,2)))), x)[1] ≈ + oldgradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] @test gradcheck(x->sum(abs.(trans(x))), x) @test gradcheck(x->sum(abs.(trans(x, 2))), x) end - @test gradient((x)->sum(abs.(rfft(x))), x)[1] ≈ - gradient( (x) -> sum(abs.(fft(rfft(x,1),2))), x)[1] - @test gradient((x, dims)->sum(abs.(rfft(x,dims))), x, (1,2))[1] ≈ - gradient( (x) -> sum(abs.(rfft(x))), x)[1] + @test oldgradient((x)->sum(abs.(rfft(x))), x)[1] ≈ + oldgradient( (x) -> sum(abs.(fft(rfft(x,1),2))), x)[1] + @test oldgradient((x, dims)->sum(abs.(rfft(x,dims))), x, (1,2))[1] ≈ + oldgradient( (x) -> sum(abs.(rfft(x))), x)[1] # Test type stability of fft x = randn(Float64,16) P = plan_fft(x) - @test typeof(gradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float64},1} - @test typeof(gradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float64},1} - @test typeof(gradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float64,1} + @test typeof(oldgradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float64},1} + @test typeof(oldgradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float64},1} + @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float64,1} x = randn(Float64,16,16) - @test typeof(gradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float64},2} - @test typeof(gradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float64,2} + @test typeof(oldgradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float64},2} + @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float64,2} x = randn(Float32,16) P = plan_fft(x) - @test typeof(gradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float32},1} - @test typeof(gradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float32},1} - @test typeof(gradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float32,1} + @test typeof(oldgradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float32},1} + @test typeof(oldgradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float32},1} + @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float32,1} x = randn(Float32,16,16) - @test typeof(gradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float32},2} - @test typeof(gradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float32,2} + @test typeof(oldgradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float32},2} + @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float32,2} end @testset "FillArrays" begin @@ -1668,7 +1679,7 @@ end # check that type is not unnecessarily promoted # https://github.com/FluxML/Zygote.jl/issues/663 @test gradient(norm, randn(Float32, 2, 2)) isa Tuple{Matrix{Float32}} - @test gradient(norm, randn(Float32, 2, 2), 3) isa Tuple{Matrix{Float32},Float32} + @test gradient(norm, randn(Float32, 2, 2), 3) isa Tuple{Matrix{Float32},Float64} @test gradient(norm, randn(Float32, 2, 2), 3f0) isa Tuple{Matrix{Float32},Float32} @test gradient(norm, randn(ComplexF32, 2, 2), 3.5f0) isa Tuple{Matrix{ComplexF32},Float32} diff --git a/test/structures.jl b/test/structures.jl index 37c0e246a..5a951a621 100644 --- a/test/structures.jl +++ b/test/structures.jl @@ -53,6 +53,7 @@ struct A594 x::Float64 end Y = randn(2,2) ∇ = gradient(g,X,Y) @test ∇[1] == [(x = 2.0,); (x = 2.0,)] + @test vec(∇[1]) == [(x = 2.0,); (x = 2.0,)] @test ∇[2] == [1 1; 1 1] end diff --git a/test/utils.jl b/test/utils.jl index 70a8ebd63..b6d6ed018 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -19,16 +19,22 @@ using Zygote: hessian_dual, hessian_reverse @test_throws Exception hess(identity, randn(2)) end -@testset "diagonal hessian" begin +VERSION > v"1.6-" && @testset "diagonal hessian" begin @test diaghessian(x -> x[1]*x[2]^2, [1, pi]) == ([0, 2],) - xs, y = randn(2,3), rand() - f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments - - dx, dy = diaghessian(f34, xs, y) - @test size(dx) == size(xs) - @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) - @test dy ≈ hessian(y -> f34(xs,y), y) + if VERSION > v"1.6-" + # Gradient of ^ may contain log(complex(...)), which interacts badly with Dual below Julia 1.6: + # julia> log(ForwardDiff.Dual(1,0) + 0im) # ERROR: StackOverflowError: + # https://github.com/JuliaDiff/ChainRules.jl/issues/525 + # Fixed in 1.6 by: https://github.com/JuliaLang/julia/pull/36030 + xs, y = randn(2,3), rand() + f34(xs, y) = xs[1] * (sum(xs .^ (1:3)') + y^4) # non-diagonal Hessian, two arguments + + dx, dy = diaghessian(f34, xs, y) + @test size(dx) == size(xs) + @test vec(dx) ≈ diag(hessian(x -> f34(x,y), xs)) + @test dy ≈ hessian(y -> f34(xs,y), y) + end zs = randn(7,13) # test chunk mode @test length(zs) > ForwardDiff.DEFAULT_CHUNK_THRESHOLD @@ -67,6 +73,7 @@ end j5 = jacobian((x,y) -> hcat(x[1], y), fill(pi), exp(1)) # zero-array @test j5[1] isa Matrix @test vec(j5[1]) == [1, 0] + @test j5[2] == [0, 1] @test_throws ArgumentError jacobian(identity, [1,2,3+im]) @test_throws ArgumentError jacobian(sum, [1,2,3+im]) # scalar, complex From cd177371506877ba093277adcac6af2bc86e065a Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 24 Sep 2021 11:05:32 +0100 Subject: [PATCH 215/490] Update README The current README is a bit misleading in terms of performance, because Zygote really doesn't have good performance for control flow. I'm open to other suggestions for re-wording, but it seems reasonable that we temper what is currently there a bit. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 6b2a6517d..866c6a617 100644 --- a/README.md +++ b/README.md @@ -29,7 +29,7 @@ top: "Source-to-source" means that Zygote hooks into Julia's compiler, and generates the backwards pass for you – as if you had written it by hand. -Without compromising on performance, Zygote supports the full flexibility and dynamism of the Julia language, including control flow, recursion, closures, structs, dictionaries, and more. +Zygote supports the full flexibility and dynamism of the Julia language, including control flow, recursion, closures, structs, dictionaries, and more. ```julia julia> fs = Dict("sin" => sin, "cos" => cos, "tan" => tan); From 1f61e2c6ffde3cfc1f519fda9bcaeb9175b1031d Mon Sep 17 00:00:00 2001 From: WT Date: Fri, 24 Sep 2021 22:07:39 +0100 Subject: [PATCH 216/490] Add ProjectTo method --- src/compiler/chainrules.jl | 3 +++ test/chainrules.jl | 6 ++++++ 2 files changed, 9 insertions(+) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index e879af3f8..2bc122a13 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -150,6 +150,9 @@ _project(x::AbstractArray, dx::Tuple) = _project(x, reshape(collect(dx), axes(x) # CRC likes Tangent{<:Complex}, but Zygote makes Tangent{Any} (project::ProjectTo{<:Complex})(dx::Tangent) = project(Complex(dx.re, dx.im)) +# CRC likes Tangent{AbstractArray}, but Zygote makes Tangent{Any} +(project::ProjectTo{AbstractArray})(dx::Tangent) = dx + """ ZBack{F}(back) <: Function diff --git a/test/chainrules.jl b/test/chainrules.jl index b87a9ea3e..eaf05180a 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -335,6 +335,12 @@ end test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5)) test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad) end + + @testset "ProjectTo{AbstractArray}(::Tangent{Any})" begin + X = UpperHessenberg(randn(5, 5)) + dX = Tangent{Any}(element=randn(5, 5)) + @test ProjectTo(X)(dX) === dX + end end @testset "FastMath support" begin From 54b0d90ec461d0bfe20ab3bd0d4a3f4fcaa9fcde Mon Sep 17 00:00:00 2001 From: WT Date: Fri, 24 Sep 2021 22:07:54 +0100 Subject: [PATCH 217/490] Bump patch version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 56d086f99..758f2090b 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.22" +version = "0.6.23" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From b6bde08771e678fe78f9301b7539cc837f7ccb81 Mon Sep 17 00:00:00 2001 From: WT Date: Fri, 24 Sep 2021 22:09:12 +0100 Subject: [PATCH 218/490] Note down issue in comment --- test/chainrules.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/chainrules.jl b/test/chainrules.jl index eaf05180a..80da51743 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -336,6 +336,7 @@ end test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad) end + # See https://github.com/FluxML/Zygote.jl/issues/1078 @testset "ProjectTo{AbstractArray}(::Tangent{Any})" begin X = UpperHessenberg(randn(5, 5)) dX = Tangent{Any}(element=randn(5, 5)) From 79454f33779b2263ef8c9ba46831d38fe26bfb34 Mon Sep 17 00:00:00 2001 From: willtebbutt Date: Fri, 24 Sep 2021 22:24:52 +0100 Subject: [PATCH 219/490] Update src/compiler/chainrules.jl Co-authored-by: Lyndon White --- src/compiler/chainrules.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 2bc122a13..8c3f1a84d 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -151,6 +151,9 @@ _project(x::AbstractArray, dx::Tuple) = _project(x, reshape(collect(dx), axes(x) (project::ProjectTo{<:Complex})(dx::Tangent) = project(Complex(dx.re, dx.im)) # CRC likes Tangent{AbstractArray}, but Zygote makes Tangent{Any} +# in particular this would hit https://github.com/JuliaDiff/ChainRulesCore.jl/blob/2ec2549b73b22bc08f554dae864fb650cfb9c3d7/src/projection.jl#L139 +# if we were not losing track of the Primal in the Tangent +# This type piracy is just giving up that safety check. (project::ProjectTo{AbstractArray})(dx::Tangent) = dx """ From 993eb16606893b0eece567fa48e3ed7eac250c72 Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Mon, 27 Sep 2021 08:24:06 +0200 Subject: [PATCH 220/490] added a dispatch for tuples to prevent LoadError --- src/lib/base.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/base.jl b/src/lib/base.jl index b7c2072a2..370924656 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -120,6 +120,7 @@ end @adjoint function pairs(t::NamedTuple{N}) where N pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) + pairs_namedtuple_pullback(dx::Tuple) = (dx.data,) function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) From 14cedb5baf4588210e03326788e5cd5d9184b5ab Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Tue, 28 Sep 2021 07:53:05 +0200 Subject: [PATCH 221/490] fixed dispatch --- src/lib/base.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 370924656..6f590118c 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -120,7 +120,8 @@ end @adjoint function pairs(t::NamedTuple{N}) where N pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) - pairs_namedtuple_pullback(dx::Tuple) = (dx.data,) + + pairs_namedtuple_pullback(dx::Tuple) = isempty(dx) ? (dx,) : (dx[1],) function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) From 6f41395df6a9c19a9b980e782b9dfff4f5bb372f Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Tue, 28 Sep 2021 08:30:27 +0200 Subject: [PATCH 222/490] handle all tuples, not just empty ones --- src/lib/base.jl | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 6f590118c..c1a7cd38f 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -120,8 +120,14 @@ end @adjoint function pairs(t::NamedTuple{N}) where N pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) - - pairs_namedtuple_pullback(dx::Tuple) = isempty(dx) ? (dx,) : (dx[1],) + + function pairs_namedtuple_pullback(dx::Tuple) + t0 = map(zero, t) + for (i, v) in enumerate(dx) + t0 = NamedTuple{N}(Base.setindex((t0...,), v, i)) + end + return (t0,) + end function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) From 8acd8de258c12271df82630cf3975599a87bc5ff Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 28 Sep 2021 12:56:08 -0400 Subject: [PATCH 223/490] make OneElement constructor safer --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 9bec64b95..4f04809b7 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -50,7 +50,7 @@ struct OneElement{T,N,I,A} <: AbstractArray{T,N} val::T ind::I axes::A - OneElement(val::T, ind::I, axes::A) where {T<:Number, I<:NTuple{N,Int}, A} where {N} = new{T,N,I,A}(val, ind, axes) + OneElement(val::T, ind::I, axes::A) where {T<:Number, I<:NTuple{N,Int}, A<:NTuple{N,AbstractUnitRange}} where {N} = new{T,N,I,A}(val, ind, axes) end Base.size(A::OneElement) = map(length, A.axes) Base.axes(A::OneElement) = A.axes From 4e439224a49461ecb43161e72c8bc1421077d809 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 28 Sep 2021 19:57:50 -0400 Subject: [PATCH 224/490] Remove incorrect `push!` and `pop!` gradients (#1025) * fix push + pop gradient for vector of arrays, add real tests * tweak * allow only trivial gradients in push!(::Params) etc. * generalise, and fail * fix * rm gradients which don't work * rm unused methods from push(IdSet) gradient * restrict push error to arrays, rm adjoint for params * Update test/features.jl Co-authored-by: Brian Chen Co-authored-by: Brian Chen --- src/compiler/interface.jl | 14 -------------- src/lib/array.jl | 23 ++--------------------- test/gradcheck.jl | 16 ---------------- 3 files changed, 2 insertions(+), 51 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 9dc934a49..e210e65b6 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -183,20 +183,6 @@ function Base.push!(ps::Params, x) return ps end -@adjoint! function Base.push!(xs::IdSet, x...) - l = length(x) - push!(xs, x...), Δ -> begin - (Δ, ntuple(_ -> nothing, l)...) - end -end - -@adjoint! function Base.push!(xs::Params, x::AbstractArray{T}...) where T - sz_x = size.(x) - push!(xs, x...), Δ -> begin - (Δ, map(x -> Ones{T}(x...), sz_x)...) - end -end - Base.push!(ps::Params, x...) = (foreach(x -> push!(ps, x), x); ps) function Base.delete!(ps::Params, x) diff --git a/src/lib/array.jl b/src/lib/array.jl index 9bec64b95..035a1b239 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -74,27 +74,8 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), _...)") for f in [push!, pop!, pushfirst!, popfirst!] - @eval @adjoint! $f(xs, x...) = $f(xs, x...), - _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(xs)), _...)") -end - -# This is kind of bad, but at least we don't materialize the whole -# array. Prefer to use `Buffer` -# function _pullback(cx::Context, ::typeof(push!), xs::AbstractVector{<:AbstractArray}, x::AbstractArray{T}...) where T -@adjoint! function push!(xs::AbstractVector{<:AbstractArray}, x::AbstractArray{T}...) where T - sz_xs = size.(xs) - sz_x = size.(x) - push!(xs, x...), Δ -> begin - (Δ, map(x -> Ones{T}(x...), sz_x)...) - end -end - -@adjoint! function pop!(xs::AbstractVector{<:AbstractArray{T}}) where T - sz_xs = size.(xs) - op = pop!(xs) - op, Δ -> begin - ([Ones{T}(sz...) for sz in sz_xs], ) - end + @eval @adjoint! $f(x::AbstractVector, ys...) = $f(x, ys...), + _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(x)), _...)") end # General diff --git a/test/gradcheck.jl b/test/gradcheck.jl index af49b7697..87fe5f46f 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1365,22 +1365,6 @@ using Zygote: Buffer prod(copy(b)) end == (3,) - @testset "Limited Mutation" begin - p = [rand(3,3), rand(3,3)] - r = rand(5,5) - - # TODO: ngradient cannot handle Vector{Array} - gs = gradient((p,x) -> sum(sum.(push!(p,x))), p, r) - @test length(p[end]) == length(gs[1][end]) - @test gs[1] ≈ map(x -> one.(x), p) - @test gs[2] ≈ one.(r) - - # p = [rand(3,3), rand(3,3)] # redefine `p` after mutation - # gs = gradient(x -> sum(pop!(x)), p) - # @test length(gs[1]) == 2 - # @test gs[1][1] == one.(p[1]) - end - end @testset "FillArrays" begin From 355296e84f8b21c0f22204b0804af51653016137 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 29 Sep 2021 19:32:38 +0200 Subject: [PATCH 225/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 758f2090b..aa48d5b68 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.23" +version = "0.6.24" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 818e3e43a37130c91f36679bd785e841bdf08da5 Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Thu, 30 Sep 2021 10:28:10 +0200 Subject: [PATCH 226/490] clean up tuple dispatch --- src/lib/base.jl | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index c1a7cd38f..55d660279 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -122,10 +122,7 @@ end pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) function pairs_namedtuple_pullback(dx::Tuple) - t0 = map(zero, t) - for (i, v) in enumerate(dx) - t0 = NamedTuple{N}(Base.setindex((t0...,), v, i)) - end + t0 = isempty(dx) ? () : NamedTuple{N}(values(dx)) return (t0,) end From 7794f810f03cb5db639d3f1ba5cf9eb15238c011 Mon Sep 17 00:00:00 2001 From: WT Date: Fri, 1 Oct 2021 18:00:58 +0100 Subject: [PATCH 227/490] map type stability --- src/lib/array.jl | 36 ++++++++++++++++-------------------- test/gradcheck.jl | 22 ++++++++++++++++++++++ 2 files changed, 38 insertions(+), 20 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 035a1b239..e4c567cd9 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -180,27 +180,23 @@ _tryreverse(m, x) = x _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] - @eval function $∇mapfunc(cx, f::F, args...) where {F} + @eval function $∇mapfunc(cx, f::F, args::Vararg{Any, N}) where {F, N} ys_and_backs = $mapfunc((args...) -> _pullback(cx, f, args...), args...) - if isempty(ys_and_backs) - ys_and_backs, _ -> nothing - else - ys = map(first, ys_and_backs) - ys, function (Δ) - isnothing(Δ) && return nothing - if Base.issingletontype(F) && length(args) == 1 - Δarg = $mapfunc(((_,pb), δ) -> last(pb(δ)), ys_and_backs, Δ) # No unzip needed - (nothing, Δarg) - elseif Base.issingletontype(F) # Ensures `f` is pure: nothing captured & no state - Δargs = unzip($mapfunc(((_,pb), δ) -> Base.tail(pb(δ)), ys_and_backs, Δ)) - (nothing, Δargs...) - else - # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. - Δf_and_args_zipped = $mapfunc(((_,pb), δ) -> pb(δ), _tryreverse($mapfunc, ys_and_backs, Δ)...) - Δf_and_args = unzip(_tryreverse($mapfunc, Δf_and_args_zipped)) - Δf = reduce(accum, Δf_and_args[1]) - (Δf, Δf_and_args[2:end]...) - end + ys = map(first, ys_and_backs) + ys, function (Δ) + isnothing(Δ) && return nothing + if Base.issingletontype(F) && length(args) == 1 + Δarg = $mapfunc(((_,pb), δ) -> last(pb(δ)), ys_and_backs, Δ) # No unzip needed + (nothing, Δarg) + elseif Base.issingletontype(F) # Ensures `f` is pure: nothing captured & no state + Δargs = _unzip($mapfunc(((_,pb), δ) -> Base.tail(pb(δ)), ys_and_backs, Δ), Val(N)) + (nothing, Δargs...) + else + # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. + Δf_and_args_zipped = $mapfunc(((_,pb), δ) -> pb(δ), _tryreverse($mapfunc, ys_and_backs, Δ)...) + Δf_and_args = _unzip(_tryreverse($mapfunc, Δf_and_args_zipped), Val(N + 1)) + Δf = reduce(accum, Δf_and_args[1]; init=nothing) + (Δf, Δf_and_args[2:end]...) end end end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 87fe5f46f..287a83093 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -288,6 +288,28 @@ for mapfunc in [map,pmap] Δy = randn(3) @test first(pb((Δy..., ))) ≈ first(pb(Δy)) end + + @testset "empty tuples" begin + out, pb = Zygote.pullback(map, -, ()) + @test pb(out) === (nothing, ()) + + out, pb = Zygote.pullback(map, +, (), ()) + @test pb(()) === (nothing, (), ()) + + function build_foo(z) + foo(x) = x * z + return foo + end + out, pb = Zygote.pullback(map, build_foo(5.0), ()) + @test pb(()) === (nothing, ()) + end +end + +# Check that map infers correctly. pmap still doesn't infer. +@testset "map inference" begin + @inferred Zygote._pullback(Zygote.Context(), map, sin, Float64[]) + out, pb = Zygote._pullback(Zygote.Context(), map, sin, Float64[]) + @inferred pb(Float64[]) end @testset "Alternative Pmap Dispatch" begin From 635682def7a850054a0eedde9e1837ee840613bb Mon Sep 17 00:00:00 2001 From: WT Date: Fri, 1 Oct 2021 18:02:24 +0100 Subject: [PATCH 228/490] Bump patch --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index aa48d5b68..f6373cf19 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.24" +version = "0.6.25" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 7f274ac174cf4ad4b8a3c47e3d3b2f66fcad274e Mon Sep 17 00:00:00 2001 From: WT Date: Sat, 2 Oct 2021 13:31:04 +0100 Subject: [PATCH 229/490] Extra tests and bug fix --- src/lib/array.jl | 10 ++++++++-- test/gradcheck.jl | 27 ++++++++++++++++++++++++--- 2 files changed, 32 insertions(+), 5 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index e4c567cd9..0838a4628 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -179,6 +179,12 @@ end _tryreverse(m, x) = x _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) +# Sometimes a pullback doesn't return a full vector of nothings, but rather returns only a +# single nothing to say "all arguments have zero cotangent". This function is needed to +# account for that inside the pullback for map. +last_or_nothing(::Nothing) = nothing +last_or_nothing(x) = last(x) + for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] @eval function $∇mapfunc(cx, f::F, args::Vararg{Any, N}) where {F, N} ys_and_backs = $mapfunc((args...) -> _pullback(cx, f, args...), args...) @@ -186,10 +192,10 @@ for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] ys, function (Δ) isnothing(Δ) && return nothing if Base.issingletontype(F) && length(args) == 1 - Δarg = $mapfunc(((_,pb), δ) -> last(pb(δ)), ys_and_backs, Δ) # No unzip needed + Δarg = $mapfunc(((_,pb), δ) -> last_or_nothing(pb(δ)), ys_and_backs, Δ) # No unzip needed (nothing, Δarg) elseif Base.issingletontype(F) # Ensures `f` is pure: nothing captured & no state - Δargs = _unzip($mapfunc(((_,pb), δ) -> Base.tail(pb(δ)), ys_and_backs, Δ), Val(N)) + Δargs = _unzip($mapfunc(((_,pb), δ) -> tailmemaybe(pb(δ)), ys_and_backs, Δ), Val(N)) (nothing, Δargs...) else # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 287a83093..9ff9a641a 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -303,13 +303,34 @@ for mapfunc in [map,pmap] out, pb = Zygote.pullback(map, build_foo(5.0), ()) @test pb(()) === (nothing, ()) end + + @testset "Vector{Nothing} cotangent" begin + out, pb = Zygote.pullback(map, -, randn(5)) + Δ = Vector{Nothing}(nothing, 5) + @test pb(Δ)[2] isa Vector{Nothing} + + out, pb = Zygote.pullback(map, +, randn(5), randn(5)) + @test pb(Δ)[2] isa Vector{Nothing} + @test pb(Δ)[3] isa Vector{Nothing} + end end # Check that map infers correctly. pmap still doesn't infer. @testset "map inference" begin - @inferred Zygote._pullback(Zygote.Context(), map, sin, Float64[]) - out, pb = Zygote._pullback(Zygote.Context(), map, sin, Float64[]) - @inferred pb(Float64[]) + @testset "$name" for (name, f, ȳ, xs) in [ + ("unary empty vector", sin, Float64[], (Float64[], )), + ("unary vector", sin, randn(3), (randn(3), )), + ("unary empty tuple", sin, (), ((), )), + ("unary tuple", sin, (randn(), randn()), ((randn(), randn()), )), + ("binary empty vector", +, Float64[], (Float64[], Float64[])), + ("binary vector", +, randn(2), (randn(2), randn(2))), + ("binary empty tuple", +, (), ((), ())), + ("binary tuple", +, (randn(), randn()), ((randn(), randn()), (randn(), randn()))), + ] + @inferred Zygote._pullback(Zygote.Context(), map, f, xs...) + y, pb = Zygote._pullback(Zygote.Context(), map, f, xs...) + @inferred pb(ȳ) + end end @testset "Alternative Pmap Dispatch" begin From 576af80e018a7e9bc778f31d471253a7e88b8ffb Mon Sep 17 00:00:00 2001 From: WT Date: Sat, 2 Oct 2021 13:35:00 +0100 Subject: [PATCH 230/490] Additional Vector{Nothing} cotangent test --- test/gradcheck.jl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 9ff9a641a..e34369f7c 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -305,13 +305,24 @@ for mapfunc in [map,pmap] end @testset "Vector{Nothing} cotangent" begin - out, pb = Zygote.pullback(map, -, randn(5)) Δ = Vector{Nothing}(nothing, 5) + + # Unary stateless + out, pb = Zygote.pullback(map, -, randn(5)) @test pb(Δ)[2] isa Vector{Nothing} + # Binary stateless out, pb = Zygote.pullback(map, +, randn(5), randn(5)) @test pb(Δ)[2] isa Vector{Nothing} @test pb(Δ)[3] isa Vector{Nothing} + + # Stateful + function build_foo(z) + foo(x) = x * z + return foo + end + out, pb = Zygote.pullback(map, build_foo(5.0), randn(5)) + @test pb(Δ)[2] isa Vector{Nothing} end end From 36572ae946acff72c12cc4bdac0d7150911e61f5 Mon Sep 17 00:00:00 2001 From: WT Date: Sat, 2 Oct 2021 13:36:05 +0100 Subject: [PATCH 231/490] Fix typo --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 0838a4628..4b899d571 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -179,7 +179,7 @@ end _tryreverse(m, x) = x _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) -# Sometimes a pullback doesn't return a full vector of nothings, but rather returns only a +# Sometimes a pullback doesn't return a Tuple, but rather returns only a # single nothing to say "all arguments have zero cotangent". This function is needed to # account for that inside the pullback for map. last_or_nothing(::Nothing) = nothing From 4f7d5d1aacc7e64b35e5039a78411641daa6a875 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 29 Sep 2021 10:07:43 -0400 Subject: [PATCH 232/490] fix 1086 --- src/lib/broadcast.jl | 4 +++- test/features.jl | 5 +++++ 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 4e7a3a1cc..3affebd92 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -72,7 +72,9 @@ unbroadcast(x::AbstractArray, x̄::Nothing) = nothing broadcast(+, xs...), ȳ -> (nothing, map(x -> unbroadcast(x, ȳ), xs)...) @adjoint broadcasted(::typeof(-), x::Numeric, y::Numeric) = x .- y, - Δ -> (nothing, unbroadcast(x, Δ), -unbroadcast(y, Δ)) + Δ -> (nothing, unbroadcast(x, Δ), _minus(unbroadcast(y, Δ))) +_minus(Δ) = -Δ +_minus(::Nothing) = nothing @adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x.*y, Δ -> (nothing, unbroadcast(x, Δ .* conj.(y)), unbroadcast(y, Δ .* conj.(x))) diff --git a/test/features.jl b/test/features.jl index d683d0d94..3115a455c 100644 --- a/test/features.jl +++ b/test/features.jl @@ -570,6 +570,11 @@ end @test gradient(x -> sum(_f.(x)), [1,2,3]) == ([0.5, 0.5, 0.5],) @test gradient(x -> sum(map(_f, x)), [1,2,3]) == ([0.5, 0.5, 0.5],) + # with Bool + @test gradient(x -> sum(1 .- (x .> 0)), randn(5)) == (nothing,) + @test gradient(x -> sum((y->1-y).(x .> 0)), randn(5)) == (nothing,) + @test gradient(x -> sum(x .- (x .> 0)), randn(5)) == ([1,1,1,1,1],) + @test gradient(x -> sum(x ./ [1,2,4]), [1,2,pi]) == ([1.0, 0.5, 0.25],) @test gradient(x -> sum(map(/, x, [1,2,4])), [1,2,pi]) == ([1.0, 0.5, 0.25],) From 0cb031a3181fd9f8b63abb9c0b2629a1cb576a0d Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 2 Oct 2021 12:34:52 -0400 Subject: [PATCH 233/490] 0.6.26 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f6373cf19..15ecc3e73 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.25" +version = "0.6.26" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From eb347fe62a8743e30c1de7a318812996cc8ea8dc Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Tue, 5 Oct 2021 09:13:12 +0200 Subject: [PATCH 234/490] minimize changes Co-authored-by: Carlo Lucibello --- src/lib/base.jl | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 55d660279..472ee85a5 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -121,11 +121,7 @@ end pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) - function pairs_namedtuple_pullback(dx::Tuple) - t0 = isempty(dx) ? () : NamedTuple{N}(values(dx)) - return (t0,) - end - +pairs_namedtuple_pullback(dx::Tuple{}) = (NamedTuple(),) function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) for (idx, v) in Δ From 63a9a543ffc0e8e4198803af28464aa2a4587e0f Mon Sep 17 00:00:00 2001 From: Gabriel Birnbaum Date: Tue, 5 Oct 2021 09:14:44 +0200 Subject: [PATCH 235/490] fix indentation --- src/lib/base.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 472ee85a5..ac7df59a2 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -121,7 +121,8 @@ end pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) -pairs_namedtuple_pullback(dx::Tuple{}) = (NamedTuple(),) + pairs_namedtuple_pullback(dx::Tuple{}) = (NamedTuple(),) + function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) for (idx, v) in Δ From a3f8dc4986005f532ad72a35afad74b684bb6289 Mon Sep 17 00:00:00 2001 From: Simeon David Schaub Date: Tue, 5 Oct 2021 11:18:22 -0400 Subject: [PATCH 236/490] WIP: improve inference for getproperty This has regressed quite a bit due to #848. With this PR, we should be able to get back the same performance as before, assuming there is no custom implementation or pullback for `getproperty`. Still need to add tests. --- src/Zygote.jl | 1 + src/lib/literal_getproperty.jl | 82 ++++++++++++++++++++++++++++++++++ test/compiler.jl | 47 +++++++++++++++++++ 3 files changed, 130 insertions(+) create mode 100644 src/lib/literal_getproperty.jl diff --git a/src/Zygote.jl b/src/Zygote.jl index ae023213c..85b71359f 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -33,6 +33,7 @@ include("compiler/show.jl") include("lib/grad.jl") include("lib/lib.jl") +include("lib/literal_getproperty.jl") include("lib/number.jl") include("lib/base.jl") include("lib/array.jl") diff --git a/src/lib/literal_getproperty.jl b/src/lib/literal_getproperty.jl new file mode 100644 index 000000000..1959e9462 --- /dev/null +++ b/src/lib/literal_getproperty.jl @@ -0,0 +1,82 @@ +# Mostly copied over from Cassette in `src/overdub.jl` +# Return `Reflection` for signature `sigtypes` and `world`, if possible. Otherwise, return `nothing`. +function reflect(@nospecialize(sigtypes::Tuple), world::UInt = typemax(UInt)) + if length(sigtypes) > 2 && sigtypes[1] === typeof(invoke) + @assert sigtypes[3] <: Type{<:Tuple} + sigtypes = (sigtypes[2], sigtypes[3].parameters[1].parameters...) + end + # This works around a subtyping bug. Basically, callers can deconstruct upstream + # `UnionAll` types in such a way that results in a type with free type variables, in + # which case subtyping can just break. + # + # God help you if you try to use a type parameter here (e.g. `::Type{S} where S<:Tuple`) + # instead of this nutty workaround, because the compiler can just rewrite `S` into + # whatever it thinks is "type equal" to the actual provided value. In other words, if + # `S` is defined as e.g. `f(::Type{S}) where S`, and you call `f(T)`, you should NOT + # assume that `S === T`. If you did, SHAME ON YOU. It doesn't matter that such an + # assumption holds true for essentially all other kinds of values. I haven't counted in + # a while, but I'm pretty sure I have ~40+ hellish years of Julia experience, and this + # still catches me every time. Who even uses this crazy language? + S = Tuple{map(s -> Core.Compiler.has_free_typevars(s) ? typeof(s.parameters[1]) : s, sigtypes)...} + (S.parameters[1]::DataType).name.module === Core.Compiler && return nothing + _methods = Base._methods_by_ftype(S, -1, world) + method_index = 0 + for i in 1:length(_methods) + if _methods[i][1] === S + method_index = i + break + end + end + method_index === 0 && return nothing + type_signature, raw_static_params, method = _methods[method_index] + method_instance = Core.Compiler.specialize_method(method, type_signature, raw_static_params, false) + method_signature = method.sig + static_params = Any[raw_static_params...] + return method_instance, method_signature, static_params +end + + +# ugly hack to make differentiating `getproperty` infer a lot better +@generated function _pullback(cx::AContext, ::typeof(literal_getproperty), x, ::Val{f}) where f + sig(x) = Tuple{x, typeof(f)} + rrule_sig(x) = Tuple{typeof(getproperty), x, typeof(f)} + pb_sig(x) = Tuple{cx, typeof(getproperty), x, typeof(f)} + + # either `getproperty` has a custom implementation or `_pullback(cx, getproperty, x, f)` + # / `rrule(getproperty, x, f) is overloaded directly + is_getfield_fallback = which(getproperty, sig(x)) == which(getproperty, sig(Any)) && + which(_pullback, pb_sig(x)) == which(_pullback, pb_sig(Any)) && + which(rrule, rrule_sig(x)) == which(rrule, rrule_sig(Any)) + + #ccall(:jl_safe_printf, Cvoid, (Cstring,), "$is_getfield_fallback: $x\n") + + if is_getfield_fallback + # just copy pullback of `literal_getfield` + mi, _sig, sparams = reflect((typeof(_pullback), cx, typeof(literal_getfield), x, Val{f})) + ci = copy(Core.Compiler.retrieve_code_info(mi)) + + # we need to change the second arg to `_pullback` from `literal_getproperty` to + # `literal_getfield` + Meta.partially_inline!( + ci.code, Any[_pullback, Core.SlotNumber(2), literal_getfield], + _sig, sparams, 0, 0, :propagate, + ) + ci.inlineable = true + + # backedge for `_pullback`, see https://docs.julialang.org/en/v1/devdocs/ast/#MethodInstance + # this will cause a backedge to this particular MethodInstance to be attached to + # `_pullback(cx, getproperty, x, f)` + mi_pb_getproperty, _, _ = reflect((typeof(_pullback), pb_sig(x).parameters...)) + mi_getproperty, _, _ = reflect((typeof(getproperty), sig(x).parameters...)) + mi_rrule, _, _ = reflect((typeof(rrule), rrule_sig(x).parameters...)) + ci.edges = Core.MethodInstance[mi, mi_pb_getproperty, mi_getproperty, mi_rrule] + + return ci + else + # nothing to optimize here, need to recurse into `getproperty` + return quote + Base.@_inline_meta + _pullback(cx, getproperty, x, $(QuoteNode(f))) + end + end +end diff --git a/test/compiler.jl b/test/compiler.jl index af8e6ccb7..71e49ded4 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -144,5 +144,52 @@ end @test Zygote.gradient(sumall, ms) == ((a = 2, b = 2),) end +using ChainRulesCore + +function _Gaussian(suffix::Symbol) + name = gensym(Symbol(:Gaussian_, suffix)) + return @eval begin + struct $name{Tm, TP} + m::Tm + P::TP + end + $name + end +end + +@testset "inference for `getproperty`" begin + Gaussian = _Gaussian(:getproperty) + g = Gaussian(randn(3), randn(3, 3)) + y, back = @inferred pullback(x -> x.m, g) + @test y == getfield(g, :m) + @test Base.return_types(back, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(((m = [1.0, 0.0, 0.0], P = nothing),))}] + @test back([1., 0, 0]) == ((m = [1.0, 0.0, 0.0], P = nothing),) + + Base.getproperty(g::Gaussian, s::Symbol) = 2getfield(g, s) + y, back = pullback(x -> x.m, g) + @test y == 2getfield(g, :m) + @test back([1., 0, 0]) == ((m = [2.0, 0.0, 0.0], P = nothing),) + + + Gaussian = _Gaussian(:pullback) + g = Gaussian(randn(3), randn(3, 3)) + y, back = @inferred pullback(x -> x.m, g) + + Zygote._pullback(::typeof(getproperty), g::Gaussian, s::Symbol) = 3getfield(g, s), Δ -> (nothing, (; ((:m, :P) .=> nothing)..., s => 3Δ), nothing) + y, back = pullback(x -> x.m, g) + @test_broken y == 3getfield(g, :m) + @test_broken back([1., 0, 0]) == ((m = [3.0, 0.0, 0.0], P = nothing),) + + + Gaussian = _Gaussian(:rrule) + g = Gaussian(randn(3), randn(3, 3)) + y, back = @inferred pullback(x -> x.m, g) + + ChainRulesCore.rrule(::typeof(getproperty), g::Gaussian, s::Symbol) = 4getfield(g, s), Δ -> (NoTangent(), Tangent{typeof(g)}(; s => 4Δ), NoTangent()) + y, back = pullback(x -> x.m, g) + @test y == 4getfield(g, :m) + @test back([1., 0, 0]) == ((m = [4.0, 0.0, 0.0], P = nothing),) +end + # issue 897 @test gradient(x -> sum(norm, collect(eachcol(x))), ones(3, 400))[1] ≈ fill(0.5773502691896258, 3, 400) From 70ab7c1e7346192265d6fbcc92c4a31cf2678e92 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Thu, 7 Oct 2021 07:37:02 +0200 Subject: [PATCH 237/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 15ecc3e73..d8cf865a2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.26" +version = "0.6.27" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From e245dee827f802d54b10a0c00c69083b6038de47 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Thu, 7 Oct 2021 13:46:16 +0200 Subject: [PATCH 238/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index d8cf865a2..15ecc3e73 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.27" +version = "0.6.26" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From bdbb36979ba458f04706c07891e3047afd22c167 Mon Sep 17 00:00:00 2001 From: Simeon David Schaub Date: Tue, 12 Oct 2021 12:20:22 -0400 Subject: [PATCH 239/490] address review comments --- test/compiler.jl | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/test/compiler.jl b/test/compiler.jl index 71e49ded4..eec71e53d 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -162,6 +162,7 @@ end g = Gaussian(randn(3), randn(3, 3)) y, back = @inferred pullback(x -> x.m, g) @test y == getfield(g, :m) + # This type instability is due to the handling of non-bitstypes in `accum_param` @test Base.return_types(back, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(((m = [1.0, 0.0, 0.0], P = nothing),))}] @test back([1., 0, 0]) == ((m = [1.0, 0.0, 0.0], P = nothing),) @@ -175,10 +176,10 @@ end g = Gaussian(randn(3), randn(3, 3)) y, back = @inferred pullback(x -> x.m, g) - Zygote._pullback(::typeof(getproperty), g::Gaussian, s::Symbol) = 3getfield(g, s), Δ -> (nothing, (; ((:m, :P) .=> nothing)..., s => 3Δ), nothing) + Zygote._pullback(::Zygote.AContext, ::typeof(getproperty), g::Gaussian, s::Symbol) = 3getfield(g, s), Δ -> (nothing, (; ((:m, :P) .=> nothing)..., s => 3Δ), nothing) y, back = pullback(x -> x.m, g) - @test_broken y == 3getfield(g, :m) - @test_broken back([1., 0, 0]) == ((m = [3.0, 0.0, 0.0], P = nothing),) + @test y == 3getfield(g, :m) + @test back([1., 0, 0]) == ((m = [3.0, 0.0, 0.0], P = nothing),) Gaussian = _Gaussian(:rrule) @@ -189,6 +190,13 @@ end y, back = pullback(x -> x.m, g) @test y == 4getfield(g, :m) @test back([1., 0, 0]) == ((m = [4.0, 0.0, 0.0], P = nothing),) + + + Gaussian = _Gaussian(:bitstype) + g = Gaussian(randn(), randn()) + y, back = @inferred pullback(x -> x.m, g) + @test y == getfield(g, :m) + @test @inferred(back(1.0)) == ((m = 1.0, P = nothing),) end # issue 897 From 1d189fb72cb0341b6045057368eed8d3583c4c8a Mon Sep 17 00:00:00 2001 From: Simeon David Schaub Date: Tue, 12 Oct 2021 12:27:31 -0400 Subject: [PATCH 240/490] bump patch version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 15ecc3e73..d8cf865a2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.26" +version = "0.6.27" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 2b35bc0d3fff63779f56db3d26daaab8e197fdc2 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 15 Oct 2021 09:21:56 -0400 Subject: [PATCH 241/490] wrap_chainrules_input for mutable struct --- Project.toml | 4 ++-- src/compiler/chainrules.jl | 2 ++ test/features.jl | 14 ++++++++++++++ 3 files changed, 18 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index d8cf865a2..7d4a197ea 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.27" +version = "0.6.28" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -24,7 +24,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" ChainRules = "1.5" -ChainRulesCore = "1.6" +ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 8c3f1a84d..34b6e637b 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -128,6 +128,8 @@ Convert `x` from the format Zygote uses internally to differentials types ChainR xp = map(wrap_chainrules_input, xs) ChainRules.Tangent{Any, typeof(xp)}(xp) end +# For mutable types, including x=Ref(1), Zygote makes Ref{Any}(::NamedTuple) +@inline wrap_chainrules_input(x::Ref) = wrap_chainrules_input(x[]) """ _project(x, dx) diff --git a/test/features.jl b/test/features.jl index 3115a455c..ab68b4bd3 100644 --- a/test/features.jl +++ b/test/features.jl @@ -443,6 +443,20 @@ let @test back(1.) == ((1.0,),) end +@testset "mutable struct, including Ref" begin + # Zygote's representation is Base.RefValue{Any}((value = 7.0,)), but the + # map to ChainRules types and back normalises to (value = 7.0,) same as struct: + @test gradient(x -> x.value^2 + x.value, MyMutable(3)) === ((value = 7.0,),) + + # Same for Ref. This doesn't seem to affect `pow_mut` test in this file. + @test gradient(x -> x.x^2 + x.x, Ref(3)) === ((x = 7.0,),) + @test gradient(x -> real(x.x^2 + im * x.x), Ref(4)) === ((x = 8.0,),) + + # Broadcasting over Ref is handled specially. Tested elsehwere too. + @test gradient(x -> sum(sum, x .* [1,2,3]), Ref([4,5])) == ((x = [6.0, 6.0],),) + @test gradient(x -> sum(sum, Ref(x) .* [1,2,3]), [4,5]) == ([6.0, 6.0],) +end + function type_test() Complex{<:Real} end From 5ae5b4f2933e87923a567f13e1c298e26b954716 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 16 Oct 2021 18:43:46 -0400 Subject: [PATCH 242/490] wrap_chainrules_input for arrays of Ref (#1103) * wrap_chainrules_input for arrays of Ref * z2d too, for rrule_via_ad * test from https://github.com/JuliaDiff/ChainRulesCore.jl/issues/440 * add test from https://github.com/JuliaDiff/ChainRules.jl/issues/537 * more tests related to CRC types * union nothing, fix one case * comments --- Project.toml | 2 +- src/compiler/chainrules.jl | 15 ++++++-- src/lib/broadcast.jl | 1 + test/features.jl | 7 ++++ test/gradcheck.jl | 74 ++++++++++++++++++++++++++++++++++++++ 5 files changed, 96 insertions(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index 7d4a197ea..04196b602 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.28" +version = "0.6.29" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 34b6e637b..9bfde430a 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -115,6 +115,8 @@ for T_outer in (:Tuple, :NamedTuple) ChainRulesCore.backing(xp) # this is accessing ChainRulesCore internals, but it is prob safe enough, and it is fastest end end +# Could `reinterpret` instead of broadcasting here -- TODO +@inline wrap_chainrules_output(xs::AbstractArray{<:ChainRules.Tangent}) = wrap_chainrules_output.(xs) """ wrap_chainrules_input(x) @@ -130,6 +132,11 @@ Convert `x` from the format Zygote uses internally to differentials types ChainR end # For mutable types, including x=Ref(1), Zygote makes Ref{Any}(::NamedTuple) @inline wrap_chainrules_input(x::Ref) = wrap_chainrules_input(x[]) +# Could `reinterpret` instead of broadcasting here -- TODO +@inline wrap_chainrules_input(xs::AbstractArray{<:Ref}) = wrap_chainrules_input.(xs) +@inline wrap_chainrules_input(xs::AbstractArray{<:Union{Nothing, <:Ref}}) = wrap_chainrules_input.(xs) # no test invented for this +@inline wrap_chainrules_input(xs::AbstractArray{<:NamedTuple}) = wrap_chainrules_input.(xs) +@inline wrap_chainrules_input(xs::AbstractArray{<:Union{Nothing, <:NamedTuple}}) = wrap_chainrules_input.(xs) """ _project(x, dx) @@ -139,6 +146,8 @@ Also handles some Zygote-specific corrections, such as `x::Array, dx::Tuple`. Safe to apply to arbitrary input. """ @inline function _project(x, dx) + # Note that this use of `wrap_chainrules_input` has the primal `x`, so could + # avoid making `Tangent{Any}`, perhaps via `zygote2differential` -- TODO. wrap_chainrules_output(ProjectTo(x)(wrap_chainrules_input(dx))) end @@ -224,9 +233,9 @@ function ChainRulesCore.rrule_via_ad(config::ZygoteRuleConfig, f_args...; kwargs end """ - zygote2differential(x) + zygote2differential(dx, primal) -Convert input `x` from the Zygote format to the ChainRules differential types. +Convert input `dx` from the Zygote format to the ChainRules differential types. """ zygote2differential(x, primal) = z2d(x, primal) zygote2differential(::Nothing, ::Any) = NoTangent() @@ -235,6 +244,7 @@ zygote2differential(t::Tuple, primal) = (@warn "primal should be a tuple, not $p z2d(x, ::Any) = x z2d(::Nothing, ::Any) = NoTangent() z2d(a::AbstractArray{<:Number}, primal::AbstractArray{T}) where T = a +# Could probably `reinterpret` instead of broadcasting here -- TODO z2d(a::AbstractArray, primal::AbstractArray{T}) where T = z2d.(a, primal) # Note: this should never be hit if we are converting things right, but it seems to be # happening in the wild for sufficiently weird functions/types. @@ -254,3 +264,4 @@ function z2d(t::NamedTuple, primal) tp::NamedTuple = map(z2d, complete_t, primals) return canonicalize(Tangent{primal_type, typeof(tp)}(tp)) end +z2d(dx::Ref, primal) = z2d(dx[], primal) # mutable structs diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 3affebd92..8833436a0 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -58,6 +58,7 @@ unbroadcast(x::Number, x̄) = accum_sum(x̄) unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) unbroadcast(x::Tuple, x̄) = NTuple{length(x)}(length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 +unbroadcast(x::Tuple, x̄::Nothing) = nothing unbroadcast(x::AbstractArray, x̄::Nothing) = nothing diff --git a/test/features.jl b/test/features.jl index ab68b4bd3..795879fed 100644 --- a/test/features.jl +++ b/test/features.jl @@ -452,6 +452,13 @@ end @test gradient(x -> x.x^2 + x.x, Ref(3)) === ((x = 7.0,),) @test gradient(x -> real(x.x^2 + im * x.x), Ref(4)) === ((x = 8.0,),) + # Array of mutables: + @test gradient(x -> sum(getindex.(x).^2), Ref.(1:3))[1] == [(;x=2i) for i in 1:3] + @test gradient(x -> sum(abs2∘getindex, x), Ref.(1:3))[1] == [(;x=2i) for i in 1:3] + + @test gradient(x -> (getindex.(x).^2)[1], Ref.(1:3))[1][1] == (x=2.0,) # rest are (x = 0.0,), but nothing would be OK too + @test gradient(x -> (prod.(getindex.(x)))[1], Ref.(eachcol([1 2; 3 4])))[1][1] == (x = [3.0, 1.0],) + # Broadcasting over Ref is handled specially. Tested elsehwere too. @test gradient(x -> sum(sum, x .* [1,2,3]), Ref([4,5])) == ((x = [6.0, 6.0],),) @test gradient(x -> sum(sum, Ref(x) .* [1,2,3]), [4,5]) == ([6.0, 6.0],) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index e34369f7c..ac7893cfa 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -245,6 +245,11 @@ end @test gradtest(x->fill(first(x), N), randn(rng, 1)) @test gradtest(x->fill(first(x), N, M), randn(rng, 1)) @test gradtest(x->fill(first(x), N, M, P), randn(rng, 1)) + + # fill(struct, ...) handled by ChainRules after + # https://github.com/FluxML/Zygote.jl/pull/1051 + @test gradient(x -> fill(x, 3)[1][1], (1,2)) === ((1.0, nothing),) + @test gradient(x -> fill(x, 3)[1].a, (a=1, b=2)) == ((a=1.0, b=nothing),) # 1 not 1.0 end @testset "circshift" begin @@ -344,6 +349,20 @@ end end end +@testset "map and tuples" begin + # arrays of tuples, ChainRules's Tangent should not escape + @test gradient(x -> sum(map(first, x)), [(1,2), (3,4)]) == ([(1.0, nothing), (1.0, nothing)],) + @test gradient(x -> sum(first, x), [(1,2), (3,4)]) == ([(1.0, nothing), (1.0, nothing)],) + + @test gradient(x -> map(+, x, (1,2,3))[1], (4,5,6)) == ((1.0, nothing, nothing),) + @test gradient(x -> map(+, x, [1,2,3])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) + @test_broken gradient(x -> map(+, x, (1,2,3))[1], [4,5,6]) == ([1,0,0],) # Gradient [1.0, 0.0, 0.0] should be a tuple, since v0.6.0 at least + + # mismatched lengths, should zip + @test_broken gradient(x -> map(+, x, [1,2,3,99])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) # BoundsError: attempt to access 3-element Vector{Float64} at index [4] + @test_broken gradient(x -> map(+, x, [1,2,3])[1], (4,5,6,99)) == ((1.0, 0.0, 0.0, nothing),) # DimensionMismatch("variable with size(x) == (4,) cannot have a gradient with size(dx) == (3,) +end + @testset "Alternative Pmap Dispatch" begin cache_and_map(f,xs...) = pmap(f, CachingPool(workers()), xs...; batch_size = 1) @test gradtest(xs -> sum(cache_and_map(x -> x^2, xs)), rand(2,3)) @@ -1783,3 +1802,58 @@ end # https://github.com/FluxML/Zygote.jl/issues/996 a = rand(3) @test Zygote.gradient(x->sum(x .+ rand.()), a) == (ones(3),) + +@testset "CRC issue 440" begin + # https://github.com/JuliaDiff/ChainRulesCore.jl/issues/440 + f(x,y) = sum(sum, [[x[i],y[i]] for i=1:length(x)]) + g(x,y) = sum(sum, [(x[i],y[i]) for i=1:length(x)]) + @test gradient(f, rand(3), rand(3)) == ([1.0, 1.0, 1.0], [1.0, 1.0, 1.0]) + @test gradient(g, rand(3), rand(3)) == ([1.0, 1.0, 1.0], [1.0, 1.0, 1.0]) +end + +@testset "CR issue 537" begin + # https://github.com/JuliaDiff/ChainRules.jl/issues/537 + struct BV{F,T} + A::F + α::T + end + function Base.:*(c, km::BV) + new_A = c*km.A + other_params = getfield.([km], propertynames(km))[2:end] + BV(new_A, other_params...) + end + function (bv::BV)(V_app, ox::Bool; kT::Real = 0.026) + local exp_arg + if ox + exp_arg = (bv.α .* V_app) ./ kT + else + exp_arg = -((1 .- bv.α) .* V_app) ./ kT + end + bv.A .* exp.(exp_arg) + end + Zygote.@adjoint function BV{T,S}(A, α) where {T,S} + BV(A, α), Δ -> begin + (Δ.A, Δ.α) + end + end + bv = BV(1.0, 0.1) + I_vals, V = rand(81), rand(81) + + g2 = gradient(V, bv) do V, bv + res = fill(bv, length(V)) + r1 = map((m,v) -> m(v, true), res, V) + r2 = map((m,v) -> m(v, false), res, V) + sum(r1 .- r2) + end + @test size(g2[1]) == size(V) + @test g2[2] isa NamedTuple + @test g2[2].A isa Number + + g1 = gradient(bv, V) do bv, V + res = map(x -> x * bv, V) + sum(x -> x.A, res) + end + @test g1[1] isa NamedTuple + @test g1[1].A isa Number + @test size(g1[2]) == size(V) +end From 4edde590303d95605d410066d46ba05e1d3a0843 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sun, 17 Oct 2021 06:48:10 +0200 Subject: [PATCH 243/490] remove FastAI and GeometricFlux --- .github/workflows/Downstream.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml index 6565e82d5..c40bc6617 100644 --- a/.github/workflows/Downstream.yml +++ b/.github/workflows/Downstream.yml @@ -20,8 +20,6 @@ jobs: package: - {user: FluxML, repo: Flux.jl, group: All} - {user: FluxML, repo: NNlib.jl, group: All} - - {user: FluxML, repo: FastAI.jl, group: All} - - {user: FluxML, repo: GeometricFlux.jl, group: All} - {user: SciML, repo: DiffEqFlux.jl, group: Layers} - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} steps: From 218eefb340c3dc47404b3cb27ecee5303dc650bb Mon Sep 17 00:00:00 2001 From: lassepe Date: Sun, 24 Oct 2021 20:30:25 +0200 Subject: [PATCH 244/490] First stab at copy and copy for Grads --- src/compiler/interface.jl | 17 +++++++++++++++++ src/tools/buffer.jl | 6 ++++++ src/tools/idset.jl | 4 ++++ test/interface.jl | 20 ++++++++++++++++++++ 4 files changed, 47 insertions(+) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index e210e65b6..9965c3469 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -237,6 +237,12 @@ function copy!(x::AbstractVector, ps::Params) ps end +function copy!(ps_dst::Params, ps_src::Params) + copy!(ps_dst.order, ps_src.order) + copy!(ps_dst.params, ps_src.params) + ps_dst +end + """ Grads(...) @@ -299,6 +305,17 @@ function copy!(x::AbstractVector, gs::Grads) x end +function copy!(gs_dst::Grads, gs_src::Grads) + copy!(gs_dst.grads, gs_src.grads) + copy!(gs_dst.params, gs_src.params) + gs_dst +end + +function Base.copy(gs::Grads) + gs_new = Grads(IdDict(), Params()) + copy!(gs_new, gs) +end + broadcasted(f, gs::Grads, gss::ADictOrGrads...) = map(f, gs, gss...) broadcasted(f, a::Numeric, gs::Grads) = map(x -> f(a, x), gs) diff --git a/src/tools/buffer.jl b/src/tools/buffer.jl index 9409a74bc..5d6fdd9bb 100644 --- a/src/tools/buffer.jl +++ b/src/tools/buffer.jl @@ -39,6 +39,12 @@ mutable struct Buffer{T,A<:AbstractArray{T}} freeze::Bool end +function Base.copy!(b_dst::Buffer, b_src::Buffer) + b_dst.data = b_src.data + b_dst.freeze = b_src.freeze + b_dst +end + Buffer(xs::AbstractArray, args...) = Buffer(similar(xs, args...), false) diff --git a/src/tools/idset.jl b/src/tools/idset.jl index a0aa93df0..d8072e18b 100644 --- a/src/tools/idset.jl +++ b/src/tools/idset.jl @@ -15,6 +15,10 @@ Base.in(x, s::IdSet) = haskey(s.dict, x) Base.eltype(::IdSet{T}) where T = T Base.collect(s::IdSet) = Base.collect(keys(s.dict)) Base.similar(s::IdSet, T::Type) = IdSet{T}() +function Base.empty!(s::IdSet) + empty!(s.dict) + s +end @forward IdSet.dict Base.length diff --git a/test/interface.jl b/test/interface.jl index 0bee98321..5b126a765 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -32,6 +32,10 @@ using Zygote: Grads x = [0, 0, 0] copy!(x, ps) @test x == [1, 2, 3] + + ps_src = Params([[1, 2], [3]]) + ps_dst = Params([4][5]) + ps_dst = ps_src end @testset "broadcast" begin @@ -132,6 +136,22 @@ end @test_throws ArgumentError gs1 .+ gs4 end + @testset "copy" begin + w, b = rand(2), rand(2) + x1, x2 = rand(2), rand(2) + + gs1 = gradient(() -> sum(w .* x1), Params([w])) + gs2 = gradient(() -> sum(w .* x2), Params([w])) + + gs_new = copy(gs1) + copy!(gs2, gs1) + + # TODO: these tests are currently broken because `Base.iseqeual` is not doing useful things + # for `Grads` right now. + @test_broken gs1 == gs_new + @test_broken gs2 == gs1 + end + @testset "map and broadcast" begin w = rand(2) x1 = rand(2) From 55b4381df6ae9488f1c6c7e04b8d10b9a6df727f Mon Sep 17 00:00:00 2001 From: lassepe Date: Mon, 25 Oct 2021 10:15:21 +0200 Subject: [PATCH 245/490] Implement copy in terms of merge! --- src/compiler/interface.jl | 16 +++++----------- src/tools/buffer.jl | 6 ------ test/interface.jl | 7 +++---- 3 files changed, 8 insertions(+), 21 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 9965c3469..89285539f 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -237,12 +237,6 @@ function copy!(x::AbstractVector, ps::Params) ps end -function copy!(ps_dst::Params, ps_src::Params) - copy!(ps_dst.order, ps_src.order) - copy!(ps_dst.params, ps_src.params) - ps_dst -end - """ Grads(...) @@ -305,15 +299,15 @@ function copy!(x::AbstractVector, gs::Grads) x end -function copy!(gs_dst::Grads, gs_src::Grads) - copy!(gs_dst.grads, gs_src.grads) - copy!(gs_dst.params, gs_src.params) +function Base.merge!(gs_dst::Grads, gs_src::Grads) + union!(gs_dst.params, gs_src.params) + map!(copy, gs_dst, gs_src) gs_dst end function Base.copy(gs::Grads) - gs_new = Grads(IdDict(), Params()) - copy!(gs_new, gs) + gs_new = Grads(IdDict(), gs.params) + merge!(gs_new, gs) end broadcasted(f, gs::Grads, gss::ADictOrGrads...) = map(f, gs, gss...) diff --git a/src/tools/buffer.jl b/src/tools/buffer.jl index 5d6fdd9bb..9409a74bc 100644 --- a/src/tools/buffer.jl +++ b/src/tools/buffer.jl @@ -39,12 +39,6 @@ mutable struct Buffer{T,A<:AbstractArray{T}} freeze::Bool end -function Base.copy!(b_dst::Buffer, b_src::Buffer) - b_dst.data = b_src.data - b_dst.freeze = b_src.freeze - b_dst -end - Buffer(xs::AbstractArray, args...) = Buffer(similar(xs, args...), false) diff --git a/test/interface.jl b/test/interface.jl index 5b126a765..0175151a4 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -146,10 +146,9 @@ end gs_new = copy(gs1) copy!(gs2, gs1) - # TODO: these tests are currently broken because `Base.iseqeual` is not doing useful things - # for `Grads` right now. - @test_broken gs1 == gs_new - @test_broken gs2 == gs1 + #TODO: a bit of a hacky workaround here, would be nice if we could compare gradients directly + @test collect(gs1) == collect(gs_new) + @test collect(gs2) == collect(gs1) end @testset "map and broadcast" begin From bbc6b362d12d30b9d544d4651f530d96947f0415 Mon Sep 17 00:00:00 2001 From: Lasse Peters Date: Mon, 25 Oct 2021 10:34:06 +0200 Subject: [PATCH 246/490] Update idset.jl Get rid of Base.empt! For `IdSet`. --- src/tools/idset.jl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/tools/idset.jl b/src/tools/idset.jl index d8072e18b..a0aa93df0 100644 --- a/src/tools/idset.jl +++ b/src/tools/idset.jl @@ -15,10 +15,6 @@ Base.in(x, s::IdSet) = haskey(s.dict, x) Base.eltype(::IdSet{T}) where T = T Base.collect(s::IdSet) = Base.collect(keys(s.dict)) Base.similar(s::IdSet, T::Type) = IdSet{T}() -function Base.empty!(s::IdSet) - empty!(s.dict) - s -end @forward IdSet.dict Base.length From d51de8ac102fe41811fcd557c6ccac7b7d34d2d5 Mon Sep 17 00:00:00 2001 From: Lasse Peters Date: Mon, 25 Oct 2021 10:41:49 +0200 Subject: [PATCH 247/490] Remove redundant test --- test/interface.jl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/interface.jl b/test/interface.jl index 0175151a4..c419dbfcd 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -32,10 +32,6 @@ using Zygote: Grads x = [0, 0, 0] copy!(x, ps) @test x == [1, 2, 3] - - ps_src = Params([[1, 2], [3]]) - ps_dst = Params([4][5]) - ps_dst = ps_src end @testset "broadcast" begin From 912e60eeb8790c111dfbde216a7c14becc5cb7ab Mon Sep 17 00:00:00 2001 From: lassepe Date: Mon, 25 Oct 2021 11:06:54 +0200 Subject: [PATCH 248/490] Fix tests --- test/interface.jl | 26 +++++++++++++++++--------- 1 file changed, 17 insertions(+), 9 deletions(-) diff --git a/test/interface.jl b/test/interface.jl index c419dbfcd..3651b6adb 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -133,18 +133,26 @@ end end @testset "copy" begin - w, b = rand(2), rand(2) - x1, x2 = rand(2), rand(2) + w, b = rand(2), rand(2) + x1, x2 = rand(2), rand(2) + + _, back = pullback(() -> sum(w .* x1), Params([w])) - gs1 = gradient(() -> sum(w .* x1), Params([w])) - gs2 = gradient(() -> sum(w .* x2), Params([w])) + g1 = back(1) + g1_w = g1[w] + g2 = back(nothing) + @test isnothing(g1[w]) + @test isnothing(g2[w]) - gs_new = copy(gs1) - copy!(gs2, gs1) + g3 = back(1) |> copy + g4 = back(nothing) + @test !isnothing(g3[w]) + @test g3[w] == g1_w + @test isnothing(g4[w]) - #TODO: a bit of a hacky workaround here, would be nice if we could compare gradients directly - @test collect(gs1) == collect(gs_new) - @test collect(gs2) == collect(gs1) + #TODO: a bit of a hacky workaround here, would be nice if we could compare gradients directly + g3_copy = copy(g3) + @test collect(g3_copy) == collect(g3) end @testset "map and broadcast" begin From 73d1d742f17af18f0c6b6329d87803e3e9e8e95f Mon Sep 17 00:00:00 2001 From: lassepe Date: Mon, 25 Oct 2021 12:50:38 +0200 Subject: [PATCH 249/490] Add tests for merge!(::Grads, ::Grads) --- src/compiler/interface.jl | 2 +- test/interface.jl | 31 ++++++++++++++++++++----------- 2 files changed, 21 insertions(+), 12 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 89285539f..e186a134c 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -301,7 +301,7 @@ end function Base.merge!(gs_dst::Grads, gs_src::Grads) union!(gs_dst.params, gs_src.params) - map!(copy, gs_dst, gs_src) + merge!(gs_dst.grads, gs_src.grads) gs_dst end diff --git a/test/interface.jl b/test/interface.jl index 3651b6adb..6029d51ee 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -150,7 +150,6 @@ end @test g3[w] == g1_w @test isnothing(g4[w]) - #TODO: a bit of a hacky workaround here, would be nice if we could compare gradients directly g3_copy = copy(g3) @test collect(g3_copy) == collect(g3) end @@ -173,16 +172,26 @@ end end @testset "dictionary interface" begin - w, b, x = rand(2), rand(2), rand(2) - ps = Params([w, b]) - gs = gradient(() -> sum(tanh.(w .* x .+ b)), ps) - - @test issetequal(keys(gs), ps) - @test length(values(gs)) == 2 - @test length(pairs(gs)) == 2 - k, v = first(pairs(gs)) - @test k === first(ps) - @test v === gs[first(ps)] + w1, b1, x1 = rand(2), rand(2), rand(2) + ps1 = Params([w1, b1]) + gs1 = gradient(() -> sum(tanh.(w1 .* x1 .+ b1)), ps1) + + @test issetequal(keys(gs1), ps1) + @test length(values(gs1)) == 2 + @test length(pairs(gs1)) == 2 + k, v = first(pairs(gs1)) + @test k === first(ps1) + @test v === gs1[first(ps1)] + + w2, b2, x2 = rand(2), rand(2), rand(2) + ps2 = Params([w2, b2]) + gs2 = gradient(() -> sum(tanh.(w2 .* x2 .+ b2)), ps2) + + keys1 = keys(gs1) |> collect |> copy + values1 = values(gs1) |> collect |> copy + gs_merged = merge!(gs1, gs2) + @test collect(keys(gs_merged)) == union(keys1, keys(gs2)) + @test collect(values(gs_merged)) == union(values1, values(gs2)) end @testset "iteration" begin From f9b9fbb6f5c3110a1d469552ad9a2443f9be5a75 Mon Sep 17 00:00:00 2001 From: lassepe Date: Mon, 25 Oct 2021 13:04:59 +0200 Subject: [PATCH 250/490] merge! with multiple other Grads objects --- src/compiler/interface.jl | 10 ++++++---- test/interface.jl | 40 ++++++++++++++++++++++++++++----------- 2 files changed, 35 insertions(+), 15 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index e186a134c..dc8cb8d18 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -299,15 +299,17 @@ function copy!(x::AbstractVector, gs::Grads) x end -function Base.merge!(gs_dst::Grads, gs_src::Grads) +function Base.merge!(gs_dst::Grads, gs_srcs::Grads...) + for gs_src in gs_srcs union!(gs_dst.params, gs_src.params) merge!(gs_dst.grads, gs_src.grads) - gs_dst + end + gs_dst end function Base.copy(gs::Grads) - gs_new = Grads(IdDict(), gs.params) - merge!(gs_new, gs) + gs_new = Grads(IdDict(), gs.params) + merge!(gs_new, gs) end broadcasted(f, gs::Grads, gss::ADictOrGrads...) = map(f, gs, gss...) diff --git a/test/interface.jl b/test/interface.jl index 6029d51ee..ad1c7f46c 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -172,26 +172,44 @@ end end @testset "dictionary interface" begin + w, b, x = rand(2), rand(2), rand(2) + ps = Params([w, b]) + gs = gradient(() -> sum(tanh.(w .* x .+ b)), ps) + + @test issetequal(keys(gs), ps) + @test length(values(gs)) == 2 + @test length(pairs(gs)) == 2 + k, v = first(pairs(gs)) + @test k === first(ps) + @test v === gs[first(ps)] + end + + @testset "merge" begin w1, b1, x1 = rand(2), rand(2), rand(2) ps1 = Params([w1, b1]) gs1 = gradient(() -> sum(tanh.(w1 .* x1 .+ b1)), ps1) - @test issetequal(keys(gs1), ps1) - @test length(values(gs1)) == 2 - @test length(pairs(gs1)) == 2 - k, v = first(pairs(gs1)) - @test k === first(ps1) - @test v === gs1[first(ps1)] - w2, b2, x2 = rand(2), rand(2), rand(2) ps2 = Params([w2, b2]) gs2 = gradient(() -> sum(tanh.(w2 .* x2 .+ b2)), ps2) - keys1 = keys(gs1) |> collect |> copy - values1 = values(gs1) |> collect |> copy + w3, b3, x3 = rand(2), rand(2), rand(2) + ps3 = Params([w3, b3]) + gs3 = gradient(() -> sum(tanh.(w3 .* x3 .+ b3)), ps3) + + # merging with a single other Grads object + keys1 = keys(gs1) + values1 = values(gs1) gs_merged = merge!(gs1, gs2) - @test collect(keys(gs_merged)) == union(keys1, keys(gs2)) - @test collect(values(gs_merged)) == union(values1, values(gs2)) + @test issetequal(keys(gs_merged), union(keys1, keys(gs2))) + @test issetequal(values(gs_merged), union(values1, values(gs2))) + @test length(pairs(gs_merged)) == 4 + + # merging with multiple other Grads objects + gs_merged = merge!(gs1, gs2, gs3) + @test issetequal(keys(gs_merged), union(keys1, keys(gs2), keys(gs3))) + @test issetequal(values(gs_merged), union(values1, values(gs2), values(gs3))) + @test length(pairs(gs_merged)) == 6 end @testset "iteration" begin From 60f53e709d8b5bc052a20fb4fcf0228004aa4723 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 26 Oct 2021 04:26:41 -0400 Subject: [PATCH 251/490] Iterators. Product, Filter, enumerate, zip (including inside map) (#785) * enumerate, Filter, Product * zip * more zip * tweaking filter locally * allow for nothing in Iterators.Product * allow nothing in enumerate, and tidy up * two more cases * fix map gradient to allow for early ending & mixed shapes * more cases for enumerate, zip * fixes for map * share code map + zip * try something re map * overall testset * one more restore * add some info messages * silence some warnings * three now pass * explain what the weird printout is for * early stopping was different before 1.5 * comments * project, too Co-authored-by: Michael Abbott --- src/lib/array.jl | 64 ++++++++++++++++++++++++++++++--- test/features.jl | 83 ++++++++++++++++++++++++++++++++++++++++++ test/gradcheck.jl | 91 ++++++++++++++++++++++++++++++++++++++++++----- test/runtests.jl | 7 ++++ test/tools.jl | 9 +++-- 5 files changed, 236 insertions(+), 18 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index a44edf7f6..7734ad5ca 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -179,6 +179,13 @@ end _tryreverse(m, x) = x _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) +# With mismatched lengths, map stops early. With mismatched shapes, it makes a vector. +# So we keep axes(x) to restore gradient dx to its full length & correct shape. +_tryaxes(x) = axes(x) +_tryaxes(x::Tuple) = Val(length(x)) +_restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(length, ax) - length(dx))), ax) +_restore(dx, ::Val{N}) where {N} = length(dx) < N ? ntuple(i -> get(dx,i,nothing), N) : NTuple{N}(dx) + # Sometimes a pullback doesn't return a Tuple, but rather returns only a # single nothing to say "all arguments have zero cotangent". This function is needed to # account for that inside the pullback for map. @@ -189,22 +196,27 @@ for (mapfunc,∇mapfunc) in [(:map,:∇map),(:pmap,:∇pmap)] @eval function $∇mapfunc(cx, f::F, args::Vararg{Any, N}) where {F, N} ys_and_backs = $mapfunc((args...) -> _pullback(cx, f, args...), args...) ys = map(first, ys_and_backs) - ys, function (Δ) - isnothing(Δ) && return nothing + arg_ax = map(_tryaxes, args) + function map_back(Δ) if Base.issingletontype(F) && length(args) == 1 Δarg = $mapfunc(((_,pb), δ) -> last_or_nothing(pb(δ)), ys_and_backs, Δ) # No unzip needed (nothing, Δarg) - elseif Base.issingletontype(F) # Ensures `f` is pure: nothing captured & no state - Δargs = _unzip($mapfunc(((_,pb), δ) -> tailmemaybe(pb(δ)), ys_and_backs, Δ), Val(N)) + elseif Base.issingletontype(F) + # Ensures `f` is pure: nothing captured & no state. + unzipped = _unzip($mapfunc(((_,pb), δ) -> tailmemaybe(pb(δ)), ys_and_backs, Δ), Val(N)) + Δargs = map(_restore, unzipped, arg_ax) (nothing, Δargs...) else # Apply pullbacks in reverse order. Needed for correctness if `f` is stateful. Δf_and_args_zipped = $mapfunc(((_,pb), δ) -> pb(δ), _tryreverse($mapfunc, ys_and_backs, Δ)...) Δf_and_args = _unzip(_tryreverse($mapfunc, Δf_and_args_zipped), Val(N + 1)) Δf = reduce(accum, Δf_and_args[1]; init=nothing) - (Δf, Δf_and_args[2:end]...) + Δargs = map(_restore, Δf_and_args[2:end], arg_ax) + (Δf, Δargs...) end end + map_back(::Nothing) = nothing + return ys, map_back end @eval @adjoint function $mapfunc(f, args::Union{AbstractArray,Tuple}...) @@ -254,6 +266,48 @@ end end end +# Iterators + +@adjoint function enumerate(xs) + back(::AbstractArray{Nothing}) = nothing + back(dy::NamedTuple{(:itr,)}) = tuple(dy.itr) + back(diys) = (map(last, diys),) + enumerate(xs), back +end + +@adjoint Iterators.Filter(f, x) = pullback(filter, f, collect(x)) + +_ndims(::Base.HasShape{d}) where {d} = d +_ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x)) : 1 + +@adjoint function Iterators.product(xs...) + back(::AbstractArray{Nothing}) = nothing + back(dy::NamedTuple{(:iterators,)}) = dy.iterators + function back(dy::AbstractArray) + d = 1 + ntuple(length(xs)) do n + first(dy)[n] === nothing && return nothing + nd = _ndims(xs[n]) + dims = ntuple(i -> i tuple + Iterators.Zip(xs), back +end + # Reductions @adjoint function sum(xs::AbstractArray; dims = :) if dims === (:) diff --git a/test/features.jl b/test/features.jl index 795879fed..545db0279 100644 --- a/test/features.jl +++ b/test/features.jl @@ -492,6 +492,89 @@ end @test gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) == ((y = 0.0, z = 3.0, x = 2.3),) end +@testset "Iterators" begin + # enumerate + @test gradient(1:5) do xs + sum([x^i for (i,x) in enumerate(xs)]) + end == ([1, 4, 27, 256, 3125],) + + @test gradient([1,10,100]) do xs + sum([xs[i]^i for (i,x) in enumerate(xs)]) + end == ([1, 2 * 10^1, 3 * 100^2],) + + @test gradient([1,10,100]) do xs + sum((xs[i]^i for (i,x) in enumerate(xs))) # same without collect + end == ([1, 2 * 10^1, 3 * 100^2],) + + # zip + if VERSION >= v"1.5" + # On Julia 1.4 and earlier, [x/y for (x,y) in zip(10:14, 1:10)] is a DimensionMismatch, + # while on 1.5 - 1.7 it stops early. + + @test gradient(10:14, 1:10) do xs, ys + sum([x/y for (x,y) in zip(xs, ys)]) + end[2] ≈ vcat(.-(10:14) ./ (1:5).^2, zeros(5)) + + @test_broken gradient(10:14, 1:10) do xs, ys + sum(x/y for (x,y) in zip(xs, ys)) # same without collect + # Here @adjoint function Iterators.Zip(xs) gets dy = (is = (nothing, nothing),) + end[2] ≈ vcat(.-(10:14) ./ (1:5).^2, zeros(5)) + end + + bk_z = pullback((xs,ys) -> sum([abs2(x*y) for (x,y) in zip(xs,ys)]), [1,2], [3im,4im])[2] + @test bk_z(1.0)[1] isa AbstractVector{<:Real} # projection + + # Iterators.Filter + @test gradient(2:9) do xs + sum([x^2 for x in xs if iseven(x)]) + end == ([4, 0, 8, 0, 12, 0, 16, 0],) + + @test gradient(2:9) do xs + sum(x^2 for x in xs if iseven(x)) # same without collect + end == ([4, 0, 8, 0, 12, 0, 16, 0],) + + # Iterators.Product + @test gradient(1:10, 3:7) do xs, ys + sum([x^2+y for x in xs, y in ys]) + end == (10:10:100, fill(10, 5)) + + @test_broken gradient(1:10, 3:7) do xs, ys + sum(x^2+y for x in xs, y in ys) # same without collect + # Here @adjoint function Iterators.product(xs...) gets dy = (iterators = (nothing, nothing),) + end == (10:10:100, fill(10, 5)) + + # Repeat that test without sum(iterator) -- also receives dy = (iterators = (nothing, nothing),) + function prod_acc(xs, ys) + out = 0 + # for (x,y) in Iterators.product(xs, ys) + # out += x^2+y + for xy in Iterators.product(xs, ys) + out += xy[1]^2 + xy[2] + end + out + end + @test prod_acc(1:10, 3:7) == sum(x^2+y for x in 1:10, y in 3:7) + gradient(prod_acc, 1:10, 3:7) == (nothing, nothing) # sadly + @test_broken gradient(prod_acc, 1:10, 3:7) == (10:10:100, fill(10, 5)) + + @test gradient(rand(2,3)) do A + sum([A[i,j] for i in 1:1, j in 1:2]) + end == ([1 1 0; 0 0 0],) + + @test gradient(ones(3,5), 1:7) do xs, ys + sum([x+y for x in xs, y in ys]) + end == (fill(7, 3,5), fill(15, 7)) + + bk_p = pullback((xs,ys) -> sum([x/y for x in xs, y in ys]), Diagonal([3,4,5]), [6,7]')[2] + @test bk_p(1.0)[1] isa Diagonal # projection + @test bk_p(1.0)[2] isa Adjoint + + # Iterators.Product with enumerate + @test gradient([2 3; 4 5]) do xs + sum([x^i+y for (i,x) in enumerate(xs), y in xs]) + end == ([8 112; 36 2004],) +end + # https://github.com/JuliaDiff/ChainRules.jl/issues/257 @testset "Keyword Argument Passing" begin struct Type1{VJP} diff --git a/test/gradcheck.jl b/test/gradcheck.jl index ac7893cfa..66e558869 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -44,26 +44,27 @@ end Random.seed!(0) -@testset "println, show, print" begin +@testset "println, show, string, etc" begin function foo(x) Base.show(x) Base.print(x) + Base.print(stdout, x) Base.println(x) + Base.println(stdout, x) Core.show(x) Core.print(x) Core.println(x) return x end + println("The following printout is from testing that `print` doesn't upset gradients:") @test gradtest(foo, [5.0]) -end -@testset "string, repr" begin - function foo(x) + function bar(x) string(x) repr(x) return x end - @test gradtest(foo, [5.0]) + @test gradtest(bar, [5.0]) end @@ -356,11 +357,11 @@ end @test gradient(x -> map(+, x, (1,2,3))[1], (4,5,6)) == ((1.0, nothing, nothing),) @test gradient(x -> map(+, x, [1,2,3])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) - @test_broken gradient(x -> map(+, x, (1,2,3))[1], [4,5,6]) == ([1,0,0],) # Gradient [1.0, 0.0, 0.0] should be a tuple, since v0.6.0 at least + @test gradient(x -> map(+, x, (1,2,3))[1], [4,5,6]) == ([1,0,0],) # mismatched lengths, should zip - @test_broken gradient(x -> map(+, x, [1,2,3,99])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) # BoundsError: attempt to access 3-element Vector{Float64} at index [4] - @test_broken gradient(x -> map(+, x, [1,2,3])[1], (4,5,6,99)) == ((1.0, 0.0, 0.0, nothing),) # DimensionMismatch("variable with size(x) == (4,) cannot have a gradient with size(dx) == (3,) + @test gradient(x -> map(+, x, [1,2,3,99])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) + @test gradient(x -> map(+, x, [1,2,3])[1], (4,5,6,99)) == ((1.0, 0.0, 0.0, nothing),) end @testset "Alternative Pmap Dispatch" begin @@ -383,6 +384,23 @@ end @test gradient(x -> sum(map(f, x)), 1:10) == (10:-1:1,) end +@testset "vararg map" begin + # early stop + if VERSION >= v"1.5" + # In Julia 1.4 and earlier, map(*,rand(5),[1,2,3]) is a DimensionMismatch + @test gradient(x -> sum(map(*,x,[1,2,3])), rand(5)) == ([1,2,3,0,0],) + end + @test gradient(x -> sum(map(*,x,(1,2,3))), rand(5)) == ([1,2,3,0,0],) + @test gradient(x -> sum(map(*,x,[1,2,3])), Tuple(rand(5))) == ((1.0, 2.0, 3.0, nothing, nothing),) + + # mixed shapes + @test gradient((x,y) -> sum(map(*,x,y)), [1,2,3,4], [1 2; 3 4]) == ([1,3,2,4], [1 3; 2 4]) + @test gradient((x,y) -> sum(map(*,x,y)), [1,2,3], [1 2; 3 4]) == ([1,3,2], [1 3; 2 0]) + @test gradient((x,y) -> sum(map(*,x,y)), (1,2,3), [1 2; 3 4]) == ((1,3,2), [1 3; 2 0]) + @test gradient((x,y) -> sum(map(*,x,y)), [1,2,3,4,5], [1 2; 3 4]) == ([1,3,2,4,0], [1 3; 2 4]) + @test gradient((x,y) -> sum(map(*,x,y)), (1,2,3,4,5), [1 2; 3 4]) == ((1,3,2,4,nothing), [1 3; 2 4]) +end + @testset "sort" begin @test gradtest(sort, 5) correct = [ @@ -1748,6 +1766,63 @@ end gradient(x->norm(x*[1im 1]), 1.23) end +@testset "zip & Iterators.product" begin + # roughly from https://github.com/FluxML/Zygote.jl/issues/221 + d = rand(7) + @test gradient(rand(11)) do s + tot = 0 + for (a, b) in zip(s, d) + tot += 13a + 17b + end + tot + end == ([13, 13, 13, 13, 13, 13, 13, 0, 0, 0, 0],) + + @test gradient([1,2,3,4], [1 2; 3 4]) do x, y # mismatched shapes + tot = 0 + for (a,b) in zip(x,y) + tot += a * b + end + tot + end == ([1, 3, 2, 4], [1 3; 2 4]) # Δy is a matrix + + @test gradient((1,2,3), [1 2; 3 4]) do x, y # ... and lengths, and a tuple + tot = 0 + for (a,b) in zip(x,y) + tot += a * b + end + tot + end == ((1, 3, 2), [1 3; 2 0]) # map stops early, Δy reshaped to a matrix + + # similar for enumertate -- tests NamedTuple adjoint + @test gradient([2,3,4]) do x + tot = 0 + for (i, x) in enumerate(x) + tot += x^i + end + tot + end == ([1, 6, 3 * 4^2],) + + # and for Iterators.product + @test gradient([3,4,5], [6,7,8]) do x, y + tot = 0 + for (a,b) in Iterators.product(x, y) + tot += a^2 + 10b + end + tot + end == ([18, 24, 30], [30, 30, 30]) + + @test gradient([3,4], [1,2,3]) do x, y + tot = 0 + for ab in Iterators.product(x, y) + tot += *(ab...) + end + tot + end == ([6,6], [7,7,7]) + + # from https://github.com/FluxML/Zygote.jl/pull/785#issuecomment-740562889 + @test gradient(A -> sum([A[i,j] for i in 1:3, j in 1:3]), ones(3,3)) == (ones(3,3),) +end + # https://github.com/FluxML/Zygote.jl/issues/804 @testset "Unused comprehension" begin # Comprehension is used. diff --git a/test/runtests.jl b/test/runtests.jl index 022727fbe..d1b34da77 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -3,10 +3,13 @@ using Zygote: gradient, ZygoteRuleConfig using CUDA using CUDA: has_cuda +@testset "all" begin # Overall testset ensures it keeps running after failure + if has_cuda() @testset "CUDA tests" begin include("cuda.jl") end + @info "CUDA tests have run" else @warn "CUDA not found - Skipping CUDA Tests" end @@ -31,6 +34,7 @@ end @testset "Features" begin include("features.jl") + @info "features.jl done" end @testset "Forward" begin @@ -43,6 +47,7 @@ end @testset "ChainRules" begin include("chainrules.jl") + @info "chainrules.jl done" end @testset "Gradients" begin @@ -56,3 +61,5 @@ end @testset "Compiler" begin include("compiler.jl") end + +end # @testset "all" diff --git a/test/tools.jl b/test/tools.jl index 717612284..77b268646 100644 --- a/test/tools.jl +++ b/test/tools.jl @@ -48,17 +48,16 @@ end end function Tester(p) - @show Zygote.isderiving(p) + # @show Zygote.isderiving(p) cpu_offload = Zygote.isderiving(p) ? 0.0 : 0.2 Tester(cpu_offload) end - function f(p) + function f56(p) sum(Tester(p).cpu_offload .* p) end - p = [1.0] - gs = gradient(f, p) - @test gs[1] == [0.] + gs56 = gradient(f56, [1.0]) + @test gs56[1] == [0.] end From f2bb45d232e4eb1d5ecbcd0b119d91041dd3e5ad Mon Sep 17 00:00:00 2001 From: ST John Date: Thu, 4 Nov 2021 16:15:53 +0200 Subject: [PATCH 252/490] remove `@adjoint function cholesky` --- src/lib/array.jl | 29 ----------------------------- 1 file changed, 29 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 15b994564..b35883e59 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -540,35 +540,6 @@ end @adjoint Matrix(A::LinearAlgebra.HermOrSym{T,S}) where {T,S} = Matrix(A), Δ -> (convert(S, Δ),) -@adjoint function cholesky(Σ::Real) - C = cholesky(Σ) - return C, Δ::NamedTuple->(Δ.factors[1, 1] / (2 * C.U[1, 1]),) -end - -@adjoint function cholesky(Σ::Diagonal; check = true) - C = cholesky(Σ, check = check) - return C, Δ::NamedTuple -> begin - issuccess(C) || throw(PosDefException(C.info)) - return Diagonal(diag(Δ.factors) .* inv.(2 .* C.factors.diag)), nothing - end -end - -# Implementation due to Seeger, Matthias, et al. "Auto-differentiating linear algebra." -@adjoint function cholesky(Σ::Union{StridedMatrix, Symmetric{<:Real, <:StridedMatrix}}; check = true) - C = cholesky(Σ, check = check) - return C, function(Δ::NamedTuple) - issuccess(C) || throw(PosDefException(C.info)) - U, Ū = C.U, Δ.factors - Σ̄ = similar(U.data) - Σ̄ = mul!(Σ̄, Ū, U') - Σ̄ = copytri!(Σ̄, 'U') - Σ̄ = ldiv!(U, Σ̄) - Σ̄ = BLAS.trsm!('R', 'U', 'T', 'N', one(eltype(Σ)), U.data, Σ̄) - Σ̄[diagind(Σ̄)] ./= 2 - return (UpperTriangular(Σ̄),) - end -end - @adjoint function lyap(A::AbstractMatrix, C::AbstractMatrix) X = lyap(A, C) return X, function (X̄) From 4ed3a86db708a27bfe0afd5aeaa6408dd8d43a3e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 7 Nov 2021 16:15:36 -0500 Subject: [PATCH 253/490] Insert `_project` into `getproperty`'s gradient, and then improve `z2d` etc. to restore stability (#1104) * insert _project into getproperty * use zygote2differential in _project * improve type-stability of zygote2differential * fix 1 test and break 2 * 2 not broken in fact * skip inference test * skip more inference tests * improve inference for 1.6 * skip a test on 1.6 * skip 2 * handle nothings * re-enable some inference tests on 1.6 * arrays of abstract tangents, and NamedTuple tests * reverse dispatch for wrap_chainrules_input * fix a typo * fix more notation * restore a test * add DynamicPPL.jl * fix a test * try removing piracy * restore some piracy, tidy * reinterpret * reinterpret * collapse nothings * DistributionsAD too * collapse zeros in z2d * comments * indents * change one comment --- .github/workflows/Downstream.yml | 2 + Project.toml | 2 +- src/compiler/chainrules.jl | 154 +++++++++++++++++++++++-------- src/lib/lib.jl | 3 +- test/chainrules.jl | 24 +++++ test/compiler.jl | 4 +- test/features.jl | 37 +++++++- test/gradcheck.jl | 12 +++ test/runtests.jl | 88 +++++++++--------- 9 files changed, 239 insertions(+), 87 deletions(-) diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml index c40bc6617..308754b0a 100644 --- a/.github/workflows/Downstream.yml +++ b/.github/workflows/Downstream.yml @@ -20,6 +20,8 @@ jobs: package: - {user: FluxML, repo: Flux.jl, group: All} - {user: FluxML, repo: NNlib.jl, group: All} + - {user: TuringLang, repo: DynamicPPL.jl, group: All} + - {user: TuringLang, repo: DistributionsAD.jl, group: Zygote} - {user: SciML, repo: DiffEqFlux.jl, group: Layers} - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} steps: diff --git a/Project.toml b/Project.toml index 04196b602..a8ea16a25 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.29" +version = "0.6.30" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 9bfde430a..b3157f289 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -115,28 +115,61 @@ for T_outer in (:Tuple, :NamedTuple) ChainRulesCore.backing(xp) # this is accessing ChainRulesCore internals, but it is prob safe enough, and it is fastest end end -# Could `reinterpret` instead of broadcasting here -- TODO -@inline wrap_chainrules_output(xs::AbstractArray{<:ChainRules.Tangent}) = wrap_chainrules_output.(xs) +wrap_chainrules_output(dxs::AbstractArray{<:Number}) = dxs +wrap_chainrules_output(dxs::AbstractArray{<:AbstractArray{<:Number}}) = dxs +wrap_chainrules_output(dxs::AbstractArray) = map(wrap_chainrules_output, dxs) +#= +# As an optimisation, we can convert by `reinterpret` for bitstypes, e.g. arrays of tuples of numbers +@inline function wrap_chainrules_output(dxs::AbstractArray{<:ChainRules.Tangent{<:Any, B}}) where {B} + if isbitstype(B) + # B is the backing type. It still contains NoTangent etc, which need converting to Nothing + reinterpret(wrap_chainrules_output(B), dxs) + else + map(wrap_chainrules_output, dxs) + end +end +wrap_chainrules_output(::Type{<:AbstractZero}) = Nothing +wrap_chainrules_output(::Type{NamedTuple{L,T}}) where {L,T} = NamedTuple{L,wrap_chainrules_output(T)} +@generated function wrap_chainrules_output(::Type{T}) where T<:Tuple + inner = map(wrap_chainrules_output, T.parameters) + :(Tuple{$(inner...)}) +end +=# """ - wrap_chainrules_input(x) + wrap_chainrules_input(dx) -Convert `x` from the format Zygote uses internally to differentials types ChainRules uses. +Convert `dx` from the format Zygote uses internally to differentials types ChainRules uses. """ -@inline wrap_chainrules_input(x) = x +@inline wrap_chainrules_input(dx) = dx @inline wrap_chainrules_input(::Nothing) = ChainRules.ZeroTangent() +@inline wrap_chainrules_input(::Tuple{Vararg{Nothing}}) = ChainRules.ZeroTangent() @inline wrap_chainrules_input(::AbstractArray{Nothing}) = ChainRules.ZeroTangent() -@inline function wrap_chainrules_input(xs::Union{Tuple, NamedTuple}) - xp = map(wrap_chainrules_input, xs) - ChainRules.Tangent{Any, typeof(xp)}(xp) +@inline function wrap_chainrules_input(dxs::Union{Tuple, NamedTuple}) + xp = map(wrap_chainrules_input, dxs) + # This produces Tangent{Any} since it does not get to see the primal, `x`. + ChainRulesCore.Tangent{Any, typeof(xp)}(xp) end # For mutable types, including x=Ref(1), Zygote makes Ref{Any}(::NamedTuple) -@inline wrap_chainrules_input(x::Ref) = wrap_chainrules_input(x[]) -# Could `reinterpret` instead of broadcasting here -- TODO -@inline wrap_chainrules_input(xs::AbstractArray{<:Ref}) = wrap_chainrules_input.(xs) -@inline wrap_chainrules_input(xs::AbstractArray{<:Union{Nothing, <:Ref}}) = wrap_chainrules_input.(xs) # no test invented for this -@inline wrap_chainrules_input(xs::AbstractArray{<:NamedTuple}) = wrap_chainrules_input.(xs) -@inline wrap_chainrules_input(xs::AbstractArray{<:Union{Nothing, <:NamedTuple}}) = wrap_chainrules_input.(xs) +@inline wrap_chainrules_input(dx::Ref) = wrap_chainrules_input(dx[]) +# For arrays, whitelist the safe ones, but always look inside Any[]: +@inline wrap_chainrules_input(dxs::AbstractArray{<:Number}) = dxs +@inline wrap_chainrules_input(dxs::AbstractArray{<:AbstractArray{<:Number}}) = dxs +@inline wrap_chainrules_input(dxs::AbstractArray) = map(wrap_chainrules_input, dxs) + +#= +# Could `reinterpret` instead here? See issue 1112. +# One easy case, might be this: +@inline wrap_chainrules_input(xs::Base.ReinterpretArray{<:NamedTuple, <:Tangent}) = parent(xs) + +# This is for `z2d` reinterpret below: +wrap_chainrules_input(::Type{Nothing}) = NoTangent +wrap_chainrules_input(::Type{NamedTuple{L,T}}) where {L,T} = NamedTuple{L,wrap_chainrules_input(T)} +@generated function wrap_chainrules_input(::Type{T}) where T<:Tuple + inner = map(wrap_chainrules_input, T.parameters) + :(Tuple{$(inner...)}) +end +=# """ _project(x, dx) @@ -146,21 +179,13 @@ Also handles some Zygote-specific corrections, such as `x::Array, dx::Tuple`. Safe to apply to arbitrary input. """ @inline function _project(x, dx) - # Note that this use of `wrap_chainrules_input` has the primal `x`, so could - # avoid making `Tangent{Any}`, perhaps via `zygote2differential` -- TODO. - wrap_chainrules_output(ProjectTo(x)(wrap_chainrules_input(dx))) + wrap_chainrules_output(ProjectTo(x)(zygote2differential(dx, x))) end # Restore splatted arrays _project(x::AbstractArray, dx::Tuple) = _project(x, reshape(collect(dx), axes(x))) # Piracy: -# wrap_chainrules_input doesn't handle array of Union{Int,Nothing} -(::ChainRulesCore.ProjectTo)(::Nothing) = ChainRulesCore.NoTangent() - -# CRC likes Tangent{<:Complex}, but Zygote makes Tangent{Any} -(project::ProjectTo{<:Complex})(dx::Tangent) = project(Complex(dx.re, dx.im)) - # CRC likes Tangent{AbstractArray}, but Zygote makes Tangent{Any} # in particular this would hit https://github.com/JuliaDiff/ChainRulesCore.jl/blob/2ec2549b73b22bc08f554dae864fb650cfb9c3d7/src/projection.jl#L139 # if we were not losing track of the Primal in the Tangent @@ -236,32 +261,85 @@ end zygote2differential(dx, primal) Convert input `dx` from the Zygote format to the ChainRules differential types. +This is similar to `wrap_chainrules_input(dx)`, but because it gets `primal::T`, +it can turn `NamedTuple`s into `Tangent{T}(...)` not `Tangent{Any}(...)`. """ zygote2differential(x, primal) = z2d(x, primal) zygote2differential(::Nothing, ::Any) = NoTangent() zygote2differential(t::Tuple, primal::Tuple) = map(z2d, t, primal) zygote2differential(t::Tuple, primal) = (@warn "primal should be a tuple, not $primal"; return t) -z2d(x, ::Any) = x + z2d(::Nothing, ::Any) = NoTangent() -z2d(a::AbstractArray{<:Number}, primal::AbstractArray{T}) where T = a -# Could probably `reinterpret` instead of broadcasting here -- TODO -z2d(a::AbstractArray, primal::AbstractArray{T}) where T = z2d.(a, primal) +z2d(::Tuple{Vararg{Nothing}}, ::Tuple) = NoTangent() # collapse all-zero case +z2d(dx, ::Any) = dx +z2d(dx::AbstractArray{<:Number}, primal::AbstractArray) = dx +z2d(dx::AbstractArray{<:AbstractArray{<:Number}}, primal::AbstractArray) = dx +z2d(dx::AbstractArray, primal::AbstractArray) = map(z2d, dx, primal) +#= +# As an optimisation, we can convert by `reinterpret` for bitstypes, e.g. arrays of tuples of numbers +function z2d(dx::AbstractArray{S}, primal::AbstractArray{P}) where {S,P} + if isbitstype(S) + T = wrap_chainrules_input(S) + reinterpret(Tangent{P,T}, dx) + else + map(z2d, dx, primal) + end +end +=# + # Note: this should never be hit if we are converting things right, but it seems to be # happening in the wild for sufficiently weird functions/types. # This fixes most (all?) cases, but it would be good to find what we miss. z2d(x::Union{AbstractZero, Tangent}, ::Any) = return x -function z2d(t::Tuple, primal::Tuple) - tp::Tuple = map(z2d, t, primal) - primal_type = typeof(primal) - return canonicalize(Tangent{primal_type, typeof(tp)}(tp)) + +function z2d(delta::Tuple, primal::Tuple) + backing = map(z2d, delta, primal) + if backing isa Tuple{Vararg{AbstractZero}} + return NoTangent() # collapse all-zero case + else + return canonicalize(Tangent{typeof(primal), typeof(backing)}(backing)) + end end -function z2d(t::NamedTuple, primal) - primal_type = typeof(primal) - fnames = fieldnames(primal_type) - complete_t = NamedTuple{fnames}(fn in keys(t) ? t[fn] : nothing for fn in fnames) - primals = NamedTuple{fnames}(getfield(primal, fn) for fn in fnames) - tp::NamedTuple = map(z2d, complete_t, primals) - return canonicalize(Tangent{primal_type, typeof(tp)}(tp)) +# Dict handling in Zygote is a mess... should this become a `Tangent{Dict,Dict}` ? +# Right now it uses a NamedTuple but not for fields of the AbstractDict struct +z2d(dx::NamedTuple, primal::AbstractDict) = dx + +function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct + fnames = fieldnames(T) + deltas = map(n -> get(delta, n, nothing), fnames) + primals = map(n -> getfield(primal, n), fnames) + inner = map(z2d, deltas, primals) # recurse into fields + if inner isa Tuple{Vararg{AbstractZero}} + return NoTangent() # collapse all-zero case + else + backing = NamedTuple{fnames}(inner) + return canonicalize(Tangent{T, typeof(backing)}(backing)) + end end + +# Dict case matches signature for ambiguity reasons: +z2d(dx::NamedTuple{L,S}, primal::AbstractDict) where {L,S<:Tuple{Vararg{Union{Number,Nothing}}}} = dx +# On Julia <= 1.6, this fixes easy cases which do not require recursion into fields, e.g. +# @inferred Zygote.z2d((re=1, im=nothing), 3.0+im) +@generated function z2d(delta::NamedTuple{L,S}, primal::T) where {L,S<:Tuple{Vararg{Union{Number,Nothing}}}, T} + fnames = fieldnames(T) + deltas = map(fnames) do n + i = findfirst(isequal(n), L) + if i == nothing || S.parameters[i] == Nothing + :(NoTangent()) + else + :(delta.$n) + end + end + if all(d -> d == :(NoTangent()), deltas) + return :(NoTangent()) # collapse all-zero case + else + return quote + backing = NamedTuple{$fnames}(($(deltas...),)) + Tangent{$T, typeof(backing)}(backing) + end + end +end + z2d(dx::Ref, primal) = z2d(dx[], primal) # mutable structs diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 96422d78c..f154ecd2a 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -227,7 +227,8 @@ end function back(Δ) accum_param(__context__, val, Δ) === nothing && return if isimmutable(x) - ((; nt_nothing(x)..., pair(Val(f), Δ, x)...), nothing) + dx = (; nt_nothing(x)..., pair(Val(f), Δ, x)...) + (_project(x, dx), nothing) else dx = grad_mut(__context__, x) dx[] = (; dx[]..., pair(Val(f), accum(getfield(dx[], f), Δ))...) diff --git a/test/chainrules.jl b/test/chainrules.jl index 80da51743..e34be0fa1 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -349,3 +349,27 @@ end @fastmath x^2.0 end == (4.0,) end + +@testset "zygote2differential inference" begin + @test @inferred(Zygote.z2d(1.0, 2.0)) isa Real + @test @inferred(Zygote.z2d([1,2,3], [4,5,6])) isa Vector + @test @inferred(Zygote.z2d((1, 2.0, 3+4im), (5, 6.0, 7+8im))) isa Tangent{<:Tuple} + + # Below Julia 1.7, these need a @generated version to be inferred: + @test @inferred(Zygote.z2d((re=1,), 3.0+im)) isa Tangent{ComplexF64} + @test @inferred(Zygote.z2d((re=1, im=nothing), 3.0+im)) isa Tangent{ComplexF64} + + # collapse nothings + @test @inferred(Zygote.z2d((nothing,), (1,))) === NoTangent() + @test @inferred(Zygote.z2d((nothing, nothing), (1,2))) === NoTangent() + + # To test the generic case, we need a struct within a struct. + nested = Tangent{Base.RefValue{ComplexF64}}(; x=Tangent{ComplexF64}(; re=1, im=NoTangent()),) + if VERSION > v"1.7-" + @test @inferred(Zygote.z2d((; x=(; re=1)), Ref(3.0+im))) == nested + @test @inferred(Zygote.z2d((; x=(; re=nothing)), Ref(3.0+im))) === NoTangent() + else + @test Zygote.z2d((; x=(; re=1)), Ref(3.0+im)) == nested + @test Zygote.z2d((; x=(; re=nothing)), Ref(3.0+im)) === NoTangent() + end +end diff --git a/test/compiler.jl b/test/compiler.jl index eec71e53d..bc37d271e 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -163,7 +163,9 @@ end y, back = @inferred pullback(x -> x.m, g) @test y == getfield(g, :m) # This type instability is due to the handling of non-bitstypes in `accum_param` - @test Base.return_types(back, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(((m = [1.0, 0.0, 0.0], P = nothing),))}] + if VERSION > v"1.7-" + @test Base.return_types(back, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(((m = [1.0, 0.0, 0.0], P = nothing),))}] + end @test back([1., 0, 0]) == ((m = [1.0, 0.0, 0.0], P = nothing),) Base.getproperty(g::Gaussian, s::Symbol) = 2getfield(g, s) diff --git a/test/features.jl b/test/features.jl index 545db0279..839e98cc4 100644 --- a/test/features.jl +++ b/test/features.jl @@ -176,9 +176,13 @@ end @test gradient(t -> t[1]*t[2], (2, 3)) == ((3, 2),) -@test gradient(x -> x.re, 2+3im) === (1.0 + 0.0im,) +@test gradient(x -> x.re, 2+3im) === (1.0 + 0.0im,) # one NamedTuple +@test gradient(x -> x.re*x.im, 2+3im) == (3.0 + 2.0im,) # two, different fields +@test gradient(x -> x.re*x.im + x.re, 2+3im) == (4.0 + 2.0im,) # three, with accumulation -@test gradient(x -> x.re*x.im, 2+3im) == (3.0 + 2.0im,) +@test gradient(x -> abs2(x * x.re), 4+5im) == (456.0 + 160.0im,) # gradient participates +@test gradient(x -> abs2(x * real(x)), 4+5im) == (456.0 + 160.0im,) # function not getproperty +@test gradient(x -> abs2(x * getfield(x, :re)), 4+5im) == (456.0 + 160.0im,) struct Bar{T} a::T @@ -418,6 +422,11 @@ end @test gradient((x,y,z) -> sum((x,y,z)[1:2]), 7, 8.8, 9.9) == (1.0, 1.0, nothing) @test gradient((x,y,z) -> sum((x,y,z)[[1,2,1]]), 1,2,3) == (2, 1, nothing) + + @test gradient(xs -> sum(x -> x[2], xs), [(1,2,3), (4,5,6)]) == ([(nothing, 1.0, nothing), (nothing, 1.0, nothing)],) + @test gradient(xs -> sum(x -> prod(x[2:3]), xs), [(1,2,3), (4,5,6)]) == ([(nothing, 3.0, 2.0), (nothing, 6.0, 5.0)],) + @test gradient(xs -> sum(first, xs), fill((4,3),2)) == ([(1.0, nothing), (1.0, nothing)],) + @test gradient(xs -> sum(x -> abs2(x[1]), xs), fill((4,3),2)) == ([(8.0, nothing), (8.0, nothing)],) end @testset "@timed" begin @@ -452,6 +461,13 @@ end @test gradient(x -> x.x^2 + x.x, Ref(3)) === ((x = 7.0,),) @test gradient(x -> real(x.x^2 + im * x.x), Ref(4)) === ((x = 8.0,),) + # Field access of contents: + @test gradient(x -> abs2(x.x) + 7 * x.x.re, Ref(1+im)) == ((x = 9.0 + 2.0im,),) + @test_broken gradient(x -> abs2(x[1].x) + 7 * x[1].x.re, [Ref(1+im)]) == ([(x = 9.0 + 2.0im,)],) + @test_broken gradient(x -> abs2(x[1].x) + 7 * real(x[1].x), [Ref(1+im)]) == ([(x = 9.0 + 2.0im,)],) # worked on 0.6.0, 0.6.20 + + @test_broken gradient(x -> abs2(x[].x) + 7 * real(x[].x), Ref(Ref(1+im))) == ((x = 9.0 + 2.0im,),) # gives nothing, same in 0.6.0 + # Array of mutables: @test gradient(x -> sum(getindex.(x).^2), Ref.(1:3))[1] == [(;x=2i) for i in 1:3] @test gradient(x -> sum(abs2∘getindex, x), Ref.(1:3))[1] == [(;x=2i) for i in 1:3] @@ -464,6 +480,17 @@ end @test gradient(x -> sum(sum, Ref(x) .* [1,2,3]), [4,5]) == ([6.0, 6.0],) end +@testset "NamedTuples" begin + @test gradient(x -> x.a, (a=1, b=2)) == ((a = 1, b = nothing),) + @test gradient(x -> x[1].a, [(a=1, b=2)]) == ([(a = 1, b = nothing)],) + @test gradient(x -> x[1].a, [(a=1, b=2), (a=3, b=4)]) == ([(a = 1, b = nothing), nothing],) + + # Mix with Ref + @test gradient(x -> x[].a, Ref((a=1, b=2))) == ((x = (a = 1, b = nothing),),) + @test gradient(x -> x[1][].a, [Ref((a=1, b=2)), Ref((a=3, b=4))]) == ([(x = (a = 1, b = nothing),), nothing],) + @test gradient(x -> x[1].a, [(a=1, b=2), "three"]) == ([(a = 1, b = nothing), nothing],) +end + function type_test() Complex{<:Real} end @@ -692,4 +719,10 @@ end @test gradient(x -> sum(gradient(y -> sum(y.^2), x)[1]), [1, 2])[1] ≈ [2, 2] @test gradient(x -> sum(gradient(y -> sum(sin.(y)), x)[1]), [1, 2])[1] ≈ [-0.8414709848078965, -0.9092974268256817] @test gradient(x -> sum(abs, gradient(y -> sum(log.(2 .* exp.(y)) .^ 2), x)[1]), [1, 2])[1] ≈ [2,2] + + # getproperty, Tangents, etc + @test gradient(xs -> sum((x->x.im^2).(xs)), [1+2im,3])[1] == [4im, 0] + @test gradient(xs -> sum((x->x.im^2), xs), [1+2im,3])[1] == [4im, 0] + @test gradient(xs -> sum(map(x->x.im^2, xs)), [1+2im,3])[1] == [4im, 0] + @test gradient(xs -> mapreduce(x->x.im^2, +, xs), [1+2im,3])[1] == [4im, 0] end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 66e558869..ef958da48 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1878,6 +1878,18 @@ end a = rand(3) @test Zygote.gradient(x->sum(x .+ rand.()), a) == (ones(3),) +@testset "Zygote 660" begin + # https://github.com/FluxML/Zygote.jl/pull/660 + function example(x,N) + ax = axes(x) + extraAxe = ax[2+N:end] + filledLoc = fill(1, N) + return x[:, filledLoc..., extraAxe...] + end + y, back = pullback(example, randn(5,3,4,3), 2) + @test back(zero(y).=1) isa Tuple{Array{Float64,4}, Nothing} +end + @testset "CRC issue 440" begin # https://github.com/JuliaDiff/ChainRulesCore.jl/issues/440 f(x,y) = sum(sum, [[x[i],y[i]] for i=1:length(x)]) diff --git a/test/runtests.jl b/test/runtests.jl index d1b34da77..17ebb3997 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -5,61 +5,61 @@ using CUDA: has_cuda @testset "all" begin # Overall testset ensures it keeps running after failure -if has_cuda() - @testset "CUDA tests" begin - include("cuda.jl") + if has_cuda() + @testset "CUDA tests" begin + include("cuda.jl") + end + @info "CUDA tests have run" + else + @warn "CUDA not found - Skipping CUDA Tests" end - @info "CUDA tests have run" -else - @warn "CUDA not found - Skipping CUDA Tests" -end -@testset "Interface" begin - include("interface.jl") -end + @testset "Interface" begin + include("interface.jl") + end -@testset "Tools" begin - include("tools.jl") -end + @testset "Tools" begin + include("tools.jl") + end -@testset "Utils" begin - include("utils.jl") -end + @testset "Utils" begin + include("utils.jl") + end -@testset "lib" begin - include("lib/number.jl") - include("lib/lib.jl") - include("lib/array.jl") -end + @testset "lib" begin + include("lib/number.jl") + include("lib/lib.jl") + include("lib/array.jl") + end -@testset "Features" begin - include("features.jl") - @info "features.jl done" -end + @testset "Features" begin + include("features.jl") + @info "features.jl done" + end -@testset "Forward" begin - include("forward/forward.jl") -end + @testset "Forward" begin + include("forward/forward.jl") + end -@testset "Data Structures" begin - include("structures.jl") -end + @testset "Data Structures" begin + include("structures.jl") + end -@testset "ChainRules" begin - include("chainrules.jl") - @info "chainrules.jl done" -end + @testset "ChainRules" begin + include("chainrules.jl") + @info "chainrules.jl done" + end -@testset "Gradients" begin - include("gradcheck.jl") -end + @testset "Gradients" begin + include("gradcheck.jl") + end -@testset "Complex" begin - include("complex.jl") -end + @testset "Complex" begin + include("complex.jl") + end -@testset "Compiler" begin - include("compiler.jl") -end + @testset "Compiler" begin + include("compiler.jl") + end end # @testset "all" From 8b9916a844958cf9cb2d808689759a03fecb8a3c Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 23 Nov 2021 00:06:50 +0000 Subject: [PATCH 254/490] CompatHelper: bump compat for "SpecialFunctions" to "2" --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a8ea16a25..d7c0809fe 100644 --- a/Project.toml +++ b/Project.toml @@ -33,7 +33,7 @@ IRTools = "0.4" MacroTools = "0.5" NaNMath = "0.3" Requires = "1.1" -SpecialFunctions = "1.6" +SpecialFunctions = "1.6, 2" StatsFuns = "0.9.8" ZygoteRules = "0.2.1" julia = "1.3" From 9e6f18262c5fa95a30f2c0120af95742221d4cd9 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Tue, 23 Nov 2021 16:11:20 -0500 Subject: [PATCH 255/490] Fix buffer --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 4d332c16d..f686340e6 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -46,7 +46,7 @@ _pullback(cx::AContext, ::typeof(Broadcast.materialize!), b::Buffer, x::Abstract @adjoint function copy(b::Buffer) copy(b), function (b̄) - grad_mut(__context__, b)[:] = b̄ + grad_mut(__context__, b)[:] .= b̄ return end end From bc6fd2f20bc7c91c1d1258b582bb32a232ff73f6 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 24 Nov 2021 20:19:13 -0500 Subject: [PATCH 256/490] Fix `specialize_method` for 1.8 (#1124) * change Core.Compiler.specialize_method for 1.8 * bump version --- Project.toml | 2 +- src/lib/literal_getproperty.jl | 6 +++++- 2 files changed, 6 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index a8ea16a25..717c707ab 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.30" +version = "0.6.31" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/literal_getproperty.jl b/src/lib/literal_getproperty.jl index 1959e9462..c13f7a89b 100644 --- a/src/lib/literal_getproperty.jl +++ b/src/lib/literal_getproperty.jl @@ -29,7 +29,11 @@ function reflect(@nospecialize(sigtypes::Tuple), world::UInt = typemax(UInt)) end method_index === 0 && return nothing type_signature, raw_static_params, method = _methods[method_index] - method_instance = Core.Compiler.specialize_method(method, type_signature, raw_static_params, false) + if VERSION < v"1.8-" + method_instance = Core.Compiler.specialize_method(method, type_signature, raw_static_params, false) + else + method_instance = Core.Compiler.specialize_method(method, type_signature, raw_static_params; preexisting=false) + end method_signature = method.sig static_params = Any[raw_static_params...] return method_instance, method_signature, static_params From 0d80a08633bdcc610406d514d47f8c23792bf085 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 25 Nov 2021 13:43:46 -0500 Subject: [PATCH 257/490] Make tests pass on 1.8 (#1125) * fix chainrules tests on 1.8 * bump * rm comments --- Project.toml | 4 ++-- test/chainrules.jl | 22 +++++++++++++++------- 2 files changed, 17 insertions(+), 9 deletions(-) diff --git a/Project.toml b/Project.toml index 717c707ab..a04795b53 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.31" +version = "0.6.32" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -29,7 +29,7 @@ ChainRulesTestUtils = "1" DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" ForwardDiff = "0.10" -IRTools = "0.4" +IRTools = "0.4.4" MacroTools = "0.5" NaNMath = "0.3" Requires = "1.1" diff --git a/test/chainrules.jl b/test/chainrules.jl index e34be0fa1..bc32c879d 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -1,4 +1,6 @@ using ChainRulesCore, ChainRulesTestUtils, Zygote +using Zygote: ZygoteRuleConfig + @testset "ChainRules integration" begin @testset "ChainRules basics" begin cr_inner_demo_rrule_hitcount = Ref(0) @@ -265,13 +267,12 @@ end @testset "ChainRulesCore.rrule_via_ad" begin @testset "basic" begin - # broken because Zygoye compresses `(NoTangent(), NoTangent())` into just NoTangent() - # which ChainRulesTestUtils does not think is valid: - @test_broken(rrule_via_ad(ZygoteRuleConfig(), round, 2.2) isa Tuple{NoTangent,NoTangent}) - # uncomment below when/if above is fixed - # test_rrule(ZygoteRuleConfig(), round, 2.2; rrule_f=rrule_via_ad) + # Not marked as tests since perhaps ZeroTangent would be better. + rrule_via_ad(ZygoteRuleConfig(), round, 2.2)[2](1) == (NoTangent(), 0.0) + # But test_rrule is happy: + test_rrule(ZygoteRuleConfig(), round, 2.2; rrule_f=rrule_via_ad) - test_rrule(ZygoteRuleConfig(), vcat, rand(3), rand(4); rrule_f=rrule_via_ad, check_inferred=false) + test_rrule(ZygoteRuleConfig(), vcat, rand(3), rand(4); rrule_f=rrule_via_ad) test_rrule(ZygoteRuleConfig(), getindex, rand(5), 3; rrule_f=rrule_via_ad) end @@ -313,10 +314,13 @@ end test_rrule( ZygoteRuleConfig(), my_namedtuple, 1., (2.0, 2.4), 3.; rrule_f=rrule_via_ad ) - test_rrule(ZygoteRuleConfig(), sum, (1.0, 2.0, 3.0); rrule_f=rrule_via_ad) + test_rrule( + ZygoteRuleConfig(), sum, (1.0, 2.0, 3.0); rrule_f=rrule_via_ad, check_inferred=false + ) test_rrule( ZygoteRuleConfig(), sum, (a=1.0, b=2.0); rrule_f=rrule_via_ad, check_inferred=false ) + # There is at present no rrule for sum(::Tuple), so those are testing zygote directly. end @testset "arrays" begin @@ -348,6 +352,10 @@ end @test gradient(2.0) do x @fastmath x^2.0 end == (4.0,) + + @test gradient(2) do x + @fastmath log(x) + end == (1/2,) end @testset "zygote2differential inference" begin From 86d1ba6cf9312d60e6321fc835610600eaf264c4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 5 Dec 2021 16:14:12 -0800 Subject: [PATCH 258/490] Fix incorrect `@forward`ing of `Base.in` on `Params` --- src/compiler/interface.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index dc8cb8d18..c52842945 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -149,7 +149,8 @@ Params(ps::Params) = ps Params(xs::Tuple) = Params(collect(xs)) @forward Params.order Base.iterate, Base.length, Base.getindex -@forward Params.params Base.in + +Base.in(ps::Params, x) = x in ps.params Base.map(::typeof(_project), args::Tuple{Params}, grad) = grad # skip _project in gradient(f, ::Params) From a152aaaa632568cba4aaa661f824c4c639ce3321 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 5 Dec 2021 22:38:03 -0800 Subject: [PATCH 259/490] Add test for `in(x, ::Params)` --- test/interface.jl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/interface.jl b/test/interface.jl index ad1c7f46c..23afdfb1b 100644 --- a/test/interface.jl +++ b/test/interface.jl @@ -1,6 +1,14 @@ using Zygote: Grads @testset "Params" begin + @testset "in" begin + w = rand(2,3) + b = rand(2) + ps = Params([w]) + @test w ∈ ps + @test b ∉ ps + end + @testset "delete!" begin w = rand(2,3) b = rand(2) From abad4e18efe6ba393a26c235d96a7db3b98f1feb Mon Sep 17 00:00:00 2001 From: pakk-minidose <56652555+pakk-minidose@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:24:06 +0100 Subject: [PATCH 260/490] Fix Zygote.jl#1135 --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index c52842945..88bdf96bd 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -235,7 +235,7 @@ function copy!(x::AbstractVector, ps::Params) x[i+1:i+length(p)] .= vec(p) i += length(p) end - ps + x end """ From 60bb9703aaa97a6629bc95858d9e8b4bf4cb4285 Mon Sep 17 00:00:00 2001 From: pakk-minidose <56652555+pakk-minidose@users.noreply.github.com> Date: Mon, 13 Dec 2021 16:26:32 +0100 Subject: [PATCH 261/490] Changed copy! returned value to first argument --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 88bdf96bd..18c0bd8eb 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -288,7 +288,7 @@ function copy!(gs::Grads, x::AbstractVector) gs[p] .= reshape(x[i+1:i+length(p)], size(p)) i += length(p) end - x + gs end function copy!(x::AbstractVector, gs::Grads) From ff7e3248de1db1281b62b01b3c09d162c2732c22 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Mon, 13 Dec 2021 11:45:14 -0500 Subject: [PATCH 262/490] Add test for Buffer when it stores arrays --- src/lib/buffer.jl | 11 ++++++++++- test/gradcheck.jl | 17 +++++++++++++++++ 2 files changed, 27 insertions(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index f686340e6..49b3ab7f0 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -45,8 +45,17 @@ _pullback(cx::AContext, ::typeof(Broadcast.materialize!), b::Buffer, x::Abstract _pullback(cx, copyto!, b, x) @adjoint function copy(b::Buffer) - copy(b), function (b̄) + res = copy(b) + + function copy_sensitivity(b̄) + grad_mut(__context__, b)[:] .= vec(b̄) + return + end + + function copy_sensitivity(b̄::Tuple) grad_mut(__context__, b)[:] .= b̄ return end + + return res, copy_sensitivity end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index ef958da48..2d058c037 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1456,6 +1456,23 @@ using Zygote: Buffer prod(copy(b)) end == (3,) + # Buffer storing arrays test + W1 = ones(3, 3) + W2 = ones(3, 3) + x = ones(3, 1) + + function buffer_arrays(W1, W2, x) + b = Buffer([]) + push!(b, W1 * x) + push!(b, W2 * x) + return sum(vcat(copy(b)...)) + end + + ∇W1, ∇W2, ∇x = gradient((W1, W2, x) -> buffer_arrays(W1, W2, x), W1, W2, x) + + @test ∇W1 == [1.0 1.0 1.0; 1.0 1.0 1.0; 1.0 1.0 1.0] + @test ∇W2 == [1.0 1.0 1.0; 1.0 1.0 1.0; 1.0 1.0 1.0] + @test ∇x == [6.0; 6.0; 6.0;;] end @testset "FillArrays" begin From c494ea2a60e7918c8a3224f0fa4f7cac42540115 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Mon, 13 Dec 2021 12:16:51 -0500 Subject: [PATCH 263/490] Dims :( --- test/gradcheck.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 2d058c037..90f0a4b4a 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1470,9 +1470,9 @@ using Zygote: Buffer ∇W1, ∇W2, ∇x = gradient((W1, W2, x) -> buffer_arrays(W1, W2, x), W1, W2, x) - @test ∇W1 == [1.0 1.0 1.0; 1.0 1.0 1.0; 1.0 1.0 1.0] - @test ∇W2 == [1.0 1.0 1.0; 1.0 1.0 1.0; 1.0 1.0 1.0] - @test ∇x == [6.0; 6.0; 6.0;;] + @test ∇W1 == W1 + @test ∇W2 == W2 + @test ∇x == 6 .* x end @testset "FillArrays" begin From a9126567fb347e37380c46b5c61c6230200cada9 Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Tue, 14 Dec 2021 11:14:59 -0500 Subject: [PATCH 264/490] Might be vector --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 49b3ab7f0..8bd374b41 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -52,7 +52,7 @@ _pullback(cx::AContext, ::typeof(Broadcast.materialize!), b::Buffer, x::Abstract return end - function copy_sensitivity(b̄::Tuple) + function copy_sensitivity(b̄::Union{Tuple,Vector{T}}) where {T<:AbstractArray} grad_mut(__context__, b)[:] .= b̄ return end From 19ba3952523e0e15b91ba359ca7b12851b34fd8b Mon Sep 17 00:00:00 2001 From: Avik Pal Date: Thu, 16 Dec 2021 13:52:03 -0500 Subject: [PATCH 265/490] Update src/lib/buffer.jl Co-authored-by: Dhairya Gandhi --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 8bd374b41..e62a70041 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -52,7 +52,7 @@ _pullback(cx::AContext, ::typeof(Broadcast.materialize!), b::Buffer, x::Abstract return end - function copy_sensitivity(b̄::Union{Tuple,Vector{T}}) where {T<:AbstractArray} + function copy_sensitivity(b̄::Union{Tuple,AbstractVector{T}}) where {T<:AbstractArray} grad_mut(__context__, b)[:] .= b̄ return end From 8c88c8d571ac7f1073d1de867c62cc55a6fae618 Mon Sep 17 00:00:00 2001 From: Dhairya Gandhi Date: Fri, 17 Dec 2021 04:09:18 +0530 Subject: [PATCH 266/490] Tag for SpecialFunctions@2 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e4bd32f73..98585b9fb 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.32" +version = "0.6.33" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From e7c240d0ef7baa665edb692e81cbaac54df4ed51 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 25 Dec 2021 12:35:49 -0800 Subject: [PATCH 267/490] Update Buildkite config for 1.6 LTS and 1.7 Changes borrowed from Flux's setup. --- .buildkite/pipeline.yml | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index c96109781..6d3a048a2 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -1,18 +1,18 @@ steps: - - label: "GPU integration - julia 1.5" + - label: "GPU integration - julia v1.6" plugins: - JuliaCI/julia#v1: - version: "1.5" + version: "1.6" - JuliaCI/julia-test#v1: ~ agents: queue: "juliagpu" cuda: "*" timeout_in_minutes: 60 - - label: "GPU integration - julia 1.6" + - label: "GPU integration - julia v1" plugins: - JuliaCI/julia#v1: - version: '1.6' + version: "1" - JuliaCI/julia-test#v1: ~ agents: queue: "juliagpu" From 3a63df8edb3b613107761ff829ca61ed393ce2dd Mon Sep 17 00:00:00 2001 From: Joe Greener Date: Sat, 8 Jan 2022 19:53:23 +0000 Subject: [PATCH 268/490] Downstream test for Molly.jl (#1145) --- .github/workflows/Downstream.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml index 308754b0a..09f4f2f5d 100644 --- a/.github/workflows/Downstream.yml +++ b/.github/workflows/Downstream.yml @@ -24,6 +24,7 @@ jobs: - {user: TuringLang, repo: DistributionsAD.jl, group: Zygote} - {user: SciML, repo: DiffEqFlux.jl, group: Layers} - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} + - {user: JuliaMolSim, repo: Molly.jl, group: Zygote} steps: - uses: actions/checkout@v2 - uses: julia-actions/setup-julia@v1 From 95d61fc317fcbac8438971794c433e609922282d Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 21 Jan 2022 14:43:02 -0500 Subject: [PATCH 269/490] v0.6.34 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 98585b9fb..4b88d68de 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.33" +version = "0.6.34" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 23175c62b4112b6069610439db45cc4aeb471f2c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 21 Jan 2022 20:15:51 -0800 Subject: [PATCH 270/490] Add codecov badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 866c6a617..35e90fc53 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,7 @@ [![CI Testing](https://github.com/FluxML/Zygote.jl/workflows/CI/badge.svg)](https://github.com/FluxML/Zygote.jl/actions) +[![Coverage](https://codecov.io/gh/FluxML/Zygote.jl/branch/master/graph/badge.svg)](https://codecov.io/gh/FluxML/Zygote.jl) [![Dev Docs](https://img.shields.io/badge/docs-dev-blue.svg)](https://fluxml.ai/Zygote.jl/dev) `] add Zygote` From 5c0ecf41e008ad57aa573144a3fd0d2cb7f691f4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 21 Jan 2022 21:15:03 -0800 Subject: [PATCH 271/490] Update codecov action and only run on stable linux --- .github/workflows/ci.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index eb544c2ec..bab7876a5 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -57,9 +57,9 @@ jobs: JULIA_PKG_SERVER: "" #continue-on-error: ${{ matrix.version == 'nightly' }} # comment out to report nightly failures - uses: julia-actions/julia-processcoverage@v1 - #continue-on-error: ${{ matrix.version == 'nightly' }} # comment out to report nightly failures - - uses: codecov/codecov-action@v1 - #continue-on-error: ${{ matrix.version == 'nightly' }} # comment out to report nightly failures + if: matrix.version == '1' && matrix.os == 'ubuntu-latest' + - uses: codecov/codecov-action@v2 + if: matrix.version == '1' && matrix.os == 'ubuntu-latest' with: file: lcov.info docs: From 8bdfc180ea8da332f2894d5fb7db14715a633ca3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 00:07:59 +0000 Subject: [PATCH 272/490] CompatHelper: bump compat for "NaNMath" to "1" --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4b88d68de..caeba7782 100644 --- a/Project.toml +++ b/Project.toml @@ -31,7 +31,7 @@ FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" ForwardDiff = "0.10" IRTools = "0.4.4" MacroTools = "0.5" -NaNMath = "0.3" +NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" StatsFuns = "0.9.8" From 6b5729936c006828400df522a17ec8451dba3ab8 Mon Sep 17 00:00:00 2001 From: Samuel Buercklin Date: Thu, 17 Feb 2022 22:51:23 -0500 Subject: [PATCH 273/490] `ntuple` for `_restore` regardless of length (#1163) * removed ternary in _restore * added test for _restore with ntuple fix * added gradient test, remove lib/array test --- src/lib/array.jl | 2 +- test/gradcheck.jl | 16 ++++++++++++++++ 2 files changed, 17 insertions(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 7734ad5ca..35c678310 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -184,7 +184,7 @@ _tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) _tryaxes(x) = axes(x) _tryaxes(x::Tuple) = Val(length(x)) _restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(length, ax) - length(dx))), ax) -_restore(dx, ::Val{N}) where {N} = length(dx) < N ? ntuple(i -> get(dx,i,nothing), N) : NTuple{N}(dx) +_restore(dx, ::Val{N}) where {N} = ntuple(i -> get(dx,i,nothing), N) # Sometimes a pullback doesn't return a Tuple, but rather returns only a # single nothing to say "all arguments have zero cotangent". This function is needed to diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 90f0a4b4a..67e51ec19 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1961,3 +1961,19 @@ end @test g1[1].A isa Number @test size(g1[2]) == size(V) end + +@testset "Zygote #1162" begin + function zygote1162(as, bs) + results = [f1162(a, b) for (a, b) in zip(as, bs)] + return results[2][1] + results[2][2] + end + function f1162(a, b) + return [a^2, b^2] + end + + as = (1.0, 2.0, 3.0) + bs = (4.0, 5.0, 6.0) + + g = Zygote.gradient(zygote1162, as, bs) + @test g == ((nothing, 2*as[2], nothing), (nothing, 2*bs[2], nothing)) +end From acedd2883a927712d120a60482ef5255d2d0de7d Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 17 Feb 2022 22:55:10 -0500 Subject: [PATCH 274/490] CompatHelper: bump compat for "FillArrays" to "0.13" (#1165) Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4b88d68de..f2e13f384 100644 --- a/Project.toml +++ b/Project.toml @@ -27,7 +27,7 @@ ChainRules = "1.5" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.0" -FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12" +FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" IRTools = "0.4.4" MacroTools = "0.5" From 36698f74048e79c7a2c542e2cd90a802ea5e1bf4 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 18 Feb 2022 11:19:56 +0000 Subject: [PATCH 275/490] update README to reference ChainRulesCore to define custom gradients --- README.md | 13 +++++++++---- 1 file changed, 9 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 35e90fc53..c945f1ee2 100644 --- a/README.md +++ b/README.md @@ -30,7 +30,8 @@ top: "Source-to-source" means that Zygote hooks into Julia's compiler, and generates the backwards pass for you – as if you had written it by hand. -Zygote supports the full flexibility and dynamism of the Julia language, including control flow, recursion, closures, structs, dictionaries, and more. +Zygote supports the flexibility and dynamism of the Julia language, including control flow, recursion, closures, structs, dictionaries, and more. +Mutation and exception handling are currently not supported. ```julia julia> fs = Dict("sin" => sin, "cos" => cos, "tan" => tan); @@ -40,14 +41,18 @@ sin 0.5403023058681398 ``` -Defining custom gradients is a cinch, and errors have good stacktraces. +Zygote benefits from using the [ChainRules.jl](https://github.com/JuliaDiff/ChainRules.jl) ruleset. +Custom gradients can be defined by extending the [ChainRulesCore.jl](https://github.com/JuliaDiff/ChainRulesCore.jl)'s `rrule`: ```julia -julia> using Zygote: @adjoint +julia> using ChainRulesCore julia> add(a, b) = a + b -julia> @adjoint add(a, b) = add(a, b), Δ -> (Δ, Δ) +julia> function ChainRulesCore.rrule(::typeof(add), a, b) + add_pb(dy) = (NoTangent(), dy, dy) + return add(a, b), add_pb + end ``` To support large machine learning models with many parameters, Zygote can differentiate implicitly-used parameters, as opposed to just function arguments. From 87872e708573e60d10815dd1b2fb8473f41f9efd Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 20 Feb 2022 22:16:05 -0500 Subject: [PATCH 276/490] Broadcast rule for types (#1171) * broadcast rule for type * test on sparse arrays --- Project.toml | 1 + src/lib/broadcast.jl | 3 +++ test/gradcheck.jl | 6 +++++- 3 files changed, 9 insertions(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e15f61f9f..a54f14977 100644 --- a/Project.toml +++ b/Project.toml @@ -17,6 +17,7 @@ MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" +SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 8833436a0..78816e4f7 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -144,6 +144,9 @@ end end end +@adjoint broadcasted(::Type{T}, x::Numeric) where T = + T.(x), ȳ -> (nothing, _project(x, ȳ),) + # General Fallback # ================ diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 67e51ec19..7c45d26d2 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1,4 +1,4 @@ -using Zygote, Test, Random, LinearAlgebra, Statistics, FillArrays, +using Zygote, Test, Random, LinearAlgebra, Statistics, SparseArrays, FillArrays, AbstractFFTs, FFTW, Distances using Zygote: gradient using Base.Broadcast: broadcast_shape @@ -1406,6 +1406,10 @@ end @test all(gradient((x,y) -> sum(x .* y), 5, [1,2]) .≈ (3, [5, 5])) @test all(gradient((x,y) -> sum(x .* y), [1,2], [3 4 5]) .≈ ([12, 12], [3 3 3])) @test all(gradient((x,y) -> sum(x ./ y), [1,2], 5) .≈ ([0.2, 0.2], -0.12)) + + sm = sprand(5, 5, 0.5) + @test gradient(x -> sum(abs2, Float32.(x)), sm)[1] ≈ gradient(x -> sum(abs2, x), Matrix{Float32}(sm))[1] + @test gradient(x -> real(sum(ComplexF32.(x) .+ 1 .+ im)), sm)[1] isa SparseMatrixCSC{Float64} end using Zygote: Buffer From e56375e08ac6191f32713f4d63c087655a2b4fd3 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 22 Feb 2022 00:45:27 -0500 Subject: [PATCH 277/490] v0.6.35 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index a54f14977..ce5c48d1c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.34" +version = "0.6.35" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 8b9d67dbc29ef438c65e9ae92ce5c68c2188cd6b Mon Sep 17 00:00:00 2001 From: Lorenzo Van Munoz <66997677+lxvm@users.noreply.github.com> Date: Tue, 22 Feb 2022 16:39:02 -0800 Subject: [PATCH 278/490] Fix adjoint Iterators.product behavior with nothing (#1170) * Fix adjoint Iterators.product behavior with nothing * Apply suggestions from code review Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> * add Iterators.product adjoint tests * Update test/lib/array.jl Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- src/lib/array.jl | 2 +- test/lib/array.jl | 14 +++++++++++++- 2 files changed, 14 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 35c678310..6f5386c32 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -286,10 +286,10 @@ _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x) function back(dy::AbstractArray) d = 1 ntuple(length(xs)) do n - first(dy)[n] === nothing && return nothing nd = _ndims(xs[n]) dims = ntuple(i -> isum(sin, Diagonal(x)), ones(2); rrule_f=rrule_via_ad, check_inferred=false) test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_via_ad, check_inferred=false) + +@testset "adjoints of Iterators.product" begin + y, back = _pullback(Iterators.product, 1:5, 1:3, 1:2) + @test back(collect(y)) == (nothing, [6.0, 12.0, 18.0, 24.0, 30.0], [10.0, 20.0, 30.0], [15.0, 30.0]) + @test back([(nothing, j, k) for i in 1:5, j in 1:3, k in 1:2]) == (nothing, nothing, [10.0, 20.0, 30.0], [15.0, 30.0]) + @test back([(i, nothing, k) for i in 1:5, j in 1:3, k in 1:2]) == (nothing, [6.0, 12.0, 18.0, 24.0, 30.0], nothing, [15.0, 30.0]) + @test back([(i, j, nothing) for i in 1:5, j in 1:3, k in 1:2]) == (nothing, [6.0, 12.0, 18.0, 24.0, 30.0], [10.0, 20.0, 30.0], nothing) + + # This was wrong before https://github.com/FluxML/Zygote.jl/pull/1170 + @test gradient(x -> sum([y[2] * y[3] for y in Iterators.product(x, x, x, x)]), [1,2,3,4])[1] ≈ [320, 320, 320, 320] + @test gradient(x -> sum(y[2] * y[3] for y in Iterators.product(x, x, x, x)), [1,2,3,4])[1] ≈ [320, 320, 320, 320] +end From 2a2095cccc31af5dfe47981033b7c99be0aafb01 Mon Sep 17 00:00:00 2001 From: James Atkins Date: Thu, 10 Mar 2022 04:46:40 +0000 Subject: [PATCH 279/490] Fix error in example (#1176) --- docs/src/adjoints.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 1d6ddc527..34aa5d18f 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -68,7 +68,7 @@ julia> mygradient(sin, 0.5) The rest of this section contains more technical detail. It can be skipped if you only need an intuition for pullbacks; you generally won't need to worry about it as a user. -If ``x`` and ``y`` are vectors, ``\frac{\partial y}{\partial x}`` becomes a Jacobian. Importantly, because we are implementing reverse mode we actually left-multiply the Jacobian, i.e. `v'J`, rather than the more usual `J*v`. Transposing `v` to a row vector and back `(v'J)'` is equivalent to `J'v` so our gradient rules actually implement the *adjoint* of the Jacobian. This is relevant even for scalar code: the adjoint for `y = sin(x)` is `x̄ = sin(x)'*ȳ`; the conjugation is usually moot but gives the correct behaviour for complex code. "Pullbacks" are therefore sometimes called "vector-Jacobian products" (VJPs), and we refer to the reverse mode rules themselves as "adjoints". +If ``x`` and ``y`` are vectors, ``\frac{\partial y}{\partial x}`` becomes a Jacobian. Importantly, because we are implementing reverse mode we actually left-multiply the Jacobian, i.e. `v'J`, rather than the more usual `J*v`. Transposing `v` to a row vector and back `(v'J)'` is equivalent to `J'v` so our gradient rules actually implement the *adjoint* of the Jacobian. This is relevant even for scalar code: the adjoint for `y = sin(x)` is `x̄ = cos(x)'*ȳ`; the conjugation is usually moot but gives the correct behaviour for complex code. "Pullbacks" are therefore sometimes called "vector-Jacobian products" (VJPs), and we refer to the reverse mode rules themselves as "adjoints". Zygote has many adjoints for non-mathematical operations such as for indexing and data structures. Though these can still be seen as linear functions of vectors, it's not particularly enlightening to implement them with an actual matrix multiply. In these cases it's easiest to think of the adjoint as a kind of inverse. For example, the gradient of a function that takes a tuple to a struct (e.g. `y = Complex(a, b)`) will generally take a struct to a tuple (`(ȳ.re, ȳ.im)`). The gradient of a `getindex` `y = x[i...]` is a `setindex!` `x̄[i...] = ȳ`, etc. From 843a52d6a069fdaadc6559d0db084a73eb6058f7 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Thu, 10 Mar 2022 00:41:02 -0500 Subject: [PATCH 280/490] Restrict type broadcast rule to numbers (#1179) * restrict type broadcast to number * add a test --- Project.toml | 2 +- src/lib/broadcast.jl | 2 +- test/gradcheck.jl | 8 ++++++++ 3 files changed, 10 insertions(+), 2 deletions(-) diff --git a/Project.toml b/Project.toml index ce5c48d1c..0a1fa489e 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.35" +version = "0.6.36" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 78816e4f7..f2b0e0709 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -144,7 +144,7 @@ end end end -@adjoint broadcasted(::Type{T}, x::Numeric) where T = +@adjoint broadcasted(::Type{T}, x::Numeric) where {T<:Number} = T.(x), ȳ -> (nothing, _project(x, ȳ),) # General Fallback diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 7c45d26d2..b3b1e2969 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1407,9 +1407,17 @@ end @test all(gradient((x,y) -> sum(x .* y), [1,2], [3 4 5]) .≈ ([12, 12], [3 3 3])) @test all(gradient((x,y) -> sum(x ./ y), [1,2], 5) .≈ ([0.2, 0.2], -0.12)) + # https://github.com/FluxML/Zygote.jl/pull/1171 sm = sprand(5, 5, 0.5) @test gradient(x -> sum(abs2, Float32.(x)), sm)[1] ≈ gradient(x -> sum(abs2, x), Matrix{Float32}(sm))[1] @test gradient(x -> real(sum(ComplexF32.(x) .+ 1 .+ im)), sm)[1] isa SparseMatrixCSC{Float64} + + # https://github.com/FluxML/Zygote.jl/issues/1178 + function f1179(x) + fs = Ref.(x) + getindex.(fs) + end + @test gradient(sum∘f1179, ones(2)) == ([2.0, 2.0],) end using Zygote: Buffer From 403c3ae0bf66c513b26d5ea0707eda37dd6125a0 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 12 Mar 2022 14:04:37 -0800 Subject: [PATCH 281/490] Use Base.IdSet GitHub tells me this has been available since at least 0.7. --- src/Zygote.jl | 1 - src/compiler/interface.jl | 4 ++-- src/profiler/Profile.jl | 4 ++-- src/tools/idset.jl | 21 --------------------- 4 files changed, 4 insertions(+), 26 deletions(-) delete mode 100644 src/tools/idset.jl diff --git a/src/Zygote.jl b/src/Zygote.jl index 85b71359f..d537efbb7 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -18,7 +18,6 @@ export rrule_via_ad const Numeric{T<:Number} = Union{T, AbstractArray{<:T}} -include("tools/idset.jl") include("tools/buffer.jl") include("tools/builtins.jl") diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 18c0bd8eb..38bc328e0 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -1,7 +1,7 @@ using InteractiveUtils using InteractiveUtils: typesof using Core: Typeof -import Base: copy! +import Base: copy!, IdSet import Base.Broadcast: broadcasted, materialize! mutable struct Context <: AContext @@ -144,7 +144,7 @@ struct Params end Params() = Params(Buffer([], false), IdSet()) -Params(xs) = Params(Buffer(xs, false), IdSet(xs)) +Params(xs) = Params(Buffer(xs, false), IdSet{Any}(xs)) Params(ps::Params) = ps Params(xs::Tuple) = Params(collect(xs)) diff --git a/src/profiler/Profile.jl b/src/profiler/Profile.jl index a351a7df1..d2a16aeff 100644 --- a/src/profiler/Profile.jl +++ b/src/profiler/Profile.jl @@ -1,7 +1,7 @@ module Profile using Requires -using ..Zygote: Pullback, IdSet, meta, stacklines +using ..Zygote: Pullback, meta, stacklines function loc(f) # TODO perhaps find most general method @@ -36,7 +36,7 @@ function mem(x, seen) sum(x -> mem(x, seen), fields(x)) end -mem(x) = mem(x, IdSet()) +mem(x) = mem(x, Base.IdSet()) struct Node func::Symbol diff --git a/src/tools/idset.jl b/src/tools/idset.jl deleted file mode 100644 index a0aa93df0..000000000 --- a/src/tools/idset.jl +++ /dev/null @@ -1,21 +0,0 @@ -struct IdSet{T} <: AbstractSet{T} - dict::IdDict{T,Nothing} - IdSet{T}() where T = new(IdDict{T,Nothing}()) -end - -IdSet(xs) = IdSet{eltype(xs)}(xs) - -IdSet() = IdSet{Any}() - -IdSet{T}(xs) where T = isempty(xs) ? IdSet{T}() : push!(IdSet{T}(), xs...) - -Base.push!(s::IdSet{T}, x::T) where T = (s.dict[x] = nothing; s) -Base.delete!(s::IdSet{T}, x::T) where T = (delete!(s.dict, x); s) -Base.in(x, s::IdSet) = haskey(s.dict, x) -Base.eltype(::IdSet{T}) where T = T -Base.collect(s::IdSet) = Base.collect(keys(s.dict)) -Base.similar(s::IdSet, T::Type) = IdSet{T}() - -@forward IdSet.dict Base.length - -Base.iterate(s::IdSet, st...) = iterate(keys(s.dict), st...) From c45fa66eb66eb2dabb0f3fddc123efbfbe29807b Mon Sep 17 00:00:00 2001 From: Sebastian Ament Date: Mon, 14 Mar 2022 12:39:51 +0100 Subject: [PATCH 282/490] fixing type ambiguity of unbroadcast --- Project.toml | 2 +- src/lib/broadcast.jl | 12 +++++++----- 2 files changed, 8 insertions(+), 6 deletions(-) diff --git a/Project.toml b/Project.toml index 0a1fa489e..19ee30869 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.36" +version = "0.6.37" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index f2b0e0709..6dbfdb829 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -59,6 +59,8 @@ unbroadcast(x::Tuple{<:Any}, x̄) = (accum_sum(x̄),) unbroadcast(x::Base.RefValue, x̄) = (x=accum_sum(x̄),) unbroadcast(x::Tuple, x̄) = NTuple{length(x)}(length(x) == length(x̄) ? x̄ : accum_sum(x̄; dims=2:ndims(x̄))) # case length(x) > 1 unbroadcast(x::Tuple, x̄::Nothing) = nothing +# fixing issue #1184, not duplicate method, since the above allows for an empty tuple +unbroadcast(x::Tuple{<:Any}, x̄::Nothing) = nothing unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @@ -81,7 +83,7 @@ _minus(::Nothing) = nothing Δ -> (nothing, unbroadcast(x, Δ .* conj.(y)), unbroadcast(y, Δ .* conj.(x))) @adjoint broadcasted(::typeof(*), x::Number, y::AbstractArray{<:Number}) = _pullback(*, x, y) # this uses dot(y,Δ) instead of sum(Δ .* conj.(y)) -@adjoint broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) = +@adjoint broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) = _pullback(*, x, y) @adjoint function broadcasted(::typeof(/), x::Numeric, y::Numeric) @@ -181,7 +183,7 @@ _dual_safearg(x) = false T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: if T == Bool - return (f.(args...), _ -> nothing) + return (f.(args...), _ -> nothing) elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) && !isderiving() return broadcast_forward(f, args...) end @@ -260,7 +262,7 @@ end @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = broadcast_forward(CUDA.cufunc(f), args...) - else # CUDA >= 3.0 -- don't need cufunc(f). + else # CUDA >= 3.0 -- don't need cufunc(f). # Ordinary broadcasting calls broadcast_forward anyway when certain its' safe, # so perhaps this can be deleted? Possible edge case here: # https://github.com/FluxML/Zygote.jl/pull/1018#issuecomment-873629415 @@ -277,14 +279,14 @@ end placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end - + # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible @adjoint function sum(f, xs::CUDA.AbstractGPUArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end - + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.AbstractGPUArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end From e47114be5bf02d2bceb8351a2178f0bab978ef70 Mon Sep 17 00:00:00 2001 From: Sebastian Ament Date: Wed, 16 Mar 2022 10:40:56 +0100 Subject: [PATCH 283/490] added type ambiguity test --- test/gradcheck.jl | 35 ++++++++++++++++++++++------------- 1 file changed, 22 insertions(+), 13 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b3b1e2969..ac0dd28bf 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -360,7 +360,7 @@ end @test gradient(x -> map(+, x, (1,2,3))[1], [4,5,6]) == ([1,0,0],) # mismatched lengths, should zip - @test gradient(x -> map(+, x, [1,2,3,99])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) + @test gradient(x -> map(+, x, [1,2,3,99])[1], (4,5,6)) == ((1.0, 0.0, 0.0),) @test gradient(x -> map(+, x, [1,2,3])[1], (4,5,6,99)) == ((1.0, 0.0, 0.0, nothing),) end @@ -1386,7 +1386,7 @@ end end @testset "broadcast" begin - # Before https://github.com/FluxML/Zygote.jl/pull/1001 this gave [1 1 1; 1 0 1; 1 1 -1] + # Before https://github.com/FluxML/Zygote.jl/pull/1001 this gave [1 1 1; 1 0 1; 1 1 -1] @test gradient(x -> sum(sin.(x)), Diagonal([0,pi/2,pi]))[1] ≈ [1 0 0; 0 0 0; 0 0 -1] a = rand(3) @@ -1487,17 +1487,6 @@ using Zygote: Buffer @test ∇x == 6 .* x end -@testset "FillArrays" begin - @test gradcheck(x->sum(Fill(x[], (2, 2))), [0.1]) - @test first(Zygote.gradient(sz->sum(Ones(sz)), 6)) === nothing - @test first(Zygote.gradient(sz->sum(Zeros(sz)), 6)) === nothing - @test gradcheck(x->Fill(x[], 5).value, [0.1]) - @test gradcheck(x->FillArrays.getindex_value(Fill(x[], 5)), [0.1]) - - @test first(Zygote.pullback(Ones{Float32}, 10)) isa Ones{Float32} - @test first(Zygote.pullback(Zeros{Float32}, 10)) isa Zeros{Float32} -end - @testset "AbstractArray Addition / Subtraction / Negation" begin rng, M, N, P = MersenneTwister(123567), 3, 7, 11 A, B = randn(rng, M, N, P), randn(rng, M, N, P) @@ -1623,6 +1612,16 @@ end end @testset "FillArrays" begin + + @test gradcheck(x->sum(Fill(x[], (2, 2))), [0.1]) + @test first(Zygote.gradient(sz->sum(Ones(sz)), 6)) === nothing + @test first(Zygote.gradient(sz->sum(Zeros(sz)), 6)) === nothing + @test gradcheck(x->Fill(x[], 5).value, [0.1]) + @test gradcheck(x->FillArrays.getindex_value(Fill(x[], 5)), [0.1]) + + @test first(Zygote.pullback(Ones{Float32}, 10)) isa Ones{Float32} + @test first(Zygote.pullback(Zeros{Float32}, 10)) isa Zeros{Float32} + rng, M, N = MersenneTwister(123456), 7, 11 x, y = randn(rng), randn(rng) @test Zygote.gradient(x->sum(Fill(x, N)), x)[1] == N @@ -1989,3 +1988,13 @@ end g = Zygote.gradient(zygote1162, as, bs) @test g == ((nothing, 2*as[2], nothing), (nothing, 2*bs[2], nothing)) end + +@testset "Zygote #1184" begin + n, d = 3, 2 + x = [randn(d) for _ in 1:n] + + f = sin + g(x) = sum.((f,), x) + h(x) = sum(abs2, g(x)) + @test gradient(h, x)[1] isa typeof(x) +end From 3928ab9e6dd6dae3bf6df7882ec444aba1102628 Mon Sep 17 00:00:00 2001 From: Aman Sharma <76823502+arcAman07@users.noreply.github.com> Date: Sun, 3 Apr 2022 15:38:46 +0530 Subject: [PATCH 284/490] Ton of doctests added (#1194) * Ton of doctests added to index.md * Ton of doctests added to index.md * Ton of doctests added to index.md * Ton of doctests added to index.md * Ton of doctests added to index.md * Ton of doctests added to index.md * outdated example fixed * outdated example fixed * outdated example fixed * outdated example fixed * doctests added to adjoints.md * doctests added to adjoints.md * doctests added to adjoints.md * doctests added to adjoints.md * outdated example updated * More doctests added * More doctests added * doctest added and checked properly --- docs/make.jl | 1 + docs/src/adjoints.md | 28 +++++++++++++++------------- docs/src/complex.md | 12 +++++++----- docs/src/index.md | 20 ++++++++++---------- 4 files changed, 33 insertions(+), 28 deletions(-) diff --git a/docs/make.jl b/docs/make.jl index 9e5dbb3f6..9d2f549c9 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -8,6 +8,7 @@ using Documenter, Zygote makedocs( sitename="Zygote", + doctest = true, pages = [ "Home" => "index.md", "Custom Adjoints" => "adjoints.md", diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 34aa5d18f..2ee094e78 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -18,7 +18,9 @@ The `@adjoint` macro is an important part of Zygote's interface; customising you `gradient` is really just syntactic sugar around the more fundamental function `pullback`. -```julia +```jldoctest adjoints +julia> using Zygote + julia> y, back = Zygote.pullback(sin, 0.5); julia> y @@ -55,7 +57,7 @@ julia> cos(0.5) More generally -```julia +```jldoctest adjoints julia> function mygradient(f, x...) _, back = Zygote.pullback(f, x...) back(1) @@ -76,15 +78,15 @@ Zygote has many adjoints for non-mathematical operations such as for indexing an We can extend Zygote to a new function with the `@adjoint` function. -```julia -julia> mul(a, b) = a*b +```jldoctest adjoints +julia> mul(a, b) = a*b; julia> using Zygote: @adjoint julia> @adjoint mul(a, b) = mul(a, b), c̄ -> (c̄*b, c̄*a) julia> gradient(mul, 2, 3) -(3, 2) +(3.0, 2.0) ``` It might look strange that we write `mul(a, b)` twice here. In this case we want to call the normal `mul` function for the pullback pass, but you may also want to modify the pullback pass (for example, to capture intermediate results in the pullback). @@ -152,7 +154,7 @@ We usually use custom adjoints to add gradients that Zygote can't derive itself ### Gradient Hooks -```julia +```jldoctest adjoints julia> hook(f, x) = x hook (generic function with 1 method) @@ -161,17 +163,17 @@ julia> @adjoint hook(f, x) = x, x̄ -> (nothing, f(x̄)) `hook` doesn't seem that interesting, as it doesn't do anything. But the fun part is in the adjoint; it's allowing us to apply a function `f` to the gradient of `x`. -```julia +```jldoctest adjoints julia> gradient((a, b) -> hook(-, a)*b, 2, 3) -(-3, 2) +(-3.0, 2.0) ``` We could use this for debugging or modifying gradients (e.g. gradient clipping). -```julia +```jldoctest adjoints julia> gradient((a, b) -> hook(ā -> @show(ā), a)*b, 2, 3) -ā = 3 -(3, 2) +ā = 3.0 +(3.0, 2.0) ``` Zygote provides both `hook` and `@showgrad` so you don't have to write these yourself. @@ -180,7 +182,7 @@ Zygote provides both `hook` and `@showgrad` so you don't have to write these you A more advanced example is checkpointing, in which we save memory by re-computing the pullback pass of a function during the backwards pass. To wit: -```julia +```jldoctest adjoints julia> checkpoint(f, x) = f(x) checkpoint (generic function with 1 method) @@ -192,7 +194,7 @@ julia> gradient(x -> checkpoint(sin, x), 1) If a function has side effects we'll see that the pullback pass happens twice, as expected. -```julia +```jldoctest adjoints julia> foo(x) = (println(x); sin(x)) foo (generic function with 1 method) diff --git a/docs/src/complex.md b/docs/src/complex.md index 4013bb112..2c82bf8b6 100644 --- a/docs/src/complex.md +++ b/docs/src/complex.md @@ -4,30 +4,32 @@ Complex numbers add some difficulty to the idea of a "gradient". To talk about ` If `f` returns a real number, things are fairly straightforward. For ``c = x + yi`` and ``z = f(c)``, we can define the adjoint ``\bar c = \frac{\partial z}{\partial x} + \frac{\partial z}{\partial y}i = \bar x + \bar y i`` (note that ``\bar c`` means gradient, and ``c'`` means conjugate). It's exactly as if the complex number were just a pair of reals `(re, im)`. This works out of the box. -```julia +```jldoctest complex +julia> using Zygote + julia> gradient(c -> abs2(c), 1+2im) -(2 + 4im,) +(2.0 + 4.0im,) ``` However, while this is a very pragmatic definition that works great for gradient descent, it's not quite aligned with the mathematical notion of the derivative: i.e. ``f(c + \epsilon) \approx f(c) + \bar c \epsilon``. In general, such a ``\bar c`` is not possible for complex numbers except when `f` is *holomorphic* (or *analytic*). Roughly speaking this means that the function is defined over `c` as if it were a normal real number, without exploiting its complex structure – it can't use `real`, `imag`, `conj`, or anything that depends on these like `abs2` (`abs2(x) = x*x'`). (This constraint also means there's no overlap with the Real case above; holomorphic functions always return complex numbers for complex input.) But most "normal" numerical functions – `exp`, `log`, anything that can be represented by a Taylor series – are fine. Fortunately it's also possible to get these derivatives; they are the conjugate of the gradients for the real part. -```julia +```jldoctest complex julia> gradient(x -> real(log(x)), 1+2im)[1] |> conj 0.2 - 0.4im ``` We can check that this function is holomorphic – and thus that the gradient we got out is sensible – by checking the Cauchy-Riemann equations. In other words this should give the same answer: -```julia +```jldoctest complex julia> -im*gradient(x -> imag(log(x)), 1+2im)[1] |> conj 0.2 - 0.4im ``` Notice that this fails in a non-holomorphic case, `f(x) = log(x')`: -```julia +```jldoctest complex julia> gradient(x -> real(log(x')), 1+2im)[1] |> conj 0.2 - 0.4im diff --git a/docs/src/index.md b/docs/src/index.md index 1eec9768f..3476d5e7d 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -18,18 +18,18 @@ Zygote is easy to understand since, at its core, it has a one-function API (`pul `gradient` calculates derivatives. For example, the derivative of ``3x^2 + 2x + 1`` is ``6x + 2``, so when `x = 5`, `dx = 32`. -```julia +```jldoctest index julia> using Zygote julia> gradient(x -> 3x^2 + 2x + 1, 5) -(32,) +(32.0,) ``` `gradient` returns a tuple, with a gradient for each argument to the function. -```julia +```jldoctest index julia> gradient((a, b) -> a*b, 2, 3) -(3, 2) +(3.0, 2.0) ``` This will work equally well if the arguments are arrays, structs, or any other Julia type, but the function should return a scalar (like a loss or objective ``l``, if you're doing optimisation / ML). @@ -48,7 +48,7 @@ julia> gradient(x -> 3x^2 + 2x + 1, 1//4) Control flow is fully supported, including recursion. -```julia +```jldoctest index julia> function pow(x, n) r = 1 for i = 1:n @@ -59,26 +59,26 @@ julia> function pow(x, n) pow (generic function with 1 method) julia> gradient(x -> pow(x, 3), 5) -(75,) +(75.0,) julia> pow2(x, n) = n <= 0 ? 1 : x*pow2(x, n-1) pow2 (generic function with 1 method) julia> gradient(x -> pow2(x, 3), 5) -(75,) +(75.0,) ``` Data structures are also supported, including mutable ones like dictionaries. Arrays are currently immutable, though [this may change](https://github.com/FluxML/Zygote.jl/pull/75) in future. -```julia +```jldoctest index julia> d = Dict() -Dict{Any,Any} with 0 entries +Dict{Any, Any}() julia> gradient(5) do x d[:x] = x d[:x] * d[:x] end -(10,) +(10.0,) julia> d[:x] 5 From cb5f279faf5947cf54cfe2404870df2c61114718 Mon Sep 17 00:00:00 2001 From: Aman Date: Wed, 6 Apr 2022 22:23:47 +0530 Subject: [PATCH 285/490] Fixing spelling error in the docs --- docs/src/adjoints.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 2ee094e78..b38c07a14 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -9,7 +9,7 @@ This page exists to descibe how Zygote works, and how adjoints can be directly defined for Zygote. Defining adjoints this way does not make them accessible to other AD systems, but does let you do things that directly depend on how Zygote works. - It allows for specific definitions of adjoints that are only defined for Zgyote (which might work differently to more generic definitions defined for all AD). + It allows for specific definitions of adjoints that are only defined for Zygote (which might work differently to more generic definitions defined for all AD). The `@adjoint` macro is an important part of Zygote's interface; customising your backwards pass is not only possible but widely used and encouraged. While there are specific utilities available for common things like gradient clipping, understanding adjoints will give you the most flexibility. We first give a bit more background on what these pullback things are. From c560ed199a8b72793285a4f076868288a0a3bea9 Mon Sep 17 00:00:00 2001 From: Christian Rorvik Date: Wed, 13 Apr 2022 17:16:51 +0200 Subject: [PATCH 286/490] Fix type stability of Params.order --- src/compiler/interface.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 38bc328e0..0319d5268 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -138,8 +138,8 @@ gradient Container for implicit parameters, used when differentiating a zero-argument funtion `() -> loss(A, B)` with respect to `A, B`. """ -struct Params - order::Buffer # {Any, Vector{Any}} +struct Params{B <: Buffer} + order::B params::IdSet{Any} # TODO store ids only end From b15eff13557de05279f3fd1f82734b218647ec88 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Wed, 13 Apr 2022 23:38:07 +0200 Subject: [PATCH 287/490] Handle `ChainRulesCore.NotImplemented` --- Project.toml | 2 +- src/compiler/chainrules.jl | 1 + test/chainrules.jl | 12 ++++++++++++ 3 files changed, 14 insertions(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 19ee30869..9d531789d 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.37" +version = "0.6.38" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index b3157f289..99d8f4652 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -106,6 +106,7 @@ Convert `x` from the differentials types ChainRules uses to the format Zygote us # Zygote convention: even if many AbstractZero partials (i.e. multi-input function), make just 1 nothing. @inline wrap_chainrules_output(x::Tuple{Vararg{ChainRules.AbstractZero}}) = nothing @inline wrap_chainrules_output(x::ChainRules.AbstractZero) = nothing +@inline wrap_chainrules_output(x::ChainRulesCore.NotImplemented) = nothing for T_outer in (:Tuple, :NamedTuple) # we create separate methods rather than using a `Union` + an `if` so that we avoid a # branch that changes output type, because nested AD on that kinda thing makes Zygote less diff --git a/test/chainrules.jl b/test/chainrules.jl index bc32c879d..94ab9584a 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -263,6 +263,18 @@ using Zygote: ZygoteRuleConfig @test (1.0,) == Zygote.gradient(oout_id_outer, π) @test oout_id_rrule_hitcount[] == 0 end + + # issue #1204 + @testset "NotImplemented" begin + f_notimplemented(x) = x + @scalar_rule f_notimplemented(x) @not_implemented("not implemented :(") + @test Zygote.gradient(f_notimplemented, 0.1) === (nothing,) + @test Zygote.gradient(x -> f_notimplemented(x[1]), 0.1) === (nothing,) + if isdefined(Base, :only) + @test Zygote.gradient(x -> f_notimplemented(only(x)), (0.1,)) === (nothing,) + @test Zygote.gradient(x -> f_notimplemented(only(x)), [0.1]) === (nothing,) + end + end end @testset "ChainRulesCore.rrule_via_ad" begin From 6299e5e7cac4a6e182fe01b2663552240a7a46d1 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 17 Apr 2022 08:31:57 -0700 Subject: [PATCH 288/490] Actually correct `Base.in(x, ps::Params)` Despite triumphant claims of victory, the [original PR](https://github.com/FluxML/Zygote.jl/pull/1130) still got the order of arguments wrong and essentially manually `@macroexpand`ed part of the `@forward`. This PR properly fixes that snafu. --- src/compiler/interface.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 0319d5268..d5428e97e 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -150,7 +150,7 @@ Params(xs::Tuple) = Params(collect(xs)) @forward Params.order Base.iterate, Base.length, Base.getindex -Base.in(ps::Params, x) = x in ps.params +Base.in(x, ps::Params) = x in ps.params Base.map(::typeof(_project), args::Tuple{Params}, grad) = grad # skip _project in gradient(f, ::Params) From 6980a17a2689318e0a874b10dcc302466880c1cd Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 26 Apr 2022 06:58:34 -0700 Subject: [PATCH 289/490] Remove `cat` adjoint in favour of ChainRules --- src/lib/array.jl | 13 ------------- 1 file changed, 13 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 6f5386c32..f492af9e6 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -116,19 +116,6 @@ pull_block_horz(sz, Δ, A::AbstractMatrix) = Δ[:, sz-size(A, 2)+1:sz] end @adjoint hcat(xs::Number...) = hcat(xs...), Δ -> (Δ...,) -@adjoint function cat(Xs...; dims) - cat(Xs...; dims = dims), Δ -> begin - start = ntuple(_ -> 0, ndims(Δ)) - catdims = Base.dims2cat(dims) - dXs = map(Xs) do x - move = ntuple(d -> (d<=length(catdims) && catdims[d]) ? size(x,d) : 0, ndims(Δ)) - x_in_Δ = ntuple(d -> (d<=length(catdims) && catdims[d]) ? (start[d]+1:start[d]+move[d]) : Colon(), ndims(Δ)) - start = start .+ move - dx = Δ[x_in_Δ...] - end - end -end - @adjoint function repeat(xs; inner=ntuple(_->1, ndims(xs)), outer=ntuple(_->1, ndims(xs))) repeat(xs, inner = inner, outer = outer), function (Δ) Δ′ = zero(xs) From 375c7dbcc9ee0fa2cbb8524c5ff7be9d70cc270d Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 26 Apr 2022 12:04:13 -0400 Subject: [PATCH 290/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 9d531789d..343069107 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.38" +version = "0.6.39" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 74f6b8f0751c8b9242fb27822d4a8d712436d023 Mon Sep 17 00:00:00 2001 From: lassepe Date: Tue, 26 Apr 2022 20:27:41 +0200 Subject: [PATCH 291/490] Make chunk threshold configurable --- src/lib/forward.jl | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 13957895c..8b96b417a 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -39,12 +39,9 @@ function forward_jacobian(f, x, ::Val{N}) where N return y, J end -function forward_jacobian(f, x) - if length(x) < ForwardDiff.DEFAULT_CHUNK_THRESHOLD - forward_jacobian(f, x, Val(length(x))) - else - forward_jacobian(f, x, Val(ForwardDiff.DEFAULT_CHUNK_THRESHOLD)) - end +function forward_jacobian(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) + chunk_size = min(length(x), chunk_threshold) + forward_jacobian(f, x, Val(chunk_size)) end vec_scalar(x) = vec(x) @@ -82,10 +79,11 @@ function forward_diag(f, x::AbstractArray) end """ - forwarddiff(f, x) -> f(x) + forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) -> f(x) Runs `f(x)` as usual, but instructs Zygote to differentiate `f` using forward -mode, rather than the usual reverse mode. +mode, rather than the usual reverse mode. The `chunk_threshold` argument controls +the maximum chunk size (c.f. ForwardDiff documentation). Forward mode takes time linear in `length(x)` but only has constant memory overhead, and is very efficient for scalars, so in some cases this can be a @@ -130,11 +128,11 @@ gradient(2, 3) do a, b end ``` """ -forwarddiff(f, x) = f(x) +forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) = f(x) -@adjoint function forwarddiff(f, x) - y, J = forward_jacobian(f, x) - return y, ȳ -> (nothing, reshape_scalar(x, J*vec_scalar(ȳ))) +@adjoint function forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) + y, J = forward_jacobian(f, x; chunk_threshold) + return y, ȳ -> (nothing, reshape_scalar(x, J * vec_scalar(ȳ))) end # Use this to allow second derivatives -- this is forward-over-forward, From c3b17f736674abbe24c494fa79712716f0a7a23f Mon Sep 17 00:00:00 2001 From: lassepe Date: Tue, 26 Apr 2022 20:39:16 +0200 Subject: [PATCH 292/490] Add tests for forward mode chunking --- test/features.jl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/features.jl b/test/features.jl index 839e98cc4..729352f33 100644 --- a/test/features.jl +++ b/test/features.jl @@ -375,6 +375,16 @@ end == (1,) forwarddiff(x -> x^2, x) end == (10,) +@testset "Gradient chunking" begin + for chunk_threshold in 1:10:100 + x = [1:100;] + @test gradient(x) do x + Zygote.forwarddiff(x -> x' * x, x; chunk_threshold) + end == (2 * x,) + end +end + + @test gradient(1) do x if true elseif true From 0b0ea3d5c2ba62f4fd7b694060d687722df48373 Mon Sep 17 00:00:00 2001 From: lassepe Date: Tue, 26 Apr 2022 20:56:43 +0200 Subject: [PATCH 293/490] Fix julia 1.3 threshold --- src/lib/forward.jl | 2 +- test/features.jl | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 8b96b417a..9cdbf1adb 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -131,7 +131,7 @@ end forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) = f(x) @adjoint function forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) - y, J = forward_jacobian(f, x; chunk_threshold) + y, J = forward_jacobian(f, x; chunk_threshold = chunk_threshold) return y, ȳ -> (nothing, reshape_scalar(x, J * vec_scalar(ȳ))) end diff --git a/test/features.jl b/test/features.jl index 729352f33..cdfe7329e 100644 --- a/test/features.jl +++ b/test/features.jl @@ -379,7 +379,7 @@ end == (10,) for chunk_threshold in 1:10:100 x = [1:100;] @test gradient(x) do x - Zygote.forwarddiff(x -> x' * x, x; chunk_threshold) + Zygote.forwarddiff(x -> x' * x, x; chunk_threshold = chunk_threshold) end == (2 * x,) end end From 5771195764a1e37ca8521fadc7d2f2ea55e4e830 Mon Sep 17 00:00:00 2001 From: lassepe Date: Wed, 27 Apr 2022 13:55:03 +0200 Subject: [PATCH 294/490] Formatting --- src/lib/forward.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 9cdbf1adb..6d433b405 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -131,11 +131,11 @@ end forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) = f(x) @adjoint function forwarddiff(f, x; chunk_threshold = ForwardDiff.DEFAULT_CHUNK_THRESHOLD) - y, J = forward_jacobian(f, x; chunk_threshold = chunk_threshold) - return y, ȳ -> (nothing, reshape_scalar(x, J * vec_scalar(ȳ))) + y, J = forward_jacobian(f, x; chunk_threshold = chunk_threshold) + return y, ȳ -> (nothing, reshape_scalar(x, J * vec_scalar(ȳ))) end -# Use this to allow second derivatives -- this is forward-over-forward, +# Use this to allow second derivatives -- this is forward-over-forward, # see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal @adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) @adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) From c44bac5c5cabc2bc73fffaa637c260d41043f3cc Mon Sep 17 00:00:00 2001 From: Soeren Schoenbrod Date: Thu, 28 Apr 2022 08:44:13 +0200 Subject: [PATCH 295/490] Correct spelling mistakte --- docs/src/adjoints.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index b38c07a14..45f0662b3 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -2,7 +2,7 @@ !!! note "Prefer to use ChainRulesCore to define custom adjoints" Zygote supports the use of [ChainRulesCore](http://www.juliadiff.org/ChainRulesCore.jl/stable/) to define custom sensitivities. - It is prefered to define the custom sensitivities using `ChainRulesCore.rrule` as they will work for many AD systems, not just Zygote. + It is preferred to define the custom sensitivities using `ChainRulesCore.rrule` as they will work for many AD systems, not just Zygote. These sensitivities can be added in your own package, or for Base/StdLib functions they can be added to [ChainRules.jl](https://github.com/JuliaDiff/ChainRules.jl/). To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)`. Head to [ChainRules project's documentation](https://www.juliadiff.org/ChainRulesCore.jl/stable/) for more information. **If you are defining your custom adjoints using ChainRulesCore then you do not need to read this page**, and can consider it as documenting a legacy feature. From 2dc86f554235c05a470b4776bad2807b85c33df5 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 7 May 2022 11:08:28 -0700 Subject: [PATCH 296/490] Use `setglobal!` on nightly This should address a CI failure. --- src/lib/lib.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index f154ecd2a..f11a74214 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -81,8 +81,12 @@ unwrap(ref, x) = x end function global_set(ref, val) - ccall(:jl_set_global, Cvoid, (Any, Any, Any), - ref.mod, ref.name, val) + @static if VERSION < v"1.9.0-DEV.265" + ccall(:jl_set_global, Cvoid, (Any, Any, Any), + ref.mod, ref.name, val) + else + setglobal!(ref.mod, ref.name, val) + end end @adjoint! function global_set(ref, x) From 89a1caab7f56849f9f9e1f43d59b352b5b3966fe Mon Sep 17 00:00:00 2001 From: David Widmann Date: Wed, 23 Feb 2022 17:08:36 +0100 Subject: [PATCH 297/490] Fix deprecations in DiffRules 1.4 --- Project.toml | 6 ++++-- src/forward/number.jl | 28 +++++++++++++++------------- 2 files changed, 19 insertions(+), 15 deletions(-) diff --git a/Project.toml b/Project.toml index 343069107..97af0610c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.39" +version = "0.6.40" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -13,6 +13,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" IRTools = "7869d1d1-7146-5819-86e3-90919afe41df" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" @@ -27,10 +28,11 @@ AbstractFFTs = "0.5, 1.0" ChainRules = "1.5" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" -DiffRules = "1.0" +DiffRules = "1.4" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" IRTools = "0.4.4" +LogExpFunctions = "0.3" MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" diff --git a/src/forward/number.jl b/src/forward/number.jl index db88af656..322f76f88 100644 --- a/src/forward/number.jl +++ b/src/forward/number.jl @@ -1,21 +1,23 @@ -using DiffRules, SpecialFunctions, NaNMath +using DiffRules, SpecialFunctions, NaNMath, LogExpFunctions using Base.FastMath: fast_op, make_fastmath # TODO use CSE here -for (M, f, arity) in DiffRules.diffrules() - arity == 1 || continue - dx = DiffRules.diffrule(M, f, :x) - @eval begin - @tangent $M.$f(x::Number) = $M.$f(x), ẋ -> ẋ * $dx +for (M, f, arity) in DiffRules.diffrules(; filter_modules=nothing) + if !(isdefined(@__MODULE__, M) && isdefined(getfield(@__MODULE__, M), f)) + @warn "$M.$f is not available and hence rule for it can not be defined" + continue # Skip rules for methods not defined in the current scope end -end - -for (M, f, arity) in DiffRules.diffrules() - arity == 2 || continue - da, db = DiffRules.diffrule(M, f, :a, :b) - @eval begin - @tangent $M.$f(a::Number, b::Number) = $M.$f(a, b), (ȧ, ḃ) -> ȧ*$da + ḃ*$db + if arity == 1 + dx = DiffRules.diffrule(M, f, :x) + @eval begin + @tangent $M.$f(x::Number) = $M.$f(x), ẋ -> ẋ * $dx + end + elseif arity == 2 + da, db = DiffRules.diffrule(M, f, :a, :b) + @eval begin + @tangent $M.$f(a::Number, b::Number) = $M.$f(a, b), (ȧ, ḃ) -> ȧ*$da + ḃ*$db + end end end From e4a9e7ceaf87dac0e271c1c7b1b3b6a7d3f1a5b2 Mon Sep 17 00:00:00 2001 From: Mason Protter Date: Mon, 9 May 2022 19:57:07 -0600 Subject: [PATCH 298/490] Remove unnecessary generated functions (#1220) * Remove unnecessary generated functions * fix typo --- src/tools/builtins.jl | 17 +++++++---------- 1 file changed, 7 insertions(+), 10 deletions(-) diff --git a/src/tools/builtins.jl b/src/tools/builtins.jl index 6d0daf57c..07c3a3217 100644 --- a/src/tools/builtins.jl +++ b/src/tools/builtins.jl @@ -1,17 +1,14 @@ -@generated function __new__(T, args...) - quote - Base.@_inline_meta - $(Expr(:new, :T, [:(args[$i]) for i = 1:length(args)]...)) - end +macro __new__(T, args...) + esc(Expr(:new, T, args...)) end -@generated function __splatnew__(T, args) - quote - Base.@_inline_meta - $(Expr(:splatnew, :T, :args)) - end +macro __splatnew__(T, args) + esc(Expr(:splatnew, T, args)) end +@inline __new__(T, args...) = @__splatnew__(T, args) +@inline __splatnew__(T, args) = @__splatnew__(T, args) + literal_getindex(x, ::Val{i}) where i = getindex(x, i) literal_indexed_iterate(x, ::Val{i}) where i = Base.indexed_iterate(x, i) literal_indexed_iterate(x, ::Val{i}, state) where i = Base.indexed_iterate(x, i, state) From 885a904ed958c74cdaa2af7a971a6b5a2da908a7 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 9 May 2022 21:59:18 -0400 Subject: [PATCH 299/490] v0.6.40 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 343069107..666b2fd7c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.39" +version = "0.6.40" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From b6651a317448e90ba5cd61b03a0ae19176cb8a56 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 10 May 2022 16:57:52 +0200 Subject: [PATCH 300/490] Make LogExpFunctions a proper dependency and clean adjoints --- Project.toml | 9 ++++----- src/Zygote.jl | 2 +- src/lib/logexpfunctions.jl | 25 ++----------------------- 3 files changed, 7 insertions(+), 29 deletions(-) diff --git a/Project.toml b/Project.toml index 666b2fd7c..3f1837806 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.40" +version = "0.6.41" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -13,6 +13,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" IRTools = "7869d1d1-7146-5819-86e3-90919afe41df" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" @@ -31,11 +32,11 @@ DiffRules = "1.0" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" IRTools = "0.4.4" +LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" -StatsFuns = "0.9.8" ZygoteRules = "0.2.1" julia = "1.3" @@ -45,9 +46,7 @@ ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" -LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -StatsFuns = "4c63d2b9-4356-54db-8cca-17b64c39e42c" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "LogExpFunctions", "Test"] +test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "Test"] diff --git a/src/Zygote.jl b/src/Zygote.jl index d537efbb7..a42dd38c1 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -41,8 +41,8 @@ include("lib/broadcast.jl") include("lib/forward.jl") include("lib/utils.jl") include("lib/range.jl") +include("lib/logexpfunctions.jl") @init @require Distances="b4f34e82-e78d-54a5-968a-f98e89d6e8f7" include("lib/distances.jl") -@init @require LogExpFunctions="2ab3a3ac-af41-5b50-aa03-7779005ae688" include("lib/logexpfunctions.jl") # we need to define this late, so that the genfuncs see lib.jl # Move using statements out of this file to help with sysimage building diff --git a/src/lib/logexpfunctions.jl b/src/lib/logexpfunctions.jl index 1e5e4c0b6..4bdda0d6c 100644 --- a/src/lib/logexpfunctions.jl +++ b/src/lib/logexpfunctions.jl @@ -1,5 +1,4 @@ -using .LogExpFunctions: xlogx, xlogy, logistic, logit, log1psq, log1pexp, - logsumexp, logaddexp, logsubexp +using LogExpFunctions: xlogx, xlogy, logistic, log1pexp, logsumexp, logaddexp, logsubexp using Base.Broadcast: broadcasted @adjoint function xlogx(x::Real) @@ -20,30 +19,10 @@ function ∇xlogx(x::Numeric) return result, dx end -@adjoint function logistic(x::Real) - y = logistic(x) - return y, Δ->(Δ * y * (1 - y),) -end - -@adjoint logit(x::Real) = logit(x), Δ->(Δ / (x * (1 - x)),) - -@adjoint log1psq(x::Real) = log1psq(x), Δ->(Δ * 2x / (1 + abs2(x)),) - -@adjoint function log1pexp(x::Real) - dx = ∂log1pexp(x) - return log1pexp(x), δ -> (δ * dx,) -end @adjoint function broadcasted(::typeof(log1pexp), x::Numeric) - dx = ∂log1pexp.(x) + dx = logistic.(x) return log1pexp.(x), δ -> (nothing, unbroadcast(x, δ .* dx)) end -∂log1pexp(x::Real) = x < 18.0 ? logistic(x) : x < 33.3 ? one(x) - exp(-x) : oftype(exp(x), 1) -∂log1pexp(x::Float32) = x < 9f0 ? logistic(x) : x < 16f0 ? one(x) - exp(-x) : oftype(exp(x), 1) - -@adjoint function logsumexp(X::AbstractArray{<:Real}; dims=:) - lse = logsumexp(X; dims=dims) - return lse, Δ -> (Δ .* exp.(X .- lse),) -end @adjoint function xlogy(x::Real, y::Real) result, dx, dy = ∇xlogy(x, y) From 5c028dd8388f7194a1dc0d0238c5dfc90a3afb41 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 10 May 2022 21:05:32 -0400 Subject: [PATCH 301/490] add warnings to forwarddiff --- src/lib/forward.jl | 28 ++++++++++++++++++++++++---- test/utils.jl | 9 +++++++++ 2 files changed, 33 insertions(+), 4 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 6d433b405..d6d846d52 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -137,8 +137,28 @@ end # Use this to allow second derivatives -- this is forward-over-forward, # see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal -@adjoint ForwardDiff.gradient(f, x) = pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) -@adjoint ForwardDiff.jacobian(f, x) = pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) +@adjoint function ForwardDiff.gradient(f, x) + F = typeof(f) + Base.issingletontype(F) || @warn """`ForwardDiff.gradient(f, x)` within Zygote cannot track gradients with respect to `f` + typeof(f) = $F is not a singleton type""" # maxlog=1 _id=hash(F) + pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) +end + +@adjoint function ForwardDiff.jacobian(f::F, x) where F + Base.issingletontype(F) || @warn """`ForwardDiff.jacobian(f, x)` within Zygote cannot track gradients with respect to `f` + typeof(f) = $F is not a singleton type""" # maxlog=1 _id=hash(F) + pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) +end + +@adjoint function ForwardDiff.derivative(f::F, x) where F + Base.issingletontype(F) || @warn """`ForwardDiff.derivative(f, x)` within Zygote cannot track gradients with respect to `f` + typeof(f) = $F is not a singleton type""" maxlog=1 _id=hash(F) + pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) +end + +@adjoint function ForwardDiff.hessian(f::F, x) where F + Base.issingletontype(F) || @warn """`ForwardDiff.hessian(f, x)` within Zygote cannot track gradients with respect to `f` + typeof(f) = $F is not a singleton type""" maxlog=1 _id=hash(F) + pullback(forwarddiff, x -> ForwardDiff.hessian(f, x), x) +end -@adjoint ForwardDiff.derivative(f, x) = pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) -@adjoint ForwardDiff.hessian(f, x) = pullback(forwarddiff, x -> ForwardDiff.hessian(f, x), x) diff --git a/test/utils.jl b/test/utils.jl index b6d6ed018..40b2e85b7 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -133,4 +133,13 @@ using ForwardDiff g3(x) = sum(abs2,ForwardDiff.jacobian(f,x)) out,back = Zygote.pullback(g3,[2.0,3.2]) @test back(1.0)[1] == ForwardDiff.gradient(g3,[2.0,3.2]) + + # From https://github.com/FluxML/Zygote.jl/issues/1218 + f1218(x::AbstractVector,y::AbstractVector) = sum(x)*sum(y) + gradf1218(x,y) = ForwardDiff.gradient(x->f1218(x,y), x)[1] + x = [0.1] + y = rand(5) + @test ForwardDiff.gradient(y->gradf1218(x,y), y) == ones(5) + # this returns (nothing,) -- now prints a warning + @test_broken Zygote.gradient(y->gradf1218(x,y), y) == ones(5) end From fc56b9df8d47a8dda89bffe4422855b95dae143a Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 10 May 2022 23:00:18 -0400 Subject: [PATCH 302/490] change wording, display once --- src/lib/forward.jl | 20 ++++++++++++-------- 1 file changed, 12 insertions(+), 8 deletions(-) diff --git a/src/lib/forward.jl b/src/lib/forward.jl index d6d846d52..3ee3f7d1c 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -139,26 +139,30 @@ end # see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal @adjoint function ForwardDiff.gradient(f, x) F = typeof(f) - Base.issingletontype(F) || @warn """`ForwardDiff.gradient(f, x)` within Zygote cannot track gradients with respect to `f` - typeof(f) = $F is not a singleton type""" # maxlog=1 _id=hash(F) + Base.issingletontype(F) || @warn """`ForwardDiff.gradient(f, x)` within Zygote cannot track gradients with respect to `f`, + and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). + typeof(f) = $F""" maxlog=1 _id=hash(F) pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) end @adjoint function ForwardDiff.jacobian(f::F, x) where F - Base.issingletontype(F) || @warn """`ForwardDiff.jacobian(f, x)` within Zygote cannot track gradients with respect to `f` - typeof(f) = $F is not a singleton type""" # maxlog=1 _id=hash(F) + Base.issingletontype(F) || @warn """`ForwardDiff.jacobian(f, x)` within Zygote cannot track gradients with respect to `f`, + and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). + typeof(f) = $F""" maxlog=1 _id=hash(F) pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) end @adjoint function ForwardDiff.derivative(f::F, x) where F - Base.issingletontype(F) || @warn """`ForwardDiff.derivative(f, x)` within Zygote cannot track gradients with respect to `f` - typeof(f) = $F is not a singleton type""" maxlog=1 _id=hash(F) + Base.issingletontype(F) || @warn """`ForwardDiff.derivative(f, x)` within Zygote cannot track gradients with respect to `f`, + and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). + typeof(f) = $F""" maxlog=1 _id=hash(F) pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) end @adjoint function ForwardDiff.hessian(f::F, x) where F - Base.issingletontype(F) || @warn """`ForwardDiff.hessian(f, x)` within Zygote cannot track gradients with respect to `f` - typeof(f) = $F is not a singleton type""" maxlog=1 _id=hash(F) + Base.issingletontype(F) || @warn """`ForwardDiff.hessian(f, x)` within Zygote cannot track gradients with respect to `f`, + and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). + typeof(f) = $F""" maxlog=1 _id=hash(F) pullback(forwarddiff, x -> ForwardDiff.hessian(f, x), x) end From 0f3d8430b181745c3698657780327f2313f926c1 Mon Sep 17 00:00:00 2001 From: DomCRose Date: Fri, 20 May 2022 19:17:07 +0100 Subject: [PATCH 303/490] Fix non-holomorphic tests --- test/complex.jl | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/test/complex.jl b/test/complex.jl index 1abd1303f..54c49f299 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -76,14 +76,20 @@ fs_C_to_C_non_holomorphic = (conj, z->im*abs2(z), z->z'z, z->conj(z)*z^2, + z->imag(z)^2+real(sin(z))^3*1im, ) @testset "C->C non-holomorphic" begin - for f in (fs_C_to_C_holomorphic...,fs_C_to_C_holomorphic...) + for f in (fs_C_to_C_holomorphic...,fs_C_to_C_non_holomorphic...) for z in (1.0+2.0im, -2.0+pi*im) - grad_zygote = gradient(real∘f, z)[1] + grad_zygote_r = gradient(real∘f, z)[1] + grad_zygote_i = gradient(imag∘f, z)[1] ε = 1e-8 - grad_fd = real(f(z+ε)-f(z))/ε + im*real(f(z+ε*im)-f(z))/ε - @test abs(grad_zygote - grad_fd) < sqrt(ε) + grad_fd_r = real(f(z+ε)-f(z))/ε + im*real(f(z+ε*im)-f(z))/ε + grad_fd_i = imag(f(z+ε)-f(z))/ε + im*imag(f(z+ε*im)-f(z))/ε + # Check derivative of both real and imaginary parts of f as these may differ + # for non-holomorphic functions + @test abs(grad_zygote_r - grad_fd_r) < sqrt(ε) + @test abs(grad_zygote_i - grad_fd_i) < sqrt(ε) end end end From 22b6963ecd6373eee8fd176e4c12c5c40f3008af Mon Sep 17 00:00:00 2001 From: DomCRose Date: Sun, 22 May 2022 20:32:22 +0100 Subject: [PATCH 304/490] Seperate (non-)holomorphic tests --- test/complex.jl | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/test/complex.jl b/test/complex.jl index 54c49f299..d73cac65d 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -58,12 +58,17 @@ fs_C_to_C_holomorphic = (cos, @testset "C->C holomorphic" begin for f in fs_C_to_C_holomorphic for z in (1.0+2.0im, -2.0+pi*im) - grad_zygote = gradient(real∘f, z)[1] + grad_zygote_r = gradient(real∘f, z)[1] + grad_zygote_i = gradient(imag∘f, z)[1] ε = 1e-8 grad_fd_r = (f(z+ε)-f(z))/ε - grad_fd_i = (f(z+ε*im)-f(z))/(ε*im) - @assert abs(grad_fd_r - grad_fd_i) < sqrt(ε) # check the function is indeed holomorphic - @test abs(grad_zygote - conj(grad_fd_r)) < sqrt(ε) + grad_fd_i = (f(z + ε * im) - f(z)) / (ε * im) + # check the function is indeed holomorphic + @assert abs(grad_fd_r - grad_fd_i) < sqrt(ε) + # check Zygote derivatives agree with holomorphic definition + @test abs(grad_zygote_r + im*grad_zygote_i) < sqrt(ε) + # check derivative agrees with finite differences + @test abs(grad_zygote_r - conj(grad_fd_r)) < sqrt(ε) end end end @@ -79,7 +84,7 @@ fs_C_to_C_non_holomorphic = (conj, z->imag(z)^2+real(sin(z))^3*1im, ) @testset "C->C non-holomorphic" begin - for f in (fs_C_to_C_holomorphic...,fs_C_to_C_non_holomorphic...) + for f in fs_C_to_C_non_holomorphic for z in (1.0+2.0im, -2.0+pi*im) grad_zygote_r = gradient(real∘f, z)[1] grad_zygote_i = gradient(imag∘f, z)[1] From 9f417db5bc214dd4a60ff0f68bf266bde51cd804 Mon Sep 17 00:00:00 2001 From: DomCRose Date: Tue, 24 May 2022 11:12:32 +0100 Subject: [PATCH 305/490] Edit holomorphic gradient test to use approx --- test/complex.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/test/complex.jl b/test/complex.jl index d73cac65d..efb1e06dd 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -63,11 +63,11 @@ fs_C_to_C_holomorphic = (cos, ε = 1e-8 grad_fd_r = (f(z+ε)-f(z))/ε grad_fd_i = (f(z + ε * im) - f(z)) / (ε * im) - # check the function is indeed holomorphic + # Check the function is indeed holomorphic @assert abs(grad_fd_r - grad_fd_i) < sqrt(ε) - # check Zygote derivatives agree with holomorphic definition - @test abs(grad_zygote_r + im*grad_zygote_i) < sqrt(ε) - # check derivative agrees with finite differences + # Check Zygote derivatives agree with holomorphic definition + @test grad_zygote_r ≈ -im*grad_zygote_i + # Check derivative agrees with finite differences @test abs(grad_zygote_r - conj(grad_fd_r)) < sqrt(ε) end end From b9caf3f5b1dccd7448e59e0d59d89374e039fa0c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 4 Jun 2022 17:16:39 -0700 Subject: [PATCH 306/490] Allow accumulating distinct Dicts --- src/lib/base.jl | 4 ++-- test/features.jl | 15 ++++++++++++++- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index ac7df59a2..b476eb175 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -24,8 +24,8 @@ grad_mut(d::IdDict) = IdDict() # TODO perhaps look up mutable gradients in `pullback` function accum(a::AbstractDict, b::AbstractDict) - @assert a === b - return a + a === b && return a # Mutating case + return merge(a, b) end @adjoint function getindex(d::AbstractDict, k) diff --git a/test/features.jl b/test/features.jl index cdfe7329e..838645401 100644 --- a/test/features.jl +++ b/test/features.jl @@ -1,5 +1,6 @@ -using Zygote, Test +using Zygote, Test, LinearAlgebra using Zygote: Params, gradient, forwarddiff +using FillArrays: Fill @testset "gradient checkpointing" begin @@ -676,6 +677,18 @@ end loss(x) = sum(abs2, net(x)) @test gradient(loss, ones(10,10))[1] == fill(131072, 10, 10) @test 150_000_000 > @allocated gradient(loss, ones(1000,1000)) + + # https://github.com/FluxML/Zygote.jl/issues/1233 + function defensiveupdate(d, a) + nd = deepcopy(d) + nd[1] = d[1] * a + return nd + end + d = Dict(i => ones(1) for i in 1:2) + @test gradient(d) do d + nd = defensiveupdate(d, 5) + return sum(nd[1]) + sum(nd[2]) + end[1] == Dict(1 => Fill(5, 1), 2 => Fill(1, 1)) end @testset "tricky broadcasting" begin From 0fa305d21c5d6495ded84f9c2a9e614038788599 Mon Sep 17 00:00:00 2001 From: Sam Anklesaria Date: Fri, 10 Jun 2022 16:08:51 -0500 Subject: [PATCH 307/490] Fix #1241 --- src/lib/base.jl | 5 ++++- test/lib/base.jl | 13 +++++++++++++ test/runtests.jl | 1 + 3 files changed, 18 insertions(+), 1 deletion(-) create mode 100644 test/lib/base.jl diff --git a/src/lib/base.jl b/src/lib/base.jl index ac7df59a2..fa71d8906 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -29,11 +29,14 @@ function accum(a::AbstractDict, b::AbstractDict) end @adjoint function getindex(d::AbstractDict, k) - d[k], function (Δ) + val = d[k] + function dict_getindex_pullback(Δ) + accum_param(__context__, val, Δ) === nothing && return grad = grad_mut(__context__, d) grad[k] = accum(get(grad, k, nothing), Δ) return (grad, nothing) end + val, dict_getindex_pullback end @adjoint! function setindex!(d::AbstractDict, v, k) diff --git a/test/lib/base.jl b/test/lib/base.jl new file mode 100644 index 000000000..99ff446ce --- /dev/null +++ b/test/lib/base.jl @@ -0,0 +1,13 @@ +@testset "base.jl" begin + @testset "dict_param" begin + d = Dict{String, Vector{Float64}}("key"=>ones(4)) + fn() = d["key"][2] + result1 = gradient(fn, Params([d["key"]]))[d["key"]] + + x = d["key"] + fn2() = x[2] + result2 = gradient(fn2, Params([x]))[x] + + @test result1 == result2 + end +end diff --git a/test/runtests.jl b/test/runtests.jl index 17ebb3997..fe5590efd 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -29,6 +29,7 @@ using CUDA: has_cuda @testset "lib" begin include("lib/number.jl") include("lib/lib.jl") + include("lib/base.jl") include("lib/array.jl") end From 99d89b09b5190b02aebf0026c3462c06bfd78a83 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 11 Jun 2022 09:17:05 -0700 Subject: [PATCH 308/490] tweak test name --- test/lib/base.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/lib/base.jl b/test/lib/base.jl index 99ff446ce..5186483da 100644 --- a/test/lib/base.jl +++ b/test/lib/base.jl @@ -1,5 +1,5 @@ @testset "base.jl" begin - @testset "dict_param" begin + @testset "Dict getindex with implicit params" begin d = Dict{String, Vector{Float64}}("key"=>ones(4)) fn() = d["key"][2] result1 = gradient(fn, Params([d["key"]]))[d["key"]] From e82f24ff2aa6449844ade41c94f241b3f6d39bf5 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 17 Jun 2022 17:38:47 +0200 Subject: [PATCH 309/490] deprecate dropgrad --- docs/src/utils.md | 1 - src/Zygote.jl | 1 + src/deprecated.jl | 15 +++++++++++++++ src/lib/utils.jl | 13 ------------- 4 files changed, 16 insertions(+), 14 deletions(-) create mode 100644 src/deprecated.jl diff --git a/docs/src/utils.md b/docs/src/utils.md index b7e779185..d04f83140 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -18,7 +18,6 @@ Zygote.withgradient Zygote.withjacobian Zygote.@showgrad Zygote.hook -Zygote.dropgrad Zygote.Buffer Zygote.forwarddiff Zygote.ignore diff --git a/src/Zygote.jl b/src/Zygote.jl index a42dd38c1..b1ca50aa9 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -18,6 +18,7 @@ export rrule_via_ad const Numeric{T<:Number} = Union{T, AbstractArray{<:T}} +include("deprecated.jl") include("tools/buffer.jl") include("tools/builtins.jl") diff --git a/src/deprecated.jl b/src/deprecated.jl new file mode 100644 index 000000000..1df630017 --- /dev/null +++ b/src/deprecated.jl @@ -0,0 +1,15 @@ +""" + dropgrad(x) -> x + +Drop the gradient of `x`. + + julia> gradient(2, 3) do a, b + dropgrad(a)*b + end + (nothing, 2) +""" +function dropgrad end + +@adjoint dropgrad(x) = dropgrad(x), _ -> nothing + +Base.@deprecate dropgrad(x) ChainRulesCore.ignore_derivatives(x) diff --git a/src/lib/utils.jl b/src/lib/utils.jl index 86e6fff8c..e5d8baeee 100644 --- a/src/lib/utils.jl +++ b/src/lib/utils.jl @@ -1,16 +1,3 @@ -""" - dropgrad(x) -> x - -Drop the gradient of `x`. - - julia> gradient(2, 3) do a, b - dropgrad(a)*b - end - (nothing, 2) -""" -dropgrad(x) = x -@adjoint dropgrad(x) = dropgrad(x), _ -> nothing - """ ignore() do ... From 99149d4c345a6ea4ddf5a994e5c897381031bcc0 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 17 Jun 2022 17:48:58 +0200 Subject: [PATCH 310/490] deprecate ignore --- src/deprecated.jl | 36 ++++++++++++++++++++++++++++++++++++ src/lib/utils.jl | 32 -------------------------------- 2 files changed, 36 insertions(+), 32 deletions(-) diff --git a/src/deprecated.jl b/src/deprecated.jl index 1df630017..9bc808511 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -13,3 +13,39 @@ function dropgrad end @adjoint dropgrad(x) = dropgrad(x), _ -> nothing Base.@deprecate dropgrad(x) ChainRulesCore.ignore_derivatives(x) + + +""" + ignore() do + ... + end + +Tell Zygote to ignore a block of code. Everything inside the `do` block will run +on the forward pass as normal, but Zygote won't try to differentiate it at all. +This can be useful for e.g. code that does logging of the forward pass. + +Obviously, you run the risk of incorrect gradients if you use this incorrectly. +""" +function ignore end + +@adjoint ignore(f) = ignore(f), _ -> nothing + +Base.@deprecate ignore(f) ChainRulesCore.ignore_derivatives(f) + +""" + @ignore (...) + +Tell Zygote to ignore an expression. Equivalent to `ignore() do (...) end`. +Example: + +```julia-repl +julia> f(x) = (y = Zygote.@ignore x; x * y); +julia> f'(1) +1 +``` +""" +macro ignore(ex) + return :(Zygote.ignore() do + $(esc(ex)) + end) +end diff --git a/src/lib/utils.jl b/src/lib/utils.jl index e5d8baeee..72c60a961 100644 --- a/src/lib/utils.jl +++ b/src/lib/utils.jl @@ -1,35 +1,3 @@ -""" - ignore() do - ... - end - -Tell Zygote to ignore a block of code. Everything inside the `do` block will run -on the forward pass as normal, but Zygote won't try to differentiate it at all. -This can be useful for e.g. code that does logging of the forward pass. - -Obviously, you run the risk of incorrect gradients if you use this incorrectly. -""" -ignore(f) = f() -@adjoint ignore(f) = ignore(f), _ -> nothing - -""" - @ignore (...) - -Tell Zygote to ignore an expression. Equivalent to `ignore() do (...) end`. -Example: - -```julia-repl -julia> f(x) = (y = Zygote.@ignore x; x * y); -julia> f'(1) -1 -``` -""" -macro ignore(ex) - return :(Zygote.ignore() do - $(esc(ex)) - end) -end - """ hook(x̄ -> ..., x) -> x From e647fc30b301904ed52ba329624a23176757d59d Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 17 Jun 2022 17:55:08 +0200 Subject: [PATCH 311/490] move tests for deprecated functionality into deprecated.jl --- test/deprecated.jl | 10 ++++++++++ test/gradcheck.jl | 8 -------- test/runtests.jl | 4 ++++ 3 files changed, 14 insertions(+), 8 deletions(-) create mode 100644 test/deprecated.jl diff --git a/test/deprecated.jl b/test/deprecated.jl new file mode 100644 index 000000000..ffc4994c7 --- /dev/null +++ b/test/deprecated.jl @@ -0,0 +1,10 @@ +@test_deprecated dropgrad(1) +@test_deprecated ignore(1) +@test_deprecated Zygote.@ignore x=1 + +@test gradient(x -> Zygote.ignore(() -> x*x), 1) == (nothing,) +@test gradient(x -> Zygote.@ignore(x*x), 1) == (nothing,) +@test gradient(1) do x + y = Zygote.@ignore x + x * y +end == (1,) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index ac0dd28bf..268c1734e 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1681,14 +1681,6 @@ end @test gradient(x -> findfirst(ismissing, x), [1, missing]) == (nothing,) @test gradient(x -> findlast(ismissing, x), [1, missing]) == (nothing,) @test gradient(x -> findall(ismissing, x)[1], [1, missing]) == (nothing,) - - - @test gradient(x -> Zygote.ignore(() -> x*x), 1) == (nothing,) - @test gradient(x -> Zygote.@ignore(x*x), 1) == (nothing,) - @test gradient(1) do x - y = Zygote.@ignore x - x * y - end == (1,) end @testset "fastmath" begin diff --git a/test/runtests.jl b/test/runtests.jl index fe5590efd..565ad182f 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -14,6 +14,10 @@ using CUDA: has_cuda @warn "CUDA not found - Skipping CUDA Tests" end + @testset "deprecated.jl" begin + include("deprecated.jl") + end + @testset "Interface" begin include("interface.jl") end From 38bf316766ed552ecd850fde1ae9e19b295d1db9 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Fri, 17 Jun 2022 18:08:14 +0200 Subject: [PATCH 312/490] remove ignore from docs --- docs/src/utils.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/src/utils.md b/docs/src/utils.md index d04f83140..ce9c3e778 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -20,7 +20,6 @@ Zygote.@showgrad Zygote.hook Zygote.Buffer Zygote.forwarddiff -Zygote.ignore Zygote.checkpointed ``` From 131c5c82a9c653a836f1545cbac9c687ab7507f8 Mon Sep 17 00:00:00 2001 From: ST John Date: Fri, 17 Jun 2022 19:28:36 +0300 Subject: [PATCH 313/490] increase ChainRules lower bound to 1.35.3 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index ae5c974d9..8de31f1a4 100644 --- a/Project.toml +++ b/Project.toml @@ -25,7 +25,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1.5" +ChainRules = "1.35.3" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.4" From 644a5dd874f769bca7d6e4aa314e08f36bf4bd27 Mon Sep 17 00:00:00 2001 From: ST John Date: Sat, 18 Jun 2022 10:47:23 +0300 Subject: [PATCH 314/490] bump julia compat to 1.6 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8de31f1a4..a35854934 100644 --- a/Project.toml +++ b/Project.toml @@ -38,7 +38,7 @@ NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" ZygoteRules = "0.2.1" -julia = "1.3" +julia = "1.6" [extras] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" From 206a6000397cc47ebf3c2b5c3c79c201e6eca310 Mon Sep 17 00:00:00 2001 From: ST John Date: Sat, 18 Jun 2022 10:58:30 +0300 Subject: [PATCH 315/490] remove failing test --- test/gradcheck.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index ac0dd28bf..0b024a51f 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -654,7 +654,6 @@ end g(X) = cholesky(X * X' + I) @test Zygote.pullback(g, X)[2]((factors=LowerTriangular(X),))[1] ≈ Zygote.pullback(g, X)[2]((factors=Matrix(LowerTriangular(X)),))[1] - @test_throws PosDefException Zygote.pullback(X -> cholesky(X, check = false), X)[2]((factors=X,)) # https://github.com/FluxML/Zygote.jl/issues/932 @test gradcheck(rand(5, 5), rand(5)) do A, x From 984a25c01adca0e9b197c235a986dcb9ed5c1396 Mon Sep 17 00:00:00 2001 From: ST John Date: Sat, 18 Jun 2022 11:08:55 +0300 Subject: [PATCH 316/490] add Hermitian cholesky test --- test/gradcheck.jl | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 0b024a51f..0ce7f6ae4 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -819,6 +819,18 @@ end @test back′(C̄)[1] isa Diagonal @test diag(back′(C̄)[1]) ≈ diag(back(C̄)[1]) end + @testset "cholesky - Hermitian" begin + rng, N = MersenneTwister(123456), 3 + A = randn(rng, N, N) + im * randn(rng, N, N) + H = Hermitian(A * A' + I) + Hmat = Matrix(H) + y, back = Zygote.pullback(cholesky, Hmat) + y′, back′ = Zygote.pullback(cholesky, H) + C̄ = (factors=randn(rng, N, N),) + @test back′(C̄)[1] isa Hermitian + @test gradtest(B->cholesky(Hermitian(B)).U, A * A' + I) + @test gradtest(B->logdet(cholesky(Hermitian(B))), A * A' + I) + end end @testset "lyap" begin From d13be2e84358e449e45ea7e3b86e8db793fd70ea Mon Sep 17 00:00:00 2001 From: st-- Date: Sat, 18 Jun 2022 21:48:44 +0300 Subject: [PATCH 317/490] Update test/gradcheck.jl Co-authored-by: David Widmann --- test/gradcheck.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 0ce7f6ae4..46e40b87e 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -821,7 +821,7 @@ end end @testset "cholesky - Hermitian" begin rng, N = MersenneTwister(123456), 3 - A = randn(rng, N, N) + im * randn(rng, N, N) + A = randn(rng, Complex{Float64}, N, N) H = Hermitian(A * A' + I) Hmat = Matrix(H) y, back = Zygote.pullback(cholesky, Hmat) From c8df3f07d326437d35a31f0da60190388e9dbc14 Mon Sep 17 00:00:00 2001 From: ST John Date: Sat, 18 Jun 2022 21:49:26 +0300 Subject: [PATCH 318/490] bump julia minimum version in github action ci.yml --- .github/workflows/ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bab7876a5..887c985c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: fail-fast: false matrix: version: - - '1.3' # Replace this with the minimum Julia version that your package supports. + - '1.6' # Replace this with the minimum Julia version that your package supports. - '1' # automatically expands to the latest stable 1.x release of Julia - 'nightly' os: From 7ce5705b4cd481d46df5416536dc172375000cbe Mon Sep 17 00:00:00 2001 From: Kyle Daruwalla Date: Sun, 19 Jun 2022 22:35:38 +0530 Subject: [PATCH 319/490] Make array mutation error nicer --- src/lib/array.jl | 22 +++++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index f492af9e6..93f6ba12b 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -67,15 +67,31 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra @adjoint getindex(::Type{T}, xs...) where {T} = T[xs...], dy -> (nothing, dy...) +_throw_mutation_error(details) = error(""" +Mutating arrays is not supported -- $details +This error occurs when you ask Zygote to differentiate operations that change +the elements of arrays in place. Some common examples: +- setting values (x .= ...) +- appending values (push!(x, v)) +- popping values (pop!(x)) +- calling mutating functions (mul!(C, A, B)) +NOTE: non-mutating functions may use mutation under the hood + for performance or code-reuse. +Possible fixes: +- avoid mutating operations (preferred) +- hide the mutation from Zygote by wrapping the mutating call in a custom rrule + (https://juliadiff.org/ChainRulesCore.jl/stable/rule_author/example.html) +""") + @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), - _ -> error("Mutating arrays is not supported -- called setindex!(::$(typeof(xs)), _...)") + _ -> _throw_mutation_error("called setindex!(::$(typeof(xs)), _...)") @adjoint! copyto!(xs, args...) = copyto!(xs, args...), - _ -> error("Mutating arrays is not supported -- called copyto!(::$(typeof(xs)), _...)") + _ -> _throw_mutation_error("called copyto!(::$(typeof(xs)), _...)") for f in [push!, pop!, pushfirst!, popfirst!] @eval @adjoint! $f(x::AbstractVector, ys...) = $f(x, ys...), - _ -> error("Mutating arrays is not supported -- called $($f)(::$(typeof(x)), _...)") + _ -> _throw_mutation_error("called $($f)(::$(typeof(x)), _...)") end # General From fc945ba4322b0d9b7aff12b9382f00c35897ad24 Mon Sep 17 00:00:00 2001 From: Kyle Daruwalla Date: Sun, 19 Jun 2022 22:52:07 +0530 Subject: [PATCH 320/490] Minor improvements to mutation error --- src/lib/array.jl | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 93f6ba12b..d1d542b8e 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -67,8 +67,8 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra @adjoint getindex(::Type{T}, xs...) where {T} = T[xs...], dy -> (nothing, dy...) -_throw_mutation_error(details) = error(""" -Mutating arrays is not supported -- $details +_throw_mutation_error(f, args...) = error(""" +Mutating arrays is not supported -- called $f($(join(map(typeof, args), ", ")), ...) This error occurs when you ask Zygote to differentiate operations that change the elements of arrays in place. Some common examples: - setting values (x .= ...) @@ -81,17 +81,19 @@ Possible fixes: - avoid mutating operations (preferred) - hide the mutation from Zygote by wrapping the mutating call in a custom rrule (https://juliadiff.org/ChainRulesCore.jl/stable/rule_author/example.html) +- if the mutation is coming from within a package (i.e. not user code), + then open an issue on Zygote.jl (https://github.com/FluxML/Zygote.jl/issues) """) @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), - _ -> _throw_mutation_error("called setindex!(::$(typeof(xs)), _...)") + _ -> _throw_mutation_error(setindex!, xs) @adjoint! copyto!(xs, args...) = copyto!(xs, args...), - _ -> _throw_mutation_error("called copyto!(::$(typeof(xs)), _...)") + _ -> _throw_mutation_error(copyto!, xs) for f in [push!, pop!, pushfirst!, popfirst!] @eval @adjoint! $f(x::AbstractVector, ys...) = $f(x, ys...), - _ -> _throw_mutation_error("called $($f)(::$(typeof(x)), _...)") + _ -> _throw_mutation_error($f, x) end # General From 815e8dd76f3618640ad9305f54ce85fc3a634d8f Mon Sep 17 00:00:00 2001 From: ST John Date: Mon, 20 Jun 2022 13:01:04 +0300 Subject: [PATCH 321/490] fix test --- test/gradcheck.jl | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 46e40b87e..182f2b666 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -819,7 +819,7 @@ end @test back′(C̄)[1] isa Diagonal @test diag(back′(C̄)[1]) ≈ diag(back(C̄)[1]) end - @testset "cholesky - Hermitian" begin + @testset "cholesky - Hermitian{Complex}" begin rng, N = MersenneTwister(123456), 3 A = randn(rng, Complex{Float64}, N, N) H = Hermitian(A * A' + I) @@ -827,9 +827,23 @@ end y, back = Zygote.pullback(cholesky, Hmat) y′, back′ = Zygote.pullback(cholesky, H) C̄ = (factors=randn(rng, N, N),) + @test only(back′(C̄)) isa Hermitian + # gradtest does not support complex gradients, even though the pullback exists + d = only(back(C̄)) + d′ = only(back′(C̄)) + @test (d + d')/2 ≈ d′ + end + @testset "cholesky - Hermitian{Real}" begin + rng, N = MersenneTwister(123456), 3 + A = randn(rng, N, N) + H = Hermitian(A * A' + I) + Hmat = Matrix(H) + y, back = Zygote.pullback(cholesky, Hmat) + y′, back′ = Zygote.pullback(cholesky, H) + C̄ = (factors=randn(rng, N, N),) @test back′(C̄)[1] isa Hermitian - @test gradtest(B->cholesky(Hermitian(B)).U, A * A' + I) - @test gradtest(B->logdet(cholesky(Hermitian(B))), A * A' + I) + @test gradtest(B->cholesky(Hermitian(B)).U, Hmat) + @test gradtest(B->logdet(cholesky(Hermitian(B))), Hmat) end end From 3239330ccf42add76a817d57c46cefb19a8ec0f1 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Mon, 20 Jun 2022 12:10:02 +0200 Subject: [PATCH 322/490] Use CR adjoint for `logdet(::Cholesky)` (#1226) * Use CR adjoint for `logdet(::Cholesky)` * Update Project.toml * Update ci.yml * Update Project.toml --- .github/workflows/ci.yml | 2 +- Project.toml | 4 ++-- src/lib/array.jl | 6 ------ 3 files changed, 3 insertions(+), 9 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index bab7876a5..887c985c8 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,7 +17,7 @@ jobs: fail-fast: false matrix: version: - - '1.3' # Replace this with the minimum Julia version that your package supports. + - '1.6' # Replace this with the minimum Julia version that your package supports. - '1' # automatically expands to the latest stable 1.x release of Julia - 'nightly' os: diff --git a/Project.toml b/Project.toml index ae5c974d9..bdb6f327b 100644 --- a/Project.toml +++ b/Project.toml @@ -25,7 +25,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1.5" +ChainRules = "1.33" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.4" @@ -38,7 +38,7 @@ NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" ZygoteRules = "0.2.1" -julia = "1.3" +julia = "1.6" [extras] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" diff --git a/src/lib/array.jl b/src/lib/array.jl index f492af9e6..548159766 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -741,12 +741,6 @@ end end end -@adjoint function logdet(C::Cholesky) - return logdet(C), function(Δ) - return ((uplo=nothing, info=nothing, factors=Diagonal(2 .* Δ ./ diag(C.factors))),) - end -end - @adjoint function Matrix(S::UniformScaling, i::Integer, j::Integer) return Matrix(S, i, j), Δ -> ((λ=tr(Δ),), nothing, nothing) end From 45389145459d7f4ca892c735abf86b14ff4dc6cd Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Mon, 20 Jun 2022 17:09:21 +0200 Subject: [PATCH 323/490] add a note to docs --- docs/src/utils.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/src/utils.md b/docs/src/utils.md index ce9c3e778..25b5954e4 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -13,6 +13,10 @@ Zygote also provides a set of helpful utilities. These are all "user-level" tool in other words you could have written them easily yourself, but they live in Zygote for convenience. +See `ChainRules.ignore_derivatives` if you want to exclude some of your code from the +gradient calculation. This replaces previous Zygote-specific `ignore` and `dropgrad` +functionality. + ```@docs Zygote.withgradient Zygote.withjacobian From 4bb6b4dd4a4b6eb0e40126587a7a170216c97448 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Thu, 23 Jun 2022 12:15:42 +0200 Subject: [PATCH 324/490] deprecate Zygote.@nograd --- Project.toml | 2 +- src/Zygote.jl | 2 +- src/deprecated.jl | 16 ++++++++++++++++ src/forward/lib.jl | 2 +- src/lib/array.jl | 7 ------- src/lib/base.jl | 4 ---- src/lib/buffer.jl | 2 +- src/lib/grad.jl | 12 ------------ src/lib/lib.jl | 2 -- src/lib/number.jl | 3 --- test/lib/number.jl | 6 +++--- 11 files changed, 23 insertions(+), 35 deletions(-) diff --git a/Project.toml b/Project.toml index a35854934..14ed91b5c 100644 --- a/Project.toml +++ b/Project.toml @@ -25,7 +25,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1.35.3" +ChainRules = "1.37" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.4" diff --git a/src/Zygote.jl b/src/Zygote.jl index b1ca50aa9..8a51b14fd 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -58,7 +58,7 @@ include("profiler/Profile.jl") end @init @require Colors="5ae59095-9a9b-59fe-a467-6f913c188581" begin - @nograd Colors.ColorTypes._parameter_upper_bound + @non_differentiable Colors.ColorTypes._parameter_upper_bound(::Any...) end using InteractiveUtils diff --git a/src/deprecated.jl b/src/deprecated.jl index 9bc808511..6fe88b5b1 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -49,3 +49,19 @@ macro ignore(ex) $(esc(ex)) end) end + +using MacroTools: @q + +macro nograd(ex) + Base.depwarn( + "`Zygote.@nograd myfunc` is deprecated, use `ChainRulesCore.@non_differentiable myfunc(::Any...)` instead.", + :nograd + ) + isexpr(ex, :tuple) || (ex = Expr(:tuple, ex)) + blk = @q begin end + for f in ex.args + back = MacroTools.@q _ -> ($__source__; nothing) + push!(blk.args, :(@inline Zygote._pullback(::Context, ::Core.Typeof($(esc(f))), args...) = $(esc(f))(args...), $back)) + end + return blk +end diff --git a/src/forward/lib.jl b/src/forward/lib.jl index b297dab41..a5518fd5d 100644 --- a/src/forward/lib.jl +++ b/src/forward/lib.jl @@ -9,7 +9,7 @@ end # TODO figure out why this made a test fail zerolike(x::Union{Module,Type}) = nothing -# TODO: `@nograd` and `@linear` +# TODO: `@non_differentiable` and `@linear` @tangent zerolike(x) = zerolike(x), _ -> zerolike(x) @tangent one(x::Number) = one(x), _ -> zero(x) diff --git a/src/lib/array.jl b/src/lib/array.jl index bbe13669d..70790f757 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -6,8 +6,6 @@ using Distributed: pmap, AbstractWorkerPool @adjoint Array(xs::AbstractArray) = Array(xs), ȳ -> (ȳ,) @adjoint Array(xs::Array) = Array(xs), ȳ -> (ȳ,) -@nograd ones, zeros, Base.OneTo, Colon(), one, zero, sizehint!, count - @adjoint copy(x::AbstractArray) = copy(x), ȳ -> (ȳ,) @adjoint collect(x::Tuple) = collect(x), dy -> (Tuple(dy),) @@ -222,11 +220,6 @@ end end end -for t in subtypes(AbstractWorkerPool) - @nograd t -end -@nograd workers - function _pullback(cx::AContext, ::typeof(collect), g::Base.Generator) y, b = ∇map(cx, g.f, g.iter) back(::Nothing) = nothing diff --git a/src/lib/base.jl b/src/lib/base.jl index fa71d8906..79dfb77b6 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -49,8 +49,6 @@ end # Channels -@nograd Channel - grad_mut(ch::Channel) = Channel(ch.sz_max) @adjoint! function put!(ch::Channel, x) @@ -157,8 +155,6 @@ end @adjoint Base.nameof(x::UnionAll) = nameof(x), _ -> (nothing,) -@nograd typeintersect - # Base.Fix1 and Base.Fix2: https://github.com/FluxML/Zygote.jl/issues/957 @adjoint function (g::Base.Fix1)(y) f = g.f diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index e62a70041..b3aef17f0 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,7 +1,7 @@ grad_mut(b::Buffer) = fill!(similar(b.data, Any), nothing) grad_mut(b::Buffer{T}) where T<:Number = fill!(similar(b.data, float(T)), 0) -@nograd Buffer +@non_differentiable Buffer(::Any...) @adjoint function getindex(b::Buffer, i...) b[i...], function (Δ) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index a522d685a..38347b312 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -1,15 +1,3 @@ -using MacroTools: @q - -macro nograd(ex) - isexpr(ex, :tuple) || (ex = Expr(:tuple, ex)) - blk = @q begin end - for f in ex.args - back = MacroTools.@q _ -> ($__source__; nothing) - push!(blk.args, :(@inline Zygote._pullback(::Context, ::Core.Typeof($(esc(f))), args...) = $(esc(f))(args...), $back)) - end - return blk -end - macro which(ex) @capture(ex, f_(args__)) || error("Zygote.@which f(args...)") :(InteractiveUtils.@which adjoint(Context(), $(esc(f)), $(esc.(args)...))) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index f11a74214..22bda1e19 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -38,8 +38,6 @@ function accum(x::RefValue, y::RefValue) end # Core functions -@nograd eps, Base.eval, Core.TypeVar, Core.UnionAll, Symbol - @adjoint deepcopy(x) = deepcopy(x), ȳ -> (ȳ,) @adjoint (::Type{V})(x...) where V<:Val = V(x...), _ -> nothing diff --git a/src/lib/number.jl b/src/lib/number.jl index 4097863c8..296852dbc 100644 --- a/src/lib/number.jl +++ b/src/lib/number.jl @@ -1,6 +1,3 @@ - -@nograd floor, ceil, trunc, round, div - @adjoint Base.literal_pow(::typeof(^), x::Number, ::Val{p}) where {p} = Base.literal_pow(^,x,Val(p)), Δ -> (nothing, Δ * conj(p * Base.literal_pow(^,x,Val(p-1))), nothing) diff --git a/test/lib/number.jl b/test/lib/number.jl index d69241655..ae7b1cb75 100644 --- a/test/lib/number.jl +++ b/test/lib/number.jl @@ -1,7 +1,7 @@ @testset "nograds" begin - @test gradient(floor, 1) === nothing - @test gradient(ceil, 1) === nothing - @test gradient(round, 1) === nothing + @test gradient(floor, 1) === (0.0,) + @test gradient(ceil, 1) === (0.0,) + @test gradient(round, 1) === (0.0,) @test gradient(hash, 1) === nothing @test gradient(div, 1, 2) === nothing end #testset From e9b119743819fe9bc4c244d0b22f27c6a62a15e1 Mon Sep 17 00:00:00 2001 From: Kyle Daruwalla Date: Fri, 24 Jun 2022 12:03:38 +0200 Subject: [PATCH 325/490] Reduce length for error message and add detailed docs --- docs/make.jl | 1 + docs/src/limitations.md | 148 ++++++++++++++++++++++++++++++++++++++++ src/compiler/reverse.jl | 11 ++- src/lib/array.jl | 15 ++-- 4 files changed, 162 insertions(+), 13 deletions(-) create mode 100644 docs/src/limitations.md diff --git a/docs/make.jl b/docs/make.jl index 9d2f549c9..ff8cb28d1 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -11,6 +11,7 @@ makedocs( doctest = true, pages = [ "Home" => "index.md", + "Limitations" => "limitations.md", "Custom Adjoints" => "adjoints.md", "Utilities" => "utils.md", "Complex Differentiation" => "complex.md", diff --git a/docs/src/limitations.md b/docs/src/limitations.md new file mode 100644 index 000000000..0908b0882 --- /dev/null +++ b/docs/src/limitations.md @@ -0,0 +1,148 @@ +# Limitations + +Zygote aims to support differentiating any code you might write in Julia, but it still has a few limitations. Notably, you might encounter errors when trying to differentiate: +- array mutation +- `try`/`catch` statements +- "foreign call" expressions + +In this section, we will introduce examples where each of these errors occurs as well as possible work-arounds. + +## Array mutation + +Array mutation is by far the most commonly encountered Zygote limitation. Unfortunately, supporting it natively in Zygote is tricky, though it may happen eventually. For now, let's focus on what counts as mutation, and how to fix it. + +Here we define a simple mutating function, `f!`, which modifies the elements of its input argument, `x`, in place. +```julia +function f!(x) + x .= 2 .* x + + return x +end +``` +Let's see what happens when we differentiate `f!` +```julia +julia> gradient(rand(3)) do x + sum(f!(x)) + end +ERROR: Mutating arrays is not supported -- called copyto!(Vector{Float64}, ...) +This error occurs when you ask Zygote to differentiate operations that change +the elements of arrays in-place (e.g. setting values with x .= ...) + +Possible fixes: +- avoid mutating operations (preferred) +- or read the documentation and solutions for this error + https://fluxml.ai/Zygote.jl/dev/limitations.html#Array-mutation + +Stacktrace: + ... +``` +We got an error message and a long stacktrace. The error informs us that our code performs array mutation by calling `copyto!` (we might not have directly called this function, but it is being invoked somewhere in the call stack). We see that our code includes `x .= ...` which is given as an example of array mutation. Other examples of mutating operations include: +- setting values (`x .= ...`) +- appending/popping values (`push!(x, v)` / `pop!(x)`) +- calling mutating functions (`mul!(C, A, B)`) + +!!! warning + + Non-mutating functions may also use mutation under the hood. This can be done for performance reasons or code re-use. + +```julia +function g!(x, y) + x .= 2 .* y + + return x +end +g(y) = g!(similar(y), y) +``` +Here `g` is a "non-mutating function," and it indeed does not mutate `y`, its only argument. But it still allocates a new array and calls `g!` on this array which will result in a mutating operation. You may encounter such functions when working with another package. + +Specifically for array mutation, we can use [`Zygote.Buffer`](@ref) to re-write our function. For example, let's fix the function `g!` above. +```julia +function g!(x, y) + x .= 2 .* y + + return x +end + +function g(y) + x = Zygote.Buffer(y) # Buffer supports syntax like similar + g!(x, y) + return copy(x) # this step makes the Buffer immutable (w/o actually copying) +end + +julia> gradient(rand(3)) do y + sum(g(y)) + end +([2.0, 2.0, 2.0],) +``` + +## Try-catch statements + +Any expressions involving `try`/`catch` statements is not supported. +```julia +function tryme(x) + try + 2 * x + catch e + throw(e) + end +end + +julia> gradient(rand(3)) do x + sum(tryme(x)) + end +ERROR: Compiling Tuple{typeof(tryme), Vector{Float64}}: try/catch is not supported. +Refer to the Zygote documentation for fixes. +https://fluxml.ai/Zygote.jl/dev/limitations.html#try-catch-statements-1 + +Stacktrace: + ... +``` +Here `tryme` uses a `try`/`catch` statement, and Zygote throws an error when trying to differentiate it as expected. `try`/`catch` expressions are used for error handling, but they are less common in Julia compared to some other languages. + +## Foreign call expressions + +Foreign call expressions refer to expressions that call external libraries such as code written in C or Fortran. You may want to read more about these calls in the [Julia documentation](https://docs.julialang.org/en/v1/manual/calling-c-and-fortran-code/). Scientific computing libraries in Julia may call established C or Fortran libraries under the hood. Since the underlying code for a foreign call expression is not in Julia, it is not possible for Zygote to differentiate this expression. + +Below, we define a function that calls a standard C function, `clock`. This function returns the Unix clock as an `Int32`. +```julia +julia> jclock(x) = ccall(:clock, Int32, ()) * 2 +jclock (generic function with 1 method) + +julia> jclock(2) +30921278 + +julia> gradient(jclock, rand()) +ERROR: Can't differentiate foreigncall expression +You might want to check the Zygote limitations documentation. +https://fluxml.ai/Zygote.jl/dev/limitations.html + +Stacktrace: + ... +``` +`jclock` will multiply the result of our C function by an argument. When we try to differentiate with respect to this argument, we get an `foreigncall` error. + +## Solutions + +For all of the errors above, the suggested solutions are similar. You have the following possible work arounds available (in order of preference): +1. avoid the error-inducing operation (e.g. do not use mutating functions) +2. define a [custom `ChainRulesCore.rrule`](https://juliadiff.org/ChainRulesCore.jl/stable/rule_author/example.html) +3. open an [issue on Zygote](https://github.com/FluxML/Zygote.jl/issues) + +Avoiding the operation is simple, just don't do it! If you are using a mutating function, try to use a non-mutating variant. If you are using `try`/`catch` statements, try to use more graceful error handling such as returning `nothing` or another sentinel value. Recall that array mutation can also be avoided by using [`Zygote.Buffer`](@ref) as discussed above. + +Sometimes, we cannot avoid expressions that Zygote cannot differentiate, but we may be able to manually derive a gradient. In these cases, you can write [a custom `rrule`](https://juliadiff.org/ChainRulesCore.jl/stable/rule_author/example.html) using ChainRules.jl. Please refer to the linked ChainRules documentation for how to do this. _This solution is the only solution available for foreign call expressions._ Below, we provide a custom `rrule` for `jclock`. +```julia +jclock(x) = ccall(:clock, Int32, ()) * x + +function ChainRulesCore.rrule(::typeof(jclock), x) + y = jclock(x) + pb(ȳ) = (ChainRulesCore.NoTangent(), ȳ * y) + + return y, pb +end + +julia> gradient(jclock, rand()) +(674298.4243400148,) +``` + +Lastly, if the code causing problems can be fixed, but it is package code instead of your code, then you should open an issue. For functions built into Julia or its standard libraries, you can open an issue with Zygote.jl or ChainRules.jl. For functions in other packages, you can open an issue with the corresponding package issue tracker. diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index e746684f7..b6dec7caa 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -118,7 +118,10 @@ function instrument(ir::IR) if isexpr(ex, :foreigncall, :isdefined) continue elseif isexpr(ex, :enter, :leave) - error("try/catch is not supported.") + error("""try/catch is not supported. + Refer to the Zygote documentation for fixes. + https://fluxml.ai/Zygote.jl/dev/limitations.html#Try-catch-statements-1 + """) elseif isexpr(ex, :(=)) @assert ex.args[1] isa GlobalRef pr[v] = xcall(Zygote, :global_set, QuoteNode(ex.args[1]), ex.args[2]) @@ -277,7 +280,11 @@ function adjoint(pr::Primal) grads[ex.val] = grads[v] elseif isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta, :loopinfo) elseif isexpr(ex) - push!(rb, stmt(xcall(Base, :error, "Can't differentiate $(ex.head) expression"), + push!(rb, stmt(xcall(Base, :error, """ + Can't differentiate $(ex.head) expression. + You might want to check the Zygote limitations documentation. + https://fluxml.ai/Zygote.jl/dev/limitations.html + """), line = b[v].line)) else # A literal value continue diff --git a/src/lib/array.jl b/src/lib/array.jl index d1d542b8e..1f1e12d94 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -70,19 +70,12 @@ _droplike(dy::Union{LinearAlgebra.Adjoint, LinearAlgebra.Transpose}, dxv::Abstra _throw_mutation_error(f, args...) = error(""" Mutating arrays is not supported -- called $f($(join(map(typeof, args), ", ")), ...) This error occurs when you ask Zygote to differentiate operations that change -the elements of arrays in place. Some common examples: -- setting values (x .= ...) -- appending values (push!(x, v)) -- popping values (pop!(x)) -- calling mutating functions (mul!(C, A, B)) -NOTE: non-mutating functions may use mutation under the hood - for performance or code-reuse. +the elements of arrays in place (e.g. setting values with x .= ...) + Possible fixes: - avoid mutating operations (preferred) -- hide the mutation from Zygote by wrapping the mutating call in a custom rrule - (https://juliadiff.org/ChainRulesCore.jl/stable/rule_author/example.html) -- if the mutation is coming from within a package (i.e. not user code), - then open an issue on Zygote.jl (https://github.com/FluxML/Zygote.jl/issues) +- or read the documentation and solutions for this error + https://fluxml.ai/Zygote.jl/dev/limitations.html#Array-mutation-1 """) @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), From a4eac890f41e7072ef5b571da101b8f498e49b2c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sat, 25 Jun 2022 18:40:31 -0600 Subject: [PATCH 326/490] rm rules for `maximum`, `minimum`, `dropdims` (#1250) * rm rules for maximum, minimum, dropdims * add test * typo --- src/lib/array.jl | 24 ------------------------ test/gradcheck.jl | 6 ++++++ 2 files changed, 6 insertions(+), 24 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index bbe13669d..a37aa7787 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -313,35 +313,11 @@ end sum(xs, dims = dims), Δ -> (nothing,) end - function _pullback(cx::AContext, ::typeof(prod), f, xs::AbstractArray) y, back = pullback(cx, ((f, xs) -> prod(f.(xs))), f, xs) y, ȳ -> (nothing, back(ȳ)...) end -@adjoint function maximum(xs::AbstractArray; dims = :) - max, i = findmax(xs, dims = dims) - max, function (Δ) - Δ isa Real && abs(Δ) <= sqrt(eps(float(Δ))) && return nothing - Δ′ = zero(xs) - Δ′[i] = Δ - return (Δ′,) - end -end - -@adjoint function minimum(xs::AbstractArray; dims = :) - min, i = findmin(xs, dims = dims) - min, function (Δ) - Δ′ = zero(xs) - Δ′[i] = Δ - return (Δ′,) - end -end - -@adjoint function dropdims(xs::AbstractArray; dims) - dropdims(xs, dims = dims), Δ -> (reshape(Δ, size(xs)...),) -end - @adjoint real(x::AbstractArray) = real(x), r̄ -> (real(r̄),) @adjoint conj(x::AbstractArray) = conj(x), r̄ -> (conj(r̄),) @adjoint imag(x::AbstractArray) = imag(x), ī -> (complex.(0, real.(ī)),) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 30c62eb3e..e37e0ea15 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -501,6 +501,12 @@ end @test gradtest(x -> maximum(x, dims=[1, 2]), rand(2, 3, 4)) @test gradient(x -> 1 / maximum(x), [1., 2, 3])[1] == [0, 0, -1/9] + + # issue 1224, second order + f1244(w, x) = sum(maximum((w * x).^2, dims=1)) + g1244(w, x) = sum(gradient(f1244, w, x)[2].^2) + h1244(w, x) = gradient(g1244, w, x)[2] + @test h1244([1 2 3; 4 5 6.0], [7,8,9.0]) ≈ [300608, 375760, 450912] end @testset "minimum" begin From b00ff49abf9ace8af90ec4eb8e6b3a169e194586 Mon Sep 17 00:00:00 2001 From: Saransh Date: Mon, 27 Jun 2022 14:33:15 +0530 Subject: [PATCH 327/490] Run doctests only once --- docs/make.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/make.jl b/docs/make.jl index 9d2f549c9..98f9333f2 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -8,7 +8,7 @@ using Documenter, Zygote makedocs( sitename="Zygote", - doctest = true, + doctest = false, pages = [ "Home" => "index.md", "Custom Adjoints" => "adjoints.md", From fe6ff51c43f7bdf92790d056b2f6ed2c717c99a3 Mon Sep 17 00:00:00 2001 From: Kyle Daruwalla Date: Tue, 28 Jun 2022 18:02:20 +0100 Subject: [PATCH 328/490] More intro on mutation --- docs/src/limitations.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/docs/src/limitations.md b/docs/src/limitations.md index 0908b0882..f74304e97 100644 --- a/docs/src/limitations.md +++ b/docs/src/limitations.md @@ -9,9 +9,11 @@ In this section, we will introduce examples where each of these errors occurs as ## Array mutation -Array mutation is by far the most commonly encountered Zygote limitation. Unfortunately, supporting it natively in Zygote is tricky, though it may happen eventually. For now, let's focus on what counts as mutation, and how to fix it. +Array mutation is by far the most commonly encountered Zygote limitation. -Here we define a simple mutating function, `f!`, which modifies the elements of its input argument, `x`, in place. +Automatic differentiation (AD) systems like Zygote are built on basic principles of calculus where we encounter _pure_ functions. This means that the function, ``y = f(x)``, does not modify ``x`` and only produces the output ``y`` based on ``x``. If we have a chain of functions, such as ``y = h(g(f(x)))``, we can apply the chain rule to differentiate it. AD systems are built to programmatically apply the chain rule to a series of function calls. Unfortunately, typical programs do not behave this way. We might allocate some memory, `x`, then call a function `y = f!(x)` that modifies `x` to produce the output `y`. This mutating behavior is a _side-effect_ of `f!`. Side-effects are difficult for AD systems to handle, because the must track changes to mutated variables and store older versions of the variable. For these reasons, Zygote does not handle array mutation for now. + +Let's explore this with a more concrete example. Here we define a simple mutating function, `f!`, which modifies the elements of its input argument, `x`, in place. ```julia function f!(x) x .= 2 .* x From 7604288b9898d31a32eefcc7a23ac04da820e94e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 29 Jun 2022 21:33:09 -0600 Subject: [PATCH 329/490] rm rules for `eachslice`, `cumsum` (#1253) * rm rules for eachslice, cumsum * bump * bound chainrules * bump --- Project.toml | 4 ++-- src/lib/array.jl | 31 ------------------------------- 2 files changed, 2 insertions(+), 33 deletions(-) diff --git a/Project.toml b/Project.toml index a35854934..b0d881a56 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.41" +version = "0.6.42" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -25,7 +25,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1.35.3" +ChainRules = "1.36.2" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.4" diff --git a/src/lib/array.jl b/src/lib/array.jl index 92658e1e0..d1057b560 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -351,37 +351,6 @@ _backvar(xs, Δ, N::Int, mean) = (convert(eltype(xs), 2/N) .* Δ .* (xs .- mean) return s, Δ -> _backvar(xs, Δ ./ (2 .* s), corrected, mean, dims) end -@adjoint function cumsum(xs::AbstractVector; dims::Integer = 1) - dims == 1 || return copy(xs), Δ -> (Δ,) - cumsum(xs), Δ -> (reverse(cumsum(reverse(Δ))),) -end -@adjoint function cumsum(xs::AbstractArray; dims::Integer) - dims <= ndims(xs) || return copy(xs), Δ -> (Δ,) - cumsum(xs; dims=dims), Δ -> begin - (reverse(cumsum(reverse(Δ, dims=dims), dims=dims), dims=dims),) - end -end - -@adjoint eachrow(x::AbstractVecOrMat) = collect(eachrow(x)), dys -> ∇eachslice(dys, x, 1) -@adjoint eachcol(x::AbstractVecOrMat) = collect(eachcol(x)), dys -> ∇eachslice(dys, x, 2) -@adjoint eachslice(x::AbstractArray; dims::Integer) = - collect(eachslice(x; dims=dims)), dys -> ∇eachslice(dys, x, dims) - -function ∇eachslice(dys, x::AbstractArray, dim::Integer) where {TX} - i1 = findfirst(dy -> dy isa AbstractArray, dys) - i1 === nothing && return (zero(x),) # all slices get nothing - T = promote_type(eltype(dys[i1]), eltype(x)) - dx = similar(x, T) - for i in axes(x, dim) - if dys[i] isa AbstractArray - copyto!(selectdim(dx,dim,i), dys[i]) - else - selectdim(dx,dim,i) .= 0 - end - end - (dx,) -end - # LinearAlgebra # ============= From 4777767737b4c95d2cea842933c5b2edae2771b2 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Wed, 29 Jun 2022 21:34:19 -0600 Subject: [PATCH 330/490] un-bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index b0d881a56..9920da241 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.42" +version = "0.6.41" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 6336b60ab392ea4ade0f914db7f37453a97f1a42 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 1 Jul 2022 10:26:15 -0600 Subject: [PATCH 331/490] rm rules for Statistics (#1252) --- src/lib/array.jl | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d1057b560..0eef0c64a 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -333,24 +333,6 @@ end @adjoint conj(x::AbstractArray) = conj(x), r̄ -> (conj(r̄),) @adjoint imag(x::AbstractArray) = imag(x), ī -> (complex.(0, real.(ī)),) -@adjoint function mean(xs::AbstractArray; dims = :) - return mean(xs, dims=dims), Δ -> (_backmean(xs,Δ,dims),) -end -_backmean(xs, Δ, ::Colon) = zero(xs) .+ Δ ./ length(xs) -_backmean(xs, Δ, dims) = zero(xs) .+ Δ ./ mapreduce(i -> size(xs,i),*,dims) - -@adjoint function Statistics.var(xs::AbstractArray; corrected::Bool=true, dims=:, mean=mean(xs, dims=dims)) - return Statistics.var(xs; corrected=corrected, mean=mean, dims=dims), Δ -> _backvar(xs, Δ, corrected, mean, dims) -end -_backvar(xs, Δ, corrected::Bool, mean, dims) = _backvar(xs, Δ, mapreduce(i -> size(xs,i),*,dims) - corrected, mean) -_backvar(xs, Δ, corrected::Bool, mean, ::Colon) = _backvar(xs, Δ, length(xs) - corrected, mean) -_backvar(xs, Δ, N::Int, mean) = (convert(eltype(xs), 2/N) .* Δ .* (xs .- mean),) - -@adjoint function Statistics.std(xs::AbstractArray; corrected::Bool=true, dims=:, mean=mean(xs, dims=dims)) - s = Statistics.std(xs; corrected=corrected, mean=mean, dims=dims) - return s, Δ -> _backvar(xs, Δ ./ (2 .* s), corrected, mean, dims) -end - # LinearAlgebra # ============= From ed84d53a97df7991c8688f797c0989e62101fdee Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 12 Jul 2022 23:23:48 -0400 Subject: [PATCH 332/490] rm `adjoint` & `transpose` adjoints (#1259) * rm adjoint + transpose adjoint * restore parent adjoints --- src/lib/array.jl | 27 +-------------------------- 1 file changed, 1 insertion(+), 26 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 0eef0c64a..e4079eb89 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -337,33 +337,8 @@ end # LinearAlgebra # ============= -@adjoint function transpose(x) - back(Δ) = (transpose(Δ),) - back(Δ::NamedTuple{(:parent,)}) = (Δ.parent,) - return transpose(x), back -end - -@adjoint function LinearAlgebra.Transpose(x) - back(Δ) = (LinearAlgebra.Transpose(Δ),) - back(Δ::NamedTuple{(:parent,)}) = (Δ.parent,) - return LinearAlgebra.Transpose(x), back -end - - -@adjoint function Base.adjoint(x) - back(Δ) = (Δ',) - back(Δ::NamedTuple{(:parent,)}) = (Δ.parent,) - return x', back -end - -@adjoint function LinearAlgebra.Adjoint(x) - back(Δ) = (LinearAlgebra.Adjoint(Δ),) - back(Δ::NamedTuple{(:parent,)}) = (Δ.parent,) - return LinearAlgebra.Adjoint(x), back -end - @adjoint parent(x::LinearAlgebra.Adjoint) = parent(x), ȳ -> (LinearAlgebra.Adjoint(ȳ),) -@adjoint parent(x::LinearAlgebra.Transpose) = parent(x), ȳ -> (LinearAlgebra.Transpose(ȳ),) +@adjoint parent(x::LinearAlgebra.Transpose) = parent(x), ȳ -> (LinearAlgebra.Transpose(ȳ),) function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) m1, n1 = size(mat1) From 995778d0520113b0f22bf9230c6a2ee5ba8ef459 Mon Sep 17 00:00:00 2001 From: "Ziyi (Francis) Yin" <54320031+ziyiyin97@users.noreply.github.com> Date: Tue, 19 Jul 2022 14:47:53 -0400 Subject: [PATCH 333/490] fix the link (#1265) --- docs/src/limitations.md | 6 +++--- src/compiler/reverse.jl | 4 ++-- src/lib/array.jl | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/limitations.md b/docs/src/limitations.md index f74304e97..5b15afac3 100644 --- a/docs/src/limitations.md +++ b/docs/src/limitations.md @@ -33,7 +33,7 @@ the elements of arrays in-place (e.g. setting values with x .= ...) Possible fixes: - avoid mutating operations (preferred) - or read the documentation and solutions for this error - https://fluxml.ai/Zygote.jl/dev/limitations.html#Array-mutation + https://fluxml.ai/Zygote.jl/latest/limitations Stacktrace: ... @@ -94,7 +94,7 @@ julia> gradient(rand(3)) do x end ERROR: Compiling Tuple{typeof(tryme), Vector{Float64}}: try/catch is not supported. Refer to the Zygote documentation for fixes. -https://fluxml.ai/Zygote.jl/dev/limitations.html#try-catch-statements-1 +https://fluxml.ai/Zygote.jl/latest/limitations Stacktrace: ... @@ -116,7 +116,7 @@ julia> jclock(2) julia> gradient(jclock, rand()) ERROR: Can't differentiate foreigncall expression You might want to check the Zygote limitations documentation. -https://fluxml.ai/Zygote.jl/dev/limitations.html +https://fluxml.ai/Zygote.jl/latest/limitations Stacktrace: ... diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index b6dec7caa..ba10ea5b1 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -120,7 +120,7 @@ function instrument(ir::IR) elseif isexpr(ex, :enter, :leave) error("""try/catch is not supported. Refer to the Zygote documentation for fixes. - https://fluxml.ai/Zygote.jl/dev/limitations.html#Try-catch-statements-1 + https://fluxml.ai/Zygote.jl/latest/limitations """) elseif isexpr(ex, :(=)) @assert ex.args[1] isa GlobalRef @@ -283,7 +283,7 @@ function adjoint(pr::Primal) push!(rb, stmt(xcall(Base, :error, """ Can't differentiate $(ex.head) expression. You might want to check the Zygote limitations documentation. - https://fluxml.ai/Zygote.jl/dev/limitations.html + https://fluxml.ai/Zygote.jl/latest/limitations """), line = b[v].line)) else # A literal value diff --git a/src/lib/array.jl b/src/lib/array.jl index e4079eb89..9496fdf32 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -75,7 +75,7 @@ the elements of arrays in place (e.g. setting values with x .= ...) Possible fixes: - avoid mutating operations (preferred) - or read the documentation and solutions for this error - https://fluxml.ai/Zygote.jl/dev/limitations.html#Array-mutation-1 + https://fluxml.ai/Zygote.jl/latest/limitations """) @adjoint! setindex!(xs::AbstractArray, x...) = setindex!(xs, x...), From 5ffbd43f70d85ed53ab5ca2cb4f281158414706f Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Tue, 26 Jul 2022 15:38:50 -0400 Subject: [PATCH 334/490] Replace `@require CUDA` with `using GPUArraysCore` (#1272) * require GPUArrays instead of CUDA * more * change to unconditionally load GPUArraysCore * add GPUArrays dep * trivial trigger commit --- Project.toml | 4 ++++ src/lib/broadcast.jl | 29 +++++++++-------------------- 2 files changed, 13 insertions(+), 20 deletions(-) diff --git a/Project.toml b/Project.toml index 9920da241..15f08ad2b 100644 --- a/Project.toml +++ b/Project.toml @@ -10,6 +10,8 @@ DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" +GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" # not loaded, just a version bound +GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" IRTools = "7869d1d1-7146-5819-86e3-90919afe41df" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" @@ -31,6 +33,8 @@ ChainRulesTestUtils = "1" DiffRules = "1.4" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" +GPUArrays = "8.4.2" # not loaded, just a version bound +GPUArraysCore = "0.1.1" IRTools = "0.4.4" LogExpFunctions = "0.3.1" MacroTools = "0.5" diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 6dbfdb829..b3c16e823 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -253,43 +253,32 @@ end return y, bc_fwd_back end -@init @require CUDA="052768ef-5323-5732-b1bb-66c8b64840ba" begin +using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve git blame - const CuArrayStyle = CUDA.AbstractGPUArrayStyle - - if isdefined(CUDA, :cufunc) # CUDA < 3.0 - - @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = - broadcast_forward(CUDA.cufunc(f), args...) - - else # CUDA >= 3.0 -- don't need cufunc(f). # Ordinary broadcasting calls broadcast_forward anyway when certain its' safe, # so perhaps this can be deleted? Possible edge case here: # https://github.com/FluxML/Zygote.jl/pull/1018#issuecomment-873629415 + @adjoint broadcasted(::AbstractGPUArrayStyle, f, args...) = + broadcast_forward(f, args...) - @eval @adjoint broadcasted(::CuArrayStyle, f, args...) = - broadcast_forward(f, args...) - - end - - @adjoint (::Type{T})(xs::Array) where {T <: CUDA.CuArray} = + @adjoint (::Type{T})(xs::Array) where {T <: AbstractGPUArray} = T(xs), Δ -> (convert(Array, Δ), ) - @adjoint function sum(xs::CUDA.AbstractGPUArray; dims = :) + @adjoint function sum(xs::AbstractGPUArray; dims = :) placeholder = similar(xs) sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::CUDA.AbstractGPUArray; kws...) + @adjoint function sum(f, xs::AbstractGPUArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) end - @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:CUDA.AbstractGPUArray} + @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:AbstractGPUArray} Base.convert(T, xs), Δ -> (nothing, Base.convert(Array, Δ),) end - @eval pull_block_vert(sz, Δ::CUDA.CuArray, A::Number) = CUDA.@allowscalar Δ[sz] -end + pull_block_vert(sz, Δ::AbstractGPUArray, A::Number) = @allowscalar Δ[sz] + From c822e9e77fa76647ba2a39896cbcdee604e9aa9f Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 29 Jul 2022 19:59:10 -0700 Subject: [PATCH 335/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 55581f932..1ac51616e 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.41" +version = "0.6.42" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From be5b47fad5fc9c0a3e22f239ec8517df60ffdf4c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 22 Jun 2022 19:35:04 -0700 Subject: [PATCH 336/490] Improved type stability with explicit params We can disable accumulating (implicit) parameters to the gradient cache in explicit mode. This can dramatically improve type stability because `accum_param` will return a `Union{Nothing, [grad type]}` otherwise. --- src/compiler/interface.jl | 34 ++++++++++++++++++++++++++-------- src/lib/array.jl | 4 ++-- src/lib/broadcast.jl | 12 ++++++++---- src/lib/lib.jl | 3 ++- test/compiler.jl | 17 +++++++++++------ 5 files changed, 49 insertions(+), 21 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index d5428e97e..f429102f6 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -4,11 +4,11 @@ using Core: Typeof import Base: copy!, IdSet import Base.Broadcast: broadcasted, materialize! -mutable struct Context <: AContext +mutable struct Context{I} <: AContext cache::Union{IdDict{Any,Any},Nothing} end -Context() = Context(nothing) +Context() = Context{false}(nothing) cache(cx::Context) = cx.cache === nothing ? (cx.cache = IdDict()) : cx.cache @@ -36,10 +36,28 @@ _pullback(f, args...) = _pullback(Context(), f, args...) tailmemaybe(::Nothing) = nothing tailmemaybe(x::Tuple) = Base.tail(x) -function pullback(f, args...) - y, back = _pullback(f, args...) +@inline pullback(f, args...) = pullback(f, Context(), args...) +function pullback(f, cx::AContext, args...) + y, back = _pullback(cx, f, args...) y, Δ -> tailmemaybe(back(Δ)) end +function pullback(cx::Context, f, args...) + ChainRulesCore.ignore_derivatives() do + @warn """ + Incorrect argument order for pullback, please use: + + pullback(f, __context__::Context, args) + + instead of: + + pullback(__context__::Context, f, args) + + This is usually caused by a call to pullback in a higher-order @adjoint. + The above warning will become an error in Zygote 0.7. + """ + end + return pullback(f, cx, args...) +end sensitivity(y::Number) = one(y) sensitivity(y::Complex) = error("Output is complex, so the gradient is not defined.") @@ -334,21 +352,21 @@ function Base.map(f, gs1::Grads, gss::ADictOrGrads...) end function Base.map!(f, gsout::Grads, gss::ADictOrGrads...) - all(issetequal(gsout.params, keys(gs)) for gs in gss) || + all(issetequal(gsout.params, keys(gs)) for gs in gss) || throw(ArgumentError("map! expects Grads objects with the same Params.")) for p in gsout.params - gsout[p] = f((_getformap(gs, p) for gs in gss)...) + gsout[p] = f((_getformap(gs, p) for gs in gss)...) end return gsout end function _getformap(gs, p) g = gs[p] - isnothing(g) ? fill!(similar(p), 0) : g + isnothing(g) ? fill!(similar(p), 0) : g end function pullback(f, ps::Params) - cx = Context() + cx = Context{true}(nothing) y, back = _pullback(cx, f) y, function (Δ) for p in ps diff --git a/src/lib/array.jl b/src/lib/array.jl index 4e72713d9..293801b21 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -310,7 +310,7 @@ end @adjoint function sum(f, xs::AbstractArray{<:AbstractArray}; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) - return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) + return pullback((f, xs) -> sum(f.(xs); kws...), __context__, f, xs) end @adjoint function sum(xs::AbstractArray{Bool}; dims = :) @@ -318,7 +318,7 @@ end end function _pullback(cx::AContext, ::typeof(prod), f, xs::AbstractArray) - y, back = pullback(cx, ((f, xs) -> prod(f.(xs))), f, xs) + y, back = pullback((f, xs) -> prod(f.(xs)), cx, f, xs) y, ȳ -> (nothing, back(ȳ)...) end diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index b3c16e823..8c0d3c54c 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -30,6 +30,10 @@ using Base.Broadcast: Broadcasted, AbstractArrayStyle, broadcasted, materialize # Utilities # ========= +# ChainRules already marks this non-differentiable, +# But inference can still give up because of the Zygote -> CR wrapper layer +@nograd Broadcast.combine_styles + accum_sum(xs; dims = :) = reduce(accum, xs, dims = dims) # Work around reducedim_init issue @@ -82,16 +86,16 @@ _minus(::Nothing) = nothing @adjoint broadcasted(::typeof(*), x::Numeric, y::Numeric) = x.*y, Δ -> (nothing, unbroadcast(x, Δ .* conj.(y)), unbroadcast(y, Δ .* conj.(x))) @adjoint broadcasted(::typeof(*), x::Number, y::AbstractArray{<:Number}) = - _pullback(*, x, y) # this uses dot(y,Δ) instead of sum(Δ .* conj.(y)) + _pullback(__context__, *, x, y) # this uses dot(y,Δ) instead of sum(Δ .* conj.(y)) @adjoint broadcasted(::typeof(*), x::AbstractArray{<:Number}, y::Number) = - _pullback(*, x, y) + _pullback(__context__, *, x, y) @adjoint function broadcasted(::typeof(/), x::Numeric, y::Numeric) res = x ./ y res, Δ -> (nothing, unbroadcast(x, Δ ./ conj.(y)), unbroadcast(y, .-Δ .* conj.(res ./ y))) end @adjoint broadcasted(::typeof(/), x::AbstractArray{<:Number}, y::Number) = - _pullback(/, x, y) + _pullback(__context__, /, x, y) @adjoint function broadcasted(::typeof(Base.literal_pow), ::typeof(^), x::Numeric, exp::Val{p}) where p y = Base.literal_pow.(^, x, exp) @@ -273,7 +277,7 @@ using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible @adjoint function sum(f, xs::AbstractGPUArray; kws...) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) - return pullback(__context__, (f, xs) -> sum(f.(xs); kws...), f, xs) + return pullback((f, xs) -> sum(f.(xs); kws...), __context__, f, xs) end @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:AbstractGPUArray} diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 22bda1e19..52a734809 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -21,7 +21,7 @@ accum(x, y) = accum(x, y, zs...) = accum(accum(x, y), zs...) -accum(x::Tuple, ys::Tuple...) = accum.(x, ys...) +accum(x::Tuple, ys::Tuple...) = map(accum, x, ys...) accum(x::AbstractArray, ys::AbstractArray...) = accum.(x, ys...) @generated function accum(x::NamedTuple, y::NamedTuple) @@ -48,6 +48,7 @@ end @adjoint Base.typeassert(x, T) = Base.typeassert(x, T), Δ -> (Δ, nothing) +accum_param(::Context{false}, _, Δ) = Δ @generated function accum_param(cx::Context, x, Δ) isbitstype(x) && return :(Δ) quote diff --git a/test/compiler.jl b/test/compiler.jl index bc37d271e..c5ddf1f38 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -1,5 +1,5 @@ using Zygote, Test -using Zygote: pullback, @adjoint +using Zygote: pullback, @adjoint, Context macro test_inferred(ex) :(let res = nothing @@ -160,13 +160,18 @@ end @testset "inference for `getproperty`" begin Gaussian = _Gaussian(:getproperty) g = Gaussian(randn(3), randn(3, 3)) - y, back = @inferred pullback(x -> x.m, g) - @test y == getfield(g, :m) - # This type instability is due to the handling of non-bitstypes in `accum_param` + y_explicit, back_explicit = @inferred pullback(x -> x.m, g) + y_implicit, back_implicit = @inferred pullback(x -> x.m, Context{true}(nothing), g) + @test y_explicit == y_implicit == getfield(g, :m) + + ∇args = ((m = [1.0, 0.0, 0.0], P = nothing),) if VERSION > v"1.7-" - @test Base.return_types(back, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(((m = [1.0, 0.0, 0.0], P = nothing),))}] + # This type instability is due to the handling of non-bitstypes in `accum_param` + @test Base.return_types(back_implicit, Tuple{Vector{Float64}}) == Any[Union{Tuple{Nothing}, typeof(∇args)}] + # But the same should infer if implicit parameters are disabled + @test Base.return_types(back_explicit, Tuple{Vector{Float64}}) == Any[typeof(∇args)] end - @test back([1., 0, 0]) == ((m = [1.0, 0.0, 0.0], P = nothing),) + @test back_explicit([1., 0, 0]) == back_implicit([1., 0, 0]) == ∇args Base.getproperty(g::Gaussian, s::Symbol) = 2getfield(g, s) y, back = pullback(x -> x.m, g) From e9a60757196368f8999e2413f2658a3d9cfffab4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 24 Jun 2022 12:43:33 -0700 Subject: [PATCH 337/490] basic comment for Context{I} --- src/compiler/interface.jl | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index f429102f6..ee2a69528 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -4,6 +4,9 @@ using Core: Typeof import Base: copy!, IdSet import Base.Broadcast: broadcasted, materialize! +# Internal container used to track accumulated gradients of mutable types (including params). +# Type param I ∈ (true, false) indicates whether implicit params are in use. +# By default, this should be false unless pullback(f, ::Params) is called. mutable struct Context{I} <: AContext cache::Union{IdDict{Any,Any},Nothing} end @@ -47,11 +50,11 @@ function pullback(cx::Context, f, args...) Incorrect argument order for pullback, please use: pullback(f, __context__::Context, args) - + instead of: pullback(__context__::Context, f, args) - + This is usually caused by a call to pullback in a higher-order @adjoint. The above warning will become an error in Zygote 0.7. """ From 3433cdd310cf3f262ea72370caba13028d8d4e0e Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 31 Jul 2022 08:36:57 -0700 Subject: [PATCH 338/490] Add accum_param tests Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- test/features.jl | 61 ++++++++++++++++++++++++++++++++++++++++++++---- 1 file changed, 57 insertions(+), 4 deletions(-) diff --git a/test/features.jl b/test/features.jl index cdfe7329e..d4f68d36b 100644 --- a/test/features.jl +++ b/test/features.jl @@ -476,7 +476,7 @@ end @test_broken gradient(x -> abs2(x[1].x) + 7 * x[1].x.re, [Ref(1+im)]) == ([(x = 9.0 + 2.0im,)],) @test_broken gradient(x -> abs2(x[1].x) + 7 * real(x[1].x), [Ref(1+im)]) == ([(x = 9.0 + 2.0im,)],) # worked on 0.6.0, 0.6.20 - @test_broken gradient(x -> abs2(x[].x) + 7 * real(x[].x), Ref(Ref(1+im))) == ((x = 9.0 + 2.0im,),) # gives nothing, same in 0.6.0 + @test gradient(x -> abs2(x[].x) + 7 * real(x[].x), Ref(Ref(1+im))) == ((x = (x = 9.0 + 2.0im,),),) # gave `nothing` from 0.6.0 to 0.6.41 # Array of mutables: @test gradient(x -> sum(getindex.(x).^2), Ref.(1:3))[1] == [(;x=2i) for i in 1:3] @@ -490,6 +490,59 @@ end @test gradient(x -> sum(sum, Ref(x) .* [1,2,3]), [4,5]) == ([6.0, 6.0],) end +@testset "mutable accum_param bugs" begin + mutable struct Mut{T}; x::T; end + struct Imm{T}; x::T; end + + # Indexing a tuple containing a mutable struct gave `nothing` + x1 = (Mut(3.0),) + x2 = (Imm(3.0),) + x3 = (Ref(3.0),) + @test gradient(x -> x[1].x^2, x1)[1] == ((x = 6.0,),) # fails on v0.6.0 v0.6.41 + @test gradient(x -> x[1].x^2, x2)[1] == ((x = 6.0,),) + @test gradient(x -> x[1].x^2, x3)[1] == ((x = 6.0,),) # fails on v0.6.0 v0.6.41 + i1 = 1 + @test gradient(x -> x[i1].x^2, x1)[1] == ((x = 6.0,),) # fails on v0.6.0 v0.6.41 + @test gradient(x -> x[i1].x^2, x2)[1] == ((x = 6.0,),) + @test gradient(x -> x[i1].x^2, x3)[1] == ((x = 6.0,),) # fails on v0.6.0 v0.6.41 + + @test gradient(x -> x[1][1].x^2, [x1])[1] == [((x = 6.0,),)] # fails on v0.6.0 v0.6.41 + @test gradient(x -> x[1][1].x^2, [x2])[1] == [((x = 6.0,),)] + @test gradient(x -> x[1][1].x^2, [x3])[1] == [((x = 6.0,),)] # fails on v0.6.0 v0.6.41 + + # When `getfield` returns a mutable struct, it gave `nothing`: + x4 = Imm(Mut(4.0)) + x5 = Mut(Mut(4.0)) + x6 = Imm(Imm(4.0)) + @test gradient(x -> x.x.x^3, x4)[1] == (x = (x = 48.0,),) # fails on v0.6.0 v0.6.41 + @test gradient(x -> x.x.x^3, x5)[1] == (x = (x = 48.0,),) # fails on v0.6.0 + @test gradient(x -> x.x.x^3, x6)[1] == (x = (x = 48.0,),) # fails on v0.6.41 + + @test gradient(x -> x[2].x.x^3, [x4, x4])[1] == [nothing, (x = (x = 48.0,),)] # fails on v0.6.0 v0.6.41 + @test gradient(x -> x[2].x.x^3, [x4, x5])[1] == [nothing, (x = (x = 48.0,),)] # fails on v0.6.0 + @test gradient(x -> x[2].x.x^3, [x4, x6])[1] == [nothing, (x = (x = 48.0,),)] # fails on v0.6.41 + + # Check when using implicit parameters, Params cases used to pass: + y1 = [3.0] + y2 = (Mut(y1),) + y3 = (Imm(y1),) + @test gradient(x -> sum(x[1].x)^2, y2)[1] == ((x = [6.0],),) # fails on v0.6.0 v0.6.41 + @test gradient(() -> sum(y2[1].x)^2, Params([y1]))[y1] == [6.0] + @test gradient(x -> sum(x[1].x)^2, y3)[1] == ((x = [6.0],),) + @test gradient(() -> sum(y3[1].x)^2, Params([y1]))[y1] == [6.0] + + @test gradient(x -> sum(x[1].x .+ x[1].x)^3, y2)[1] == ((x = [216.0],),) # fails on v0.6.0 v0.6.41 + @test gradient(() -> sum(y2[1].x .+ y2[1].x)^3, Params([y1]))[y1] == [216.0] + @test gradient(x -> sum(x[1].x .+ x[1].x)^3, y3)[1] == ((x = [216.0],),) + @test gradient(() -> sum(y3[1].x .+ y3[1].x)^3, Params([y1]))[y1] == [216.0] + + i1 = 1 + @test gradient(x -> sum(x[i1].x .+ x[1].x)^3, y2)[1] == ((x = [216.0],),) # fails on v0.6.0 v0.6.41 + @test gradient(() -> sum(y2[i1].x .+ y2[1].x)^3, Params([y1]))[y1] == [216.0] + @test gradient(x -> sum(x[i1].x .+ x[1].x)^3, y3)[1] == ((x = [216.0],),) + @test gradient(() -> sum(y3[i1].x .+ y3[1].x)^3, Params([y1]))[y1] == [216.0] +end + @testset "NamedTuples" begin @test gradient(x -> x.a, (a=1, b=2)) == ((a = 1, b = nothing),) @test gradient(x -> x[1].a, [(a=1, b=2)]) == ([(a = 1, b = nothing)],) @@ -517,7 +570,7 @@ end @test (x->10*(x => 2)[2])'(100) === nothing @test gradient(x-> (:x => x)[2], 17) == (1,) - + d = Dict(:x=>1.0, :y=>3.0); @test gradient(d -> Dict(:x => d[:x])[:x], d) == (Dict(:x => 1),) end @@ -546,7 +599,7 @@ end # zip if VERSION >= v"1.5" # On Julia 1.4 and earlier, [x/y for (x,y) in zip(10:14, 1:10)] is a DimensionMismatch, - # while on 1.5 - 1.7 it stops early. + # while on 1.5 - 1.7 it stops early. @test gradient(10:14, 1:10) do xs, ys sum([x/y for (x,y) in zip(xs, ys)]) @@ -608,7 +661,7 @@ end # Iterators.Product with enumerate @test gradient([2 3; 4 5]) do xs - sum([x^i+y for (i,x) in enumerate(xs), y in xs]) + sum([x^i+y for (i,x) in enumerate(xs), y in xs]) end == ([8 112; 36 2004],) end From c098f37a643223e7e4f397abf9c3ea6c8c542325 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Thu, 28 Jul 2022 13:20:39 +0200 Subject: [PATCH 339/490] number adjoints to rrules --- src/lib/number.jl | 63 ++++++++++++++++++++++++++++++++++------------ test/lib/number.jl | 51 +++++++++++++++++++++++++++++++------ 2 files changed, 91 insertions(+), 23 deletions(-) diff --git a/src/lib/number.jl b/src/lib/number.jl index 296852dbc..0e629518d 100644 --- a/src/lib/number.jl +++ b/src/lib/number.jl @@ -1,29 +1,60 @@ -@adjoint Base.literal_pow(::typeof(^), x::Number, ::Val{p}) where {p} = - Base.literal_pow(^,x,Val(p)), - Δ -> (nothing, Δ * conj(p * Base.literal_pow(^,x,Val(p-1))), nothing) +function ChainRulesCore.rrule( + ::ZygoteRuleConfig, ::typeof(Base.literal_pow), ::typeof(^), x::Number, ::Val{p} +) where {p} + function literal_pow_pullback(Δ) + dx = Δ * conj(p * Base.literal_pow(^,x,Val(p-1))) + return (NoTangent(), NoTangent(), dx, NoTangent()) + end + return Base.literal_pow(^,x,Val(p)), literal_pow_pullback +end -@adjoint Base.convert(T::Type{<:Real}, x::Real) = convert(T, x), ȳ -> (nothing, ȳ) -@adjoint (T::Type{<:Real})(x::Real) = T(x), ȳ -> (nothing, ȳ) +function ChainRulesCore.rrule(::ZygoteRuleConfig, T::Type{<:Real}, x::Real) + Real_pullback(Δ) = (NoTangent(), Δ) + return T(x), Real_pullback +end for T in Base.uniontypes(Core.BuiltinInts) - @adjoint (::Type{T})(x::Core.BuiltinInts) = T(x), Δ -> (Δ,) + @eval function ChainRulesCore.rrule(::ZygoteRuleConfig, ::Type{$T}, x::Core.BuiltinInts) + IntX_pullback(Δ) = (NoTangent(), Δ) + return $T(x), IntX_pullback + end end -@adjoint Base.:+(xs::Number...) = +(xs...), Δ -> map(_ -> Δ, xs) +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(+), xs::Number...) + plus_pullback(Δ) = (NoTangent(), map(_ -> Δ, xs)...) + return +(xs...), plus_pullback +end -@adjoint a // b = (a // b, c̄ -> (c̄ * 1//b, - c̄ * a // b // b)) +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(//), a, b) + divide_pullback(r̄) = (NoTangent(), r̄ * 1//b, - r̄ * a // b // b) + return a // b, divide_pullback +end # Complex Numbers -@adjoint (T::Type{<:Complex})(re, im) = T(re, im), c̄ -> (nothing, real(c̄), imag(c̄)) +function ChainRulesCore.rrule(::ZygoteRuleConfig, T::Type{<:Complex}, r, i) + Complex_pullback(c̄) = (NoTangent(), real(c̄), imag(c̄)) + return T(r, i), Complex_pullback +end # we define these here because ChainRules.jl only defines them for x::Union{Real,Complex} -@adjoint abs2(x::Number) = abs2(x), Δ -> (real(Δ)*(x + x),) -@adjoint real(x::Number) = real(x), r̄ -> (real(r̄),) -@adjoint conj(x::Number) = conj(x), r̄ -> (conj(r̄),) -@adjoint imag(x::Number) = imag(x), ī -> (real(ī)*im,) +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(abs2), x::Number) + abs2_pullback(Δ) = (NoTangent(), real(Δ)*(x + x)) + return abs2(x), abs2_pullback +end -# for real x, ChainRules pulls back a zero real adjoint, whereas we treat x -# as embedded in the complex numbers and pull back a pure imaginary adjoint -@adjoint imag(x::Real) = zero(x), ī -> (real(ī)*im,) +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(real), x::Number) + real_pullback(r̄) = (NoTangent(), real(r̄)) + return real(x), real_pullback +end + +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(conj), x::Number) + conj_pullback(c̄) = (NoTangent(), conj(c̄)) + return conj(x), conj_pullback +end + +function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(imag), x::Number) + imag_pullback(ī) = (NoTangent(), real(ī)*im) + return imag(x), imag_pullback +end diff --git a/test/lib/number.jl b/test/lib/number.jl index ae7b1cb75..ce0a64bef 100644 --- a/test/lib/number.jl +++ b/test/lib/number.jl @@ -1,7 +1,44 @@ -@testset "nograds" begin - @test gradient(floor, 1) === (0.0,) - @test gradient(ceil, 1) === (0.0,) - @test gradient(round, 1) === (0.0,) - @test gradient(hash, 1) === nothing - @test gradient(div, 1, 2) === nothing -end #testset +@testset "number.jl" begin + @testset "nograds" begin + @test gradient(floor, 1) === (0.0,) + @test gradient(ceil, 1) === (0.0,) + @test gradient(round, 1) === (0.0,) + @test gradient(hash, 1) === nothing + @test gradient(div, 1, 2) === nothing + end + + @testset "basics" begin + @test gradient(Base.literal_pow, ^, 3//2, Val(-5))[2] isa Rational + + @test gradient(convert, Rational, 3.14) == (nothing, 1.0) + @test gradient(convert, Rational, 2.3) == (nothing, 1.0) + @test gradient(convert, UInt64, 2) == (nothing, 1.0) + @test gradient(convert, BigFloat, π) == (nothing, 1.0) + + @test gradient(Rational, 2) == (1//1,) + + @test gradient(Bool, 1) == (1.0,) + @test gradient(Int32, 2) == (1.0,) + @test gradient(UInt16, 2) == (1.0,) + + @test gradient(+, 2.0, 3, 4.0, 5.0) == (1.0, 1.0, 1.0, 1.0) + + @test gradient(//, 3, 2) == (1//2, -3//4) + end + + @testset "Complex numbers" begin + @test gradient(imag, 3.0) == (0.0,) + @test gradient(imag, 3.0 + 3.0im) == (0.0 + 1.0im,) + + @test gradient(conj, 3.0) == (1.0,) + @test gradient(real ∘ conj, 3.0 + 1im) == (1.0 + 0im,) + + @test gradient(real, 3.0) == (1.0,) + @test gradient(real, 3.0 + 1im) == (1.0 + 0im,) + + @test gradient(abs2, 3.0) == (2*3.0,) + @test gradient(abs2, 3.0+2im) == (2*3.0 + 2*2.0im,) + + @test gradient(real ∘ Complex, 3.0, 2.0) == (1.0, 0.0) + end +end From cbe800d129564a925958ce6f8de5ca10ff017490 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Thu, 28 Jul 2022 22:54:42 +0200 Subject: [PATCH 340/490] replace convert as well --- src/lib/number.jl | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/src/lib/number.jl b/src/lib/number.jl index 0e629518d..30b702254 100644 --- a/src/lib/number.jl +++ b/src/lib/number.jl @@ -1,3 +1,10 @@ +function ChainRulesCore.rrule( + ::ZygoteRuleConfig, ::typeof(convert), T::Type{<:Real}, x::Real +) + convert_pullback(Δ) = (NoTangent(), NoTangent(), Δ) + return convert(T, x), convert_pullback +end + function ChainRulesCore.rrule( ::ZygoteRuleConfig, ::typeof(Base.literal_pow), ::typeof(^), x::Number, ::Val{p} ) where {p} From cdadaffe55b69a1202014ab3b49522dc209c3425 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Mon, 1 Aug 2022 10:02:55 +0200 Subject: [PATCH 341/490] comment and version number --- Project.toml | 2 +- src/lib/number.jl | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 1ac51616e..8328eb69a 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.42" +version = "0.6.43" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/number.jl b/src/lib/number.jl index 30b702254..aa50c54dc 100644 --- a/src/lib/number.jl +++ b/src/lib/number.jl @@ -61,6 +61,8 @@ function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(conj), x::Number) return conj(x), conj_pullback end +# for real x, ChainRules pulls back a zero real adjoint, whereas we treat x +# as embedded in the complex numbers and pull back a pure imaginary adjoint function ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(imag), x::Number) imag_pullback(ī) = (NoTangent(), real(ī)*im) return imag(x), imag_pullback From 1207b4d0fb0c1d3b2cb64e3cb38bfaf60c03f5fc Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Mon, 1 Aug 2022 23:27:45 +0200 Subject: [PATCH 342/490] remove adjoint for hv/h/v/cat --- src/lib/array.jl | 21 --------------------- 1 file changed, 21 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 293801b21..420e3716f 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -104,27 +104,6 @@ end @adjoint reshape(xs, dims...) = reshape(xs, dims...), Δ -> (reshape(Δ, size(xs)),map(_->nothing,dims)...) -@adjoint function hvcat(rows::Tuple{Vararg{Int}}, xs::Number...) - hvcat(rows, xs...), ȳ -> (nothing, permutedims(ȳ)...) -end - -pull_block_vert(sz, Δ, A::Number) = Δ[sz] -pull_block_vert(sz, Δ, A::AbstractVector) = Δ[sz-length(A)+1:sz] -pull_block_vert(sz, Δ, A::AbstractMatrix) = Δ[sz-size(A, 1)+1:sz, :] -@adjoint function vcat(A::Union{AbstractVector, AbstractMatrix, Number}...) - sz = cumsum([size.(A, 1)...]) - return vcat(A...), Δ->(map(n->pull_block_vert(sz[n], Δ, A[n]), eachindex(A))...,) -end -@adjoint vcat(xs::Number...) = vcat(xs...), Δ -> (Δ...,) - -pull_block_horz(sz, Δ, A::AbstractVector) = Δ[:, sz] -pull_block_horz(sz, Δ, A::AbstractMatrix) = Δ[:, sz-size(A, 2)+1:sz] -@adjoint function hcat(A::Union{AbstractVector, AbstractMatrix}...) - sz = cumsum([size.(A, 2)...]) - return hcat(A...), Δ->(map(n->pull_block_horz(sz[n], Δ, A[n]), eachindex(A))...,) -end -@adjoint hcat(xs::Number...) = hcat(xs...), Δ -> (Δ...,) - @adjoint function repeat(xs; inner=ntuple(_->1, ndims(xs)), outer=ntuple(_->1, ndims(xs))) repeat(xs, inner = inner, outer = outer), function (Δ) Δ′ = zero(xs) From 1d63da63d47ad66372c8092e6cb89932603627e8 Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Mon, 1 Aug 2022 23:29:10 +0200 Subject: [PATCH 343/490] v0.6.44 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8328eb69a..c61ff68c2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.43" +version = "0.6.44" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 328eb4d122a12b0a6c4947e17278081e877169cf Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 1 Aug 2022 18:52:06 -0700 Subject: [PATCH 344/490] propagate ambiguities from rrule lookup instead of failing inexplicably --- src/compiler/chainrules.jl | 3 ++- test/chainrules.jl | 9 +++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 99d8f4652..7c7de8655 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -73,7 +73,8 @@ matching_cr_sig(t, s) = matching_cr_sig(t.method.sig, s.method.sig) matching_cr_sig(::DataType, ::UnionAll) = false matching_cr_sig(::UnionAll, ::DataType) = false matching_cr_sig(t::Type, s::Type) = type_tuple_tail(t) == type_tuple_tail(s) - +matching_cr_sig(::Any, ::Nothing) = false # https://github.com/FluxML/Zygote.jl/issues/1234 + type_tuple_tail(d::DataType) = Tuple{d.parameters[2:end]...} function type_tuple_tail(d::UnionAll) body = Base.unwrap_unionall(d) diff --git a/test/chainrules.jl b/test/chainrules.jl index 94ab9584a..e9cb4afbc 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -275,6 +275,15 @@ using Zygote: ZygoteRuleConfig @test Zygote.gradient(x -> f_notimplemented(only(x)), [0.1]) === (nothing,) end end + + # https://github.com/FluxML/Zygote.jl/issues/1234 + @testset "rrule lookup ambiguities" begin + f_ambig(x, y) = x + y + ChainRulesCore.rrule(::typeof(f_ambig), x::Int, y) = x + y, _ -> (0, 0) + ChainRulesCore.rrule(::typeof(f_ambig), x, y::Int) = x + y, _ -> (0, 0) + + @test_throws MethodError pullback(f_ambig, 1, 2) + end end @testset "ChainRulesCore.rrule_via_ad" begin From 4da04412628ad2803038dd38240cb482d6f22a5a Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 2 Aug 2022 18:55:30 -0700 Subject: [PATCH 345/490] passthrough safe ccalls in threading code --- src/compiler/reverse.jl | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index ba10ea5b1..6e88e7273 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -254,6 +254,15 @@ xaccum(ir) = nothing xaccum(ir, x) = x xaccum(ir, xs...) = push!(ir, xcall(Zygote, :accum, xs...)) +function passthrough_expr(ex::Expr) + # Metadata we want to preserve + isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta, :loopinfo) && return true + # ccalls and more that are safe to preserve/required for proper operation: + # - jl_set_task_threadpoolid: added in 1.9 for @spawn + isexpr(ex, :foreigncall) && ex.args[1] in (:jl_set_task_threadpoolid,) && return true + return false +end + function adjoint(pr::Primal) ir, sigs = adjointcfg(pr) for b in reverse(blocks(pr.ir)) @@ -278,10 +287,9 @@ function adjoint(pr::Primal) end elseif ex isa Core.PiNode grads[ex.val] = grads[v] - elseif isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta, :loopinfo) - elseif isexpr(ex) + elseif isexpr(ex) && !passthrough_expr(ex) push!(rb, stmt(xcall(Base, :error, """ - Can't differentiate $(ex.head) expression. + Can't differentiate $(ex.head) expression $ex. You might want to check the Zygote limitations documentation. https://fluxml.ai/Zygote.jl/latest/limitations """), From 7d0376a1a0cea719b943292594b4753fe6b0f3e0 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 2 Aug 2022 20:41:37 -0700 Subject: [PATCH 346/490] function name is actually a QuoteNode --- src/compiler/reverse.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index 6e88e7273..532644914 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -259,7 +259,7 @@ function passthrough_expr(ex::Expr) isexpr(ex, GlobalRef, :call, :isdefined, :inbounds, :meta, :loopinfo) && return true # ccalls and more that are safe to preserve/required for proper operation: # - jl_set_task_threadpoolid: added in 1.9 for @spawn - isexpr(ex, :foreigncall) && ex.args[1] in (:jl_set_task_threadpoolid,) && return true + isexpr(ex, :foreigncall) && unwrapquote(ex.args[1]) in (:jl_set_task_threadpoolid,) && return true return false end From 83cdacca7d150a13d4f9ff302cd452b3a3961fa4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 9 Aug 2022 22:05:43 -0700 Subject: [PATCH 347/490] Add rule for Dict iteration --- src/lib/base.jl | 39 +++++++++++++++++++++++++++++++++++++++ test/lib/base.jl | 32 ++++++++++++++++++++++++++++++++ 2 files changed, 71 insertions(+) diff --git a/src/lib/base.jl b/src/lib/base.jl index 79dfb77b6..161dd6e60 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -47,6 +47,45 @@ end end end +# This rule behaves much like the getindex adjoint, +# just with an (internal) ordinal index instead of a key. +function _pullback(cx::AContext, ::typeof(iterate), d::Dict, i) + iter = iterate(d, i) + function dict_iterate_pullback(Δ) + (iter === nothing || Δ === nothing) && return + k, v = iter[1] + _, dv = Δ[1] + accum_param(cx, v, dv) === nothing && return + grad = grad_mut(cx, d) + grad[k] = accum(get(grad, k, nothing), dv) + return (nothing, grad, nothing) + end + return iter, dict_iterate_pullback +end + +# ...while this one is to avoid duplicating code or differentiating skip_deleted. +# The alternative would be to write a rule for the private _iterate(::Dict, i). +function _pullback(cx::AContext, ::typeof(iterate), d::Dict) + # Calculation of i is the same used in iterate(::Dict) + return _pullback(cx, iterate, d, Base.skip_deleted(d, d.idxfloor)) +end + +function _pullback(cx::AContext, ::typeof(iterate), vi::Base.ValueIterator{<:Dict}, i::Int) + iter = iterate(vi, i) + function values_iterate_pullback(Δ) + (iter === nothing || Δ === nothing) && return + v, dv = iter[1], Δ[1] + accum_param(cx, v, dv) === nothing && return + # Same as vi.dict.keys[i], but without reaching into Dict internals. + # Iterating the dict instead of keys() is to hit the rules above in nested AD. + k = iterate(vi.dict, i)[1][1] + grad = grad_mut(cx, vi.dict) + grad[k] = accum(get(grad, k, nothing), dv) + return (nothing, (; dict = grad), nothing) + end + return iter, values_iterate_pullback +end + # Channels grad_mut(ch::Channel) = Channel(ch.sz_max) diff --git a/test/lib/base.jl b/test/lib/base.jl index 5186483da..74f129f6d 100644 --- a/test/lib/base.jl +++ b/test/lib/base.jl @@ -10,4 +10,36 @@ @test result1 == result2 end + + @testset "Dict iteration" begin + # https://github.com/FluxML/Zygote.jl/issues/1065 + function sumkv(d) + s = zero(d["c"]) + for (k, v) in d + s += v + k == :b && (s += v) + end + return sum(s) + end + + function sumvals(d) + s = zero(d["c"]) + for v in values(d) + s += v + end + return sum(s) + end + + d_num = Dict(:a => 3, :b => 4, "c" => 5) + d_arr = Dict(:a => [3], :b => [4], "c" => [5]) + ps = d_arr |> values |> collect |> Params + + @test gradient(sumkv, d_num)[1] == Dict(:a => 1, :b => 2, "c" => 1) + grads = gradient(() -> sumkv(d_arr), ps) + @test (grads[d_arr[:a]], grads[d_arr[:b]], grads[d_arr["c"]]) == ([1], [2], [1]) + + @test gradient(sumvals, d_num)[1] == Dict(:a => 1, :b => 1, "c" => 1) + grads = gradient(() -> sumvals(d_arr), ps) + @test (grads[d_arr[:a]], grads[d_arr[:b]], grads[d_arr["c"]]) == ([1], [1], [1]) + end end From 24a6111c851c8dfab87628c5227b11a2dbf89648 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 8 Aug 2022 16:28:29 -0700 Subject: [PATCH 348/490] Treat Pairs(NamedTuple) as NamedTuple for indexing This prevents issues with double-counting when using kwargs. --- src/lib/base.jl | 28 ++++++++++++++++++++++++++-- test/features.jl | 17 ++++++++++++++--- 2 files changed, 40 insertions(+), 5 deletions(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 79dfb77b6..21ca62b1c 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -119,11 +119,11 @@ end # named tuple @adjoint function pairs(t::NamedTuple{N}) where N - + pairs_namedtuple_pullback(dx::NamedTuple) = (dx.data,) pairs_namedtuple_pullback(dx::Tuple{}) = (NamedTuple(),) - + function pairs_namedtuple_pullback(Δ::Dict) t0 = map(zero, t) for (idx, v) in Δ @@ -145,6 +145,30 @@ else @adjoint merge(nt::NamedTuple, dict::Dict) = pullback(merge, nt, (;dict...)) end +# Keyword arguments pretend to be a Dict, but are secretly wrapping a NamedTuple. +# We can treat them much the same, just with some plumbing to handle the extra `itr` field. +function _pullback(::AContext, ::typeof(getindex), + ps::Iterators.Pairs{<:Any,<:Any,<:Any,<:NamedTuple}, k) + # So we don't close over kwarg values in the pullback + data = map(_ -> nothing, NamedTuple(ps)) + function kwargs_getindex_pullback(Δ) + dps = (data = Base.setindex(data, Δ, k), itr = nothing) + return (nothing, dps, nothing) + end + return ps[k], kwargs_getindex_pullback +end + +function _pullback(cx::AContext, ::typeof(literal_getindex), + ps::Iterators.Pairs{<:Any,<:Any,<:Any,<:NamedTuple}, ::Val{K}) where K + val, gf_back = _pullback(cx, literal_getfield, NamedTuple(ps), Val(K)) + function kwargs_literal_getindex_pullback(Δ) + dps = (data = gf_back(Δ)[2], itr = nothing) + return (nothing, dps, nothing) + end + return val, kwargs_literal_getindex_pullback +end + +# Misc. @adjoint function Base.getfield(p::Pair, i::Int) function pair_getfield_pullback(Δ) f, s = i == 1 ? (Δ, nothing) : (nothing, Δ) diff --git a/test/features.jl b/test/features.jl index d4f68d36b..4c16267f2 100644 --- a/test/features.jl +++ b/test/features.jl @@ -552,6 +552,17 @@ end @test gradient(x -> x[].a, Ref((a=1, b=2))) == ((x = (a = 1, b = nothing),),) @test gradient(x -> x[1][].a, [Ref((a=1, b=2)), Ref((a=3, b=4))]) == ([(x = (a = 1, b = nothing),), nothing],) @test gradient(x -> x[1].a, [(a=1, b=2), "three"]) == ([(a = 1, b = nothing), nothing],) + + @testset "indexing kwargs" begin + inner_lit_index(; kwargs...) = kwargs[:x] + outer_lit_index(; kwargs...) = inner_lit_index(; x=kwargs[:x]) + + inner_dyn_index(k; kwargs...) = kwargs[k] + outer_dyn_index(k; kwargs...) = inner_dyn_index(k; x=kwargs[k]) + + @test gradient(x -> outer_lit_index(; x), 0.0) == (1.0,) + @test gradient((x, k) -> outer_dyn_index(k; x), 0.0, :x) == (1.0, nothing) + end end function type_test() @@ -562,7 +573,7 @@ end @testset "Pairs" begin @test (x->10*pairs((a=x, b=2))[1])'(100) === 10.0 - @test (x->10*pairs((a=x, b=2))[2])'(100) === 0 + @test (x->10*pairs((a=x, b=2))[2])'(100) === nothing foo(;kw...) = 1 @test gradient(() -> foo(a=1,b=2.0)) === () @@ -578,8 +589,8 @@ end @testset "kwarg splatting, pass in object" begin g(; kwargs...) = kwargs[:x] * kwargs[:z] h(somedata) = g(; somedata...) - @test gradient(h, (; x=3.0, y=4.0, z=2.3)) == ((x = 2.3, y = 0.0, z = 3.0),) - @test gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) == ((y = 0.0, z = 3.0, x = 2.3),) + @test gradient(h, (; x=3.0, y=4.0, z=2.3)) == ((x = 2.3, y = nothing, z = 3.0),) + @test gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) == ((y = nothing, z = 3.0, x = 2.3),) end @testset "Iterators" begin From 17a5673bdfd7d8fcdf27454b75b15efdb6477c9e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 14 Aug 2022 20:33:17 -0700 Subject: [PATCH 349/490] broadcast adjoint for unary minus (#1287) --- src/lib/broadcast.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 8c0d3c54c..98124bd03 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -80,6 +80,8 @@ unbroadcast(x::AbstractArray, x̄::Nothing) = nothing @adjoint broadcasted(::typeof(-), x::Numeric, y::Numeric) = x .- y, Δ -> (nothing, unbroadcast(x, Δ), _minus(unbroadcast(y, Δ))) +@adjoint broadcasted(::typeof(-), x::Numeric) = .-x, + Δ -> (nothing, _minus(Δ)) _minus(Δ) = -Δ _minus(::Nothing) = nothing From bd5ce6e6e394081b6e7b28d669e8b8e7b3e05176 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 15 Aug 2022 15:58:02 -0700 Subject: [PATCH 350/490] v0.6.44 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8328eb69a..c61ff68c2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.43" +version = "0.6.44" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 7855c46db1d1e4dc133f76b0cdf61e199001fadd Mon Sep 17 00:00:00 2001 From: Miha Zgubic Date: Tue, 23 Aug 2022 15:38:51 +0100 Subject: [PATCH 351/490] compat ChainRules for @allowscalar --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index c61ff68c2..37d40b4f5 100644 --- a/Project.toml +++ b/Project.toml @@ -27,7 +27,7 @@ ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] AbstractFFTs = "0.5, 1.0" -ChainRules = "1.37" +ChainRules = "1.44.1" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" DiffRules = "1.4" From 77dea335d1d51b8a3a4ba1893bd7cf1160862628 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 23 Aug 2022 19:07:15 -0700 Subject: [PATCH 352/490] Bump version to v0.6.45 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 37d40b4f5..58e84dbf2 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.44" +version = "0.6.45" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From fb34703a2c57cec8cbdd23be816250dc9de17e91 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 25 Aug 2022 21:28:19 -0700 Subject: [PATCH 353/490] Handle nothing grads for Pairs.data --- src/lib/base.jl | 2 +- test/features.jl | 4 ++++ 2 files changed, 5 insertions(+), 1 deletion(-) diff --git a/src/lib/base.jl b/src/lib/base.jl index 21ca62b1c..1a85cc56c 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -162,7 +162,7 @@ function _pullback(cx::AContext, ::typeof(literal_getindex), ps::Iterators.Pairs{<:Any,<:Any,<:Any,<:NamedTuple}, ::Val{K}) where K val, gf_back = _pullback(cx, literal_getfield, NamedTuple(ps), Val(K)) function kwargs_literal_getindex_pullback(Δ) - dps = (data = gf_back(Δ)[2], itr = nothing) + dps = (data = gradindex(gf_back(Δ), 2), itr = nothing) return (nothing, dps, nothing) end return val, kwargs_literal_getindex_pullback diff --git a/test/features.jl b/test/features.jl index 4c16267f2..e3e0e55bd 100644 --- a/test/features.jl +++ b/test/features.jl @@ -591,6 +591,10 @@ end h(somedata) = g(; somedata...) @test gradient(h, (; x=3.0, y=4.0, z=2.3)) == ((x = 2.3, y = nothing, z = 3.0),) @test gradient(h, Dict(:x=>3.0, :y=>4.0, :z=>2.3)) == ((y = nothing, z = 3.0, x = 2.3),) + + # for when no kwargs have grads backpropogated + no_kwarg_grad(x; kwargs...) = x[kwargs[:i]] + @test gradient(x -> no_kwarg_grad(x; i=1), [1]) == (1,) end @testset "Iterators" begin From 3de236ec143b73cddd1e081f30730ad4c5952b77 Mon Sep 17 00:00:00 2001 From: Christopher Rackauckas Date: Fri, 26 Aug 2022 00:36:23 -0400 Subject: [PATCH 354/490] Add DiffEqFlux BasicNeuralDE Test --- .github/workflows/Downstream.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml index 09f4f2f5d..47f8032a5 100644 --- a/.github/workflows/Downstream.yml +++ b/.github/workflows/Downstream.yml @@ -23,6 +23,7 @@ jobs: - {user: TuringLang, repo: DynamicPPL.jl, group: All} - {user: TuringLang, repo: DistributionsAD.jl, group: Zygote} - {user: SciML, repo: DiffEqFlux.jl, group: Layers} + - {user: SciML, repo: DiffEqFlux.jl, group: BasicNeuralDE} - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} - {user: JuliaMolSim, repo: Molly.jl, group: Zygote} steps: From 4183226eff3ed45ecee36701eb6a569ad08fd3cb Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 26 Aug 2022 08:00:56 -0700 Subject: [PATCH 355/490] Fix test --- test/features.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/features.jl b/test/features.jl index e3e0e55bd..cdd513fdb 100644 --- a/test/features.jl +++ b/test/features.jl @@ -594,7 +594,7 @@ end # for when no kwargs have grads backpropogated no_kwarg_grad(x; kwargs...) = x[kwargs[:i]] - @test gradient(x -> no_kwarg_grad(x; i=1), [1]) == (1,) + @test gradient(x -> no_kwarg_grad(x; i=1), [1]) == ([1],) end @testset "Iterators" begin From bf64def534f91492b60030029138a52bb1f43cf1 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 29 Aug 2022 16:40:38 -0700 Subject: [PATCH 356/490] bump to 0.6.46 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 58e84dbf2..cfe708ec8 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.45" +version = "0.6.46" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 01aaecc869d4bf652d2c997d06ccd6694dcf9398 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 29 Aug 2022 21:43:07 -0400 Subject: [PATCH 357/490] add css + logo (#1299) --- docs/assets/flux.css | 113 +++++++++++++++++++++++++++++++++++++++++++ docs/assets/logo.png | Bin 0 -> 67670 bytes 2 files changed, 113 insertions(+) create mode 100644 docs/assets/flux.css create mode 100644 docs/assets/logo.png diff --git a/docs/assets/flux.css b/docs/assets/flux.css new file mode 100644 index 000000000..541ead5fe --- /dev/null +++ b/docs/assets/flux.css @@ -0,0 +1,113 @@ +@import url('https://fonts.googleapis.com/css?family=Lato:400,400i'); + +body { + font-family: Lato, "Segoe UI",Roboto,"Helvetica Neue",Arial,sans-serif; +} + +nav.toc { + padding-top: 0; + background: rgb(240, 240, 240); + line-height: 2em; + cursor: default; + user-select: none; +} + +h1+h2 { + margin-top: 0; +} + +/* Green banner in ToC */ +nav.toc > h1 { + margin-top: 0; + padding-top: 0.4em; + padding-bottom: 0.5em; + border-bottom: 5px solid white; + box-shadow: 0px -2px 5px rgb(60,60,60); + margin-bottom: 0.5em; + background: rgb(60, 150, 60); + + font-style: italic; + font-weight: normal; + font-size: 50pt; + text-transform: lowercase; + text-shadow: 2px 2px 5px rgba(0,0,0,0.2); + color: white; +} + +/* Reduce ToC font size */ +.toctext { + font-size: 10pt; +} + +/* Fade out non-clickable ToC headers */ +nav.toc ul span.toctext { + color: rgb(180, 180, 180); +} + +nav.toc ul .toctext { + color: rgb(100, 100, 100); +} + +nav.toc ul a.toctext:hover { + color: inherit; + background: rgb(220, 220, 220); + cursor: default; +} + +nav.toc li.current > .toctext { + background: linear-gradient(90deg, rgb(245,245,245) 0%, white 90%); + font-weight: normal; +} + +nav.toc ul.internal li.toplevel { + font-weight: normal; +} + +/* Content */ + +article { max-width: none; } + +article > p, article > ul { + max-width: 45em; +} + +/* Links */ +a, a:visited { color: rgb(0, 120, 0); } +article p a { border-bottom: 1px solid rgb(200, 230, 200); } +a:hover, a:visited:hover { color: rgb(0, 80, 0); } + +/* Article Links */ +article p a { border-bottom: 1px solid rgb(200, 230, 200); } +article p a:hover, article a:visited:hover { color: rgb(0, 120, 0); } +article p a:hover { border-bottom: 1px solid rgb(150, 200, 150); } + +/* Doctstrings */ +article section.docstring { + padding: 0.5em 0; + border-left: none; + border-right: none; + border-bottom: none; +} + +/* Code */ + +article pre, article p > code { + background: rgb(245, 250, 245); +} + +article pre { + border: none; + max-width: none; + padding: 1em; + border-radius: 10px 0px 0px 10px; + margin-left: -1em; + margin-right: -2em; +} + +.hljs-comment { + font-style: italic; +} + +.hljs-number { + color: rgb(0, 150, 150); +} diff --git a/docs/assets/logo.png b/docs/assets/logo.png new file mode 100644 index 0000000000000000000000000000000000000000..5e8d4920fb6d1768c88ce1f855f3cdb1780e3ec3 GIT binary patch literal 67670 zcmeEuWm}b97wtAcrMrF(}sIE&}#`xDNmgIwyy zJ66mZW6WiMjFbpG3=Rwg0)ZD36_kTOpz9%!XIHPEgP$bw=Cgu7UK)Rt{0M=Rg~8tG zLV?eO`l51@5Qqyo1oG7z0=WS{`nm;yIM72NJ30^uXFLRgX_H(p%MJbkT2EX=5c2r+ zE4}Gw6u1&1Ciqd|%go-qu?Cv*Bdd*T2o9=bu$Y8I$wE&GW#%8(!zN6qXAOF6)h1(D z`q2xnWo&y*GTGnJKFXpAZR5wTJL{(2HK8&!{2ELspS-kNAV0VsNSICul7~J6FZ}y; z_Vfk!;myC#kR_;gpZ|S0qWJ$G{$H*E;j`x%>CHvO_1D{3zP-0q>)AtruxT5c+=KmJ z;uFvXRdus?kTzDjcQI6# z2|PUZYBNQQq~1AjdOaiS_Rk<9kuX~qdCYPfwLHk)k=S=-(~5^Ws|exdU-eyZAnh45 z!HwGRZC#B{k3aO>4x**dI4gO>A^*MmE3ZFnwx+wfduLna3l3W@I!jxwt#o9{E(JzP zBl(k3YZTKlP&vU9bpzrunUe{|0o$<`3(fSygbZ zw_TSuSg7}?N)}uLo?DiHtQZnTZ}D%g574$mH7ridY1F(;tfqA?zwp++Hd$J|!dofN z%*P?n|K9@}zt)CF)%(oX9B6Ah&8UxiVyF*d9mmL?CuMK&iSy`$8huSxN>6r7_p%CK z3Ch9GO-H9J!6y8POfSNtQ=Rx|f!>l3*vzDIvOJH-Np2vrYJ&D}4DCL6r|HG5e9mdM zwHV7EBzv5E!8qm#g;r%8f?iuDiMroXl5Ap)I@@BLt`;?+>|>D5TxK|vPStm))^)~n zbz;p5VXFvjCHk7}yrMRT>ZZOOAe|FYd{j*uDjIN34$y`E`zkqDTOzhf$Li#F+#AkQ zKJDLHd6~vuY;3(Mi1#y}7QwD>PaF!XIZ(%$8rn$EVS7K($}rRZj(?I}P?K8bV~?`$ z2dKT(e#CWhW+!I-rN3+aweq7zw__O2yUY2hBJ>}kyPSISgk+=ilCz=$lPagY(&(h4 zb1Ox7nISa9mGIvc$F?+rijXO5PERH>clhD>|BfyEtv~EE_3p@il~|u?qV&&YzFA7a zadJ6Nt7NF8wvQ2(9*tG*(GjZgIgaU#C_G6-_4xL+S{_g@Q2svP$FmjwiS4}uZcY1E zvfsX;{==ja^F%XKB)@$}pDeLNkSh+0Dd;tR3ZYOyTGmRUiR-m8UoCXTMt-<$_Nzu15$?>`*?FiS;}|{qD)6a8GFb87 z{nYI~g~Jo$7;)%^NcnVm3gQEvv8M4k;=$N`Y{aCM^Sa3~4&JF!>FqgqgM3@j7lhcW z7=Q1*dSy%0rq-aQ&)5dfKTe9GpLaXN06UtDYaZ%<6f;2byOWb&noIN}ab3oyZrohH zMX>I_x1zTD1e+e3g*D?B6J(MH)OlA7v{u!6n7mnjAy-IpX8#=hM%7tp5xYfWPGeEY z?-=gyjd#GFpS8YSPRNYPLFT+4%r}4&P)N>_M1DN4tLsuv@G;N3>ABeC-}22(reyc} z2LJayZg5}aY>a8~o9cTwn5yS%MI`4u&MvklhtvSTu1 z=ipbq*w6dQC*Pw2SPu_uRehVEy=0e1;siO|llz%@-j;A^nPv>K|y$M-@A_WXAXM!OGlzQvc?y;|e6@BF|x zHnEu69fSRRS!Hj`9x85Zpzv2|`pkQ6NdDdd@gt=BA*Af2EGS&+@_|6PVh5V4j*!wo z-2`hoZdis#XE37iMeV<bB?X zqP$p4ZDLp}O#NT5n?khBAa!tbx>>vt=uytG77x?ZW5^1B@#F*_OUguyG!Q)`9?zmFaD6P9H9)I{>!!FU`ttCoM8eZOC&6eULO1lIyy|?`eE9= zP+&JRw;}rq_Xc1iZFU%(*qAF{Ps;AC8~mZ&oN#zOV>|h8+FV3*dnLWTeyM(%F1Tp# zt_SH6M=wMxg|<4+H!zH|F$>2E3iuAcx;vc8&vE^|=@-90?Cm=LwrbG_AFVWFlplL< zjYHByqz5?VT9ilOLRy(m3qISv`WLom3F-E-1`h^#l%398&dXW~1uBtSS+uG_=)-wt z=fZ?#ltRTBp#HCZQh~D7)mC8PO?3Lzto$(}^AIkIUUKMVA@9bul`nY)P7@B+NKKoK zz`w`4g2z{>Ul#BB=AOMn`2!Q~6v8BuNKQ6DG$Yz!opaTue^{m2`j+T`=I`)zUbOo( zfn~aoUii7im8Bd7L)+#Mo-U@IL||QnnoIe>e!bnFKX@QDr}uBd?Cm})W(Ou(wtBiH zzbhOj6dWf(?jp##cc&I8f;cnyg$w<7$nyO(=ko9R z*X=$EUUF_Ow)p`=aJ!;Bg8C*U7Fq$0H@{cj=31f@-=`NpXCwT3b+}*c(fCXL-xH^} zE)@{xYV$%4eg@_pBb0oi4Sz<2(fr&dKdyf^`dUEa3I^XRbo^&*U>f=k6ty#{&NIeD zl>uT6k$zi4v>fM_YP`#TeiQCpdvvJAnS}Y)7L|{>5^nP2OW%nCBUID$Es_b_9$#IY zOS3*3p|i9VUOEvi7_r+-;c4+--j z?Wd%)g}y|`eBrS5#2hN2pCa1f zj}lG7H77FCGr`F?tzAYwQ2P6Zbo7sl{!{}-#`E2@o$ptVx}5XGvl0WBT|ipHaT{|RFO2y`~{I63tct~ z&u)Q%bGQEA5kNka|6rg)dksY`MT15WA$yUujbk1P2ODyv+VgHDC1%gIJG-g?U7F$V z<>7>;!LXinwW@`M!^+UAL^vb^EE5{4jqNtTaS%Roy*2oIAt99`_xvc)K-3xdmci+De5dWa221~{ z^DU63MYo4NVt)bAQmZf;fP^bWL`2m7x2{N-e<-gX;B7I!fPndHn!JSDtBtPr7q>5-&_wSrmN`R5IuDK~axb(8I64);KCl~VfRd9?Q13N_ z+rZK?WK6;=J11x5zv|(GhG^VLVngD$lNAFWDF$-UyhfBg&DDNOlJgLO@v7(O?GlBg z;|gwu4qj-I0xT9J=bHKh*>2F8^1%U_VJhc~UmPWCN^uccAWI3V6-GoXQEKq;@c$co zW?s_>=Zt9buMXZ`Q8Rt2%P-WR9t8_C@PR|9{5n1NG|%wND+c6_jSlrso7uL(j7n$d z1xo5e)<_qxk2PLEc%3&>oWc{ueu_nr>)t&jIFLPkN=0f{+WBoWF~GV~B@1og>6_u? zZL%B&8=H`Y5Ij1l7(tcB({c6I??=BinGS=MZZ&OiU6m zQ!6X0f6w@|u->rQ5ugm`rI<}*{W++D8Fq4|SCF3}Nh&>4pfVzQ&)Nh(kHSjti!I61 zNWN7V4-tb$wA^3dTK(r2Afb|P9Fuc?47@`~;zqTwB!;hifqoT@@=%m_)2HUW+aKp8 z4_2yO63(-^^x0F0nS^@zk`O#ZOib*5+fB?!V~Z|HsIQ=Jxvi5GK15Z@++iNpt#@`kI>K`IQNR_;O18*9w1a0VEOGh zo=4VDVu2`j9wv$iN(+oQ?1Xq!S4y^JlZW3?J|fbXDWdgu+;mKT9w!I@BGpQ%99s+V*-GS4%!Gvk(_1k|(+YW+Kyfe^5v2fIw!HYwG!kvSw{ax^3oDE#J&Se0@ z0jyFIr;Q*E3*2h<-Hc56v&`~E34jcSUUH?9`_AZ1=q@AI#10y9A40y`kxM3eFG+B! zs;KDvHQ#8sk2SyczkcsN6Xa7#;KM}|PTCnpLd(A%N_G16FiAZdiit<(qBiwtAR|$f zLVK-P*L8xp-`F_L%r8RhE_<1_kETz&9m3^Rh_!C7gp$OTZ{Mcj2)q6^>tY<75ADd9!Po)l6N^Gxl3 z6?inp$MMZ|Za+Oz!9At7RVJJ5jOVrb;{Ee2#70O~7FmudFD5xTSp8}BAhsDV4Y(yx zvENZ$53c?0sk@LBPW<*c^3xkjJ&A|fX0G$!MF<~XnZLJ^+&u+*H%U%Axh2vCmUcf2 z?m7-j&7Y@hm|h}l%arJ?8|2rUpVDc-@=|zjwn&ogp0e`STAAl+TSEzYI??rolN}#H+|Q*X8j$|K;~kzyh#2**q)-&E@X_ zt1q}2+8M3uVcP$~HZaV{uY8B96S_{&x{3%eE zVO3aI_~t^(#Do&&H2|cPo3=UKhTQ=1@u1Yz8)isGHv>T8u|J2Iv+Qbj*fhh~%rmNP zejjytZs}8Q>tC;&Nt=cu0P3dW27>s#?-YcP(gYtLsB$JMG*r~@zrlM*(J?V`v9ZrL z`*sUTn)$Jx*%IyAzvi^ilgN!i7FygcX4uex`?}GoW^%d8jf&1U%_7!kHP`SC3FZrT zsJXiN1%TOa>3`OokGzg&VjGN<4#=?lk&ziz+tq5pUQrIR>Ci`r4h1$gwt?{WtX33aS z9o97j@}Ylw^yh6FIsz8`uhCJ|j+gB|dvY4?B5Ut70k}d)b+^b}p_r{nA7+^*xct!1$f< z!0L!?&*=({3s;N-NG}wUicjyEY7an-$T@xrIKG?^NVwDSiqIR66k3&1$kQA8cabX9 z1$X3M;XxDS7Y@N#o_j^R#>^B-s)ddT%h5$A%OZxOR-E*DFlp)yd;vR8I<*5$eQvgA z`ics_Hw5!_nK(HF>CfC;FlERk9zH(97?=q1?nF`465Gtde8YeH-{`#u$QD2}bn>K* z!;12(SXMq?cP%4jm7&`nd;_P{^Uhhx;O$fKvqim%X4#t5Zu2p-$+_0ix~9)Qu%_Q5 zIw0Z&a&#J#l5{1EW@#X!J&BD(<7 zC6r>5Ij|Am$+Pf@M@Pz<5vL$<#uY2iKG(WH{Y$&#-l^A5Ky4mTG1&XFqBv^}^UZG9 znWwQR>jMKj`rgpMbzNxF?vKpD!9fWE0)1v)p6S+b2FjgQgZnLOKFnQPSuOoL5`{G> zyVN(1d-EK06a3@aN_Qq3AV+h)+{el}L$-&#(3mB9Kragz+mzSadTU*T`(tXh9Uwg- zR;K5!CeI-@^Pu9yQuW0{L-Sq%^{aejWF+Zrlaji6Q2z5r8!9@B1Ct4O*$f26y{upr z+=1Hm#6Z3TaW2tqxou}S8Ag-}eKhwDp0t^F{BhC&%U*)60CJEmVmHCVTk1=Uz9oH{ zjg5k;>U6E`YPYe^#>U2fvoX0oSqD?WWio>M1k>1_T}#i*@|Ck?_>F?jajUfcF)DV= z3j|QgFmQ_1tGGywd&b6Nb~9@D83!x_Kr2}`;q*8f#8jfj2y3q@kI{7 z@I73u`uakTfwX{iw)ps65yzjS{kY7^h=;*6mVH9cTI5;?&Pw%LO zg2fQMfVCP*<@x&#|6rXRZ+=h*#&iv;Fs^mk*lQ_$vA-3vN$@Q)E2+sI_Qodd3ye{ zNo3XY2SO@z)Mr+Zt+U?JizMrG0D|*Ol>t*k+R5B}w#^S-RdwjS#uq#S?d3z|<%&n4 zh8EAo`}^C2!`Vwh`lFr<0T$(A=JoX|J`b9`ttG)p;DDF}4sj>{xia9TuezKcE3kb{ zUy|PfLaj9m1M(G~)D`1Lye|TNBB&XkEPpkX*ZtjbNnK$f4I0LmfETvQJ*`<|5#AsD z-`z@ul#U}rV^T<$g|fQL{R*KMnwp+=+Z-I#7wL06=yz}s8yypkjoTFvpPcBuJgTj^ zyfikJs)>!ew0EkgXu<*8O&Ht=nXUD*VP^ z3iWhJ-K4fBz9Lk!o~y=}-3N!keKixbd~zeDshR24!Ez^F*Nd~Ovdox;JhOazL2oov zL~%B27mw}CbRXIn$|x>TS9kdYh1Sc8AkvlP6!yv& z65Dr(NmyitsIr&H4F5SY%FByLyBKPN%in?M3QqJf5_4<)jggTVFu6=JjFKF*WJ(;P4Tei7g~ zjSbwU^p|pBC~Q!{p+CC}yG*L4Ur%Q|)lHy7^_$rXY6~hVs>LcMNr9B(^rOR5cn61f zcXu-d`B?8q4ksN06TDQtcaOLjG{Q{|6LgHxGYZ}X49omBu73g%Q9+>_1nrz4xCL-| zKHQ~${NCN=Gh1UVattAj}vle=z)`k#nRHo)f%qF2S`kh<-x=2?!Y3GqySr? z*rJN^240(a^5twgTH4sFno@;psBbdE3*!rarssyWv}e^$3+rygZ8NyXy#69bYPNkpj4-^&@IK` zth4j(4bwtU*npE*DJUkHUn|NZ#*;ksTBYcaor>}RY*i2ydr*I8Cz}5V)D7icx(y{m zWmax9`&AV)NZ#oDd{JSc1TS?mv+1AFpYgN^5Xj!ig6pB!9rfW9+Wm%44{9b+vQ!;OnLDEog38iquI_7+#X=T(D3zN zj)|WvxKK_`j=!r$OG*mC3>K(Sc3evz&UbdHcVkTYBGV153hUU_G2Zru7&qpJOi%A= zyU-X>tF|=nFAE6wQhrXj(|%Uw9-VPU>g##1gl%qym6l+Yr=qO0X3khBk8;SZ47st&&Ns8BeK+_2J>c-ffqKnK`{0K$P5uA@N*od%*ckRiMZx zV=K03&-Pe|K}kv)a(P7982uzn%g>C?$%rj3ipeO6Gu4YXHA;T^Hv8|}DDvqq2Jsm` zlZuNH@{40L3rUxG;A3JA9?NPPLeA>yfp~Zkl+-Q`H(k|6-c!U~L~PG;XN(HlQt5@Z ziC+qc#er0FE`GUt#I^PmTB%RXo>@y`T88)EK~~lcleycS{WE*r zDK|Zx5J_hj#6BH-J3G4o&)ufuPTc=YPI8T^5|rF3C_(W)rh#sVPEw^tj@JhJA-FXm zA*7LrIi|UWTf2s;@$2kU>q8EMX`6o-zyUS{}#d#^;@${Ef)m!Gq|%k%L0=PQ769yk%m+$WAY`oGqI{*Uwwx6PMIF1%65!$f%f1^v5fTe>8rcei_Px2v>^8V%xjYENZS#V1$-;3H$I^EKQ%Ls zy&sR2g&x2kSTmoRpHM2-bx*^Qp_C}(o0QrIVdj{xtT7#W>*pdy+ z60kn4VrCh)z=c)%;r(bb9lMd2--G~vNTW_}LLN6>?NH_X))AS)K=w3g{cTwLk>V{b z`3$EZZj!H=*~_KUh!#si%B(dm`DyE%z_h~rmcqnE8i&VeAge3h8^pL9M#qP$0v}mn zBmu7EI@so`@gM@@1l++hi)B>&z7hP+E#wG_QsYxaj6O}ZN z9H5CN0#S}2M32DG03|;IJvY65RMiha=!sZl8s_l$# z=uoYvP;n)n3T%nV7ik=5WEbDWlE3{=KB}OXY(rvc(jg0ha6D&GIF6eKp)chp)Rh{P zj-Z*@&TxhR$Sr+w@Bb@I1|0XMK90ue$jhVlJ|&iH&3*VW>ImA_s_Ae;lUMI#S~zCA z5@5Zbw~m{4cZ=>0a1c=i!X#|uMDp#$HxCbwZ&ycrEV*FrAD%RSM;NC)Rx{4kuIe9E zWb(5AShVyb4W$xNtj-~dxe{I_1wjinf>!GCj(|^PZtn;hDPr`Ks};ia%08 zJB@g7f_h_&GVwON%KQo~er5>Jx8VfFm|v_%yKlc+^&&a7T4?ZnuMCOY+1cT}I}$ii zT?Whu7Z;bTCy2L9Kr7;Gen}w_|Mkh~K1-p$&GJA_!8Pvr@?{yxlIFyR56-JZ(jd+`~F z?E63%!p}3t$W zBZ!0Lo0|tjjxUBF_JSt110?$;b&RhNeSLi}0f4i6`}&^x>~tyy2M0m09X;l`0^m6F zsmm*QodQEfx?8V}tGc^FLm7mTTB|BHD}PT?(ZERJTV;YEiv;q{6=^1k$q_3MyBs-E znH3X+b^=lJOrUyQi5a4Y;o6;TW5I}kh_8g~vY|Tzh6(-q9gv0q!w!r;+RgZa^_9YL zl;UYx5!fKy9|l;3dU*L(Z*vU6t`S(S;puaN?kkd1zEgqu)uO#jxL}YvNOACSyz~8j zzFLMegs9ygcM$|zsD3$G`iKUgyvo7N`7X0NVq#+Af71x>ynsgM%a7^apiv31z%(R0 z@s0|tza`yOvOE8jUmiInzt4QCfmfG`{=hLX{F_x#bM8$Hwo84Hprx*Gs}cUX`_|eJ zJ%jplAJlP@RH>Yxu#LGfY_sh%QaUCkyQ`z+wTf1Nq0^r#7LO!W%gpTpn~p$q%J(2h zu7N(09j8G+L++)i)gE?QMG6&2OG_hjQTH>mE{Vuooq)D6_%k4&!JnxgMu~s!Ge6kz z$qhu!*@C8uTU#AQ3%xB`Y6Hx&Y9JgJhibV5nZlo^HPwZL>Z@Z_bQ3c9 zV`Dgm%3E91ViN-5VWX^ZaUFPFh4-GL^U#jL$e~e)jljxXeEPua)4A|HAIN|9?YRA8 z1MCvJGFEQJY=gp(aEQ3LI55C?2K`T;KK++PTP7xyZZ?+EI2~Znp}_#K{7lQ-$jZCN zodIKwM6IkWAP^gqt&)VE|8A3W>X?PvZRE2>A;2i@dBV*NN#RQjp><&upsHw3bVV-= z=I}M4u?9P1q?Ol3j-sg z3(`0L_NgASF>zNS({1V=iutGeH~1?wKvhim{HCEE27?BP0$*HSetE#ihq#*2z7qAc zF|HiHB^+;d8&R&E+j)Xs1?n3b&MU`n5|wuTtbbsY~B`Ul@afaD+G_t+})HfUxl7sb}B+SSqC-e6mvsp@Cxht$&K-ukKbA6rB3ZnbVp*A|S zlF$uZwf^QyAePFwaf24+@d0VEMvCZ|0PZdlp^tR46F92NL9vFqGCkuM5+{9kob#!$2fIZf0_n*^9)&b}jPhMW0$z+)D zrkOxGt#h#&82CL9pn(n`-^k3_5zly451{1FHE{?2jvaY%v1l>EZU;vcY!6WW5tF&q z8Deb((5Ovc?63K`pY7AzqPFRvgSM5I(k643gA6(Dv!gPAXEa&(=qCLFoGEB;Z>I;& zZDT_ph)CFcaBz^5lS3h$8XXg}zOg}0L7`m>{M`qZ4eXz^^iW@4gX^W0fWQkFG9IvM z)bHK_jf0ViDJm-Jy>_$P!CamBWbu5ndqZw+Zh3imadB})1*%VEt$-iG$j$wi%|U#$ zgrKl3z*K{(0KA5omDSO~A%ol^3>G$ZO|Fsd-A(_IgbpLMtv_pyM@ExFSi9K-@L2!g zCdRY#aKwlE^fhNXQyd&(apknwPabQ2*-(KEtV}A?)X(xzacYwEE6ZFyXiG_JInGt& ztaCIMeM|(ZY0Jfo`C0|Pxw$zS!BUzxzpkO-`gj3|kOGs%n&zga*C2&ApHHZhh=ihf z0{l$iMz7xqCoex*Wj+aH-JI-fBH^`4KmfRk{^h|u;Fkyb`piMZ(7o*+9gS)}>;h6ibyZbHw}rg2a$HJ^``y(spr2h` zUDegpK+}cCNe{(g^Xyy6s z+qZ#$144P%56!~DvN@PCTWy(CR>pu|vwXDNW@uo5L8F3%j;^SoLE{r4Fx))8kXaU;DCDR2F1^5LA!}`kFVtpQG%4r9LFiLOiQAk#`@7iU!M1PZv zU__weOwQhY!CghF;+0^!TfHesnO7-#A=foOp8{Ob?dqt%U!s*q^3x~AIA&N_SYu;j zYwJTm88=#7esXbfk&zjWBoim~kOkQ2R}Ar7gVTF%?zx_xZD2*@p194YgIsiUbRwa6 zzf}eHh>%`e&DU3%j*@e5)MW_xp}a*&bH7Lk3hDzN=u0GjbcGR&y3m585QVK+2^&WIzO3(&_!!1u!Y}=6J}!87hGj+FmS$wwd7aPx|B&+zyrBfSS&dm@cht|G<*j7eC=C-au0YV|r z*){Uwc$`n#`}>0@dP}sLa2&{2e}8|K#Af}l79~C9jM#j+;0kDE&xhoi*+M2fOrS|f z9INH-yzkp9C{&A=be-XGSCq%pJ zfrdJ;w2$P@CDC6f2)d}vgE=C}nHQKryCi5{0lNr@OrY7W#z0@00ckF{9m&+x6z+nP zh9+1-8Wk0lq4Qz$aMaTB<-|hdB9i^ecM?geFYik4Ql8P6gmB6G-JmGgu6$IM#`v|47#4F!Tch%qZ!RF)rjJ z^esq86_qq+3CGQtpd9Ra%$}8WM)IdEzkv?a#oG?q8Ah_Gc$9JR7&r6aS~gQv)$@n z1qB8IUB8Ebm{b+$pW5XHZ>(b}fR%#oUfD(%#w6b@C@OikYItL%Kbpz;?SeZN)B@Uw zZ*;8H6Mdl_6wOPtjW2T8ljCx?xCHuZ!D$~vFs-yi`AV^p9BgpX?Ck)Te;nRY)a~UV zQ@M0(b91u{r9tCu`(WHJD{ar7!NDBY%`31~i*WcN+>+mXr1cxlAn5&7o;W5<8IM~?k_ve#f;38<02`vNLE@Fs`0qY zcWRFY%MF7UV}Axo`FlQmx6K9ZSonJ6%x@)`Do0?IcGi`A)$8k>J?ODT13-*cRaHG{ zF<%5nI};OeSQT`Dofyx29u2xs8Py40T0bbfv= z9!Ix+V?iERP--^*P73d!J@tEcozE+ZVl)g4GjsFP^XH}}CN}pr@5onML7U!~IW4xE zb_%BX$9qS$`LpPWm9gv}{+w|F1k>7!YW39^Lb~#3YqJDAdUu13YK~$tCqGi+lB~ib zLH{QhJ}(RC7!XRsUZL;>3XL_8|IGPp<^$2GyC0MB84bDzlGs$`o*Ggkx6dn0N9h-U z%NJ`l2oqN5+-V?u`)~xp0@LTQTKk)`QE64)6R79kT3Q}KU2GTu%=)*iUDYp7uq400seAJlwsqNh%hIZ(o_U$uVbKVSSNpWM{H6f`F zlCS)e#7WohP%d&b+4aV^nwT6*>FTC@c|*cAybzg^!bs^Uw9_uYU<0$7nNxuU6eXC5 z)Nw2*x?Lgc)KpZaOTWIYdG@~+2RBu8EAC~V_Wzl z;8Gl&l%#HKOtI3sa4fwbQsw8Ki}SrC&zDGn>%bax7Z=5B7v_n{hX3S5QdLLI({6F3 zSCG$`L6ehKp6Uqn34xM6ITILb=cQAfAnSyCnoFW`W)W!k1MP6C-d4^7nvIVyFcp=Q zny)vKf(xT{LE-WqU;vWQOHeh7UFSA<+;h$gy=BmAC)vQTw70jXQ7Qczfu_~#?Ce}- z*B=)b2bu!KHibavxb9|s_uZY_9HVZybDfM@LmY2PuusvxSYJ zGEvK(rTpFt`R*$$0!~q3dg_(~l19l8x}0JOGd4H}tNcthzTuMJ z+kJa8eWHOUkCwM#y>NT)bTOAblvt3mLfet1lTA>#lAse5{05vJ0=@p@+HQ?!|U2ceKIVo0B3{p^g~xB^vVp@clin)|&qVA2$XSlRT)LuTei z8t>YlAEOGeJWku`(}pu-9OuBX13v(wd?%T!?#WsY85x;BtdPmRS+>VCI76p@kc|2w5OKp{DrTa0 zOrt4KKPXH9HO*^UOY@W+&!33Y28kGuGRn1GNF zGWo#k(@pWssnOx#^oHs+={b*+wSY=|r#`8}mPcM(XlEfIsB(QXB_$2 zs^?&;;q6AMbQ&+2UE+S6wg;8Zi}z$?J_oH*@$?yWbqO}CXWKv1R?ws2K_bH@Sc`kF z{g%&zyVyt6*Vh-6oBdieb9L?&R`Z;!o+Eqn4NicLs+B+s4i28Gpn&YIv}QTKqFOkB z<5$3PLsmRZ5;~qPCoue@96ND)nWx;nF*QrL5jEvvAa0jWVi=9qFkKe8lJ+SLGf8MG zFbxYFbt!ER=rL*Jx)bb>o@bm*Ep5NGm)WigxBHZAq&0@Ej1MwPM3ps_q3ol%=f&2I*NoI6_El6E=RKz}Br|Y} zEU={H{dbZ( z&wP*&X_ ze|<$dYlXWdk9K!ZZMpY2@0ouTpZK_^<>L#^c53JigfnLxuhp&zsskyF;ngGfKM}g~ zzuE%iMe*or4@v|;G_A?RaB^~Lfk6H|=6bJU=!fwh;1q^6hr;+CYAY(DrkG=g_#W}B zlL{GaR|JJS9bH`;fYIzM8j}bIi;9U6c`gGG?7MO)si}!y(5wv(5rG28Yqzy%yJ;^3 zmIr}DzHx-3E1vOu!xY!_!|`wMKNIlp5_J4w9?N5;t8DA^L9tSep2gf?=d@Xw^c)%G zuMnMIkYZwnA3c*O6`1nw3~C>i;!{y=bmuk%EjMAcFj)4ho$$h}k6LAVP@85z&lJgF z4|~fK8M*wkpg`P-L0?bLAFB7{AEfHa_*mIK5UFc(m&YH4Yy zsm)AR7=QQj0@h@$tIOc9qp;9&NvB%wefO&vz{WP6rFwA8is#GG*P|*}P`9n$E`bcK zKOQIU;&S8t2GvIM#t3xC{+9|K{b<)oo9oJg_e+hCiv^Bjv zW=@W8#0%^vcHN>i?&bb$O_+qcKbb~jn%dO>+oHXb6Y5rw3}t0?wUN!+*y?J>4<9}d zcs=omS65$PVS@l!c66BS)aw0a&6pdscz_~gQWTGrUr8yppY|^9sn>JQ{RmK0C1#nFY)&=s#TTE763Y0?XMj(QUUR zmw=LEjY;0Z{=9>#^QAK?4kC)ccO=EKiPcxV>&ojFkZ>|Im^c0VH$Q>j-vi*LQK%v& z_9g*^!6TeHV{zf;#$NdZ#4m%Jau9lj7q*%KG3f8_516(Gr;MT^T2fL25aHbd=>hP0 z6SmXYDXjN1S3E$o!4cWrnZy1y%i#+L2ZvaYVwtGwoxTzCP6u$H>P@ zkv>z3L^4DQ`rH+Y|LxniSBP)9T`%^)O!V~ifyH0! zO;>vl9*k6<;f>rAqwA3sslCG;>w7MhQ`1R{sfR5WhNt; z;FLp6O^x^ud5|aoM`i#{@b>@y{mgbbEpY~y>M}O*_fkO)-Fob`av$A@a_sHxh3uMA z0=cmN#g+-S3#Wn~2?0-UowCPq1#KOV(quB7z5t)~tzhfx^_-e<)g`$#@ zp-5FVCgVZUI@0O3win0R*7pG31$!1n$HhH0M*Sv!%CH$3z^sbw&p>#fW@H2itA%tT z1q@P2S($`xZ+m;&c}zy^80IHnvzAYY^1X&1VV`fG>l)rIHUB>}y#-X3>-N2^Y*b1RB}Gu_ z4pF)l=`LyM?rsrD6-2s0y1N_ckOt|L?r#3;J-_eU9tz!mnHCIf1*1v3d$N#Rd^=2HG8c%xXV3cT73r;>Yi~4*)9&m)*Y%Cf&8|EJ zy{;C(0r+ zf#p@2N`Xd~IWgF)^8fWj;p5>AjWojrNDt$g<<3wHMln#BDP5%C7rnl`e3W#1wfen* z^vC+Qr*AeMI#vP1=rczH&!6OW?FH!BS`F?o)?V<#Vuo*j!_V^npG$p*hauV7Wf*1- zdlX?8X#tSN^ZI6cygind(Y{EyOhlxoq9mExE%X$vppiVBkIqr5Xj>hD`)kg4RTz9J z0H@~TEI#%SWc5mp-0I?$KZ6;NkOE#m05ZlxT_dq;0L}nQdxlOdrOpPWPArV2R~9DW zA1Y&W0jtUudQlO2-%-vFyoZ2G6cn!<#&F*cvg>1QmWIHYctMf~>>wWWwYOW&bB!+> z*?v%hyP8zCAX_2xzyWz>d3oR(FF!k5ESU`EZ>jIUliFQU)6jsJ`z3U9bYvv$*RPgI z_a6_9@>t8Uw$PW_zh44g-z7Qid39EtnMtL<`cO(zvg$aGY|kA94Xvf{X^L!T=X1eK z$(YAap1d{mpZ0PEVu32jT_`zjVqyZmG~3(~n%U{H|NarW|5iMXGN5IY93DyT%!j5G zdwX!ub}LJMQUjk)lKm0NJ&2yTv^GBI;SPH03;ce$0LpRq3W?P=pgBMu;bt4*Wf%Y# z`Xnt2a)LIfa@V-)`wecRMi*H}u8d3`&=Mf$M*EpSu+M}DQFUwdOEGt0St6@cK~9~k z!k=0+wE=Tp0{-y4Kus5aRcH$B*X)Qj7&-KVl;z}_djySjb%mVPhlYkw?B7XC2mjp$ zcNsyQKFC)X1>%M}d}k#v4ADjZ@78c2S@3YBkF2p892@OW*tLm94G#}jA0+1%6i~|k zfD?4dzYp+|E-Ya>n*EP+Qo(}I#G)=$`-T32W{U#t&Ak;aIu|BWgE_zd%=UwnhJaw-M)S zM(CFX6cjj=e>%VTyPB~`l}nUn{5ZqpNqUG~!L6k2lRTKA1||vuxVPa;c4*Kc2R&Gv??LXD1L`ds-k)EE;JH)`k61w*S0XGLe zq53d%y}YbnQ;7oX$UBX{VJ9zQ7R#mBMFX3iB*3H{)PItUY4dVUtstzd)t5349RV!X&u4C%w@|4udBq!rcwv&h7hM7OyMGlkUuU z4k3C!t$Q!_@O|Nns(>5wurL-TXu8}&b;*T=w^!$O%DCP;REKa}1tb)<%~tJ=)o&_3 zphu=*J=4u@?XF@L7R0P3VYNw#ptwyuMoO7~w}2gHZGmba&kBY@V3;5Us;a7fW|~8y z!&om@ly_M6Vf_mO=yXT0?gq2LeKIMChKD^_U^B$AIRJ*AK_|aX+I+COeL*MpU5>$E z`Wg@*urkX2+ZdVx15v( zKP+f!O6wM64JE|5mfLszdl@oGe*zbZ&BhsPBv@Py^i6J*6tuN(7yO_9eC7r~sg7x; z5E|Sv2x=ss*y!XLk#egqR0nc(Me7e(iSYkG7mw?3x=tG;8&LJg=A{_%=aav^C^H`o z#2EdKBcRQHjXqo!^2*jo!F4Ud_*9p;*dndL9QJ zMi^7^2Bt`*d2_S{_5=z2xpy5E7ssvJigI98@yzvbHv;hHSNG_dm{t{L))MEqocBH= zNgRR|hYQTFHB9v&UzJ|sHpp98FwB@)&3Rs0;5GcXRR9#=oGK?cSXxTz;goo2e7vNy z-}u6B{|SDfy1Kfxg5S`=rk(pg?7H^^>ZpL=U_%QU=#Ac+ovOY{i0P`@0~jl)@{P|& zTuOYh%VHMBR$cw}MIi}vPyUOzNg{AOXFup1ERC3zgtRX}Ag4qxLY@ko?QjcZq{Ln{)2iJ)pdEUO9%SeJ?d=G0Iwd=FO*lrk9bncb`Q!;a%FWeb ze^=uKLh6KPw2s~cbe5Q>etrk5@kcRR~R4afg@$p&!c>iX`ufacUYHa*t^3j*; z4%KD24(hf4@<%0DPE~v_7u*1@Z@r;bo05sC|As0N^!^!#qLRfaJf%hShYu;(W)XL5 zcsNcZ{xbjVIhF^mwVmBtjn3JsAFlo-;6r?UQ;cHsl1wB))Jfy4rF3Njp*G<>y*d10 z074#@H`@K?9Jtsb7iyHTYsNw^*bq0_{gc7>n1k(*-~o5x9~MI}-N1i=yA5u*|LL1R#y1-!F`=YMkl1`LGmg5MhPZLm^nD2=X^EG zjIq66A!fN()G82EFhe-}OZY2_z5eyZvAyx6i*(0(Q%1Gr}k+C_h@X-+;t*d%f!wXy6Nf#ZyawCgb672G3VIFQa6G zmrpqrS?+`0lrwXKev|266q+J&q?O%vjsj&O5*o&4!VyD%oT4hM?&*Xur!Qulb_p*& zd+6QX12C->zCqJCZZ|yLi@m~alzsybVEXu_GTgO!ax(U`oY!FkCq@?Phe+@Z zc_dSv#fDK8_4lOY=>!rNOVjz2ZC-i=^Nzl*eC9^<4!T)$3D%$z{6clQ^4)rHk9t_% zFiw=prrBo~-kQT{-NDfVH*^7Cx&2{vIfTYtK?K0b+!+J&1Qic2H5=>VM`?FmX>&b*2>kHJt|p*>ypZ@LD64lov02C`AY^r=wus`q!<%#ur$(8SBh5AnIm zT6UEW1k9l&LF9MA8X z)>=G1=mFF*;vtqgR~&_#@Y5-xZnK>XD+1Aos}21KwmHIf~;CvcSyYx6Wtym!m4$V&@m# zja4-@LuzP_?fUko7gm?JpzOwfJ*~~oripp%aoES6$It=?J*r}IdAU-K=P1}r2VIjp zdc3Nutrw71H}nh*-)jq|@iNl8$I4V=N(VPPiT-}16x5IB!5!aFGJWkrH%jTcl7VP| zD<2-ybZZe3dOOYIZ>X7ttUT9nAJG&G!onTIl(&T#)3`GC+wXIjE3Mj0jq(S1Sq=G# z5Am~V*4N?dfDliJeHy`%3p=augEk-$wW2N%9TNE<7w4TL5A(!>Q@#K)N=iS_geQ1r z-Yz20_~bNrVOH!Tpd?#M#3(W7!9ybEbUUHXuP5MkGvzT*5I;MoW` ztX~uxt;1A|!?g!7gzmj6y!+q%wDk1iQWQ`SUKZsimR%Q3Gqro08As>g;t)>Yv} zMa>~aCZzXxHxOEUW5?xJ-84=$=%{zpo*;=LbV=XR?JE5Rwh&CsjGXokFv;B~-vvWm zsG@54wV|5uh6}|WIg5QrrSg6l?!>>V9F6H?0Vs)_;VlQQ4W@TOr2v|NhC96Rnyi?n#Gn#GI+vlu;>Li?Lw zi&qs}+PwDu51#&-0yf0c9~(<6JhAyRY@br{SQ8><_~=}hCf-2G#WSh<$3lu=@w#2# z+W5454T#Qkfn=_!`v}9<%CLd!kfH-MyAGW5xLy-CDw5DDdX!=FKYp7@@FeuQJebXN z)AV;A=50oO{WUcEWZ-y8;3{uu(8>{Kz(GUQFG%J8cJ`y7@CJb(rJSSIX_wMY{W~#t z6YP0%#0 zk`m!y;&NQBLdj# z2B`?$lfpV>zuZ(B@aY*CIMtquh=|mvm{yjRJzDDo4a|5OM%?(Tj0Fu1d>;2;AZs8a z)6>%fnvmFv>IJBwKVujq*umZK3KvG1dOD}5SA&11rqo>Kb}(o9)5^Z8~t+unY7#!t*^R2wU_X?2!*Os^z{6z<{+I9Gw&&`Ords;tx=P4 za>6J`&Z4Je=NSG3E&70UB%_=jMz}C_3C_nw;zI?~hoO+%o_nyBL5nm{&X5#s zHB;+Je=7@g*KIyOgz_N5av*Z*;L!IWe}X0=O&ID12yCY3;2B@~(X*JkGfRb+&%Ip_ zmn3l%+irEu@74CidBw)Fyfw&IDvZ>G^knKnnq0Gd+!GWzC{HJk$eAZPUMS}CU-EES zyIUMgf3OqQ#-6d4D*89^*kP>^pg1tX0cH(7eR{k!#Q$4oHqk0Z&W!age^+A6 z>xrOm>AUI>egCWeAx^s$`$IES?3i#P7-pJZVs`iSQ4H(@b8m9yhxkzt%GViJ0FZ$Z z5{G6Ms2am6&tJJAm+~^gH=6EDXBYTJZ?9f1;^6hCJdw(Ex*neSx8PS zns9Sc0;i9P?hjhPK`IjAEP1n(b5jb#;kv)SpSJ<>a0lv9O|>d*1$U-fGaeDxljw04TcL#+k zlH@QAKFT*XL-;6EqM?hW6#62=Dv>ao&Z=NiRP0RJ{Ps;oo}s@h(Y3U5%x@RFY;R|Gm9_=$zY!<)}C;Jcd&3tO57^q1b6quSLAPnUKN%|Tw4UKtsI zW~g999~k(u$VC9BsMt{xvBtvR#&v2$plwY9=oGVDpK#wzwqep~=z10qU4?KC!bGL78zHSBE$8JXh&?i{7tV6L?qnypb$$Z z{(SagFFkU9)`ENSHCiL-cMo6hUS-5&qh#XAe z$5{NUmM{tT`^%T*%6T6I!E(&wya%M9>{a6xPyxu@mcai8npRa;52`(YGWq9>YPl({ zhX;Z^ujbx@KjK<$FHC-#&39pN)Wsv65FhV+1^0$Ic~Nw@MOY!~Gwup^&fC*p2w1rG z8%$Gps7K%|FwtBrayc|?MO+8Gs}j-*b>G(S)3BV?tc4m&To*)NQ-?Klzt%UDFBJJ> z1Eb#Y+eix&X}M*bd*^>v`}f7~@8NsPTEE^+7R*vBeEb%Cb65ol(dN%ce}%x<4B}G> z9?OF`{Xf=iNvPu5QGXiRT9j}PsPDG4udjik*ZJ)?sOE3C`X_X25XN2Wn_uD5ck;i6 z2%7cW-pnM=?vLZpJ2nEg`)YWxio0G>QvbvY0}IPYCJJG}>{_`FhX-wbwa*tF-&r;n z{ywCZURL({|2~w95+@Fm-qV}X7!OUw(g6+-dOBHjNd?Rl!+@eRABGzG*Cn z!`W}4g>~oZt)|)bWZFNN`GV%!|Fv{pR%0#nBi%uC6o0{P`^O(_5fKr{vtl=5!oul9 zMC&)d5kBbTGY}r=3lZ^5GrvQuL*S{G(sp|M_;D5Yipfl^^-6CHh9d(PS8VSRP!IWa zUToktc#@R{g1p4V)bzAuawDP%J}qK-^CrF45BdNeYDHVySNTtz;h%R_G%CEp!{;+A z$k5oM`@OXZc2?R^6ROqmVYT=nDWYu3h6VR!&q@tBBT|+3?-va)=I7!Wxl)MJ$USwl zw%CVh+P}OFOwkO#>ktCbc66K1Kaz`LdH?@GDxi0O_$zzsWou*eQuemq?X;vC1qAkI zeC{0ixT4b1loMoXR09tQn3JojqHFc|7#O}cbg!+gG5>w@N&y`v2Q6ndjL6`%dqPd= zrW}EXgY#(33Rtv|4oOc>KJRQ_pwky!Q^0kD@G7b0>g(zKuFzt|!-w24xJszC+E+T% zSQA|hJfV-f`uMnK6Adga>2QcfPn_81`{jF%G1Q}FeO9|ItQewG_4U#ETjW-FajV6 zAymWKaTWw0%%;-D*4D`2U`nklTvHI39O6)h-4+)ukw@sm6a-V95vCxx71whmoy_Qt zQ|e8gng(25%xWaahvQ@qeK4b|3%nb!Bx_mc`YHY_1uEqGDvLspuN*qlQr%(A5KB!+qT;HHY=dvC!L>1DuiKJI; zQEa5Ar$^zDo`al1lW4WSFxMCu5Lh7!qCkxWLdTm4$1q~4@6;h>hXHQE>X%tj8DLy)QPoJP5XYt>I zm_{UgQo66{UbuS)2fxafz>&1fU2fR-^ul#{aj|9nZlmY5>qXEZq^YqEWp8Yl*7R?L zCTgPJA_x~G3WKhN&e_>n`_rSw+iRu}ksg?et~1Vq%EY1htD=Gp04b!?ksPJm4}U!X zMox&Tl&|xH9D`Svzp{wZMECcV<3EU+*{JQzrU#dSvGdW@aXy4c$ly zKX0>Qqka;+lWVA1KwwTT9EWrjT59SSb4(D@alus%z?L+YGdRgFf}Rld^lEoILH9q; z=($}!4&{9M>I5}hX~{W|D{A=G2(?%G4OA5SA@PW^VF!tX%RZo%0x_L~st0vi7LnZI zb<4Nda*LgWE47cAL6q0J_4SuS8p*M0Qod$>*%;NFUPAz?pCeo6jVo{YKzDpi4g{ez za}x+WhXn_u2LcPX5*Tl*LkVL2Oyr&P_vM^4TuG)7w4FL$BDizg@YjLUM`rYg77YEe z0S5ePZrjVtt)h*_2`o>aJ?mY+%Ww4O*+q~71jjUp2&L>Hd_>mxOqw-{If>uPD0{q6 zQBXd@N0DSTvd-q434X>Y~-#-F8V4+;B_(hN_D zI%UV0u?rq>q-yX|Kl;qq$ z8axvKZmlF-q1$SAcEJ*S3ISSld|#vGbfu z5!kWyf>|IM{5By3f|KZ($t|+<7 z!K(>=gsCyKmmirj@M7a+$64oo|Ht%EAStq9=Yh;*>DM@>j8Te2uN9}g{r;C&Z1t}uvY0Dxj^YwO*0o4&sOyZK)~`1gG#L3%dcE<_B7 zHRE0xpm|(FC-HAbmWhroweS_VslM5FgF&I`x~G#dzIU?Gi{HBiP$HJuVPFwpb~ZFK z421xMF9JMqVylG!eN>WwczUMOtCo9p9^S~jo?|xu96bg?X?N@Yz3)L&TwOh> z&URVmyi&Zf>-1?o1Sks}g@3t=8;k38;0b1d7-eRQ$PQnKE6=$h#Zz5XHqa|O41}ne zkrDDFTtIUZ6S&8VA|Yefd-DJyc1tpQfD%YK#_0jCwxY7~tI=A-IonVW5fx?Ul=Ym$ z`cp%FpHWc`uep z>ivhU@Wvv~%^HK^XllmaA~Z9x;H-YS$m=}0^Jri{b1~^HNqmE<`gm)N4c!`^>~q6o z+ftPAl?&wG1|7UC2j}`Q;P@}v8hTxvMim&0qH5s7XyC-&rRygJj}|&V7~w8zPJlt( z!8`^l>99%+^e_j!h;a$(bRCq-ku78^|7&(CK$&7#;ssj|y?5f+`1ql1y!*}hs(sYX z!M`4j?Dw9Y4IsdJcje^84)%LjS;LwU9Hx(y^nWLUTvu;ECbN%PvvKEf=#2Q4_G5MH z)9W8K5IO`ias`!=l?|mjSHU@ZM^uGFqSJQX5;Z&j2E=ls8l%}?+C7W?%KXkle{xE` zfK4`rKuu(=n3EBB!zV655?!UF(Q4?H zm~oC|$sHZrgU|@Qg5)vKKwrl;{|*c^&DijnjnOCe)n38@9~*a?lVOxxC$Ia2vG z_}>V2O@LdOnL&{BE}Wu36Ra`LjASbiPT-VRTFy0Cn3#y!#F-;(wQpu6zE`HX%QdP{cXFXcqVczx7N8Y_H5FOXUkfS5I#O-qmi35Ix=5zq} z=Lb*;%zE8=95mv4oh|qW2V;&jV)5|uqG}{V)yrn@I8kCy=4c1wFpLwB&JxFFZBFsb z%+1qwtq8&>rAX&eL0kOkQU-Uf_<}-lNy$$Ei#d-|&33mMxPWVF&u5(G_HBP4oEker z7So*@&)Z>g0Z0{ec+Z>S;TzXz^i25o)_0+bxscX>74E1&*a+{_xakRj zX1kl3rl!>9NDJJ$lvNG2oR4!?FgO_tdCErg1>`*ZtGE5 zuB_$D{xcIQt-y+)hF2I?suYj(Rn-I4e&z*GbQNm(nHw?K658(9lshjWu^8a9TRrql zUXSRQOy2uvSiT|+AH1EN9elf%Hjm%JIsx#5j8XbU{&Hl-kV0>I*kS^|8^h&OT_GW) zpgE)O-@hYcN)8MS>DFb^Dd#1ZhcsRtpqCQkp!t^V5GeuhGAM|+lrOq%AwlT3g0KG26vW{ zAlv=Fwy7wCD#oS40KaD15rr4fCk$w&UmV$o9xlm`jv}0_&)uv4Rdc0h(S8+Gi-?#c z;aR);=-){KWxw{0Eh)v7`moxMMzX7gR8z`voomNqeE5_wWntU>2wh0n;%|uBc5-xD zlwQah8Bu)IVSn=-8m6HBn>8JHNdZ2Hj_vE0a!IUfa(QO0n(^ zr+Gc9kAP%P|K(+86Y@AS&zx>GdH=`hQ6=RyQ2%gEn_wjT zlW_|@F(VLcAR+ezoa&Z$;*3p992{+zBD~LG@;w&lS@a4%HSpUI$-u`h)=S1Pwg0uV zmHDj<>~rf;$ICD?38J3_WvtHZZh6Z=q=8#^H33e;^y~VA6DmGSV?*-{MATxd7)JgF z_pLzHXuM;8nHoE!Q*?YddZfS#bhX=NGMoW3yp)^kcPjy-FoVB;xwgG6Wwr};Ei5*T zZqz4~1LGawk+imPo~v^SYlwx3*Uw!!eSLCoBFBrP^`R?8ay!en5CZMv0oOPAKp$8L z53QWt;S{hR--a3(!?6hF3s>idAjy!~rO8`?Oi`Pj?n{^IdtSAtyUv+2lYFH=u@{uJ z!0*5K7Gh)^QH+x+$c#;EEX>JYU3I@=7nD8w^}iSfm4dL;AA{#eO}76+CdV&lR$U>e zkoAMwBe#s2)G%*;6m+*QQ;b*Se~PK3&3R8+AX_hvbm=`*;_;17skQW^^CA+M7@FyDmNcnx$)jc2oEfBxeF3Fe$6CN}nQ zl@7bbBzd1q8{Eo|s1FWTB`qf;SDNC1&q@6Ly{?X@<}1Ra#QcW*WOuF*xbjW?D^Gc4 zWlK}j#eq*ce2ngsM_(>R2O0hZowKr#3C9PzWUP^UtBuZ9y?k{mF88CS7~;gD4J=7r z&ntp#B~%Hu%H|_tr)s>u7AT(fh8@++^VGMUzn4IltVd^1nJHFx>ag($UGiJR?^_U7 z*VZpx32w^~3j24@PvXD$mU5htH(YPb}LH9*|S*F?v}H0(;hg7N+O zCJdfAR@*BCUN}RqZbI7$nrK&{9|W~$zPY@S0HX3WKW-OJbxuxDGoMzKwYR`?$cu1c zV?zYkISnf3s|n|E!>NXjh2>;xd$E=pa=!?{hOCZ-V|@JW&-{HL#re1-@b$;$NF_G$ z@CS*`&ul3bav@dk9hIMW5a&t7P&}^T)Q2FRS#nja9XZL@vnBHpKkbMZROUiOnKSE| z`<-39k7K$25PjwKWZa+Bnyg%@NN?5g2yN>Yx~4dP`WTG`|5okfjBx$o!_BFUUWlVv zX)A_P)STy}m16AMMc8gt8KKi7fA9liBG5jW_Yhrxqy0HAFAt^nzJY#b3e@8vgsgrI zo*s}lp_!*@$O4_mmf%SV6fYNOB_ErS4tPN~A>I4r0fP>LIpq*l{f6;qUI|`u@-I7> z!SL^T>FJH-soIY-fJdvLe{Ny|9bEIFOu zbiZr@xQ^*`o5OgJ$~STf`(NYoS~04Mz3dPafAag8dM=LepsG$o#}$N9)af?Qy=FtN z@a0=G!pK~RFe<5Q((xF)vPk#%^B{Ywj)7t|g|-_eMBB;0Kru%KfFx374p_v)`5$+=N3pmm{|2UUeP`{jCYs8g}<*?3VHRZ&|zret`Ifhqto6PD4iayEDo85|J%__wHQ{kLaR>x6gV5 zGu0widV;pmAzfObae|SLElILE_G=K>0EPQm6qS1&io)7me1p(PzlIg0PiUzIp3Cm!+91-O#80*N4Km4PJv{1P^J8poZlaF307%&DsGy~lxWfni>pmo9X;l5#`E~3# zYoWa8}ov7%FVY|p830KMTeHpN!g?dd3uz5chn`K zAEMok#e31dQd3Ue7>pX&3po>+Nprkh$8%)jTzUD5r7UPbPe&JsSh4Hk=VL1Ke{@SB zi1Yo87A%G;EnP1$pLkWnGzW?v7gy)@Dd>!d9T^ujhme_2WZ}hL)6ZCCzy4UI{Sah+ zkOw;4<5i(p#XUKohT|?2wLqr7mqr5Z1JzZVrm0lnX z@IkuDP`ZStr{`BZhM|FhXhfTg#bt;HUIha)4*{!rAi9&*toZ(HI&oH05Ftfi#R#i7 zj(dhSU9Du6K9XQ_%e}=ky)Lu^RXoG1KWMB{-&$#X>!$1H9G4Q%w3KDXbQ?CLVUr;| z3_KHv9ZJsVkx%KiWa}p9<>FFd`^h&1QUtIN~X(oUs*GAuxU?Abf;rs%7V0HQU zY5u+QdUP~iyZrI#{b}XJw#B@?e***hE`>%(9R zmPdavdveM6X_mBTVUQoTI`Poap|@qq0GAekO}5q$9LN5I>!Plpz!F?VM-J}<1-*Uo zi)(A+#_FF!70t;KZIa-7+Wg>oS!E^b5EFQ5kg%o(24rbKtH8&9lEqru@d+_%5_k?y zh_lzsz;px}oE=X3oE58mFAx@h`WeK9Pl%9HRhTY~$s8q_%M^~I z(mhLg73_b0nBmGvGE|NJN*D>l2&l35Y3O?G%4C!juuCsFPaKgXH$O#f$v98vK9ZVY z?KeFp4oRxUh7xqel9o+ls8(@AUsz{WQ;4ea0@i0P|J2_U9b*uy2p&ki>8%b4eOQbg zLuP;5H9n5tmKg_C#>}{!m{_2sw&%q<)vRRL7KHxeUbyla_C|xueez%q;TH1{hFPsq zj@c7R$rpP?^>(Yl;rytjrKPANs!(!3r2)=Xf#*rQZta=g#kT*t?+UD*g4gCs$hPD0 zdJy7#fIZOibxwp|vWk{*2j7njarRLA6j<6CeXCI6{R2)j)>jg8%H72aUCMqCvq~c# zv}ouXjH&r-&IpHGlzMA@yP48Y0{w4=h|cqq*IT2C;_wtiv5K1G&Av3Q3}nGBCwIwk zt9z5XC91;>~ zrw53DtWs}8@$tp=^)>ScfQk>}qy5yn?+O^f4c^NSNa;`q)^gBK6lN z>FBJ8zSg>Em(*$aHCXaRWfHD%a;KS@1TUm_%@9wNnS_lL6s5XVPs0Shd4+=p%Ac{mHUOX76~`AB7g44Y zp?Gk9{9Z)l33YieW*EQinx|+(=h*pc+Al&c^VNApGl8vx`{NdLJs!`*B5Fo@S2fY= z0^{cftv^-XK_u)(NDd*j_Nb`+beWRkG~4S({DKlnaw}_rU8;B)G~#HBr?0JMHSz@g zZhO#b-8?Z}UU+I5mX&?v>!8BDDdoIT%v+B@>{^L%VvTz@*>F0^7z$@wn`#lbast?X z-89{kv9%d4_6JATdD2f1HzZ_aMD_z?z~MME4sO%$WsV?G=m7C>TcWpZZ?-{2W;V7wS^rRwXDe_5!SfHWfWGOa%Kwdx;c3g(#(`+yzlV`o#90E@CXjOu*_vTDd^;O8wHE4D zVdeV5zEkaf^cWBA61G{7?89)E-9{5&GVb5p%q!nG%2G$ab@l}Il|m1BrT*#4s?X$Y zaZTn2^f2fZpK#E*z-oKa#9YCkydd=L)OUFzustuJHbfQvs36A?pKC`dWJqS`=L=t> zf|x-#BcTe9s`dI1Ma@lKxdWZYb2p%cffw9r3;^WMUb`9ALHRZGR$}Su>KYSQf*KF} z4~q}6`_UStk^#-s4yRC%eG0&pa$&5A%X&_2!@z zEdPv?osxrmohRUgu0+pMqzD2 zxL;1{sGs`he3iuLk77QojGBZ$hl7J_0zm@@im)a_Ps_K^;jlxEgjapv6qJ-MPP$4h z@(>Iq);(ucLDjD(E$%jp%wLt9>eA@xuI%W3=To?TZKDlcWpd)KUArE`kU&rK%tF-r zTkiU}K{A`31K6i8e1P6->MZOf=5|azlZ0KR4`;@m{={?IyB~lHZSXY4{~^%{tQ{%P zc&Em0eI)@%_!2M3hUwgN@oW~c0xv$+#UV3Nb33JzTS3K!oHH{t)S~zRf#E0nK$6D4 zz8}Ws=I^ekG9UyEU^$R&UU9Twq6~Bhq6PCXnYiq_xG1y7(g&iH)Yz0j9+P|@|vO=G1 z)=ra6AA;`7D+I;>sMJ8DRqms+31Y+MY_^6~pBRxx>-$Ufys~FWU&DuAkFrXAx7{)a zZB&Au!rq~~xD+OVl1eI@;~im9wHEq{Zr5BpMrUyqmFE6MaYLck`9J&%zHR7zparaQAYSLAJo81kT_He^Y_9g?#1S7KABh;q;Wi8> zc6N581jx2({*M`I0D)|1ZJR$nH8yQ_E{+~MJ^x*)KT)$q~@O${GJ-~|0eoHcM8{-l{DYHm0|ChChz#e zHQuGwdxyoUz%DV>qat4^Bip7X(J?8C=(aS%VlG(BcTe1Hi6k%rw?I(&>{f-siVnp8 zk-hF7nw0$zD@YL@Mj_tt&moJWB#=*XFF!m`p8mGXpj8)rwfhHNZtZ$k+_oscG!vN0 zKE7%K*DqA5$-Bb^porMv?@yMS>01JIaD4&*FnKM!B0jLf;qlrejQjrlyceO5IR~-4 zXo;RWIy%VW7KVoHAD_dGAmQ`?5o0kipc{XHfSSXGpi5O<-aC1WB%{5N#yK4lvz zATgnYmhnimNNGPT{iYW<(1Iv!!;(Cqg)lOI)}v!`+@WiW3EP|;@z{*_HNnd8AHU#} zTPwA7wWrypv^3WrbR`F0^+|3H%i9qBrZd1{3k<_mE1-QRe#RfT&`(jLOYWy^wo_K- zFzj?Ljq@zUGi&1>P3fw7(`Q@xI@K?XJ0dNJ@~zWqfI+z^Og0&tQ8v4?172;WI0m30Y(9>Ak+~hXwC3tm>7~)DW{zFii$&JFo!p;}@ zP#c_XHh_zGZ6`S{jtK4qaF2(Jd;vM>0zkAVM$OppaPs~7caQL2HEkBrwdF>^h9+24 zqfFt48SbWXUCrzDWAR$ePi7vktc(W(Q+)GuQM+BeCio;ZI(p~Car*b3cwhcZBrHbi zt@{>*|KjzVtnb((^kcyv!cgnu%;>dc@8M1^yg^Sfp1c8vcBFV4 z+zA-0w|OFnrhbVk{>lIx<;XP}+S(zRN#5Rfz2z^@&Y1sp^z&VQ+rfkxUxG|3q_Rn% zk+s18+AV|%fToTZ8bO8%31(dodf3jhOwVHo1kqR0KlV8@Ajm}89VdfD1q$%n>bW-F`I?Pxve4aS<2(zK4+LBiJ@R6Lrn~=IZ-KV>) z=;=n;Nm<|;**Zs)-A|9z^JJzXaiSoE(FkDAh&UTK;^L=>g69CC5I+^>cRR^@$F~8w zcWakF;01uQe0fcHYiB1Bc+@VUbMOj6Bq|cIU`j#)@l0lHojD*w`YRGaOHcqH*YE&@ z%X13K7~k!c9nxq6IOr7+!zBc|QtJZ|L{K5=XPlNaLtuD%VPPX&Igp69H1L?O!x#7x z2>e!LJx2OBfrk$(7_-j$aAF7wc&b1A{T=k*6ZPF0*bS8UHbo~V=l~QUCtEwghd=Cj za|QL4v&vss-HbhM1=p{qW@zWD$$Tc$&E$RW_q@%rZ8DL(tbN(Hx<|t?N>wafdnYG` z`k`N2u^K*kl3JIwZ8f;_d63&gTaP2To<*zV!l=d%^#v5E3Lo z=7fY1cydXn?}3(lqR|R7CnU`qC~x+rH@CK+ut4!HO|vA z=`eREvr2&>nZ2DI)H51ZnOTr(q*3QQ2szF}X=2~p4`3Wxy5W8TxBO@;+#}ExpHb^g${^WrmLO}okBRwOdw1fmV>`8-n*lByxz|0KhCv%UZXNCL5$D@=~n8Al7 zPze3f{`BGLDV3}#6&2O}O9Fg+5vK=G9x~#fxvFZg*ZXMRD+Jsh=GPCE+)3V z>cqH5i%1i(kuJ(Wn{hWRGD^CPit{Wg>1;(j#S1<^Z-wpT8RcNpv!0cx01P*ew6dYUt}qMftm#x2?rgUp>=p4sI*Z`Q`YfVaG(t@-I+ivyPB~0mVkYG{mc9xZQ!+nd8+bI}@e;4gG8m8zVpk zcxrS*MkdyseMBOPIkz1RHFbbM2V8Pr{SyIC@s?-3bP9kk&36aW^JKyZUUf)&hikPGyHmlf(Sdq%nX-NOL1XS-R{^FC{Ih#wUEUiSVW2wZKvIpZAM5Kn&t()q;BcWbG1C5!0>$A>D-)$$^B^Rv-5A`pm%n z*CIKp6&Mm?v}^-Bd$QB43n=&PU^s^0!SV4^z*o-C&pC<+Y*NU5Gw#V!S%OC$<}3K* zliZNCXSWsT+7u6alj8}QgmgBqFZ=%_);I2kaMMMua&f~%_$%i%v#{8HMX}th#`v$% zTv%C{s9p0n_@2fKxgIa3%;2(bk%-S6MQ%j{cN$Ccp1dosl*-A6o0l68vK79e5|La& z`ojooH#hQjl?+7z3@gWc3~u{+V_0K643I*Rk(%LxU!NT<03in}378WLsCkb(ir_Td z+x-lm6xsX5u`yW13>JLV8$EfwcdVu=q`H$3g!Bv9RcMY_rusmB{JF!2Fx9(1C4e)6 zidlW2r9~H*7-*u!Bqi${Hpc)C+#^D@d5QWazaPRhZ7TPT=DtxW9Mh^*cH36lKHe-JFC3?!stm_*8sphr7>q1# z63RaRKB5yDE0Q>cNB?%mtyGIlSFGf>OxtUVC=hsZaML<(jypl!`JZpdNJ|ORQmJcW zX9pm%m$zzs5YQ47gf^0N-P_x{MuGu)#MbWzmoTwfyA*}0rT#}Ie@!HlcJPP2ISwwa zlEWMZ&6;q5Q?JWyb{el~#3)c8AqwbUb9RyMPlcVHg9+~ZH8aZAiLt)^mpP&Ubiz_d z+S{n=qZJBCut|yTNTfcK97j{^EbMNxMqaxN8Jz3R`c3~e!WTg0VV=q;IG~EhE=P`^ z#un`E&`r^>J4UztG-^QPC9U#AdZAS2=(9N7X#q`8&RrS>6Enk&nO?mjuQDkICeVFA8z`K5H4@>k0Ok`55jdEcRnI&ElA4YmKDrvWfom&8N^0c16VKdvCGLxSGYhzOoD1sWaRtgkcg{ECU6gw+pCzv=83a<@2VeS;Fk`H{g~A=iRuTZ zrrIKfA8g&i`bYUu7U^g#FVP#)?x@|2tlNRKs5L6y|!Gnh9q~nW?Eu^{2C` zyN4POfOI-uR$lG^DIftT+9Y4{5)d%Bo~fyLG768nz@$0|e&n)WpCYO60*~qt)+57pI9)rEl^agJQ!;(&%jo72xClGCT{a`j|Xu>tAc~Q_6Y_Y zb#ARw#>fG21dQA}qto7G?RQ&k*B0$I?({fXVC(Xsvhj9XD-Y8ZR(MCI4*bDQ%J8&> z%C<;{i}TpueN5xKTg5HP*3`9lU|=9M-zr?XgS^bGbC-?ql|#pzxf@1KMJ3T6fs2By z&(@~LAGwyF>kfio*#8iit!Rw+_yT;Nmu18$fE>L-@n;GxoZ~ z|38+#0;d7xlnWw7|nERmn1#C;<@4Tn?9^c30d*j}DZjF~*{c7s# zzIZ7+tSlV+gl|FxC3XB0Hsmb^L*x=E&%)gxtOJvXX`7#EXTqxnGy#a-z=p4Y;1jj1 zro&i_aNTgQz)Zu*$Q#no!0SmQsH3BUT)YcZ2oR){J?iry75-gQGwu=E-hDqbJUn(X zhajHt8af5K&muM-w4x_g-rdH(_f8g@{zC{6>@M+@XFoU{n~m|ZJ65WoEB*S)#lo=! zL-6`}#dMoGPR1R~L6n3a88r3;0a8~1hM@nOIl4;La%m3Th{rAH_FJvBW*B)Dysth` zWp$YWiHf5g>bEP9QUY`x{zjiX3||hr1geUp7g}c!c}E3}97niNjvV*>``hnh2p>nt zmLWA2*z;7pZw_IbFS)uGpO}b2X<%rWIn1-bVmZ#2`q$dJd$$2mraU;)1LM)5rAv1r zYxquj_3z({u@4snR*bl7q`wzao%enWuGII|7vnx%@;)_8iism6-Fl&)d9QYG$Fx&W zhRbf|7bfYyD3>w~WwGyrv-LwlZOdVOLD^TxaUFjCwd!%?E&kK4t~AzDH~m9Hq_<4B zlNQC$=aO+YU189MTJxv_5H}&O5a9AJc>aTV^}Wu*YF`TEVcXJC62J)#ViefgD{rod zcIF!T4)PcO{o_lGYiw*(;Jg9h=c9WsWh5j(7zoG}|IW1L1C~8UJG;O1&zgM;__!;C zpf8v|fVyk(FH+zMT&5TL7(Hy`jFlG+o~r!U@B-PryEfO3rYQ>v5qJO;yb|}1g}>KN z%DGBk4J*s>zJ=M7;pR(aC{yOy7hpRUdbi7NYL!m?qlv0I?!%zyfEEd3m4de)JPL$6 zA=}#%;ApXOApHac1Q5_Hq;x>PgMph{4d3fCkQ`lH@kYeN7~MJ&m$hVuhle|$UPeZu zGkLRsjLGCz4oKr3xPiu=IXUzz5Xl4D??H0lahI6R&=-~uNfR@8V@@di;KhJOImlAi z;18le3D>W^S6h%;;OiEaIP_5JZDu~GNm75*_SBFh(A9t;XXL~;;)0f!V1i@vv9t9Y z1SC9z9s7!g`}P-Oq`-sTyJsg;Ho3fl5Ctnjbrd%C;d^bf)D4WAy78q>6Qr`gwA z!+A21LwyKUN!R;-7y->dFPoA!q)RuNT;wTAnroyO4BxvtKXJ?q+W3l5%=N|Xh&d-E z=ttS)foS>FOSjGQXP9W>K=!e0vGLcnJUEG6(|o`H0jLKy$B<^_2oleYzWYv2PUWaE zpuvZh?X9_+V-R*-JluoCs__Pbn4i#f0~rfguRa|Bu?Z8Qle6>fv3!nn;%LgP9^3~n z>DwSe^zpD0&?pLQ!_YCZKJ_BTO40%DPg2gT{!ej9oRa z-X!{Q)}_#94go(&Vu-C(Q+R{wq8HKHf+qjlb`_GLiC-~! z?o1zf?DS=5TQC%6yixIAZ3WdcVG@n3z4zDbU&ND!If3hKH_N6_5JIHW+ z0Jp(-pTn{Fy4Z`76 z!OY5P0@fOWiL}(zKMnlo1Oy>*FEeTAEKQ3MZMj|s?~NNH-|tUBrR;vVq}KqHOg0VP z)BLT$7{Jd#ShW{IlCN;uemw0b6h3R*5N6I@njrc?+aR{N*d6K%k3%e1ao)8OtwZD^ zX3CyNhuL()LRWV;!WDQP>j9qtGsOgPf@rjb)YM$i+-hrJ_ZNcWMpZ=$vUWkK+!ccR zZVfdIC<-X!s)QXG-uIxsLm*k;I)1uJq*3p|PC+fJYrXs5-TQFS%(M~%VlNDjIf&2d zmYwn2i-!*%I-CxJMA>`s4K#!7)PxeSqw6pRpiGcKC5d6`3c5vSTJuM*+EAxd0q~!K z=S)N|q3r0agT4a;kBZp zvxXrL;d>swD*?bv5&RZv95+F1H9ko_I(Kl{kQN6X6AENVGJSckZyWT#;!|Se3>KC! z_#$#)B}Z#d?K5O;K`8Rcd+|$m&@pH)JgQ&Q8AgD`xeG{jN@-HvFc8=KqYFY0S`w+cv};`=b{Y?XKZ@k>oj1?&$| zV#VnU*R%~#9v*`TMkORNQ}w;~g?3f(z_1OrZa;utarrUz56r+8X^u zJyLOZ=bS+~1&K0jFG?_oW_xdjvd#iO`o}1XGh%&XBYp8UDJp8{mho$1VpkxOlY_%o z_AHIOod7`Ho>Q*F4y(rGTMd&6pEteMzxhw;Z{NOsAmM!R-f5CwtDVOp>YE~ksYLy| zl@?7qI>gk>j4Zz?h=>yPlpZXFyEe|mADeYwX-ujZKh;-0TueO>UdNuCJ?rbBb-g93 zl=GbuFoUMMDwruB*BJ>6s5PvucYvS)2N#4MQu2gDc4L%QJ9&iI+T7OoC)ZAbUl8)f2tp(l~M=6=nk>gz!JIqB&9R_ zzLA}yk$!SHgdDv(8oAVvAXr|Kmtr|5Ab81kNb34XgHw~C`BSWZpORvxlHYjMSEa;T z`zOcm`!*(J0(Gu|#M^k3N-^Y5`nx$n-h*rGK~fsAfW7@8f?eRDxq|bbK|6TvWM@Wl z^ZDS&i064q86<_R6CuLZ5)%_``Kv8jLvXK&9s%MjSVzh6R=puDfLK2$N;aB=7{MDGDz=hO@u!h5M=4w{(8^LkIdEi5N%5mY|oB~ z3)D2aONh-(jk1K_*R(X^Pi~0Kerx>zu2~|^-gEF=>QQmPa9@)TaHGEo=Yjys2A0!r z@^OV6kk;D2XVm&?YJ+bCwu!R7G>AY5fy5=m3js4|^L<{q6#Ip$G#si|CC23i z|1D^INpX>AyYn%mEYoF;T?GH#W&cU)m5Kt|A4qPPpPz@u+%s^gXPgb{f{EfNG#x={ zRE*33l4N?J3Ssbr0|TEVJG7y3DW+ZQM`7WzL@Xc)B+4`19Q^W`Y{Zp z>pQyu*_?A2uk@wJIv*oOJ+7%2*rms5=jD9Rx7XKwFAHg> z`j-LZN&z%bP5AL=Aj>G4yyEt?`Un6+a7Y6d`8MMz!2<0xDo25gM$PGD5ya(mIrm}n z^c0)9hC!0Qv>>)HPu5OE@Z)ep`)N5TTlU)1!2>1)z>~)t`o)a#rYbub{lm-=ChGxj z#jjsp0Di*G50|Eorje*eLoh9x63Re9M)tAJIXK=-Ir6dDussZojBK;IAYsBVd6kyE zHI`?rScgKfqk(X`neE>1T{zu+ikn0f-X-M1Ccn5JR`oS&vSzX%BUFf-4GK-QWD$cP zXImV|7>;@0gT%9Fh1b1VN;!{pc~Ar+dPzl;TwGjW zDuvTq`t}w8CoQ3(L@w7-p@&W2JA!2Zt)B?DiAyHp&}t(UrNl&(DJig3K#sNvh|3&K z6WZMW@<1)#pP#lLKmJ)Hv?X?j5d5I&f3hvIqujFzj{@g?gxo<9#~di4?Wcza&Gc-_ zE(KK=GXDR$yDvatGl-^QeD$Ibm+x4V z(CgMW+XkdkxD%xai4$dgIihdFlhhm*G>nn&ulpc^A#rENT8m%C{b(&Bv$YA%;E!|H zh^S*%E*BC~(hiSzpofi6CWOoZlKxGEt=DDG19m+J?2jA6xh{XJ>@eO9)n$IjW4fIl8nvK1=jnNY9FVq@&BVG2{`G=l51-=v zTU)Dg+1Ib_`P&fjPhSBpm8gy$jXp>Zk~g?`3e&T)mH{jym1}}{v^sE0Dtpp{UBBNJ zwQy(mKIjPVG1F2}H446h$L#3nT>WM|fM=gnDvHQfzT(b0+uaRN&p}T1>D-xBSG6Ow zn^lItgMod;=ryI6BHCBYsB+r%-oT4r98}(x!W7(Dk8@~MNa)_oys!Nslbq`|OSR7k z={SdBV)kt|H3bi2;gKB|EIkb5plfLu3Q*d9^0 zSVV|M!0o1_Ah-}rU;j_8jg*AsG1vH`XLv_PRJ_iQW~*#T?IK1lX2!uSsFoqRh%O-?>NQF47c4JIepIc?(XSTOLWh;BEorr^x z<01)7*dP40V+jt`T)`7LGz+Okm&*$)gt5OYfA0;g*junBsaKPm#h(TW1IR$f#$;oi z{#`mG(6$PzaulUZukpZ#^box?g9iVV#HxXX%`|Dy$4<9n!X@V~i96{YK{N_#bmxTZ z`FKR~)1;6x9KsvrcrQ1(6&G!2?{bkI1dg!j!D07*Y7rUO{NXjg|AXjgZK_yAz_?Gs zf?zW!a!BfaPyz{xUwnP{ZN`CP1F?7#-!a6g&4FJSZ21o#4$kbu*06WkAxLg_-3Lv3 z{#*+Jzk;D5xo&(E#Q3w3CV}(8EUdkQZL~3AdG~rG{TAXPjcL;nN%ueZQyFb^}lrC_(f+kZD*I z^Aa*7{^jj2v33_}qZ2?Tt&+9;5*dH3a2vtLg;Fe3)_=(Z;!U6RdiqDh&^e~wZQ~t858u*Tqm)NHAAd zy9>;G%_ZCo`Y!$ZAf@baNIKjT#m@zA;Bbb2m*w3%;9YlbF6V9&;^QItH2OEE5!%0H z;XI2%<|kuVxTQbmkECj#J2?lM?l_FO=>ggvTViAE(zkToR1sJIWrm?Z zq}RKfE#}86X=g6j5`7Ebt1~hTz*o6s5R2%KaXol&EUMI#hS)(+VnbLVF!~KA7lf%+ z51z)w;V@Np-&|k1zSFgyt!0O^gvs#|F6k5M4E_-R-0rvi1QaAt{( z>*GED7YUOHq4ofud{MJ6 zLFZ?_Hyyon1P~?(W2)-K2US&8@Oj*;?1m?C#;5_soCpMusV^QPEUN93tdcTTf;{wrwYJq;K8)JwdGk5c>gj4;m zlN2PjO2IOp`EpN7OzbgfsQ^@BYtgbyfFia37gj2TFXLey{ zM`q%7aE-vQ7LH*YoVTsK#j^%My@<{gXbJ``m}Z5w z#7OhQyoa1MFmoAo)>1hF=Xc*y&diXuw6cU*261QkoO;JR%_B}Emjiwi|-ZalT z-xBD;5w_JE6^xnnD}W+dkc~>P)B5~Ur1;Mbc#Ldsl^@yJ-(q9^gImZE!sG)leOUa_ z#kY`ZEdS04=l%M@E&|ag)r6$#$5xxKBgFjBh-93Q9^CsxJ?%}2k_vdFa$558@-h^l zUFUTJ>l=TpX5y+fK031!sXt_rtdEn6``S*qv3U<|m5TE>ndU?bexmRulS78!wb(N{ zohe$4KJw9i#>Qdhj=wZ5qpUqt3^Sqp*nf4FwjwNzux%{yxK()-3?BChOgLFLEwX&G z9Lr*(J5uwl`nq9U9pGe}HrQ6`8dC2Q?p$`DQJfbk_6EeS!)_;lp6hAA=4 zDNt3KzQTrAWbwc7RgIKAdNOmaFvC z#}L|>_wn{cpwO^G9uV=X6Ysv+WTzx2^ZY!#$Ohhyl5&3T;w2&t0q&w$Q(bXvva7;`+h~$7T?h0V1(Jh50_?R`j zAg_IWcb7MA0qTI5jOw}=lyPvdJ9}Tm|1cuym){vwn*823H$(1j!U6X_7Rtvt1hK?)nKibb?J>0?Mo-eb2~J*Mi}b2o#4S@1 z6jnQ)Djhl_#8Om=bo-(}00@fRy|rj}j=4%!kSl!Nd)fFVecB>*(o zO;xWBK%oI=hr9|2bUG(sFG2DU=I}gVxbhL2U%^z$tZf12QJA~8XmkOz{(99%%Ed)s zOB|YM*oGmc5M9sDbNIK};q;Hg*qw;Ydl6;uk{0*dCil-$jLdS5jJ|7WLq;i*6_xx)b{D-&%f5lZk%EuM+|L#SKBZUv+_U|Jd?N-opKRUAgIk2K zEJEzL!k^nts4lWnVS{|?ygX_xQk4rB8Qa(10qO^4`;6N$8#6Q48hJ__NwT3RcxAb8yDPjdb$5xoU zV#TDYBG}1C5r2}bk%xI?MOvRdDEcGtFZTlpykIxw5G86rlCpU0GY-%4R?99y8 zK`z`**$YIkS$!m*FSG>D&aU+4&{2&FF=0-*(AiPGH@31~dv2r8(63m?nyfxFJ<%Qg zX?lW8@vAH@(&`sWDv=;j^+PpBoyKJ~`qxJKW1&H`NY@iwB0B4Wkr^MGrhgTw#JHV7 zO|FQvbSUgH(1re(oUa5d`*^Lsw6nyoz*-C}^9QeUqezZEggxzlSveI{_;Lt6_xUSx z=*@J87@pkPj#|k32dQ2N4*hVmFF3%uIy)62H02;`RGjn2O_qFy2x300UX~COb2%PS z?a1!~;Ccj*E!b*b@lcfoFuz7m9d9OP6f#gldEs6|9*-~5gUbhInLch!yCACj6Xv${ zJ^`JlzezG(qM6&Wf{I%D)|oS#Wm_mPd!Mt)U5F`FRV)z1xNY&c9U(fch9A)(3Kw7y zexN{I@f`Q6f7J ztA29;6go2+{RB0`^V=L;I~Eoe%-vQvSJlbuiQ^Iz73GLPwBPLeK2kN-0G&Hl?mu)B z%!E%-vi&&Ncr~2l2M<|>KDM1p&nP^+`hz+ppZ7ZbK+Tbe>XlKConEq!h3%381}@?2 z#UeR=)?bPzyzcjgW-znjZM8;R|8*KN2?`2s+>eDcjzW;RMm%_qnAtU2e8Ix|S2)d2 zi_XT>w7si~6J*A6l+(a>0#yj1I7?t1hZMNQebAQN!e|bgp*^6t;2AxKuz~}Gh6YSl zkTM3UG{_o-P6$|f$A^cIKcQ1?w+vS>fKoeF4}AsZ5v8+|lDF2bi}2IiHwEB+mT~*+ zC+K$(8WuZ4Gcz-QuLcVrgHV5X&Jakw&Qx1y6je9xL=-SbdeVtXN_N4hl$RgVK-!Zf z8T(_-a1josryDLXks=mR0T05(4)YT$O)>5P^OGHefad3+5fm!HGuw;ON2HgY|CJVx z*VXv$Y)fIdz@KdEUB2PNqToKCWWT7B#4>Q*KDJ_blPlm;>L=?An9b3Yr>wpysVmVVl! z&xYOQm34T=9e*q|FyD@1JNZPo$40PZ!NA1rO=fMRm%B%x59(I3W#P0fE6k(+7LkC{ zZnr-Hgcu)g4wrc5B?&y@u=iu@8S&w5+TD2Ctnjrbzd3H|$q z9?9`v@whhs*msI@)t!Jti4x_#eH8-D0%X~7=$QgoRqhrKl53l9#n9PSU#O z+)11tsVey)emX6X@YsLu%SJoRV(~8&Uc(c z$LJdx%x6||7E!`QD~uP{+>GbO%glIT)BW2)GMu&=VM8)MOtaIfhg0kiGBd<&bDq7V zaZP1^6GwgKa%I6=cGFm6HN|ozr%DAO-XS|BAhK6-d&YbA?=$!$YrkQ1ph7}xgU9&+ z7>lU){pm=dbb%s7|D6uyK0|TH<`i9O40W42%RF@wS zdGOg*Lo)h9t}#uz*qp4a7gya-ZXj=M1s=qh@wZ^Zv9z)()>iyW-vjB@yYN#Xv#axB zr?~E95^?VHUOY#9;gi-m%;zC8PczRp!o z9g^v8D)A&cT(H*kLFHLnS3oe4-daW$@e!C;@F z7{A!2ZP-RSy6d>Amm8e^$&U9*^ zpO}yDKl4|rOmEh_SHz{Id!XbfS~Cl#4MzZ$fjtpC2!(2vZ(qNrW@Nww`EP0IO;iVt zCkFI1$*CN(5%dCO2wi4*XFU|KfEgI}psyJJPfB}FkFvEZLfe%vT_6%h(6MZnk)Do> zjO_O@-U-^LL1+tNyMpHGFWyJ9?z@oKz-V*?Wxl-WoKz3&F|Hp!J2j*ck};LCdDWqE zaXr!Vae2R(lgwelx9+#7b-mociLdpXJ~vZ!50}<}#1f&n@*C|7!c|tPwJg*p*%CzR zBJv6F{F^9@XuTq-og*Jpb^$9AKa1$ujaK^u;c4^SSKi*Apg}JjpsljLcf@j6#bp)@NkY@DeVP(VDG* zr$P|h`NajI0v857UMO<~=N>3#;I%OBNnmbwHTUxJ>U>+QM6EoEm5y8s$?RHm&hSql zY8EbAa3BgP>_QJ9ydQ|81?;P|v{FF1t=|TnMS-HB98~7~8ECfiw+3@{4hsW;+O;Kv z&g}w%_*ZEitsaC6GhY0DCW`K)(hs*1e%K+|#vCtjy>VNr_%F02$i(?Wwd5%3!rjZ- zmk#pbhWN{2xz9^-7WjvW*L64FIlw1AGb%~|!2<3ruR(;E`bZ+P#Wy?W-#Lk(9Y#Y7 zN9Y<1pT!Wf*YG3VPQ8cA0!U1nr>DW$>;dK;fN=vtLZl=lf_jlcS3Z%OGFjo=7dMD1Dn-c={@2ItWe`c9h3yKzAspsR@V$|kg#$zt z-B1vnq;O?_bUUL$skse}=MR<&v8u+P(WaV$KW>Jl3Gshp z+Ve@eIxcQaX1H58g})fPP&-rhNLHxsJ-I%0D>HQ3q4W*45|duC?Fg!aYVUD;b}_+CLY%x?fd9RjZs%M1yGQmT2>0fU}?bX(&H z(mF7hH;xmFtEu4#Rz4TjIvd+jGD&Q+3$QU-`&9UJAZ`vIi@-mOK=fl+4e~4K!j60zc@_ z%e*JP0$JGAwY3EP63^I~nJb`Vo6&g!WuHjrR*YAy{P0-!*6T7T=(nw`s`QM#Nxx3Z||e^dJd8huLl+Llqm71l!vp* zU%W02T%I>M4YAgVG*vTfN;`CJ-?o6)W=3&5UmgofoV z0+EHpvJ_y7&?>lFR$98TraO<-0Vl#%{YPjrdhd#=ZxBy>)JaoJ08;e;~J0|0=(D z)2s6{j8&zdm|M{@n#adw!2YGj-yvZNwUbm!bMpi?Tawww$`f$g86SuTNh_jO$7mf` z(X<45#AE`ftvf&;3?7upx@RDtx^NC(Susuu;AXhFxw+=(iC3?`I*@eI0rMa&$1pn+ z6S|<87z87|g;b3 z%N*4k{GhM<&-CmyKBQLtV?FHP@GH(t6SGx)ib-|5MJ2OjiInmaOb$k}9YAH(zQ}{% z1Gdp46R1P#+%UGXDoiLI1=G(sv7|bPHTnW%6ciMEyoHX(oaa2-b~M{S(>m^n1i>e! z3g&?0KmEba#KPhQ;2laU;lkf~al^>%2J1JkzOM=Y&}_ByY1x4_fiqL9V0HZRA(6Lb zZY6p&vw*$i6nxTOI;*6M_<306)#Gex^BcR4!b{ql!nD_EdMBc`BfnSdqQoKU&jBvr^WM)Q1 z>edCfMYvIgN{T>~4=LnasCnNvGdI^iJ#PmgWkFtEx?nCmEuhR8N*`_^q?A2eI?cX9 zgxxbH?PG6Xk%*Y}N>+$2eQ7;vl6{&H;B>iUnI+AzKc!Z{#&~p%it*In)O@~_CmYK+ z+2YNvfIrb=x&HfbC6XXwn$7)%%##+|9%v*h0@vnROaXOuT#Fh9$S7eJwzsvFufSdh zpzQwr`ytp_i10><6huVr-({XUh#P{BtsF5W2Kuw##VfGUn<;F7t%NQ%cz;9(aoqV8m|ncuJdXuvO;Ca4{QHm4l(GL$ALX%6Og)){1ayo{DSaaX;nr+ zO$~7W)}5cZb#>p#N1$dLHpUjp%`q1U5Hp-e71SE%g_rTs&f&VMz9~-DdW?SX@RQKS zHnN=G@sC$voAy=|&>x?rD$__RW@Aihc!lxw(bQJ`x`Fw8_+RsbqJ#6ncR`mQzN($$ zV{5eF^)tv)1cFcT!?Srn0-CmBZuC2EYz2kipqDs?;RuF1g7L$6v?2QgWFbsj-kvs~ zf}hW5zmeRV;`QtQW>mlRtq$c(0^`Plp*mgji;mCcC@e>|ubEicPW7y?U1MEl zI$k2X5z@QS5sCyw{5=nkB}y{s$@mFZ8gP}n@IQe6?YFv>?YV}O;h<2+K&`FS5}u7; zwmXOE26rGz$89|e)CzEsr>8Fl!~o)W!Ao=kSuCo>AKuc@6~V7GKi|UC)aByH41~52 zV1Jn6Ygc^*;}{SO)KpXu($)(V>EPi_hLklTq6RR$scT#2s>vvaxI)qaU&5h zkJ%n{5Fw&gVR%&P(OH053=xDMOkV`*)I#LhnHlM2VqGafY>2eMF;qy3YCC_2u!C1u z%B6l}H*wlMQw_@B4J`H7%wA;3{;Uk%%~s%T?6)=}vY+EpnGxM!{byXqCTr_j*&M}o zU9UKpd-iQJu05l%(^Iu&F}`Q6=OXk`W|8jX>Hp`hUX zvI_+-uN%Oed^C4JjtSSL3EPE@|F*#gTZruprckE%5Njm3R|3e^(tp_{?h6=;asLTw z01B1z%~GRBiKcfBZbHi%P%6ak0tWhsqeH1`^dS80J5j0quib1zscd~rs2$J7)k6N+ z-087D_H3zG_FW!fj;!*YC?@kP5Xx@y93gEHX{0EZTq;?Af`f(SHztGwZWDOc>L*SV z5n}CAMm7*I2SXV3L2@l3ui0T30Zq2In;vhBz|LR=>DPH%7m)pa4G-5yD;&bA9UUB$mM^yfTmWtS9zIOdWj3;PKwX#^8xK}*LGC-^SlXLB z{0)QV61?;UMAiW)kFrEZ%T^E20M|Jt8Qxx~;$PnhNa+XUXtEg(eQQ+|8C8z`)o=GD zI+{zdiUMo{@dG{?Hb&GAWv%fao=d%nn)Y__KFO`i76WLnI*r#b=gdCs+J`Zz9@fneiOV{kPp(c#!Uj9abnzePY`{sd-TNx1qjovqjedO z{%p=W1YWkc9q^L2h2StGCTVrM#mlQIeGc{$|kAAbOd

8);$)Hc?7bhdY1maH|_Twk7cw7qk9HPAg76IbB~jXL65* zM|uZKtYgCVNGFhhmD#|Wo zfHS6+_M#+%ww-m(qbY$0f~KU+tAUV>(@X*tI+(%ylBJ}lp;QV^<}{x+A`Axv#*5y#69!pi&k{T24I? zQ0!QCfM?XS=QLl&v-q}4*p*oGD-Jrxvv~YzQN|gSmnULk)5IUhW7)~oDKBzRbP`dU z^t|#u|6LxOM34W%j0P+LN3$Vft-&cK_GW8+9Ckty8-r}P;xKW1C;m}YrA8%s2QrPc zUq6Su#4k!oKs8cks*O2r;1zF0BJ{e)#-f2PzAo zF>!Bi4}SWOzf-+n#6}}vOHB}juk}3N#=UlH?eE<;er_)|086T8LqZDGvw!+5uqXX{ zqN2Nhsa_RLMNMCGznwRcljN<1wL_#R`4)kI}YnQ457XEXZP zbtO<f5tHMJQO!DZM=)*n_#A@W}7`tvO@Xp+41x! zPX?X!$D-W}k~g2dCkVc+oT~Gkj`$L4FYyS`By(*Ys)+l`Kc{t4y<#tW^}%@PzBQx? zi*@RP4;1Xm1?%wJ9bJ040o`$Wx89wyDcPmF?3|puURQC)$_QUkNB%r0K-5%mq0KWM<6gwFw#}q;;nF?h=6j+r&u#n>pvoI z{JgDq9)z*ZKU|%q@e(-^zFHqV4RRmma@)Gth}SP%`$JT3?4S8rI-Ese%<|snpDSBo zz+OhGWAT~>*x&Gf zzGzFn(mDK~XgWN$PcFjGF7GZL?#cOv&=}dk6rD(qU`0ix?af1-6S8aO99evX6jhZdv0Ye#9G6$e^#aWt?B@ zht#va4c71Kod25T=mln8n};dhqi2|bbap&$P#b;5=8hFJNsir_937EptfF!0??ve% zjl_9q4YP50LBF#gV{Q&HuQP*<4$**pDVa1!gI;vDPhY$fC)_*UpKQzBzi$uFKGxAH z)U-varh{TVt|h-OnTDDg4|>6ouB|}P2DoWm9-;+iX#^M%Nir`V=J!Iv!ad(PV~0au z2j+Z%>OWrBgExu2(K$H~lEBHvM!DR-1S2VIuW6;F=&JnWQBaB0+ScZ8?f(L_kXrDt zAWa)CXH`-_*S{&IqtGor)fGVKak|vPob#+qAClvz4-_Hg{7!uA5OB2gPijs^oC4|d z&Ug%kr=JcyGUIu%r;=P_n08NwJB`J?mPrR=mBf}6oYU%j=$ZqQ2$Sad{jGOnS2C+A zV_C4uw$xh}_mA!#9b1^2$IT*c;XIH=eBc!oAVf0SFs)$!w*!~~HC5Fdb6$eTsHhH_ zb$BIn6gulQwD;CxsM+A^ck~`i51HQ&UM0KTDL)3@FY0u03C6xkEjknDyU@u80H5$lUoBkM zE8z-(bh!#AL3{oNqzmVj-fw`*J{?_eTbl{c0)|pr7XZsb>%SOY^F+G5zpZMgov_d#AfSt7ibr(d7&j3uztb)7uZZ9;RO`$C}vn19l+B9zW`8AxtBWr94F*|i6^b0Ta*7nCrWHr z+ug2OO6G!BgG}N4FHvW}8^7A0S`E8O`%4(7%BO+DZIqO&?OEi=J72XU>3$0(FZ3>^ ztVl(n7Q2XSkJ``8M?B(_1^FbRr)IaZ6T+O-bedX%u@xEEH^IM=Hz5o+9+?k%040Vd z@N4Ns;Z}O_uU-(iVM*bz(?p=SzY(A&IJgiL;2jW{T{@Q((BX% z*;)mHKUUUCxQI2tjS6n=Z%W%*z|jbl{}mVq!I1^oR1cAmfYFtHgAQf(CA!D=7X;R} zBOL;*DKuMb9}(egh?>T=t%0l!DeE_Jk$|GxA$B$8Y|r-Tuj{c;%#aF-x9G7kU(gvB z{}Jp~P)FnVS-!Q^N|0rs329GVQ?i)fZsxg_7Dy>7I?G6=VGs2uyu)4ip$<>IJ2kmb zHo5kolg6;k7nm3M2|9T4NC(JWi+Ts{L`!zA5PS`U8?Z~bYqP5k_N2_;YL1pxf!#{^8e)Mu#y&Q4(h25eLp2i(qb>a=tge1UQP>%9Loev zNwC`e7SC4BkuVmT2-hIzP}6PmMg!FU{NOQ(Yd>^c>Bb!$`v@6Uh9Ws*U ztex6P=-F80xpU_ZgrTGs<}>eC;%vGsrXfFrkwN-_J;(uS6|FuGl_=p#Ke4LskHE;~ z&nzaN4$0Pyj!|-&q$TEv>eVyj-@{@t@%7pa1}V=I6;cmTGf`~eOZ&sJ6Uv>pT%YJX zIjR?8+9kukEUrJ~lLZ1}35gL{;ygJ)Pft&UG68^Hp};_SCkv7mUKiJbvY`qMKGT9g z>)JObqTab(Vh5bR{d=zOUUpZ=GMva%JVVcKE@V%3y0AzOOjB1YqL-{>IyyVyVP*5zCB)jM&7h?z@ID!@DsHM+eL`qD zSciR7fqs5=-Q-eqw_lE*|8B1TJ4BZEF*9JFB$xk50%iPi_XmVrEX6nj3Saf|f1hrp z`eILE{O&C=+Orf^IJ!Zjr{JdPA zVX@E+=r@&WGj=8`f@=>D@j?IpOJ~>IvU$(s#ykiQ!TGo-WFCjSGTDsFPLbgLM)N## z@-jH-bIU8kz8e$?XLyUxkO@5%wx**BzejZwqDxsVR)1rBXguIX?qLOA^P>4%X{1U# zd+R+bI5Q3I^?|C7wTJP`Qt8i%YBEi9nzOAkvK&erb#}6CER@3lQR9_)S1<6o9X`?EcE>R4(?V3YV+tb7oOnihT_BfgA&xi^NMICt(qh~rdLSa$i8_>O>kXH)5fR{by6a>Ns` z;u_;=9A=DIO)}hp`0@zL#TfHV#2*k%rli@2X%TKn19E^^A(R}UcWn<8f4dmK#XDFn z45n0gj4yP8G1xT&iT}OEXO-w85t$>N9R?AjYQUWcQ+x6E6bc`jD0^to=oKPbc(*=k zd8N2~Y<<@2sG;(_JTNycZO&P^S(e##Rz*1aRE(1Qeo-IQ$f&Kzdya039`XGgZh4mn zee`$mpn~Bwy$`ssaPFI5R>$1hIitBfuyT@nZ;kPT1USy`NjqF7f3t?v*PnNEx|hlD{}gYa8V8=xN979rY-;1)Kgzx1dTMhWcEd zZo@8wremId5o;n@A%3LQQumi)>1R`1ybBvzCk>T9&vs5@?X`nZv*nuFN}Y#;{E`lY}dZi0|DykmhqaQZbr2C;n`bRkr%k=hSPf!57R^9HZ^0V9d3y&_{^kaG-LUtAP4 z*Q$ay>1Z60d{T=H;9L4%h)}PR^Bv}OPl}JKKa*oPVa+sw{X(q;&m|MGG4E2P;U;4- zUIo>-XHmG3hvv5o+?G@OcliMm?;Guu-tV@)PpL7}Z z!*AiUbaK8`E~7F|-Ma7(`*V$QJf<5G6ZjqGNX{*~KHy`8a=6#@9=M!NmX3}U8<>ik znu)_zA4l?(sb{r8HzK$pH4gmLGzP3Z+cfHph~uTU!e?k=B_WI- zWlNA{oK<_88HBdXunI*pRj^7P`i4_UJT*F)Esr{`{aJh0-)}mVAiw2&XKog4TusPH z2&c5d@aBR9WEIAG)%S1yz*~}RfxL^wtvK3aKHvBs&*dMFwEKl_#e%hg3us!CY9VdU z?-VL2D}!b)toF(Xu1?g-eV|;8!ox;*EvaV>#J@L~L1dZxJ;dw#}aXzu?`(ND(Kws&K&(VIY< zzoYdt*M;Hk^r7*qW3!X1tCmoU49nv8a1|Hj>~Fnx7}i?YH&u6Y(T zgH4F6VQ)8ay2=VuMCPmnSKRMisr^9Uh<8V3>C7ne%!^E2ROUu!MoBm2_l~oe%-s9R zatLK#Rg5U}6jJ6jcxRAo3e88rUj9PWYudqhJF{ft+WVZ5_e2$ANmQyGN#(9$)=N2Ti$ru-UYL}$8Ws7$fxOQrOv6_)t z3w<7MZtuK$f2TY$F%j(XelcZ&reKePw;OCkh193xFxf|G%mQgPjfGg5#7F$pvEO)I z_bfW%;fuJ_-TquzD*e}e4$1_0H2D`MlVL;0s#H^l3e!U>Kf@H+NzA&^>9D5BLB`eE4LPvoS-#j@>VYT(D0}g6&ZMIv$>E@H+*ARfBC#N?7=kkKz z--3fKFiI12c45wl2R2XzRMlMiqiC`utC#HzcREt27I0SUZ+9$P$kOS+>e;~P8co)! z-|tQU+IHO0R=t^VP(EIOKU1UzomdL#uw3`2V%{)n8d{ zQM)e)qJRh}Aq|q!-67p20xI31bRJT=B&4Jf5lIn5qy!O=5_kzg6i~WUQl$CjhI79A z)BOkTxNA6u4#(c_UVH5ob3XH#&-9(9PVKWWH~HDB^x&87eEc&&bzp6L~&z5&!TMtVWC!iuQ<#NK%jEg|vk?uB~IfWzrkll4>bCm$-@d4SK}#5jK4Yo_`cUgVDQHLy-Qv4-htBO@Lzznoy@3|ah= zeM9VJTwcz*G5320$D4^w^W9bg0A@?$maS6w+41SNd*6ySvZmmY+tF~>KFlDa#&6Fd z4AzG-Dg^6HZHR)kCVp>k@9m1xvsefHn^FK3>o}ncGqR}XN!`{k4+ER4hk3^>F38D6 z`V7ldQN~_rLlF(OYfI~12O{^Kv9}G);ttLgf5!F%F5?~vk873M$(*xMg88zSOr~cyC~vOqBijiwOhE#BY?H zHK4bJ%-om{Wvm?!)`Cv5ewxMh&X?+`fG9ycH9ZC@Sf7v#Q@gOHO5`}=z0w>X4gEzn^Zq*c3RKpd8L<6nm>s!8q~+ekKx_Ww2`#;Wqxjc;(XuP zMeEJ#RtSZH{rN$@OK+nE^$xoeNU7S534*17gC_ z`*O7GH&$zIyMgH;s58HSx5cV<5#k%ijHUG%-$$<}4c86`*9zC+{fbu4{iEStU><3N z9vhMUhBZuc=dzO%YP$*uhPug6rS{u~oF2&Qt+y4+EdWfKkw0oiT-zms2BCOY|OeZq$;x<;&xg2NQ32M8ygI z3ZV9j%gdnvF9$5E#k?S{#J#18AZt-TSeaHw03EIXMXdM>Pd z>}Ykjmvh^IABT>k;nxI2+Cnv=&7;0da@2A3wsHt4uDe@18i;&v-~@ST6n z(arJj5(bwrDj*xwGe+hVASX;Akc*8iy868%9Q;TmM`;o&nyfp`feTfCf)nv0)1$pZDP-t;IGIbC}=%EMsnZsXPm{p-!6C|S8I5xK8iTU0Vz%C*18ZVokaIRR-T zA5Ul5fB8=GQoa0Z*{9cY3|id}C_I1$<GAk199@6dIsB&Lb~B}9#YY(x~ur?3qNt3or{CZ;9Cv&l8G@EjR+Ff$Z4`0lqRsoRzgN?%~M|2)Rb3N+G?lr@$(}k{8sRQM8}k*j8?-ynZ1Wyj7R18*+g)_)VU`%c9a9w zeNVfOm$?u+?Dt8I2=}Kysl+bVOOG(+fMLPXpqsmM$K0&JF|YGd^jgtO!#Q*(Fj18+ znU+^n=*&qc_0B`QJ74NT76g-_&H_}j4Jf|@bw_1oLv&ZRn&e4HzO=&;dmb=!)K)gD zgvD6iqr+TbNaNL7YL9Q{5t*FiP=ta*LA~RiBCTnR3Eyb;VkQrgNQQh`J}mZ$^($|@ zby(1}eBg?(zzi;fewH_=dtv8I` z^~h~qeeCh8zA1S(5WulqWwXO zpas@uCu3}U1SdNvJry(`C@3j`+;;-FRs^0G@bKjF;N9sT))5A8aHSnNOb+;ZeZ-^= zO5ztMZo_7mf#HHP zq2P-eqE-ylqq_R~q|BDx{r!Tj7*R1Xf@hC_^w2puDap*d0GbWJe3Co1W+GMz3~bhB zh00xs`h4VIG-!MoaVbUq*PTnbUJ`c#EbLnt_`%2mKq>9M^^@trMH}~%ZD&RJy*h){3wn}bQ2U#T-@C5*51D(FW(LqtW7o=7#mMcO=-^gL9iL}6LQ79xkN>M zA-)UtDUi2GOK#o4X&V`-dGP}09T5r@feVM%j>v6~35L;p)1(Z!ri(Cgi+IVRVPqeV zrREL$nmQ% z6!HNsS*Ad5v~nf(Oi%X5Kt2yC+W9G4bPLLLAW626AueRylLFf|th2P6rb-}AXLcSJ zK^O>SNIpyZHEk>VvLp(*X;~1p@~C(|#<7Ob3y(hDJxL+@qWV2nc*J z)b_l~aiB?!NziD_3IzJAuk+#)`5AUz+X1jg%9Z1fzD0T=-qE5KBcF;*{1Z@JZmIP8 zt6F8P`u3u-_cH&*KO~K6^SawOsLaG}E7&3E1jFxu;NyzA(V3+72l1G_-CboyVh?FJ zg^ezt!bWOGuz)`JUFE8*k`Sl5(9)tr-D00ms{p?eN78g(MD0fgW~7F+T> z9Y)l?5<~LO=9*{H!>s}my7vu}$bqwArtE8 z5`wd0zDMP7ihiyP@7DY@em`D$OQniGW{3jL^<*<*cgaoHZ`W_H^80#8jr*efhba{MX<_}Lm*V&@2u2n*~ie7|u) zfAq_kphW*D^huHeSYl0~fc~Z~oK4yGg^xDb(V@*Aq@WK)d6y=&i8TyEz?IGTGWSYu4ZU8URpH zXY4oXWX|&^6=0|0=jiSF_sTM`54%$Ep(ZDqe3O6vV3U%f@uYDBR|B?FzK|9S^w1Cp z0w|K5jkrws$DN4acT-rQ^w*mgFpH?oZ@%JGcCU__##gZ`CVT2cD9B>f;ymPBts?L_Nk28&$EiyqrYQ=MPoh zb=l5KvC&BWW98rwvzt%{_98ey1&fF)CX33~OD}W;@S{DHM>^>W`<1p=_?3IIO0VtS zK|C){SgzUThg4ovStEB!4tO60n?1FjZzoIo`|H>z{JkpPcKNRNz6^2(2PscayvWpP zQQjSsl5a)F|1&CI3tSj_@%-+tE}0w^Cem0}3%5MTZ+;CmZh^n&0ihFQzv|i8O3jhK z($+Puo_DLL4x&LD5q8IurXRJy4FrQxJhAbwqJsYXa170zfkdNLCwl{9$3B>Qz}5r) zB;}k}8187HUyzqWr(#3h#7rhmT*Pr@F%ne7A;Ter!(0 zzr#l3_Wj-Vb!?n?5-KqrpEfXF`F6XyD@;kcRL*-`ymsHm>6S8H1ZC%ix z!p6?-4ph}!x8&q^Cm=gcztY4QV!_RT_ki&m%%w>}4GATT#5BuF{kiE51HapOINI1j z&IU3dddHZ>@Bvf3Ywzrw2lcm0%pb|n z$>fw6=;oQh?VM##lh(TH3&q0(e>sl2B`TA&!(DHu0mbxRepO3XUodry(iI0va)Re^9qN)tYS@YU9xm5++l5=Jk}jSe@pYDV2h z&y_yRzjMP6LN%~)aY50RIdqW_NQS(vshMc>hCDunaCdC5-Pb_whKyL!?qE z1~@38e+)X=JJOUjCUhTsYq-n1j$v0^Q?vbr5GrrC!}|e;9CR>ndpRIwQb$LJl$2C` z{5w*?9K`EEqYG@HI)4=Ey*A9vAKU^GWq*l9i8S#m^hA06r$RbtOxS7(E;2({=t%i% zj)?`$v$bri1>h-(8Cd0_bLHsT1W#v71pQ?|6Ld?qf!XXY6$CK?NE#i7oP7Jom*cPT zFMxHo`R5O4#}9yiLZ|{~CLTE4!7ZZ`dmhbS9+}b2r&&dVzez?J`?LFDrRbq~;+L zDSv%!NKX1`GQ{6u;zE!gz~Gy6Kk0KML^{JNEs1(v)qnUF8Xi0s1^uC7w^neIu-N?z zf1n~nQ7A_bPtWLw!b`T!;`OifZsuVFwS%*d^No$;GS|@s# z2C1g$20MC}f8z~hFJAlwnOuOzWfS{&ZG04RHCh#sgCIMwlprJxv;yCnW(E9CzBJ2g zmS)tBR9!Ootu6*=Y@lGk|I%8hDE~Vmm9)Z#)W)@2+U2+!TMeWJ97y8*JyqwNO z2*D7;#4I@a4@(DN6dTk1I-tXzMR!EUXtkyZ8)F zHfS=uU{j%`i8HMJL_ddalo_65jIf&%9Am@_F&>os3~>&OS+dhh^R=KK`QHoOmiKRc zCyulKWVtQZPII9a9nmDpym`S(HvMYWer+dt{pr;xd1SghOzgJK>Mv&^A6+l-3>}^% za(;U0HVCquCL;uDcBgq(ydi&mb^{iGbHNyo|Mt8+X?SR6WyyfPsO~M-A1<^@3XzG} zQKl~&RLPtUcT0Kip1o_79K37j=IrxdSfP|oDQPWT*!E;!&AwtL@e8ZQnx$_Mib**6 zh1~>i3L2Ulx^MQXpgNF&{5!c~--d>}%tE-(tA!kg&gg87SFiW1eFVIvI@TeI2W^S_ z)XJT4#=Q_BlC6Ac;3_4fsol{7uIL4KyoMs0hXV9K9L) zc=rO)O>Th@PQ#z(*Lvxv8ue)o8t#34L=Q6?8V?WBkuEAu5oQ01Vht)`y*Lj1iUinY zz_+_HfD4Emd#V~Q`PoaJ$A_hW|9tDYE&g66Rx{i=$I_+CC9Q`|A%+ zR*A?%&-xJ>hV_ddrz-O{aEC$@0C4^Gtohof?z; zFPD-sdTO!l`(YQ>D<5wqvhYlny>|E&AdsWYv zxvI1?t9J#ss1UEPcDtsCM08-?p^Kbw9GPkA6*|;WRz_@Bpn@vdpTc>@C&hlYx9{QpVMI$hG+oW1+Op|fJje*qR&u=VV z%bvYRHS+E;6UCqS66Uup-`IymfWO@@ZSJ_G`9^TYafh9CBv$)mV`c}?eMKYy56u^O z70=Qv_QmIo+w3$m3ak4s8tIHIZ{c07zw?oNTbOlDVHF(4@SA7zPDTjkolJ?Q%3{QD z*?#I3R|Z>gmW;?4q8i?2Y+D}@rYElZ_4|h0vXWqPYp4lEAapWl*zlZS;-BwW_~KT+ z%j#We79!BFae^Et54Bo>n3caPme-s0S1RPQgWN0$;HFC25& zIH{Sp47g_MSYk)-M!`>nurf$&@fe=of+|KPS4vUIGuQFK@S{hGB1s~c;kiw8&`nkemNLf&ca%9LdJ8QfCgShwRO+<&>!fJe<1iUEh0dB83glagq2VG?{wjdpw$x?(x)m zLuvTMDh93X`+~-==nJGaoFl+}a7BH9G*A8n=k)6QbI8Zw&UxHV%MD&bKaGpsQiern zsCKb^tA+O>BmXnj3JjcZNOcWts(V+_cuA7>^uAzbNw6yEkDHZ+jjdn*&v`6-yV$Y-@V=)P<-@ zNg2H0{G#~OFuT~N_0zR%pZOn=vwJSVwu!pyxaxEO835Q6;)Et> z0OZP{IKbJ>XZSN-X1(&(2LTpfj#)PM|0t_~U!@)(nbQ23=#hW<>?}mn!iP5_&M@*S zU4PPu4pg?_fc3p)7x0#`7ezG6<`)jm4lXsY?CSk1=-DcPlI+=3wWO)~qVS6G+2It- zhK%@D*Ie~2g-*#Z0=ZNN;LlKQz4@b2SD^v^4a%{i-1LD$Z(?5pXR>W{eO9o*$+A*Kj_tPE9V^~-$eLp;Of4X7J}_l;vu3$D%Vs3zY3kwD$U75v&qa+NTB8P zfTtBYGQ z71SsjD@|0wI=CBPaYuj88UXK?*rz}JULU@Z3=>Fn$NVnD`uSYVsVm=x1AWKyFbgCB zAX8sX2~ZDcW|GMMc-P^c!;ljTXV{l-|g* z+J?5h15jFZAZ@!Tvg_xrS-U@dn5JgE2$0AU4jE#y1reUzejc&x z-zP4odEJ)aY`Nq6%jNj^$!e0c(U03z?m6Mz2dZ?6zT^y z@(uoN>zJ@T{MURC~Q_!+)8lZS$#hn2a9wV0)wHT;F* Date: Thu, 1 Sep 2022 03:42:12 +0530 Subject: [PATCH 358/490] Turn on previews, move and render assets, add `gradient`'s API to docs (#1300) * Turn on previews, add assets, add `gradient`'s API to docs * `prettyurls` + move `assets` --- .github/workflows/clean_preview.yml | 27 +++ .github/workflows/pr_comment.yml | 13 ++ .gitignore | 1 + docs/Manifest.toml | 294 ---------------------------- docs/make.jl | 16 +- docs/{ => src}/assets/flux.css | 0 docs/{ => src}/assets/logo.png | Bin docs/src/index.md | 8 + 8 files changed, 58 insertions(+), 301 deletions(-) create mode 100644 .github/workflows/clean_preview.yml create mode 100644 .github/workflows/pr_comment.yml delete mode 100644 docs/Manifest.toml rename docs/{ => src}/assets/flux.css (100%) rename docs/{ => src}/assets/logo.png (100%) diff --git a/.github/workflows/clean_preview.yml b/.github/workflows/clean_preview.yml new file mode 100644 index 000000000..25946efc3 --- /dev/null +++ b/.github/workflows/clean_preview.yml @@ -0,0 +1,27 @@ +# from https://github.com/CliMA/ClimaTimeSteppers.jl +name: Doc Preview Cleanup + +on: + pull_request: + types: [closed] + +jobs: + doc-preview-cleanup: + runs-on: ubuntu-latest + steps: + - name: Checkout gh-pages branch + uses: actions/checkout@v2 + with: + ref: gh-pages + - name: Delete preview and history + push changes + run: | + if [ -d "previews/PR$PRNUM" ]; then + git config user.name "Documenter.jl" + git config user.email "documenter@juliadocs.github.io" + git rm -rf "previews/PR$PRNUM" + git commit -m "delete preview" + git branch gh-pages-new $(echo "delete history" | git commit-tree HEAD^{tree}) + git push --force origin gh-pages-new:gh-pages + fi + env: + PRNUM: ${{ github.event.number }} diff --git a/.github/workflows/pr_comment.yml b/.github/workflows/pr_comment.yml new file mode 100644 index 000000000..f0b203285 --- /dev/null +++ b/.github/workflows/pr_comment.yml @@ -0,0 +1,13 @@ +name: pr_comment +on: + pull_request: + types: [labeled] +jobs: + pr_comment: + runs-on: ubuntu-latest + steps: + - name: Create PR comment + if: github.event_name == 'pull_request' && github.repository == github.event.pull_request.head.repo.full_name && github.event.label.name == 'documentation' + uses: thollander/actions-comment-pull-request@71efef56b184328c7ef1f213577c3a90edaa4aff + with: + message: 'Once the build has completed, you can preview any updated documentation at this URL: https://fluxml.ai/Zygote.jl/previews/PR${{ github.event.number }}/' diff --git a/.gitignore b/.gitignore index aa5ffbd93..915632fc5 100644 --- a/.gitignore +++ b/.gitignore @@ -2,5 +2,6 @@ *.jl.*.cov *.jl.mem docs/build +docs/Manifest.toml Manifest.toml dev/ diff --git a/docs/Manifest.toml b/docs/Manifest.toml deleted file mode 100644 index dfeb0b184..000000000 --- a/docs/Manifest.toml +++ /dev/null @@ -1,294 +0,0 @@ -# This file is machine-generated - editing it directly is not advised - -[[AbstractFFTs]] -deps = ["LinearAlgebra"] -git-tree-sha1 = "485ee0867925449198280d4af84bdb46a2a404d0" -uuid = "621f4979-c628-5d54-868e-fcf4e3e8185c" -version = "1.0.1" - -[[ArgTools]] -uuid = "0dad84c5-d112-42e6-8d28-ef12dabb789f" - -[[Artifacts]] -uuid = "56f22d72-fd6d-98f1-02f0-08ddc0907c33" - -[[Base64]] -uuid = "2a0f44e3-6c83-55bd-87e4-b1978d98bd5f" - -[[ChainRules]] -deps = ["ChainRulesCore", "Compat", "LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "346588c81effb94da6a30c1617e56af6a878e4d6" -uuid = "082447d4-558c-5d27-93f4-14fc19e9eca2" -version = "1.0.1" - -[[ChainRulesCore]] -deps = ["Compat", "LinearAlgebra", "SparseArrays"] -git-tree-sha1 = "ad613c934ec3a3aa0ff19b91f15a16d56ed404b5" -uuid = "d360d2e6-b24c-11e9-a2a3-2a2ae2dbcce4" -version = "1.0.2" - -[[CommonSubexpressions]] -deps = ["MacroTools", "Test"] -git-tree-sha1 = "7b8a93dba8af7e3b42fecabf646260105ac373f7" -uuid = "bbf7d656-a473-5ed7-a52c-81e309532950" -version = "0.3.0" - -[[Compat]] -deps = ["Base64", "Dates", "DelimitedFiles", "Distributed", "InteractiveUtils", "LibGit2", "Libdl", "LinearAlgebra", "Markdown", "Mmap", "Pkg", "Printf", "REPL", "Random", "SHA", "Serialization", "SharedArrays", "Sockets", "SparseArrays", "Statistics", "Test", "UUIDs", "Unicode"] -git-tree-sha1 = "dc7dedc2c2aa9faf59a55c622760a25cbefbe941" -uuid = "34da2185-b29b-5c13-b0c7-acf172513d20" -version = "3.31.0" - -[[CompilerSupportLibraries_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "e66e0078-7015-5450-92f7-15fbd957f2ae" - -[[Dates]] -deps = ["Printf"] -uuid = "ade2ca70-3891-5945-98fb-dc099432e06a" - -[[DelimitedFiles]] -deps = ["Mmap"] -uuid = "8bb1440f-4735-579b-a4ab-409b98df4dab" - -[[DiffResults]] -deps = ["StaticArrays"] -git-tree-sha1 = "c18e98cba888c6c25d1c3b048e4b3380ca956805" -uuid = "163ba53b-c6d8-5494-b064-1a9d43ac40c5" -version = "1.0.3" - -[[DiffRules]] -deps = ["NaNMath", "Random", "SpecialFunctions"] -git-tree-sha1 = "214c3fcac57755cfda163d91c58893a8723f93e9" -uuid = "b552c78f-8df3-52c6-915a-8e097449b14b" -version = "1.0.2" - -[[Distributed]] -deps = ["Random", "Serialization", "Sockets"] -uuid = "8ba89e20-285c-5b6f-9357-94700520ee1b" - -[[DocStringExtensions]] -deps = ["LibGit2"] -git-tree-sha1 = "a32185f5428d3986f47c2ab78b1f216d5e6cc96f" -uuid = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae" -version = "0.8.5" - -[[Documenter]] -deps = ["Base64", "Dates", "DocStringExtensions", "InteractiveUtils", "JSON", "LibGit2", "Logging", "Markdown", "REPL", "Test", "Unicode"] -git-tree-sha1 = "395fa1554c69735802bba37d9e7d9586fd44326c" -uuid = "e30172f5-a6a5-5a46-863b-614d45cd2de4" -version = "0.24.11" - -[[Downloads]] -deps = ["ArgTools", "LibCURL", "NetworkOptions"] -uuid = "f43a241f-c20a-4ad4-852c-f6b1247861c6" - -[[FillArrays]] -deps = ["LinearAlgebra", "Random", "SparseArrays", "Statistics"] -git-tree-sha1 = "8c8eac2af06ce35973c3eadb4ab3243076a408e7" -uuid = "1a297f60-69ca-5386-bcde-b61e274b549b" -version = "0.12.1" - -[[ForwardDiff]] -deps = ["CommonSubexpressions", "DiffResults", "DiffRules", "LinearAlgebra", "NaNMath", "Printf", "Random", "SpecialFunctions", "StaticArrays"] -git-tree-sha1 = "e2af66012e08966366a43251e1fd421522908be6" -uuid = "f6369f11-7733-5829-9624-2563aa707210" -version = "0.10.18" - -[[IRTools]] -deps = ["InteractiveUtils", "MacroTools", "Test"] -git-tree-sha1 = "95215cd0076a150ef46ff7928892bc341864c73c" -uuid = "7869d1d1-7146-5819-86e3-90919afe41df" -version = "0.4.3" - -[[InteractiveUtils]] -deps = ["Markdown"] -uuid = "b77e0a4c-d291-57a0-90e8-8db25a27a240" - -[[JLLWrappers]] -deps = ["Preferences"] -git-tree-sha1 = "642a199af8b68253517b80bd3bfd17eb4e84df6e" -uuid = "692b3bcd-3c85-4b1f-b108-f13ce0eb3210" -version = "1.3.0" - -[[JSON]] -deps = ["Dates", "Mmap", "Parsers", "Unicode"] -git-tree-sha1 = "81690084b6198a2e1da36fcfda16eeca9f9f24e4" -uuid = "682c06a0-de6a-54ab-a142-c8b1cf79cde6" -version = "0.21.1" - -[[LibCURL]] -deps = ["LibCURL_jll", "MozillaCACerts_jll"] -uuid = "b27032c2-a3e7-50c8-80cd-2d36dbcbfd21" - -[[LibCURL_jll]] -deps = ["Artifacts", "LibSSH2_jll", "Libdl", "MbedTLS_jll", "Zlib_jll", "nghttp2_jll"] -uuid = "deac9b47-8bc7-5906-a0fe-35ac56dc84c0" - -[[LibGit2]] -deps = ["Base64", "NetworkOptions", "Printf", "SHA"] -uuid = "76f85450-5226-5b5a-8eaa-529ad045b433" - -[[LibSSH2_jll]] -deps = ["Artifacts", "Libdl", "MbedTLS_jll"] -uuid = "29816b5a-b9ab-546f-933c-edad1886dfa8" - -[[Libdl]] -uuid = "8f399da3-3557-5675-b5ff-fb832c97cbdb" - -[[LinearAlgebra]] -deps = ["Libdl"] -uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" - -[[LogExpFunctions]] -deps = ["DocStringExtensions", "LinearAlgebra"] -git-tree-sha1 = "7bd5f6565d80b6bf753738d2bc40a5dfea072070" -uuid = "2ab3a3ac-af41-5b50-aa03-7779005ae688" -version = "0.2.5" - -[[Logging]] -uuid = "56ddb016-857b-54e1-b83d-db4d58db5568" - -[[MacroTools]] -deps = ["Markdown", "Random"] -git-tree-sha1 = "6a8a2a625ab0dea913aba95c11370589e0239ff0" -uuid = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" -version = "0.5.6" - -[[Markdown]] -deps = ["Base64"] -uuid = "d6f4376e-aef5-505a-96c1-9c027394607a" - -[[MbedTLS_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "c8ffd9c3-330d-5841-b78e-0817d7145fa1" - -[[Mmap]] -uuid = "a63ad114-7e13-5084-954f-fe012c677804" - -[[MozillaCACerts_jll]] -uuid = "14a3606d-f60d-562e-9121-12d972cd8159" - -[[NaNMath]] -git-tree-sha1 = "bfe47e760d60b82b66b61d2d44128b62e3a369fb" -uuid = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" -version = "0.3.5" - -[[NetworkOptions]] -uuid = "ca575930-c2e3-43a9-ace4-1e988b2c1908" - -[[OpenSpecFun_jll]] -deps = ["Artifacts", "CompilerSupportLibraries_jll", "JLLWrappers", "Libdl", "Pkg"] -git-tree-sha1 = "13652491f6856acfd2db29360e1bbcd4565d04f1" -uuid = "efe28fd5-8261-553b-a9e1-b2916fc3738e" -version = "0.5.5+0" - -[[Parsers]] -deps = ["Dates"] -git-tree-sha1 = "c8abc88faa3f7a3950832ac5d6e690881590d6dc" -uuid = "69de0a69-1ddd-5017-9359-2bf0b02dc9f0" -version = "1.1.0" - -[[Pkg]] -deps = ["Artifacts", "Dates", "Downloads", "LibGit2", "Libdl", "Logging", "Markdown", "Printf", "REPL", "Random", "SHA", "Serialization", "TOML", "Tar", "UUIDs", "p7zip_jll"] -uuid = "44cfe95a-1eb2-52ea-b672-e2afdf69b78f" - -[[Preferences]] -deps = ["TOML"] -git-tree-sha1 = "00cfd92944ca9c760982747e9a1d0d5d86ab1e5a" -uuid = "21216c6a-2e73-6563-6e65-726566657250" -version = "1.2.2" - -[[Printf]] -deps = ["Unicode"] -uuid = "de0858da-6303-5e67-8744-51eddeeeb8d7" - -[[REPL]] -deps = ["InteractiveUtils", "Markdown", "Sockets", "Unicode"] -uuid = "3fa0cd96-eef1-5676-8a61-b3b8758bbffb" - -[[Random]] -deps = ["Serialization"] -uuid = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" - -[[Requires]] -deps = ["UUIDs"] -git-tree-sha1 = "4036a3bd08ac7e968e27c203d45f5fff15020621" -uuid = "ae029012-a4dd-5104-9daa-d747884805df" -version = "1.1.3" - -[[SHA]] -uuid = "ea8e919c-243c-51af-8825-aaa63cd721ce" - -[[Serialization]] -uuid = "9e88b42a-f829-5b0c-bbe9-9e923198166b" - -[[SharedArrays]] -deps = ["Distributed", "Mmap", "Random", "Serialization"] -uuid = "1a1011a3-84de-559e-8e89-a11a2f7dc383" - -[[Sockets]] -uuid = "6462fe0b-24de-5631-8697-dd941f90decc" - -[[SparseArrays]] -deps = ["LinearAlgebra", "Random"] -uuid = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" - -[[SpecialFunctions]] -deps = ["ChainRulesCore", "LogExpFunctions", "OpenSpecFun_jll"] -git-tree-sha1 = "508822dca004bf62e210609148511ad03ce8f1d8" -uuid = "276daf66-3868-5448-9aa4-cd146d93841b" -version = "1.6.0" - -[[StaticArrays]] -deps = ["LinearAlgebra", "Random", "Statistics"] -git-tree-sha1 = "5b2f81eeb66bcfe379947c500aae773c85c31033" -uuid = "90137ffa-7385-5640-81b9-e52037218182" -version = "1.2.8" - -[[Statistics]] -deps = ["LinearAlgebra", "SparseArrays"] -uuid = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" - -[[TOML]] -deps = ["Dates"] -uuid = "fa267f1f-6049-4f14-aa54-33bafae1ed76" - -[[Tar]] -deps = ["ArgTools", "SHA"] -uuid = "a4e569a6-e804-4fa4-b0f3-eef7a1d5b13e" - -[[Test]] -deps = ["InteractiveUtils", "Logging", "Random", "Serialization"] -uuid = "8dfed614-e22c-5e08-85e1-65c5234f0b40" - -[[UUIDs]] -deps = ["Random", "SHA"] -uuid = "cf7118a7-6976-5b1a-9a39-7adc72f591a4" - -[[Unicode]] -uuid = "4ec0a83e-493e-50e2-b9ac-8f72acf5a8f5" - -[[Zlib_jll]] -deps = ["Libdl"] -uuid = "83775a58-1f1d-513f-b197-d71354ab007a" - -[[Zygote]] -deps = ["AbstractFFTs", "ChainRules", "ChainRulesCore", "DiffRules", "Distributed", "FillArrays", "ForwardDiff", "IRTools", "InteractiveUtils", "LinearAlgebra", "MacroTools", "NaNMath", "Random", "Requires", "SpecialFunctions", "Statistics", "ZygoteRules"] -path = ".." -uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.17" - -[[ZygoteRules]] -deps = ["MacroTools"] -git-tree-sha1 = "9e7a1e8ca60b742e508a315c17eef5211e7fbfd7" -uuid = "700de1a5-db45-46bc-99cf-38207098b444" -version = "0.2.1" - -[[nghttp2_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "8e850ede-7688-5339-a07c-302acd2aaf8d" - -[[p7zip_jll]] -deps = ["Artifacts", "Libdl"] -uuid = "3f19e933-33d8-53b3-aaab-bd5110c3b7a0" diff --git a/docs/make.jl b/docs/make.jl index 328166811..b061acbab 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -1,11 +1,6 @@ -using Pkg; -Pkg.activate(joinpath(@__DIR__, "..")); Pkg.instantiate() -Pkg.activate(@__DIR__); Pkg.instantiate() - -pushfirst!(LOAD_PATH, joinpath(@__DIR__, "..")) - using Documenter, Zygote + makedocs( sitename="Zygote", doctest = false, @@ -18,8 +13,15 @@ makedocs( "Profiling" => "profiling.md", "Internals" => "internals.md", "Glossary" => "glossary.md"], - format = Documenter.HTML(prettyurls = haskey(ENV, "CI"), analytics = "UA-36890222-9")) + format = Documenter.HTML( + prettyurls = get(ENV, "CI", nothing) == "true", + assets = ["assets/flux.css"], + analytics = "UA-36890222-9" + ) +) deploydocs( repo = "github.com/FluxML/Zygote.jl.git", + target = "build", + push_preview = true ) diff --git a/docs/assets/flux.css b/docs/src/assets/flux.css similarity index 100% rename from docs/assets/flux.css rename to docs/src/assets/flux.css diff --git a/docs/assets/logo.png b/docs/src/assets/logo.png similarity index 100% rename from docs/assets/logo.png rename to docs/src/assets/logo.png diff --git a/docs/src/index.md b/docs/src/index.md index 3476d5e7d..ca45d5da8 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -133,6 +133,10 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. Depending on how you write your model, there are multiple ways to *explicity* take gradients with respect to parameters. For example, the function `linear` accepts the parameters as an argument to the model. So, we directly pass in the parameters, `θ`, as an argument to the function being differentiated. +```@docs +gradient(f, args...) +``` + ```julia julia> linear(θ, x) = θ[:W] * x .+ θ[:b] linear (generic function with 1 method) @@ -174,6 +178,10 @@ julia> dmodel = gradient(model -> sum(model(x)), model)[1] Zygote also supports another way to take gradients, via *implicit parameters*. Here the loss function takes zero arguments, but the variables of interest are indicated by a special `Params` object. The function `linear` which depends on `W` and `b` is executed when the loss function `() -> sum(linear(x))` is called, and hence this dependence is visible to Zygote: +```@docs +gradient +``` + ```julia julia> W = rand(2, 5); b = rand(2); From c81d0d706984908d237fa86065b1e901a6b7f602 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 4 Sep 2022 13:49:10 -0700 Subject: [PATCH 359/490] Bump minimum Juila support note in readme 1.6 is compat, and 1.8 helps quite a bit with inference + latency. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c945f1ee2..2bff370af 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,7 @@ Zygote provides source-to-source automatic differentiation (AD) in Julia, and is the next-gen AD system for the [Flux](https://github.com/FluxML/Flux.jl) differentiable programming framework. For more details and benchmarks of Zygote's technique, see [our paper](https://arxiv.org/abs/1810.07951). You may want to check out Flux for more interesting examples of Zygote usage; the documentation here focuses on internals and advanced AD usage. -Zygote supports Julia 1.0 onwards, but we highly recommend using Julia 1.3 or later. +Zygote supports Julia 1.6 onwards, but we highly recommend using Julia 1.8 or later. ```julia julia> using Zygote From e8ab2e7f00b14fb2939841718faaf98c512377df Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 5 Sep 2022 19:21:40 -0700 Subject: [PATCH 360/490] bump to 0.6.47 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index cfe708ec8..c2c895e46 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.46" +version = "0.6.47" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 4e10ceacb88166fcec5ef994c8a9d95689ab2486 Mon Sep 17 00:00:00 2001 From: maartenvd Date: Mon, 19 Sep 2022 05:34:52 +0200 Subject: [PATCH 361/490] preserve type over broadcast (#1302) * preserve type over broadcast * simple test Co-authored-by: Michael Abbott <32575566+mcabbott@users.noreply.github.com> --- src/lib/broadcast.jl | 4 ++-- test/lib/base.jl | 10 ++++++++++ 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 98124bd03..58f7ecf99 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -195,7 +195,7 @@ _dual_safearg(x) = false end len = inclen(args) y∂b = _broadcast((x...) -> _pullback(__context__, f, x...), args...) - y = map(first, y∂b) + y = broadcast(first, y∂b) function ∇broadcasted(ȳ) dxs_zip = map(((_, pb), ȳ₁) -> pb(ȳ₁), y∂b, ȳ) dxs = ntuple(len) do i @@ -249,7 +249,7 @@ end valN = Val(N) out = dual_function(f).(args...) eltype(out) <: Dual || return (out, _ -> nothing) - y = map(x -> x.value, out) + y = broadcast(x -> x.value, out) function bc_fwd_back(ȳ) dargs = ntuple(valN) do i unbroadcast(args[i], broadcast((y1, o1) -> y1 * o1.partials[i], ȳ, out)) diff --git a/test/lib/base.jl b/test/lib/base.jl index 74f129f6d..4d4c54626 100644 --- a/test/lib/base.jl +++ b/test/lib/base.jl @@ -1,3 +1,5 @@ +using LinearAlgebra; + @testset "base.jl" begin @testset "Dict getindex with implicit params" begin d = Dict{String, Vector{Float64}}("key"=>ones(4)) @@ -11,6 +13,14 @@ @test result1 == result2 end + @testset "Type preservation under broadcast" begin + # https://github.com/FluxML/Zygote.jl/pull/1302 + g_simple = gradient(a->sum(broadcast(x->x+3,a)),Diagonal([1,2,3])); + g_hard = gradient((a,b)->sum(broadcast(x->x*b,a)),Diagonal([1,2,3]),4); + @test first(g_simple) isa Diagonal + @test first(g_hard) isa Diagonal + end + @testset "Dict iteration" begin # https://github.com/FluxML/Zygote.jl/issues/1065 function sumkv(d) From b13d5953952e3c134ec2cd60023366e67eb215fc Mon Sep 17 00:00:00 2001 From: Will Tebbutt Date: Wed, 21 Sep 2022 12:23:56 +0100 Subject: [PATCH 362/490] Wct/fix repeats euclidean (#1307) * Add failing test * Fix repeated inputs * Remove comment * Undo change * Uncomment tests * Bump patch version * Defensive choice of zero Co-authored-by: Miha Zgubic Co-authored-by: Miha Zgubic --- Project.toml | 2 +- src/lib/distances.jl | 17 +++++++++++------ test/gradcheck.jl | 13 +++++++++++++ 3 files changed, 25 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index c2c895e46..599325e12 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.47" +version = "0.6.48" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/distances.jl b/src/lib/distances.jl index b49a2f74c..ee39e9de6 100644 --- a/src/lib/distances.jl +++ b/src/lib/distances.jl @@ -79,11 +79,16 @@ end end @adjoint function pairwise(::Euclidean, X::AbstractMatrix; dims=2) - D, back = pullback(X -> pairwise(SqEuclidean(), X; dims = dims), X) - D .= sqrt.(D) - return D, function(Δ) - Δ = Δ ./ (2 .* max.(D, eps(eltype(D)))) - Δ[diagind(Δ)] .= 0 - return (nothing, first(back(Δ))) + + _conditional(d, δ) = d > δ ? sqrt(d) : zero(d) + + function _pairwise_euclidean(X) + δ = eps(eltype(X))^2 + D2 = pairwise(SqEuclidean(), X; dims=dims) + return _conditional.(D2, δ) end + D, back = pullback(_pairwise_euclidean, X) + + _pairwise_pullback(Δ) = (nothing, back(Δ)...) + return D, _pairwise_pullback end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index e37e0ea15..3330d5927 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1192,6 +1192,19 @@ end # This is impressively inaccurate, but at least it doesn't produce a NaN. @test first(Δ_fd) ≈ first(pb(Δ)) atol=1e-3 rtol=1e-3 end + + @testset "repeated X" begin + Δ = randn(P, P) + X = repeat(randn(rng, D), 1, P) + + Δ_fd = FiniteDifferences.j′vp( + FiniteDifferences.central_fdm(5, 1), X -> pairwise(metric, X; dims=2), Δ, X + ) + _, pb = Zygote.pullback(X -> pairwise(metric, X; dims=2), X) + + # This is impressively inaccurate, but at least it doesn't produce a NaN. + @test first(Δ_fd) ≈ first(pb(Δ)) atol=1e-3 rtol=1e-3 + end end @testset "binary pairwise - X and Y close" begin From 47a9002084c5c9538fd9a4b6a07d1c7109a930d8 Mon Sep 17 00:00:00 2001 From: Saransh Date: Wed, 21 Sep 2022 19:54:23 +0530 Subject: [PATCH 363/490] Make Zygote's logo a real PNG (#1306) * Make Zygote's logo a real PNG * Better quality * Better conversion * Dark+light logo and no sidebar title --- docs/make.jl | 19 ++++++++++--------- docs/src/assets/logo-dark.png | Bin 0 -> 113603 bytes docs/src/assets/logo.png | Bin 67670 -> 107081 bytes 3 files changed, 10 insertions(+), 9 deletions(-) create mode 100644 docs/src/assets/logo-dark.png diff --git a/docs/make.jl b/docs/make.jl index b061acbab..f81ebc9aa 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -5,15 +5,16 @@ makedocs( sitename="Zygote", doctest = false, pages = [ - "Home" => "index.md", - "Limitations" => "limitations.md", - "Custom Adjoints" => "adjoints.md", - "Utilities" => "utils.md", - "Complex Differentiation" => "complex.md", - "Profiling" => "profiling.md", - "Internals" => "internals.md", - "Glossary" => "glossary.md"], - format = Documenter.HTML( + "Home" => "index.md", + "Limitations" => "limitations.md", + "Custom Adjoints" => "adjoints.md", + "Utilities" => "utils.md", + "Complex Differentiation" => "complex.md", + "Profiling" => "profiling.md", + "Internals" => "internals.md", + "Glossary" => "glossary.md"], + format = Documenter.HTML( + sidebar_sitename = false, prettyurls = get(ENV, "CI", nothing) == "true", assets = ["assets/flux.css"], analytics = "UA-36890222-9" diff --git a/docs/src/assets/logo-dark.png b/docs/src/assets/logo-dark.png new file mode 100644 index 0000000000000000000000000000000000000000..188c149844cad623edb8e36fd2b636a1a803490c GIT binary patch literal 113603 zcmeFZ^;?v0^FA(V00IJnNGUBP(vnKI#L}U3BP_YVA{GtONOy;Hg9-~G-LWj)uq?ea z-y2`?ex4tA{($fC@i<&jWbe7=nmFg2nGJlUB1>?G^3IJLHwfhAq||TRz^%V=1LNox zF7Pj82BL3(KQO@RvM+Cx^ir+exIuSAUh0LWhtVeDriUizUZ3rm%0AGT>Y#B zi_6YEx_37)Z(aM+mUZJ(NxT7n8<~#3NY>%Hey|WuB!ucvr441F~x;u5&sWWVqgPGx_hmD{(R&A zDe3HqhZwDt-wufWE}+Pc&2Au?90hD-jSj@==Zq3(Q(A$7gHR&>iOb;gpPMc%|o99}m#}kC1a#dgBx4bWknXD-bB( zv5>38VG7ioTUD=G<>RQ@P{BR6W%t|GdD1yC>5vMUiI|9n&)diol6{2PIax62Z}4c8 zuk%C>+3Us@o$Q<0n&!YJUq$QIHa%^Ek46^#L^3`giy?)*%vKS2G;P&m$$Ti^Bn!K%7$hb}Nf0UP41dqc?V$;MB1Y4*5R%xmdDV z!0tg~<{P*~ZpT+D9l|ma^H=!lCLD-io|>@}BcZ0T!(W9*CGHL_sCxYd$FOAGFRa(A zb&f2ppUlV@LZ~hw?Dp1nVf1`d+gHFDIn5_qQfdU==F~SYrE3UIw0IEP{n_u+hi0lN z`^!>XGW-+o%R#Ibby66y0EyY-Yjt*Ol_;%08!Nbf`!--E7jEb=f8T}`IQNEav$&nl zLH!4+tx-*4?vIYc`g;oRUTVMOa353A0*yqQ!^K1|x(wM=j_RF{SJ#)DvLF8xrf4^p z=gEcpyNe$!eCWD+{MG}Oj=G;qs{PqXu>Sh|NSA?0`rz^JWRDN|1ql~x%ZwyqGu4a3 zTx>0Zy=zW-_f#K2ET#lnE(oeRwpou|8nc%zFHY1ZE zjsia5`4#=xq{D+Pu5c$$!_GA6REf*?er)0Ed?yDHqT$ed!3Gb@eRDaP>u>O^`O5LW ze7OZ&;F0rKxf80yYp<`%om04XVJ}{C&Li;Nm6vGi#ji}-~>rSWB7Zjdo3 z``lOD*Y2_|!1y3g^`PbwQY4CX=VpmW0XlxH{V-u$Q$|L{zGz9c`P!!kUL|AhF;TOM zl1k7a1nn;!2d&*{5+A7MhIFs-bvZ^LTWIXVC#()biv%G*+FnG_Jk%5M zwys=~6}wiTTW9KYyv7q9yy@7LUN!d}Jx;sPqb;64Cf8Af_w9!?$i?*ho^(bt+&3-P zfu00ST_!%<{$eTu+YY>(1Pz#p z!u{ZZUDG#R(NyIK9YgM10oFQd+b2;?7#}SDF z_%OqgBjX7P2oyC9uRT&N^}XDU(YFz}8>s25_l8YWNO`blEz&~8xMyYQ>obTd9&E%Y_OwnhWDb$==CVsujF1@{|IPT?RiN-0V`g!*WB;8 zW}l07Tv?wNwL`p=A(0L$Q6X8polkRvCnyF8Oy^RJq)x}S4s3QR6)GC4nzomA>srO2 zmuG&)DZbm%?o@iv#fCSuAsE-g@*}P*OnMKASx}JETzOK*W_)~jykTb_syk;U|11HP zkiAii^$V`|;W$OxB_y={fLzpX-NyIC1iuA-K6j*e37TLQ_JmPfN1 zJ|^Hv*m8An>(Kpb`nxw>AMT=BsGN@pJ}BJPX*anZ024U?$C|qzHh2L7Rf$~o@QiUD zZNY34lB&DkW_>kNiey(@4u^4=ItzYI>!ZVNggTybmNLZ|>WPHWL5|Luf--(Nop)R= z3}Tfwit+GNh%I9EKe|2zh~Iv-s_;$w1qig%8XDYpc^c&DK3qLa>JK>yGWk-p=+Y_Q z8(Wz*{K{R`_tE|BBT;o!rMzjPvIx{s&a3a6*{G_};_ib5TxH1PEb5^L*FT7~{vL|@ z$O0Xdxx(Rf|)%EeFiw4f($+QGxZ53^wi%E1BL z`*)4pMglgtHnhi44PV@pEUyhQx4FKDm0)||RF61ppxd+w#lLxXv~&c?{TelZH^oFH zvDW1aQ7Kc%q9VFbvAF)ql*!8TAjyiZE~}>nBsZ0ztKIZ{%|gbVj$!niIq$FYto~1! zkJunCb5s;30@#!z?M0Rs2W>?)V_c2>E9g&li7^weW2c7MJFCosBM|ydrx(W{P=bN7 z`hpcvTpwAa=W@;xvL3Q{5Eq(vv;cp!_C@7d8^5OqB+(?!%g7$o`PzUvVXw7SaUb3Y zY1P8*J#}zfLee9JRsPSPo`{~@j(X=-l-^?%H|VWIk2!D7vGB#tFd}l<_g= z{r#Y#Rjljmn{F8>IABwe=g^Bqs5^C~F zg3W`^Mz~RLqYb&|0>KppvuVxnLzZ<#ollEUE|xCW2H!7VfPge_HUT)$fn!jV(h?f@a@pnu2v&{0caz_@KP9>GN_ru=ue4nz6?gvG$(%-?+1Z7Q+ zu$=1IwymJupS05dmTh)AAJ^wBe=_=6#Z}7mI-KD55eRc+tzE?hHiINxwDl`XKoHo{ zK40!tkrvMQ#-hyj)=BD(B?n1<(-1lo3;2^eq2u5>>ZV&zM2M_}f z$W&~qn)Cm`Bi@LF*&pcNbalIPU!&Ybk?RXS7-HFw;e&}+Sx>D#Qm&mO=H7n?7dCsj zmmb$EXggk4@qX*$=j$DW9smXHv3ej->)wnAc%g#r5}MhuBEO|{V(~%#&fqRXM_244K^0OKvzG$Y@RQeqs&AbYyo0Yqh!&5SxL09w42%1Ik8=-8 z^ieRfF#`1`_Y+-ft|d5tx&GRPR<51%So*r5Mf^J=CcZ}VL_N!5Xp-xe(Xgyf=fK3c zJuGZro@;L0-_YQ!4n}v2Ku6_I$p{RI``=+dx=wzEXuyzIj2(ciNpm*VM967yIvL%Y zRX|og@e}Ua%TYwKsuSG)3T-RzF#2OBcEpic*c0u_$q^zCnk))Z750~%hSq$)( z5Av#T7$`b{>o~ve2X8I`<{Pr@lO}cR8G4SC(X{qs(^6BT#)I*OUTsns_5Xap!>b2W zMjzPtEQ_M1s%Hy-FPXkq?lMt`Ef2XjAht_K)Pp%v%%{GPNB9JWrSRFuW1;P9?jqZ9 z&kI)x`fEM57AT|07`yTi9j@nI+RY7_Z}cx;YYe)*CCgq4e#~zB?Q>;tkwoSYCy2wV zt#n;H@SC<#YAH(WP4Cgq{hvM&JqZKXUTB8&yC6BzqP7H|85-!&{Kj-;)x_?5dFD>5 z*sTYQ0J6HBBV%amRPIWslHh9xSb5A=JMXnzS%4>?5yQrh zC`WH}(y4gzaUS5QgiJB)!61caL2WFbUndOV`sESzZy_fKD)p(R`4PvY<}UF4*^kU& zY`>pOmNak^Tr0cDRcayUoQYzisavUw=~E#mj+*AJT(}*~bBA#(vbctI=ay9VyC(&j zlW4i9t`gyv#{IKouxgU8lDjRMqyFL0b*3UgX92jU>yN?w0-Jhp=S<46)sUAZ4v%*v zw>-owRxYUXo)UPHj%WnaOYczLQ2)ag6;DAQ#?flRvnbE zuC`U$cDR=UpRCMx1EJ&=9e#1G@W(Mf+@q&h1BBGs)%IJPG+m)V4W!zxYk4Qi$?lbFGM2a{JikDJ`u zI&$9)ZA2C;3%imx?DE{H7UjJ_$&y==qe4_4SbVxhY=)8>py;Q?K24d;`{iStXC{td zkb9FPY1WdwnoaYxMH@X48dD`zv3lYOYy-WXV5=rmqa2>RJDxW+Jcy|OU-}ri(#JQE zs^Th`Hb?3)|`dT@Z1-RaR6U*stL<)S9TRc&R!T#G;;{#y%0NB-#@oP4@^VNvy`M0y*4N zQ7G$nye``z&c{SgpUMAdE}r)qEfZM+0>7+Hb88Sq?YeWGpnKcx&-Yyl5v`zD-N#?$ zA3zd_gC$<#Ve4-e{6g*>Bn{CB^DB1RC|OA`g%4_67#A)3HH0Ux0AK$ugmi+gLs!Rch&?09N2NZ%Rkha3$$Cw}OI% z60eotFk=uWZDy`;<_{LK(P#49y8FEJKw$=bG;7yVp(xzmpW9{LlJO{23^EwCKqhTH zGVGtZX>XK!b!n-+CSwMZHTKX#U2Gg-v-=z~P6>*#wK!K&K z3hx{l(qqbBj+RIyB4@$?Y3>{~%^v`C3_4&8MV7iwq9&~=Ed>G%Tx6bqINa$V%m^~B zG|6_`&ywd3>U@1K>TvInmWi7MR}Hh&>$A06;6!mN5EQBn=QR$<8t$T;7H!>=pPu4N zt2G9)b`(iH z)85o^`)NFrcv0VnQoLJ<_aUQ_yl${N-;e)MNz@f^sx(C!h`D>c=vqLZUuMK=jo%qF zRfn=eb-O!Fxb+xd(d_r5q`Pr7YCmVqq+;udh8-pwUeK~dV~Eo~34^SKEm$NMvjjDF zHDl5M0{zKv7uvH$6}^z91u zf3~_i#;G&9Y~6)lS*Nx5q{c1I1+15*95D1KZG<-onky)(;^dJ$xa=A@1kY(0PdELQ z_Oc2yUh7PO+vZa#ZLsmAtlK*r*l}X0o=7oLxB)upPyTjA_Ta(Z)Qn7@h^iW?kN(g9 zkh1jM$-;eS8te?cwDj~J35kie;p+_YcmGq1_W+0(*PC6FU%jblEO-eo$U%ti^CBa3 zpNuZziz)}4Q)7hB(D!UZN#QFz(Oe0VX~Z5Q)FdzU zroE@+{=`7zMs6lEmg-xVnKD&`M|ZKW7U?H05!_xGhO5XSaaEPQ|Cp&Og+nSmHxo)s zX8H6#y{Vu6ySR>vRenK%LCw)`!sf|3@5(R>^++RzI@SqD0@0~4<|O*LEt+cr!7;e$ zV9jL*+lVsb{lkSsJ!6E~1&&$Em>KcM8M^VI1`M&Oha5Mfcw`U|ZdgeGJIRMD9^^J# zT0y}ff@9}2F5#o$#8IN)r;;Om4U3Qe=YbL;08KGimarEm?tB?sn37zQ8Mv6ctWZ;l zd8HNk?+fT^@+#=r98pVxhc>gdG?zW|>uN@Qb{RJa&R*xhs!gQfp=f=g`sSdr%wHfN z8B@R7ZVgeWUE1?JkvL333MWVD>u%fG@g7&;Y8w9M|9d#_)fE{}MmL%ZP{M8-H+_B7 zr0_BLBZ|xlqs{N%q*O@?sP%EMe#;k#7@-<#sD7_&hlJKN4%>CgDXge)CBHl9mC3g$ z8J$otQ@SKcNpjks8K21355xUasQKOUKAQ74l?kUge?eE)M%NF*1TS}g<+q=#6f)!! zTlnuviyBbGx}2O;HLu+Rmt$LU?}24&yrW1CWJz|)=oTph=Jr7$w5%>6d%zlC4(Z?B z52QBXPhc2}q!+qh7>}RR*51#3dQuI@b30>v#Bo#O>M|lSSfJFc#D66z34hE<&ac5- z%||N zQtb4EXj>WumVM+Gr(z?2-s*|0&JlL87|Qq4aS{zA25>%|_yp!jVt)~8yy0rTU(>NZ z)Hg4*+ths8MbI?^Jbl`vj5~gP4`Bby0`&F`7fb)SJUi|%-*zGU}lPoK1%wnzPB zn8I$Gs}mKLebk2B{HFh{GrueHPGT)A+yUa*nM0_8hpiC|Fz;*0LmH6;Q@Q)L7L!f- zp}kDjZzm_iSr^wD-(>_W>Xc{Gaj16R5pH~buh@0CYkD*x?B~S_T6X{WdW>*bvTJn) z3k@RF`Y&__kh@9HlQlde$Xd3>_ZM`(aqxpO^3+o3*Z#B6l)wbq3@6F3%WZ$y3gWo4 zSariw9Ae=bhYzk#VY*EWUv~X1CY#LLpf3f=T$$*#vi|u|&p2(jVu0B5|}+X6}!#|64%-WGi37g?igM9gs4E+(K>;{uk~bf?=LzE(zSv9n_lGh}7;gTG|$ThGp22 zQHjYa*F(T7@@Sqag?}LEPS4gT=hspjHluFZ7r>u;Yar@Bv`ii^4ODUP zAu36@KCG2y@^`i~uy(RU0!c?U{r2hUkNbZirpq!y)o*&m1h<1b+WsWs=Zt9uHst}| zBx^N-c<C@1pUtJHR+`~GLJmPoro?NDjd&Jv;M_F%zE4$d80Zbn5^ku&#O z1B26E4*Q#N)lqCys^y*YLTP44%>IXhCOgnil5vNfs0uALQpeg30`tp6TYlPaMZ0?> zagN5%p{7VeNlg%e=Kvc#k`ZQ&lY!iyKQ_j&*N)TL9z1pFTQ6H~21H(sMTB-u9Ly8{ zL63ABW2Gpb*WceMzq%^ijiJX&>HgGi<3U!v5mWzNl-@DCJhqo-#6GI!p-2o%p8H{+-e=jN7V8^N#F(onnlJe6N65T$U%0tq1qOimQc{C+Eq{R8cp6B14r)W(Y-YgLA z=t+M1)OWTZKAuJXPsor8{eFsS@trE`pgA=fW{`S(>#Y8rni{FN;D2#y61-P_3|qI7 z?NWw1{LD#Mwm}Nky-O+YHtAHT#wC3i{sMrNnpK-*vhjBeI&;isFkr1>qFEipD4OAg z?V1LlzZ@vrahVbYKxxMkeUIrxTXKW?VrbV5;@h1oHZsoj(K+OS6JN$ZO&ELm+fW~r z*Hp=%H_W;96C59aS02blhZ_|AGcZOtOalFsz65W2Hyl8y!(1Vyd}pnv~D2e}`;($E@t$ zcN=f5T{*z8c|(omz`ZJ6`45Lqk5*JE-u{fbVzTk9SDgBF!C>&ko^5IqbRa>S(;bpD ziF23?apss%Pg0{(z-(@9xv7n^i!NARPqXWzgl-|oX(7iUX_P~^t{@#oklKyKEDBbImXEr4A4S!lp3DQnjh2VIGS@;lI~Lfx*|0_l88~W1vjb4dd0o zoDn+mzvE9oc8d3Ir?|1l+$UecI9%V$+i~pYkBJ@t)W_r9qg{U*59<-|5GErkR*Mdx z3jIEJcqbVj$h+DmTL~M!JmC9U6kXJ=@i|Fsr47p5-S>-S(3e}iyU-1a=^9oa>09Ex z>li%N;+ZnGLQp?@HMapE&`Gr=GC(BYxXf`%h|4HxY)^R z_K&I@GR#|2A0&Q~*Yl4R3C~83aT(0*onk&A&0@}$|5U|JIm-E1h_`MNVB%GQKAc>8 zmvR>z+?L*l3-%8WMpuGUIfO06;G(~{pcmXJt^WFCe+>XSDWam0 zrqAyGlZUsxF!>WX;5JqKA2rsRPq!Kl>j5VdPcrQnz`XTQn=T#C5a4Ss8)r8`S3Tg{ zA9dMo?Dwp(%6~o*U+yU_P#;m}2vyOig>RBu<6d1WS0nOFlCuuv zZc6#rRA2G;&jb%o4mQRdC8t!r1j5SSeQGJ}*xii~{_6}o%uj7c*^6(vItp4=zy9%F zgIP7@Oi!>Q%@LdER;k2G^7`qci|j&LgXdmuX>G7IX#l`bTX}T1TN{thSeM?5tJyoq zyJx$$T=6;C3pI{Gk_rR|UieQFE&j7Wfr3z7RUdxTMUAhHi(f&(3m<4Az6_Ap0penK zo0*E9-s8+@Vp@}RQq=W_h<_aIal5OjW8G8z&b!J>g7FMiAFc`lB0fwMpj#l^`kpnY zMG*zk(LL6~aQpraBk}vcL~e$(1PFv2e`H*{r(0z+;@9d=c*!)i&@T?$gDPDf*GGS@ z!&Cya-(fdjD~v(oaT#t&yem7o?*qMYafYYiRXESgwYM; zQCahZDC0ytm7IrC`%xPgsAt3k%u4|+a2K2ayr0*Su2VF|$j`+IQY!aNvsUrI#HE>h8Ps`~`6MmFQ{AiA; zyejEUjAtipOPf&BUZ^IfL(8A|^Dkcij%M{u75BY>?K~x4{^t-oj8B1rD$${Fr(O7` zD4X=@djbHxwpB{Jgx+(s9e=vmSDJ(_?n4b#x40dV<}As-v4Q$Kl~MLDgMattJaIam zN>2Ex)+EbO>)pnuy^_5QaxI{z^w*rtR7Rx=0cljVvR;4i{hA=q#)O#N4}QUoJ0!vh z*A%wN&Z**DtphFYpByfmI*OJ)&b>sxaDORz6*T?g$pjCEHD){xMzon>o7i$9)7a_& zpA}pq&rSPxR1NUK03jD6G%)VHTD?OQ^b|Wohwc%cp$56(ZYiojEA*C`ArQpZqCk&! z^u|gZZKFjwPNiaI!=JGCte0&S(3jSoq|x|8Nds89Bgn_$wKZp9styoqL!cjr%To_C zi%0=@;-dKX)--YU2~$1g@Mc+Wo43s1gis4K&Q0aIW8k`Ga#<9%Q{Oq|cV%5keRMxy zSbTgzKVst3%h$-sDy*A?G-r}5CmDo)%Pc_`4PalUK)JVZ*&A|ZXGc4z=Dl8F=2feg zEI>F-m-tCmFE(*Dum~4R+EGZeO-FQPBl^f3!?nf>Y^K==k_8K%PMA5R@@$^L`<`!~ zd67x)v?Z!F_Q*DT-`nA$ZALGF`V1K#*VkAwKIj?bDO8Fin4ID9;KcyOgW!p7?luC8 z{qu8*4uV^;c)=P$Y-)d)c{zL_6$O^{4v2N-ra;!u@VFYczgqQ0o&60)>9}!X*>#(~^EGHafwH_*PRZONRO0LwgJ!xS2-LOKr#z(t%q+5hVeBr~$!5PCN`iAgElXWuq_WLc79K-1v->2ot__QF2b4{m?~2pY6%Vjad&QejIom4OHrp zHbE`(rAEQ23suyHF+HHcO`F@*c=C!0HHIFmj5SHTtCbz3(mOZ^VuC+KK!%Y7Xhf6P zI`l+J{%HwLulVJeWvb`$*S&d2q|!eel8N5za-w7N!XezTg>@<_UexPwe{CqY?=D(! zAAsd^WO8{g$Jh#uYcI;o!Km9Ghtnvsj8(X}AI@*&@u$u(!;uYbbGNrBpurRKwKi>4 zan$0kF=x&n{0b(`=1#O7BW&0~Y|KoLsTVgy+G^qt(pP8>M!m1!WmAxTo;nItuA8>S zNKIL?x~AqPo5d>ayBsvll^WCo6n_Ab4NfKXC{VYoKdibP8PDmL>b_!)eqM`kU}R;L zcNL_HJx9-6>Kjr^iwPR4k=`kZaJQ8D+o*26qEpYMV>hVXUW`(Cz7~38=rKM9c7`V1 zn>#>O%Sk@kFKe&|CV%{jq~GW@ljbbr+@s{-rfHF_=0o0#6R2IVXLI)Q1@Gkv_Z9pq z9jD-lVj7qs{RT&R3ix;0eZ#LH(yJaDC$3!!A$|H+!UA~lnGu9P)mRjK~sfes| z`*bu4E^QZ7JVZj_z3LNpzPd$EWW9Vmn&&;F;O4FMJp{B>ys4 zE`Acq%E}NZ6uK>|w)V?&>32-)6+dZp$;3#_^O)4HjZ`lFX8GqMNt51(e&kcG?HDT}->E&gkDq+` z(?XEIywiBHUYMH%S= zY+&nd!bL}mkw`&beBSv@FD3Su(ad$P5B7#=&*#%m0h;36@}(0beKC7c>KLCs)d*-4 zYSxSf7=wiQJgl|CT{UEt5}|SvbF?l%DE_-cCZU4^kY<_#xvHtfZ>Y0Kl2hF#t?`U} z4n{hYSKsISQ^83{VVBe`C*0g>Ks8%OP$&t%e|~S_>6QH2bdSU$QZJk@5PjZziv)hJ zauPMN9~a8frE?RTy4T2M`?$X{wnG^|eS2)5X_*X)233kOF)2nS-_t2y`Y^Pm>kJnc z6;q9z4y(^5lY`onOj|h7qW0^|;(Wp@J%w9{ojuT!Vj`Dc6dfKK2p$F3`kTK0ORBc` zcLfC?r{Ndf|b(Wxl|FWwb7hkrzV$3&EWT31;HtWNYkBBI4~GF~l*BkOXTp1sL! zMI_w+6?xG6Bu*2oK$osYJG5vMtKb!rm*Feh+4(^=)e9D2J?3PlH_Qc=BQdCQAE9+H z5;;%4udP@=9abfbtZYDfT$H{$Jl00&u~mhgyy`z9WD#9m*dwgp4oio%#(mC^Joy{k z`1Rr$0MrR#Z?dqv&5G9@Rl*tXy`F6>LXo#14@j=|zd69=UocwIF|%bo-xe4=1KB8c zil7MQ25O_mt0N>_!ah_OpB)|-V!i1GhIDqjxBNcJ_$U>+8Kf7hqg6QVYqo}owT(Ro zXL)FXPUItB>tO$(JzL@0&{uGEjUx9O01Hvw5WM1xbJZw77&^2tGl<+ICA?gm!s2RN zc(--a=YHhfHSE9P{`-3#dp!coO7TzUdeSIm(*`ob#URH^amE)`*q=>q{Ud$a5N2u1 z#YxtmP^02bDsfZW&~-$TQ0!>rso(9`>X>$aFXz8t{>6p&TJ$sQZRDEWECm+u}KK3V&F=mFxZ# z+02QY=1jRH+b$H#x5lH;kqkm#Zk~PMOb{yCpADBBWu^X`s*;ev)UR>iD9p0V++G^W}P zWJpIbDJN92ywotI07`&NiK+Ybnjq!pYhM@8yn^dsteuTSjYs!QodL6dxUhF>rZ}Vm z3~Sdrcgy!6_P&qTb!d|wUK(k)QV?o^_wwOph=ID;Jppu*30X~jg+n`tNm@VIZa zWnAfC`uNQp4DI7Qb|!J35$Kwi<0Hn1aRy!6Of+}8`1P|ZA(Jkly1-_`(f8Zo)Z~4} zL}NiIVWAKGEA7UgHhM0!&)Fvm5ZJ?;8gl#Q`g`+-n@PsbhB_pWn#2Qc-B#k>?fWjW zHlBTY>+cGa7}uYLp zy*)Wp_yo6n`5X+9kK|?r>^DSBIwn~eWa2Kc@w?Rc$2&4YD)j16NJxmDAwi~y4rqiF zUT&WGFqVrJe$q9nYGb>{NnMucp$sp#yxKGS1C zArPoXpvCvROR>fW5^40kaoB)Wg{>dXJ~t0?(c!rX{dXX&kla z>6z~ddPMu~?q7=%a&&%%4Q?^fv9ViWyQ~i@9U+0cRPq4P^Ov0*iDQx*x(r0AitMy~ ziZD4omdce2?Ett(A~C;Ob{CGBI+rs7l#&-4QME^HqOE(Ek+szjy65Kk9bEQQ-`^Fc zq|(;6+gK+}hbJc;PJ#0|>Q}oM3>7%ao*0(65Od4OPeV2IAf}^l{Jp<`+XAG8BtJKn zF0O18D#-kh0#HP+fpeF5oE_v+scDTgYgt}R515*`{a|Lb5};~;Fp=J30SwEY5V8v` zwq~Bv?5%G@{hMP;+orwZzi{D1{b>yaP+}lalH$nq@Pwob;fUqs<;L#r?vp&_FKvH& z!wtU~qJYJ71HC(>^&Vuz0kl_#Gpt*U^ervXbCsg&8WwKw&q`3ox*hsEzP=iFVkiT( zI;n1prlmv%?}e+MA8=AA<&1W}xyjx`&36)W z29P?s07%Mx9`f201d2HoZ`ebjihMQD?qh#uLY}}%Bw`hXgDOjUn5x1cJD<~MvzNY@ zDLGS5Eqp_Vuk)NNsWr7XX0s@))3|)00EEu9y&Je^nL{_H()e6j?)mb_$z`w zPWnHQ(pl{HEkuK&qJn~(w1U-Omb@dEUZvH`taekXKXk?vSv~(q5&SX(XT^v9@#nBr z^&Zwi;ljquZ+YiF{<+H4{752DM>>O0ejO+r0Jsrl-ptxvIm)LfFYrM0XGTRPFZg76 zrkfw8TzB+WUH!K|Yjg3KRJ_@Twf|KRpPOj_^l z9|NZCA;*G)3%Jjxi*ze4Agu%fQzTX*PXA^c-rFMQ`$H>NJvmj`C%J{C%@82xfJ?)M zJwgETWR5JL;pJG7>ekUpgLtE>8=c+KsCuW;?^_JK>n7d>s{DKoB;j1!3q9&lgLdI6 z9%qDPpUzvt>uxgY3|yJa6uENxh(l`b846V z$S*FG%PPxIG27{%Ri)N2PD9n2{g<7podY9G10Slqjw%=AZ71=JQr?1(P8SdKecn#I zQT!ZBwJnd@I*(m>Qg`^4Fd^^!(gizKH*3_4q@r`&j#r34B?>;v|8>f!VkUKwC!RnB zTtD%ZUT8~a5+fnc_ID$(1f3X;jg3vvE|4~4jyFSsGLAAuWC#BpJAi^_lfb>6TN9S4 z=NefaG(gm6%T-A}dG)U?7?54%HSr6SZ=UsDWwuLOEWlPng_@q<37(vp0Z1vJZT4=D z2fz|u?LA_L-v=ed32N0yv2A3RMtqLuzBZ&nZF0bUDdPxeZtJtEMsLFjSX}onvE5(q zw!RJxxc-u)J=!c`p4O|`?wc%Yhdk$!=Zc()j-L+6wXA;>e13^jS`FDb`f0Rn(+5wZ z7&fZ&X{I*d-S2wDc=OEmYTAZ8i(J#+PTgrRLMx;cJWMwH+$MOlm_VA}(lotiK+_7$ zwJL$;jE3-s)AuYnvD6;({E8XQjMW5cKqz6S7@e6fj-uydr=09^WN&$ zC}5nUf{rNBD$v^Curx5n_p$>SW)J@?rKl3oP4BZk>l1Eht(3YSPSW>M-6>d z#RXexqeC1b?K=mBsdgSCJ7V~2DYKi4)sl>~+mwTN6szy9RbQ6>V3dp_;H20`l_yQ> z17xwRNW?AtJTkbgp6#8@u=@F?Vm{Gz{q_2>>fz{7;r03vA)AJgs$nv|=j#mhi9L*Xz2|O{JtybFl;5-B%_NLUQ=0>uGgntnd~(NF z6Yb%yh%P?Y#r=F{Lb3C?)r3w*OVi7(2I82#WyDY;6!8Y)eh2oO%5`zA=e(7cJl4>A z2VW7rMSL#qO)c~r2pAsg2-@lA#;OuYcl!UC1$bC|LqArHeRT0^LhhUk1Aw0g_Wah) z6Vkwv4;#$~lP?+F4)A)*gYmP4@LdAYBE(qCal{^7GmzK)bV!eY{*!DNmC){zo=& zsHviGlXv?;zO@&>b>a&r`(GnY(9BzfhK)z>1EzjtvzR=6AkF<%iaTLaoU&Vn_=DT? z&}#c-WOe zFSkbg{DKgbRLCq4S}kj_IUSR0Suz(kvW2vD;g|nV-PJ`dnpmjblYh9SyQCjf%m^>K zZ~I}y#J>BR&c-wL(S%JiTOWwCmc5LEOw)*seRp)XjI^4I_QqtmU`)M>lU#kPb(e!% zwHe&ta7xP^Y!o%5)jw;;Qdu*(OKDhrcC|DnY3uXGRA17sV}<*?lGCsbi*2e@=KHxm zhpww+Fxr?u&ttsBpN#N4%YFLY$kf5Tr?h?mO7h3_LT~kOJ$X_PxSQ|9r5(*h^w0c- zLTkr$$VL62n=%PAr`Oq9w3 zewYxOM(So&T&6CggJ+QU9TPRqIOC4sN%P6OiGG1iEl{>Xt@Zmj)EV^McNa;`1&UKuyF-~I5+rrD82hgf5(T=D}8VBbSkS)uA7 zfkn7HH-rH9gX$??=8siA7MWBv?jub#vhY6J!xMx1H8Zwup2^lK;XHin%;X<#=P+i& zae+OwC1P{>`?Px8CNJJV=9(#za9OWvvlq9md`pRmeP4yr4_mGDCc~DNmb~bBsapR8 z)2o*^#H$4@?rN)dx||1DF%6lB$Ukv{KpYE~+&0D)g>|hw{eMH4uJ0eH$LFl0II5{J z1XTI;CuvgmH!;YL$sS@k4CD9}AW|BZeh3kc`cy9R5+8fIOUpgu)`^5g$1>%FxNtKu zyEo^NS!qFAj*?V%sw)F(>Of~H{YD=$S#aHJhE!7x`OSaNWE2PFpD&p$%L=r7Guyjq ziTyhpW-U7d(&>^qJzKn;{I0sIFujK3ehZ%nt+^saWQooR}{}2c^s$G%t;U2icx96YAzZT9pHV7J5*|m+OP}sVP~&KgbUD8kKdDr;cg zHIVK9x;+`7*_=d&`i)*;R3o?E9{t~?8}K0~;omkodLJyj?zqT%+e%uDffb1fR$u;cOo z1{62<*VkbI=Q|_Q$LCtKGmbwifVE*LzkX*m>>~PVPoAOt>Qe($EG#T$EkC&v#OkU= z|1uzj^X-{x{#~H~1>9{Crp9xCW)BNqjHbQ9aybB&lVj*GWB-He?Lgj8ScaN?_jB`$ zahuXU_*}L=Fe?c(d`XQ4h|pU*UgoB)T04b)z#0-qrblb5rMTzY=ExbUdCixD$pr=Z zi4ILmYoArc6Q?HbR(VxNmRGbTMR1xtb*XOnRgFG|A!BoviARgy99qRxrLdNP-GDN#r_i$_m}ApHY>DG=;9^l> zK|y`oRq?tQAM!6J{82HjaBD@c(PO99X`!9t?pWh0IWW4wMYQ*+1X%HQofA!+-Z7(d z@hSIepDMEgnjOGuEY{bQczmWl$tW7yGUI`MG7P?ew_m_B%|W4K#x?zA_V6#g_{E5t z7|kziIhFgpE+l;RO@|gvBO`$`h`NTxqMOub)9)VesAxNbvAo_qrb{E^=ki_n89u62 zDrC_2Rvx!v|M%7-3oIC?s6%q-L7=f7BJ7a1Cq-+(4Z8EuSV9dy!|GCFQJfZpY#~#V zhM%oDURvBlQs}o7oGa{$XX~0jY$p)h`Re2&Bc2?grb%-CAnIz3mM(+Zn8)X2|1@)C z9$7O(BmU;+NB0aq@!fw{r}pm@8PqwCZ_l;148Th&c7RE!8ON6ML;aZvs1?wK8KA#X z%*IU0$v|98m+%y~2{)7COj6Kl1R)*CRpB@8hD~&A`s!esNW_F8n_j^rlV2;grBo`@3zOUr99pd7Nd2fXzBA2G#7m^t+3x)zB1$5>Sy z^}9*+Uj8C_t1a`P?A&K0Y?jxSHiq_6|I{_}5WLh8KI!8U^n*@@JK)7Y%fqUs*(G$a z_Ceqn%KkDPd2zmzsp?&zJtc**{x&m`fG1t zO^eE7`^QaR>bW;qG+V&Hg4gWT=mOI$F804Ll$-8tpD5bE{rq68FUY0f^bn~CWO$p< z{n`)!>(7w{ti2pl?YO_SchX30)U6pE%Y~EgITwX5*{5$W3~%{P^-CjV2t2}*+v5C| zm)Z~wp00Lm7J}>M-@860)Hz1L<^%nBT95 zEditEEBABzSRG3By)kI~AO1eZXBC^>>y+$+zLtUo)JRTn`CKKVWHKu&t8W&Tvggzv zCGWm0+S?8b|{<58r$Ktltz3YQeWQ~Tm0W};S+pj)f5K=Yz|-TNobUk8U6 z>%K|gFSkvOETBEw>MTo-Cf^G`?~P8G7TmtL87BWw@Gaj zFIS4Q;CG}I(oc%?$`mhq$we249yY$Kj*?^Z5c1)4@)EtgT%k$LJ+E!D^1%<<9lFbI zPd@4!q_cdOtZO0v%%;U5tjoP`$z8Fqq4+$JwUX8$RoJ9UGZtZrSWLGRd*(2kl?%+M zNZY-}z4Ae*m8kJl%`S?O9;Df6|rz}g(o0R2G*sF{$?wP^no6=4H_i1MVr|& zo8C{RkGeDQm?%I(iM&3i#Rmp-wOT|9G6QWIUY=NVjFx>$q^t=XdN@9Id6z@nWT1#- zS!Mm#qrsLL014Li)LbB?TRJjNi(bI2?6}^-7Pvn_Mt>O2j6;3nuy+VJb3#HsV-Siv zYeJ-9Xh=K0)dPf5rS;vhg(7JRX2X%gL0Y)5@r$tdN>dCnS+Au&q{CBkaSZ#`x2)7O4MzuC1u>G(}{`{T7!fMTMY?< z$E2F*3CpPJX(${FEfr8keaYG9EpuVJS3C}*pLegzvUjXHO739*4sGg`Rb)9 zgPOL@ggI*um56D~)UM7Dgh33^RoRT$3Vlk&<75@ScG6weC?;t4uFekb+8O)l**QWnSNQ}OGqx*)xeEb>Zh-oTJ4+!q`2(cxQuE_1GcirB0w zVG7UY`f~2HrYJvW`@3oM4&tba{)|YL4>czI))io#)0cPAVDkn; zt9R0PXY#-AAN1fX%zyEm}wxKm01;-#N+j-J3!5YAPFlZMItTBdHjS(Pt z*7IlRhOf&*Mz)fHF7Dsh>9V@skH?<;&i1N|s_to{^YIW8+A;h+unHp3$) zyRpUe)P9N#=Ln0bnV|8a8gmPgrFmQIcgoeLpV3k;FR!l)UIs($?(RaJaO0tVU^a`P z6|@|%6$q_VcX&Tp(^<z2!(^5Zlr<2qS^aY4kE1Zo$8Q7Y|3IUZi9}cn%|Y&dq<&ZnMh2%9sQk zmh`9YOHX|L!ppN;)wA;vcS=ON`)a3zCY!doL<((8-am)dv8whbrIU7m)YVI1vY{Cc zA?(eAMX%rA#Mn|clG^I~>u9^UviCce{LQ@^aF2SaTqVqn;h&75<6cLLAHX+tE)wM> z%xySg)vgm?LZfh^v>Ln44Ed0VnGJAJp5*}pf}#i>WNCfI(ik+!l61oHO#?6D8>a-1 zcWoz=y8fRuKZO$bcrhYN9`tXP+^40$>{l&_>Gma4N*3?^_-wW(P7L?z)-*JW+0zLU zoqEiidS*?A|C76_$xNq6QDjWEeJ*IFxIGneNk3h2d!0RRDk=xeA93^nl9I~GyQm6H zF~@u7EAlHeYbBMBX$_|GIoBmW{fhzjddJ}m&zr5}FUeS{W#p8TYsFy@! zyZg|kJ;>R(pnQ2OdrqX0=KJ!j2n>=Z4OkX3d5Q-HH%gXQ87sY)4Uh_;;bD7b5)VlA zfPVSPTTvH@BDoL-oWRvWO3BEon1w^tY|h=7yTALgKi7X2|BU_LrBitSXQ*Tm2-tUv ztT_@n#q`Dvxm9s{6Lm;5;o?eo9VU-^1$(7hu}PU)Th@HzuU zYV*DAW=rH%rLcGAoD@ga@B6)K{hmyWf8RH-=p5#&lPGIa;=^GXfC(o#$z0{5yo9a4 zQU4y;f6N9YzUQi=f)&9@rAUx02d-9y;S{F z;sp6Cg~lc^kcfr{@Nu|qDb zaVW-fBNWsq%L^Lkggb$`@lwDt_sGIj6aliLemNM7yKY|mQ8o~ZVq98-^Ycdtk&-(D z`&1kU&nm^P+1b|w!STB$Op%4ixrOI=Y&%^`R;BaKddjR)J85;-6st^n z$JhD;A&!LfHLTF>yLa&qs+mycqq8(ZKIXus`2=_RKtP?@aO-=6Equ7Mt zSrwXdLJ=pnIi|d4ST^8ZcdvIkI0!ucNC0t9(_ZZlM>SxsVnbK7b>mCoH6F zD6lD8s~jKxg*lB3uZ3n;ypY_IFBV07ejG)Oadg5rUKy+ZWm??WnwDNs1wQ`B^!ba< zyyN_y`n^biNngUqsP3ldvyab7*q2o4ewmNP#dXqF$gnHyYU+l_4dWgX^4bhcP@Y;| zT>Jlw>gsx*3bCsQXk;K4(ryt_YNrh)4!)rRP*P{v)gGk`QLU3WJoT#=#RvxPMQ(FmZ!P1(na$*>4bm}w$yzL6w!@@!=;~X7z ziTe8=;i$^--0_@k2qswgi}z3e>@bh*?YXneLY>I8iFsfYj_j2NW8$;We&FDK82`>Z z{EHDGlJL)mFawSId+GReD;nR+p?c|uv$JK!N+Hp!J!~FcDpi8yHwYW(wN(uyY6g?z zYpkowmA@l~f^;W2wfK}W-{i|NLmsphno>3=Ek8ZIZG)9gN!io=l|xjE)9;@2d_;@v z|KDYbqT%4-9UUGV?07^Rw64D=7Pg%!{AzUf<~=gFONShNUm=Hc92VmreFdHls#@C8 zfdRC#wO1O!{x)}Ee`lATnk;IHUWfio^u6`Ll)9V*18U#1yo+#|Fl$_#!}O2z%4}q{ zg-zLB?oy6TM&0aBu=Xe@gx|QxIJrl?59cc4ajocmPnJ`5qsS4W13##%7NRpZPeRlF zieM*{kuie2b}#H|I+_>_&0n_p^*)%mL@PIjqDWzOE9VME@^P;eIGiTRvKT3SUu zeE3l2@9&S0l1=rWW4XmCKHp^E?cJhdZT&??T|Gs{&#zrZUOtAdsjQ+xpYjz_rIxlf z6D}_9#MRZ6M?ylva=Qi_t68xu!EG3HPN_C0C#MoHCV9Z{419We`gxLpmsfK`M~8m| zAVArXkunvfr8e0)IR-JHvVv>VX&KYN~SdX>$_1~3>O3kA2wHgxg!lIlL zz1~xG^J0)=RImU0|T_qa`sJnP>)!6)P+2yIdZJYv>-` zU%!9%w$|6jaBy-mnoo$oy+ESF<0l>{Dq)X4@0`!Vin1Q}lJ1(4K%+N~Ozc#LEV>l7x5E zk#BzB;INMy^}jbFTF?opRCP|3)j|956Gg&L1FO&>o77vQwdMUH9ZeJZ;_((^yO=5W>5LqksUwD ztCW#;Dvs9(t~OBluMnZ+@_8~XlKuSrepyNC$HHOHhePLcBu**P+p*estezP{D|=T$2RQ1_1O4$ zk|}sLQBlzVpc_1v2R&RTfzZZlSy2z#TzC`O4jp6Tz)kPRN%Qh#^q>Di+e?uZ;{RR< z<{+Gj1;cou@&1)*^;#7z8wry(`k;c2VvvgG?#ZG_a-;*y2cpA?p)@w$UHdpiacLPk z%T!*)Ol59DPMWm(uz2_(WpBIka#)8?GUBh3t-i*W#kWzTMNf)5eX0{(zH0yIN4Vg^ z|5;X`?G%Vv&sgpT3MYpYvEk5b98kpZXY0Mef;D}nJ%P(LAYa7h_RR<6%lc)L$Ua^2 z)dcglY@Q9hIJ?I^5MTECH-Q6R5gICC5`{sUe|0#&kJ&&Ovsmf-??8Qo&t>iL9K==@ z4i67AEDEt{B{X_PH_e#YNKXX(GwMXA{| z{}yQsetyT=JvGYMS-LqRHPzDI%4#t1vw{{P+Kzm29JIVUJ&cP3_zyUpe^JaNk(88N zbwzgr??6AbY+^i#u9VOLL=?zy0wGV zji=Z8Bz6^gwb&?z$b}RrJFflWQc{Dq`1qOgMz=OwToe?2zkmPEbwhTg<6ZvfcYUHC zb^YhJnDsTCRdiViKT71<(|M(Bw$)|5hi|EoK8MzCH9WkvWnf@nC6igd zL7H3UJjL7FTlDPk(9aftHeJ_hgfL9Q%c$^DyDv<-HUGqbQoXdXu@Ms(?+P-wBf|;A z_`J3=6j^QQRpD^t$QuM)i%*)XD=R9B29b%DC@}%A2|}Khia&pj0%@h_%z{obGO|rT zbVYU#4t!&Z%MD(?b*U@vlJ~UUdB;tLFDjfprqs-Ci;{+$mL2WRm}-&(FRF_}!9n1V zGnkj}TQ8uYHV$*~y9Hs-sIBk7v#MAeAV9-@p?(b+>F>cw&>by!X|#0E2BRkrxmUvL z%QjO}53V*>Gao{fM+(UGSs7)tX`yY4=LzVU=wm7sb|76nk&wPzyVthve^~&=N>yLv z{Y71YpojgdggVZ4WaC`aJiG7=clF-MNgyX9<9N*1=(E+%#b$7tMtfF_ zHnD%QG4JEy;q9HAp1xPoGG5uuWHUYG3%EXk^6L$XdJrCkSqJfGe>g-$)m;boV?&b3 zY_2zEAOR);5N9AODj9J{Qd1K*EP$ZmzVtCntQ!vtY`u>S@k}p+wf_j^R8?JYNJxb3 zz>vW4mosH=vKBCdrpesfB1UYI5-UpV3@d6)O)WS^u0}!u zY(O$vg#xA!ZW1CkMnaF@=EGIn;QYXU%NRR;#n%ZJ3!|}0(x8*0Wr~#u`Vs-;*ks_VA{P8+0tIjk)}mE9-Ca#h&6{uEzKtl% zBVzvskzN#ZWaQ0VCuq8hP5RW76mwe}n~VusURl|Z34Asal5tvumcVZfNkHlUi;Rp+ zIGv$v^rx@Ebt02(eFY>%`HeWRjEcF`)YJ-ziHHPeM@HUd9{hTo2kw*1%loJMrzx== zU&^Vmu~{W$WhXprY)f)%?0I2c-UU%+=I^*{X^)cBwS1qJbC7R^Ho$z$H@BLjKAkEP z6B7y0UY>sT_lIhIy+f%0Z+V2MXcy(dSZI2AL=p82jLxsEG)!528=o!%J#^7ME+N+= zv3v1WUb;Wzb#da{4l%!5;H`|K^T(tVb)H)oe)wbr2K$4>_{OhZl{D9lgeK?Jj#YeVSraSsm@nwW zyv&XD_2ybJvL;&@t^8YCTRHc4cPt{F2gzk!L&G#=x654q{|>lHsy17fi|q_{|NL%k ziHV81CKnB~B?C8`89;YV?DsM8zG%r(i;I47Nl7kUK4)uw5Xyy{r2lN+&spWLSYs?L z>hU5H1@3h9lha*$uG?bN=ZwON$k*&TBfV@|CerVN2tcs9tMSxQBIwTLI}ZrVq9F>UXVHphI;#lcS_1a;H?Gz zGnJ$H#*?ujG!#{LCW9IZ)b-`O&aBlS@@ojtKDQ8;TMYf);FxSm`8l z6v!suvADyb-*dV?Kl;6S^QP=?Tk(3cLuPGlEgPH4;5V|j85tRNZLadX-d0@L0Np)U z|J|EZwBG)6Y%-ik8X!nTWr3DSKywjvyig6Y16LiiF<_U1aqs)5Uq8Rte`{e_GHK5G zA;)!qmbIN?3@@VJpzJjpZ*6MqoVc48G9$^%NXuD_*N2uiYWOeo$+)jM_}ps6K9lc{ zcXWvV{Gk_X8*S%CNUy2_JAh(SjgjN>Y~5y(L^iv1|MAP(^5W}8pVieK;+dwf^7oTG z%ArL$`Q`Rf`2bhoS<)og3(7M3w>+kR1Z31RQ4QvC$gkQe&4kWw%?3pi3Em`FU^lYygwWY}D1& zM->s~cJh{<`Czw4lEfhA%`F@s$^=;7-@r-^oll}cM8ES}jV3kK)ROl>LgGXFWm24} z2~EdF^HKq9rw)hR+jXN}hL9j%*Wk6FXE=gENw!XIUf6U-5)OWGium@TONK8CD@%l^Sly6~w;=4rhuBbOw5i4~Db1{3-n* zn#eKQ3^#hOXRPT+nwQjzLtR_jQi#hN=AfV;pA8Kdo3$>#0PnLk5nTk$pJv7z$xC^{ ze(s~QpJm@%*kQSQcuYcA%)`!DDr?|?a$W5A5Uk9fUQ#klVFTcfHuC`lgO_;|@Vaoq z$P=K__keq?g#@s9xW)8udWajHpi8(6#o`*a^>|J4OrNvIcXx{%$`6@Q0p?0%sHeyN z`_bAg$%U#*&U46Wr0Emh;RTj7E3@pvuR1~q_^$lL_)0Um4=(Eb`wo*%yQG1|2(`sdKK3_x|E$jx@!HRIG{!jB}jgP7MBBS;ghCEoD>&71=p-<5&eW zJFVZq5m9jD=VGNuUelqO4MH&^=jrFb+ET1ZhkIv` z1{-cRtqnzUzBO$frgw&kV4q1Q)N8VxZ2W)I z6o;(?1<9zi;}nFi!OLA9`zlEgj*^m(Xp>>CN+Bd9WP@Inot>70qg40p(iqT@c*cA= zbR-UDY=oVQC&Ze~zP`Q+5Kf}NMVB&?o}HZqCx(0~Zk(?;Tm}oMNVEaVAt6vME-q(K z%K>|9mOwXn9#3alOlin2o|KmCSsjX#T6E#irE`7`x>EnqRLS%;th7T}%_1*pUl%W| z>7x=21}f>q%^TsVmbWMh>Kog0L;I+wVy+fjZ@;yG*AnmUEjsqRR3t4z_%3dtaD=|a z`ij>I-dFub{_3A>_Dh(WWEgruUbF3$ISM@LUfSt!P#L1y>J$!oTBm>`Lg6DYI(!v( zmlq5{R2cMgM03n{^7zG z=R5$X(SMN>sjj4CP|eWrp%Kl9haaEc(RpA}s4Zp$@sb>97TZZ;&$n5addM=~1j(&Z z_zcYqcO8&f6O-WJ7qIM1ThTco0JXin-5@eDa%hE`5~*jXZzvA`6A39P8#hg)ZH~?_ zuw_AsoV-8;oxz#c@)L*Ua8X~M_*tY*b0mID4J_uPPo6K?QCyB|5Rb80gY-%GP*au{IHjScbS38bk?^G%H75g$tt)Eb+_V^ZAE2^~F1=+vHr}_++%smbWah?6ccz7>HLG?){*f*{l^SytI|>nvG%P|8 zyeK}?AO_Yjznb_^pCLbRT_i+WjdrpYXB|Qhk)1T&Q2;GtU~Z06`6!Xbqp+{6$LIwe zHwtc4EUUPi^#-6Hs{p|IWjfew9JhlM524ZaZGP~QLjTCfzOH*Q z$-ce?d2UkD^m}mg^iK>kx`#?iO6YoedW7W9Xs_EcGcV0R?X{cj@N&59jOzZ<-$A>S zSqb=atsY-|ML(EUZhUvP*6m`6lH^mz$;FlMeRA@zFQSoBA2`?59UZq@e{ORn)j0-4 zg(B_m={crG#CAtMC{PN{viB}FWPc;g6X;aQ_u358-D;^u%zlNCYQ(-QP4-Aw&Porb zy(|zP(z5jIAhxRV$(bE%SdPAkzi4^R!=u(Z(~1WV7f)KfPw870{uyoYungc3=rPc~ zg*4SPid7nsIWUvc#0buWKYP^64AEn_u$at4FwxG7*mQMtGU~Il^;yYpAMu7kOJLKh zGwb&zu{giDSl&N76C*}s20$&!YOh#2t_4uEpx0Af18||~>A0VJVEIQR>SCt)z@Q{3 zf?~mC2RXiWBqIKv1A{TxlIL$}$!!5s=QZ2t3thY;nM`NWMZh59K42`cO!hemJh@?< z%Ij0a`@28Au5^zO`)%#UOEl>IM=`MC$d_B=3#36yDV2^7#!h{x=ZO_1{xmGE34dm# zNUHpy)$jX)&Ph1P+y*#P+JfyxFR@TLkt|ONa3a-d`rOx~tN(Bhgo`bs=F!7B+ zYemI(!tjnAIWv3k2*ANVVUjQWE2)q^;RA#&K{k%fK`1uDHrLIt ^N(k5Y1y-^+> z&3jXy0_d5U9hprnH(*ZzJgw3PNXlQ-?1^aj10tqBP>?tTH>MT|?h-pG0}=gi&&{L2 zofq4EFxre4qoXSYIrd3Q*WpnFUU1{`!tU|$wDFD#VVp}{U;({JBPiQ;ooA_`K#q|x zmKJ%r{2_+k&gl0R2us_~PmM>FUz^m-;D;l{rO97%J)i?ZZZ$-hcL=pB`SOI?OZBK<9M30waLDbykTUT>LAycTsRF*i?C{ z8PQO#S-+bBfHnmp8PV&a2`Q3m1pO}G`gMT#@J9zJ-NI;L14W^e6gi0}V$xyC1w8iP z`f^8%{;bPmVLh%j0GgEkafza1&RRSl+n8IKsn(#`9(^imsYIx=p*cS}+04bkftbm# zw&5x=s5lOYgrd-&6tZXLe$@%66`o@-^fn-sq!%hCxAH!0?uqV824xSjpl5f9y^s)g z`fKB0|0Zft4hd`E#8#*JfDGHbyIV*Vq3#fuv(PX5%YkS2)TXE;tshIa)7fR&=k!l@ z1r(ID`XBW)n7SX_(EgPj#g$loz|VogMzAFciEm&&Z-3j>W9;Goy+Ekr5C0qze(D_! zI}shMfPg?L_$-|4EZJH)buH380>W>(+;(%bKpRrBs-lqYhcF)?-IjehDDKaCf4(8H z&uZKuyd${r_!vkE3;V&M#_e%2*xbA*KQU27)VU3bOoc%03xI~$(-wjey+IK85(t=h zSGPHh%|YH*KYxERWE7N~@zyiwx++>`mob4*xIR9@NA@stWS#!P@^y$4wJQg%yHW5J0S^sD0$Ziqf1&dBA&Y%4;sFL=o1v7>f$Mn>~?sZB6e>ta@F12l4N@@ZdQGK z7Bd7pRH~|~dMa7#W;mg7D88V%6*z%gv7+}#_~xI{e<1N1|d1lPs#a+dl}u`-I#E2aNo^pBxrhvpfFn7 zA0HnxSi$N{ROiODPoJEqTxu;>`1i2MdYo8CMn?Er>g(nAutzQ5(y$;pOJ?w94^&36>MEob$va@XGLk| zj`SFY6KrG5)Kd(7_c8nh>1S3QFIt`%t!V?g68SBfB6|2VolHvQ@5jxy;1qG>r748v zB2VX$a5==rDyfOXBOr(mjY84N40Bi&Qc+|7z{{jav1@j`EA-k3b(WJLLt07d<#PO% zkp5=eVLXLapcfB=+1&Pv6Ea*Zqi}SFI1S*1J;UPiUiv}nHG&5JL_BI$xODv=`K__d zGe99sW`PCK&L~>w#jyn@50}fC_-BV%5FI80Zpt3}ySE)51AlvFuo7?S_XfYbF9YRX z6iYQ%n?*Ucct$K1EOrdCq>-nPZ1CHPRXr03e!P%9D*!7bRrA%D7TLitiQneQa^q*< z73@hC(#%HOFhl5LV^{js@5XmY)4-td-k$6V{OsZFotPLKOVmH&{2Uv$>%dNNy} z)A@mN0{*bl{=={VUb(LT`OB>UhSuWGLf4s2+0(PD4}K$^^b&UQy0mn>!J=*h4p87x z#C3RC7M6q2K!U;2G{O*h)~u7aB5B1I()(i0L~1y+{vU|b+%SeM8!Ug-baiZ4sMxi9 z;XgI^)51o#G%Jz0pNsiMK1+$V(Fb*snXQ$EV1Fzn<|t6K{lhE&Q(5g0B}P~%`^+nX z+rQId0oSLi54<(?HK6Rw166Xjr4?R<*sLz_OH7PBZ;eAJW}9o4YFj^#&x-tGPQ z=7%6j+nGe{{WeRm8r9$&MDT^(MvyNeqN0+#yu3ul5UbGPx3f4bi>aun%o^f`MQlh! z!8+b(#Xzv3CXWLMp|D04W)pnM^Y++uO8g{5L`TCJ$`-PL2n)5Qz21{KJWFIWIDDMo zoeRahIYA5db|hmIr}lz1D>aYD+V;#t;P%bMhqYZ|-g!`+^t(Bx>*(m%;=z{CmyO>y z?%aFb>Gq8yjZi@z*_O9i0)RUu!Vqgz^SiepCP@Psp#cF0O^y?OEWs`1-x(%3j6J+A z+HaQ61mhgA*QlC*@WNKc!ni5F5`hE`BoQhjcqr=WakY@8UAi}B-R9_Or`Fs%>nAsx zUM+@)26Ke`PAGO!O5!X~z=`P?)Tz>g#`c?#{+Sy22r->T5{P{ih+2rlCIJ_2Ih!f^ z#)W1vRCzWF-d9o*5|Rx{)yr21&q<+xM(6`Xyi+oeNIzc(&>B1{9`h1wEJjK?IxJXZ zvJB-?GBKR%YZ4n9`>(mVdA3`7kV~~m&ujy*3CTz_LiY%G&Ac26hXl?0-owYW?f~O} ze+QI&0p09s;6CQQ=mlK1VrC8(XM{&&FtDrrl#Fo`#Bg!%CpIpZHP#n-7;iExr5CVE zcyo8Qmg*pH}GAqfY6yF%B38zzSBwh zf|rS(Hmfq*>KI~ShK~^X^d^zimU?qziD_r4(MH)|n?bi_R6yM2*`-1}{J9*^E;}OM z8(k%b1Ch>T{)6y0oR@P*+JMbWza{+3FEr8c5l9}d>#Hr|!g)}BGOV@wUFdo+kcr>I4v{)oHP2RgqKomJ;Tgrp|b{l~IAZ?Ewz%Ks&S&{F!7Rq)!7y zQe>DT8A!4MkQE3-vW{@`rf7ZBj3Of4Z+k8!6kxZ-JObN-7*XPu-gMY}%D&t&UdT-> zFLd;(4vB0KREVFy2pgwLHuxAiI5X5C`y-=BNR|@*3k8%af(RiuyQ!Xf=y$UULmq@! zdy9R9l92o?ob=F~EiARMLedn}1j( zd4kM}o#D*L*}zdkf_*fH%D_`$P(hT|rlktiDa1Ki__x0CYs9(FE(Irs9rW9CV}#T7 z-t}q#+HPFG*n#C1zagZ7KsV5F+uhT%i-43=B+aRFYw1H-7zhKzKTFUix(O^0@;WZ< zqT;in?QssCpS$w$@wK|La)3C!jyDukv`Hrc>^Wdzod&JWPg6@v{JO@*-FmvZt#Y!m zWpeuZ?KM?ZO5IIOTIkO4r6ncYD$2^uu`x0AVt$u24R0|Jip*i5-MbtXSxp_Gf}m-{ z#7v{6bJzIY8Bvu82?-Nen3*-KHx+TnfxM8vb$Ccf11SwGu>5aU)s=*S-zo_-`hwz} zQT`jW!I6>QJWjuGjKdNG%H`me&W8QN6Jixvl735t`Q+#DcM0N}R zMjyOxq8J3&@vCwLCCBH$EjYc;B>8KP*OKbu6Fm$otC=t;78*KeNM9=@{4B8MNWYY; zt=%PwT`|ON?P9*Q3V{mQ+OmM{?K}=Uqg^9IfKoFn{>wv%D{XC7X^!Rz|7$CU;y>yZ zBHCm6$vBj>_7Ihyu7+0SGPl)!9cx0T5w?5^5`;^eT8F&hd8}_wr;4*~f~cxsTiiuv z1Y25CT^+j*CT0>gy#VL)3t`^@Zzh$rx|h*3y6_S>)?{ROc*Ai!)BPV1%P`4=Z;MNi zO_u(iL#(_{A_3l7zP`5vHT%%-UfR|QH{{n9^2#zC98rL(Y4qWC?gbP}0Is@kw!&k~ z?)YE>l|47#+bXAd@w)Gf{;f7>_J3z*mtFkji*|8!wQ*HN1@a54x`sw2_|gEoU(}cw z7-q-6e{Yfl76%DAxeGQhr08PFS*=Hsz;43Zi|y_9!M+TalSUks{-?ijj3_CStp)^3 z(AMy@ExswSpW+)xl64bL|MFKKpL{v}^Ts&^1u1-XWj)v@!LS#@Z%#$M5avg&o(FWw ztyOmDoT^($jpXxQl^r(DivG8$Q}~I~dM=;6pJRTZn2*6JC25psC>b2wKe=IY#)+1O z1C6giv<#;+MCuD^cwrrZ6NrDDJ`2d!=+n4oWSZ|tU&mV;yV z*YGfcahy@&9$uNK%zUXbswl*7Aem*lQp?NBV+Hbf{yecCctE6^z?w5T>gRNYEc#C% z=5^HT7f<6#0Y0TN0g!$OV~eF+sK%jql;zJAX7{PfjZg*@legYmu zoIW9vHa{J;7}n~Jjj~uQJNK^lG1zgMFgMhxj^l#A#D9L*iewYmQZPVED=UF9 z_k$k~G;UPRl|8sQ@q?zN$J{P(7k#RGzQ0VZh!~rsNs!f)hRw1gfaRgp*wfTtjS)2$ z*Fj@{>UzdX%Ba+tTHEXo`i~8b^5#O4Aej#VQg6UsO?W*gn4mEZmE5a_`~V=X3q(wf zD3j|?FN!@wEQl81C$Y6(zC@TZJ98R!c-qZ?@NbP---s@yQBR=X`Am^y?KqaHr{_O* z;MgHt5MvH~?d^Rrf(T{w0{Thc@{OQE6!ag`GsJv_24-g17yJ4sOjT9S8hd))hk{?C zU8F!tFEY^4DOEQysR3_wF|wlT@X*k5zZWBma6=O@aF9S>-Q#HZcPC<6SDBEJp+m;% ziOl2;gI5-Y<=GhE|E;fjsypY| zGZD2f2Fwyq#qvim=hCxt!Z9M;8;R41>f01@a-sgfDN^)WX4d1Mx4JzS2a_lciGU%m zwzj&6+?@Vn=DqedW02IFdzHaUg9%E1h#2e^P3QNf?_y*mZdFk&Exh;lyw5nV!o#7n zInO~tG4Ye2mp_$e85Mu7kWbA^zDPjwdWtmr7}G599mtW;(DEW-4}fJnA(KG#Wjh4k z2aQ@LJ{l5uq0Z#5{hyB1J_r@GZFHS*Od|*)Z(!HlU$B|vXyi)RZTAIu5tc^A#`VYT z3iC}{;Gdk9l99pKk0q9W0({uh0emN!=h@m*QGLBzM%y(7K3Fg75CAMI(ctI1cLfIr zwkqqt2NPhSLZk^7gU_dPcpQM>^XSD&mZf zkd%f}HZTnMrPVn_EuK)!0~xft<-kd93c!tcZ;*!M91m+6W*B71I3x<_4ObK^}{^CUnak7BdPf@#Pb$-moLMl@1KC zjNAVCg~2PBByt}T5fCDEoJ^F&CJxDfix32d)bA12?>dO6&01&b33?e zonDBbo34gw8{`se7Q!#%!Vvg#Mhe*b6|eC{lCWM8LQEG z^at2uDc!(l#3+VMY?IG0n%Y=U0V2c@(SidW86@mbVz%p!V|2U`yw6fV6ZZuHi0ZAy zTK^)YuY3(~u|!Ao9Gp|I2p}yfVeO59RdJDA)Ze0*$vc{lbgt+*vmJCiP!PpZUj5bj z3t?f4I5t7~AvBA%GR=(?ByhbE3BA>Bj^s1H_qnKmIrYQXG)R17z$_zh{_Cld5YOmrcN zBjh6@I*n5OA?P0T7@To^pRGv(JjWDyejZS}jw z33ZdaK4Dbt0{DfKAWC*2AIX|h!)nyVzVkhsi#3obJqj&;1O})~2fqOg&25VkpXXj7 z8Q$s}zG>_5`)gAGPTAT(f~sDN@A*c5rdxl+1T?MNU7z6WmFP{-L-uRzf}$`@O&xge z|N1w<-;)r34%pKqs@=#LG8sG91oU`%#8t>CHp_-;xmdvCz}IPU``u27JIOPY%T0?% z7A<}`PTws93if!ix>{|>_Hl`%W@1zgYErU@bhtR{|ib8ObC~vI?7cy4K6*@3%MH3Bj3@> zGXTXQAH%5BbktxJP^xw`zYMcPaHvq-D+oCKsKO0NXyyeGT@`~5mxK`VUbU-6%soa8YC z)g%^dI`LP0XR}m)w6*2V%gH&HP3qul*uU8cTi`6SDU$mV2g>~T^70+Y6ygFTgtfZ5 zY98~}@qY3-Q(iCQ_rL%ZZq!}y-#ukw5=NNl3HXJWn#JYm&Z(HZMB&|%qACPHO@?;G zZT!hEEhQ3O1(`)pNG)zlz>1+xOaOew{l^jdOF2E)X*BgA$A2x~gBwkBp-x?=u(04{ zGOYJ{kz$}Uf zs&kfGxiUSx0R4LPKJ-0nx6^~n7zP7~vTfkQf>6EPA~*&6ZN>Ql9&{&)Jg~Zf4a4vc zQjpCE)m2aLCryDMYiYy_=en`oO8`0+JfFI(Ys?>T1jM{gSMC@x2?||Ec=ca4p?b{@6L{xVGE-VyxPh3iIhz{3dKyi^T-F7gCxb9-eT(}esX=O$R z@*kD(-&F9#(c%VfIZqkui}2l3fy*EMB>zm)0(_os=Cv)fHm!EcBp-hOy8wKu&QGtO zg$773k;p8!(=RlQ_Es5v7O8<*2O64KP@x|g)wDIB?PDvUYDh2TqM);&64>M_O(m7c zLEzokXF>_YXl+`h%BBXe7jkpqK?zi?+lyW1o5Gdel;IclMH|bqo25e z$oq<7Pd7P9dd<oGNFeCSj$ ztUqur?YnA95z>aZ6g-DogGqF7o%{*U{fsHH(#r9LqCJ~?rJq>%7o)+IJR3SK zy14mM4GTI#)^d0hk{L{~0+Nfl_Tme=;?TejUTSXB_pS{#_z7}gl0&1@@<3sRmNQjA z->#2-!+_QH`bi-w*8c2GK0l%~a=9vc^KBK49@aNGTo@QSed}oPq(Zz~kZ43377QQp ze`DvOtgiirf1sfDTtYP91OPX&gamadr186W{(!I!iNf^#weS`ly77DxMVk6B@0U~Ikv9a!u*Qkwe!qRSl4eG$6VdapM4YV(gI4^)V4 zbAz-ORna4;qk+Ifi*stRuGKz(aXfV(vB^z zyA|#;GtdhSIhKgt{1yd11m^Kpio_W!R*{>oppytk+Wg7yAItmtHO_AG;CD+6U;nZd z;vufUwJg+2#kh$+^RkupWTGAk?7c6wvs@kKbn!)Z_{u5o*`MG&X*0@eQqDfcPlv{? z;+bto(<0&MQPYRNy_sDd_Nk?!m84)o_F{s z{lotb?dHMi(v3fs+2hy(10h{pu0Ud9V`HoR&^Anng~jOxSc0B&tlMOET%4j6NbAzq zxQV|6iaIf}JOie_2>4z`9$l8Ou<*b)Q20x5sWm$R!j6N)a3dEbl_{G{3 z&)b|(L{CK(ucv3%@^WVkJYEsOY(=!WYmxtrn$2BP*8~}jr^J4EIr{na2?2w|*O=qI$ieCCTDk3kdYCbOC23o&RRbf|Hz|gt-$W9@l}D zw`vuJ&qr8=ZQQ#3LePA4^idAS?!5FvIxjhvae+8#0_#Z73iWtRippnxt(7@~;F+&) zs*LYfiH&JyY0>xE8|AT#PuwsksKa~FE<`SvJ>})KV=w|TdY$ECG+-*nVG3*D6c;L> zy{v!U=c@z;+KMl(cikT&;B_TjNmumW8ye`5Q0!ATzYL%~1oek!)`)jL597nvJrjkn ztlye9XZ!>&8f5f^?BgJU zVTZiDJcxtYeH_dX@vHmrO?mA0*0&QxtlTdRuL}gekB@Wma&UkZ7AontGpWY`M!5fX z3dpjr3}&5=y?4kQlE6x1(8RFpyU?ZVGf@O;J$&1iWf*GNWvA&@LTs#iWDsulU>{#L z@J3Hofv3W^*aiaUtAqpq@*QfHsU+AeR$Be1&x;5PtH{~FlmGdj7@5N)71-7NfT^o@ z=4$-n`O|6pO540VCW6>Iq4|q<49sr+#ck(w0hFU{lE15}Xa6s|5?OqEygRNb;aTha zmD3#thi9o_(m9w8@tYqXwwa38q`p^FR^yCf#e5922%7ua*q_wS)4Yi{c);6o$i9ENMqJNwlop z+{K;m-VOUJpeXPNeY&}Fww!y0wxp$!BS_`~)}Ia{RGK*%o{OT_XxJj)mWrsTaa#Jf zZ}0el`T=fSDT%}wPufKixfn0R36N#ozPh{=3pJte0lhD{Q=V#q@Yco$Gvy5Pi2Ksf0eHe{I^zNFq}=3SoG zT%hPr;IkSLeBYgzot$I|J0`9$8lG-B13^aMW~gG={H~$7CKL9ZA&lzL`8asaV{DLy z#mOub)yz-Vb3G}EPE}|{PX%8QyZtj@K(jv1 zrRiwPBC5D6aj+L>NF_7g1F!!P%})Bhez>q;*J-P2=E_%lCV^VQGI4crc?rUDditL) z=nujDhUVuPbkYnz)=$-Mw+c29!(r4yK7N3y5&n<3omw8+*!7U%IN1-n}_wc7rb%^6aG#B>)o9yn;sw}2j$n(f)tbqs z;v;BAChZnOvC57^;U7MMpHVhSEpnCo4W>BgF@)=5%mE=<1788lO$-&)U+I{27g99O-zlt`LE_n)o~P8 z`NcZ06MIwHkDMJ5x{ugo(bVK$tiKO%S=9x*aAi$|Nxs9n#n?d6NqWHxf@aP`JLS=6 z+ef0f^KnC4XtH2N?q+lJ+i) z=uwJ;!VD}d?ezKcYxkSD))7_U)9Wc-4t_^bfRS9L!BotC18HvgHP`~tYz{^enWTSO zYQ6`&zD{h3m)B{_YyvQY6R;Cj#x-JSd0F5d*tg%>FWJz0`O}D>42;0aPonLkBYy)T zj%_m!5bf9mYbjlapiu`m{`?^&c=d`M8(-LQvyXDt3otzZb5%!L*q9BC6n|qV&oe@W zFxox59_kT((@ueDw0^yN?BiI)XGl)FO5AZf*A-VDf$@;QJ!=;Z389rzKx_RhUdPAi z%(r!VXEq{_z4=BmYG)xMji7jR&e0y0p4R<{d>*6BN8|Q=YRZ@gNKNu=wP#9LCrtRc1@v>1 z(sQ+_i`uHL@^PUc-q~ck+Hnv%?)4jJ(HMs#s^_kM9# zbjN?2M#LWSZ=Ok;eYw+lLK2)UAtHzyynJC7-cxtL@4BETL4B`|e)x?=?2%q3MJ4-~ z%WCIU0{Y62^Uw78x9lc{Mre$j4asU(0ax+%EpC75ODJBO4T^i4H%w-?zS}o?CJ<>a zsg%lHxwMcX-UnkBj7=?-D)|}Wo#Kjx+T{#0j2fEiESlEK(>Ri#O_%jm1BUuz*dz%>upOC;3kNbBiRm*XsyFCu-_3ioPe1d4!!n{~a3iCv2);2HQvk;kzW2Q15^~0%N#+kBXhfv_{Ja-XOB${q9bru!4QQ>Uj9nv%_JWy z-|EKkD?0E|@Ls5{@0B_GBNeeWdi7i&15KvA@A*LQ^~na928{%$>e@zI5Sfz)=v+^2 z8!0#=;A18rG=tm(h+!H*OT7({2~?Wq`Q)nt(V$;sWzW$NfDDQZlOyI#eY;NE<9FH^ zR^E4)E{1Ge`PW$i-g~5JcIeiCz&St&oNxs{E{<*yrY-YvtM@>mjii9U^51@+wS<9p z)Az-r8jVd*3OVa8yAcf%!xv_Sf@0)ayE~NqS^P<8b=iFD#0+w9QjnSJW+w8_A~ho> zUIFRP#Zh%Ca-7Ii9HWVKl@0dk^`&k(^`&+lp^@qtDUs@xp{3>>MICOH`H|+4HI>eZ zl`ZxyudN5WC{y8PVh^aEy26Cxy0)r@+jj*LJQV*D?Ppl?jWdkdEg;?92#6rvEg;<~ z@U45ldq1BJ|LI_O$F=4<e|FuUwSfT|&R4HsGW*(nS9G2hv;Nl{&b**0W^o5$7e0)BZq^f9Zl%6Q27CXi zkx^@?DfcaK(}8p&xYfORf8XBwhm1v>RVfV)E%T9;yixQH%Aj|*)SS?0`lnzyoq(pI zq8rk`G2=bU`8hW65n>yLuhnnp{B)WVQYqF{QVOW8#E_4Uz6b~`%Q8^O{w#t!_pWSC zeiRvJyX=n5aY|iH&FnqZT@%O0!8K>Uh+;jfVXH+e zWUGvn2wJ`32--y%Ia-)ei)DH3y!-b;CgtR&Uo) z^jF_1k0w})ii*w$zl&n_^B*NJ{uo}@V`yMtpsugKq{C|;VP_6x%5tqrgBDc-zs5`d zhohz^SOm1@!4zSaLkmdUC^NG_`9-8TDGpiX0}DC7v&l|05f=={7fEKqo0GMZS&$R_ zV8);zX9Glf;1zea6O$n>W|jt8JbqMsG1DPOln*-Se45o`d_6PLZor z*${YWG)+pvKISvkag%IoXc`t4L5pUWGuGi znG&YVas2`-;^!I_xu+Us>nCc%xu)u(dFJeK?cQ0WmA?6dS1Tw^NIc5?z5eO)&T#j$ zH$|J!TYq+T))RG9n_dkK6*yXAnQpN>MEZ&@CE3JJAD1e>mc!S;N8~kDro^jD!wOL=F2^RR$S>pC@N18k5{ zE;eDS?-uaZJx>-JM#JnXd{-)VGx+u>{JUyvYI^byw|YOJegq6Xgj=Z_I! zSgU9%R*Tv^5{hkmWvl^1gEyf#uBCOLXfIk_h{fy>;&ab^^ucwdnOgQZui21`OeC7M*TT4V~maL5uZyzTE} zHn%@)5#bSO-I$-mw&r?`-f8Y!95+Z<4HKyPq2Ou;d@bgfP>mn{#HqS5Y|vR>T=8T@ zFf}AGbnch-Vfr(!G)EW8;52_c6`FHD6l8va+D=7{U7u+iuyo5&z@R=;n&SRTrb`!r z-uNDjB>W)O^C#=t(u@gT^{^kzCa&4*OF-rf0*R=@7w}Eo7A{`n@u{n-iaTMW*zxMc zipav?o+L%f?~>H>$D^O%FFjZ`*bjMoB0<;9-rPKq8P!nkfD7cc12=Z z5e;MpyodO4g_g!SN1K9B%1R4TEzxufTP_#j?e!;4{Xsm=e8^ z=ITf{IZ(a4{4KF$nZ!e3_0k(#h}apC7U&d+q4AbGh5v4i;O(k3;%OuWhN3?;?WwqM z-=4J!s$i2~LMauLmfBEYFUm|54!=pFBuYhx1wN>wf&B7km)te?*x|eRGxOsx3eG65 zQzxxkGM{=lJsO$CVg{=WD+byOtKl6%{4=YJ9poJ!S{Fed894;po03nKTy2LR0uO$B zFH;S*Ave7dmiM0Ms%X{7c3^`Hii%T&@91kBGaO3Qno>b&#+51U=74y?=Q78;BlSnJ zAp0hWp;953a9sri1Q2n$($$i9Gon+wo-#8!`vvdhsw{)6O;}hMUvo?axCw?q8nq@a z@&g=2fqmjJ|J}v)vY_TC&i>8hXQJ!-R&Q)?zuy3bvA*Wsk-4#^|LjPEvumEN?TI1d z3jWuxbA`dEfke84Pzl*FL%k6ojg4kN+yC6tVKJQG050fa76<_9DAG7~`!7oHR?bJV9J;_&ky1}84E3r{-X zpH27+(5NWC|NMzDigEj7+-cI+b0U)A(HNknAq7H$(L^t_w1Up5FUMeMG-rZhy9~MP z!|CYRb2^^`wWJ9C3>EwDrK!YC@Ro45{o1WH7+szujcT7XsB1qrxF9w~r(@HZAClA8 zq%$_2VxQ7kVV_c2;h$2OqF<4n;vZ01pHHq({&r^@$&L_aaP-N za}H!UIB+$5(hSc@y0^m%3d)tuNa!Hn z#_->S0*SpP1FP{H-^Ypp!C&yH&jGN)<=}3gtjhLDD|9U2RZ#$lbb=NytjJEm=ukO* znQy1a7P0Cvc$v(`W$ESs()%Gd#)LJ-^h;ZF_88?xiQ0$ zWCVf;Qs0kA#i;gk`-A04q4K{Y22uO_whhE8RHa|PVS|c7iofV(NMERYheN zy4uoo%Gye=6^sYCwbj+RKIhYKH;BK7lL~S`m>iuVG4ywLJRZQ@6+&e~abKv#{7~Fu zk?H}Gi+QjZwPn zI{qgX49uizeE#hFMp_!3Lco=})-|?pnSEvr=%=-y(RI&}oa!mCJW_w=67#V|29yUI z(9qEZPFB1?#2(<;&{|%d9HbuIno#ruHmtqLQf28i#6key_3R6fZzZ_v?UfKFBg^(> z-AosBXFN3A8~)@Z=Vz9?=ZZ|0lCB;XhD85g79hbP5~V&pkmUt^w+Ga%NVVYd(l`Zl z3@eVd_s^q@^qd@{mju6jb*k5GHxh?nRXhJ_coC3Kt12CsP6~Q8$S|o2Mrx&YM(pvQ8RkXNCN~ z!iea#YhyRF6s(yKFR0eKs@i?;YBhlcy;ghXliDvM`5KN2S^Iep#4R$2h>7z*99M6k zIY^4B;?8!lOuP(da{&%_E+o#>c8lLX*I$HK&W~l1yO-fJvZZ8igjO4EtS)SqS6N7l zfyg;?7e|3=8`40Z>ki42KmL~|FCi6b7I?g^v7}!b(aR8<=_UQYmy?+Td0O*}DBocU z3xyNE7d!ryH%D=z7zpaZ!=BU`xeHv-j@)Sdz52z|OB_)p&&0$S48UJv>bjDww^%et zI=$?j?7${Y;!;!B)?yRcaVakP(rPOB&rH@G9tb9=qs10Cy2dcGe?pcZx`WA{ugIFm z47#}oDJ=TqQUhz-4E~I0urWK3{0Ol8@^BR(mtD`Lg{o zaeIH5QpvTQ%FpF*5?8Hx(HBHAw8gQj(Q}|Hoiy@jO+f8kAqC`!@Mzu)N?WgkJU_^i zAAOK~uXN3G+jz%<0n5L<1A*%VPZbNNHlaYOhpd3%T*&V>Y)#e-y#O8!VFm-;C3Av~ zO)WwHGO%2DJewbWpFeIZINlo7I@}zuxcU0}UONk~`TA6?XE5zO1HIY7d2fjitR|8~ z#2`%zg{3nvTR(rK2LPv$*;(FCqqhTBEN6qs&9UkG-yQ&AKy2|{Xy zT=I3|M)0luR$axglG^CY-{n@WBBRmDQ!C9yp`qd$C})UuVQA1NOQJ45(I5n51WAB+ zuxM(2neDI7U4~P1e&+p@>7_3awgUpE^EKLFZyuZ0ZO#~|uC6`X=J_11# z=o{2#9}UnR*&J!1_lM9ofB}+&EBz*;)?(}V;SjRC>I4XC?L`8JiVhdvmH4a%w+BEe z=0Ew?Z;g>&iNpIF^4XPs^9Iv&s+hkKQUPfBYfpg4wIm2mYTFgy#_-uM;q{R7u3brW z8?Z(C<=gGDbmh4@jVp@7Tb0?sMbDr?!gc>uieqJ3wVYzx-ZYA%$jhx4Ej44}+1QM! z?j&Drm|KI98 z1Byo|8VTX`oOcIMOYX`knT4j)ADDM}pXd_6$6Jh8248y@R)d zKsb|${~t@4Chw@h2Ddd<Xc`Ihlt9RdMGV#t;~5~&1C424vs}xe z8!uOAxagF!_&KDq2%Ben0wuk> zevj>tnyb)OiDdo%Yyq$NRsqZW8pd_Y7Rm#N!hJkkZn;pe{Hw4}@E|pJa5!hUE9KLh zn6|b8udb;9eSP1UaQ+hKG3~FH3QdK~ea4L6>fajC<`XFtd2uL5GZ8Xg>bAy0TwQ&6 zX=O|##puF-ptK}>SiCjNC**^rp`jV{_nal`%=A(? zoH7Q=d0s>;%?tWoUbgc`q(6dCU9i)$9Oak1BR%e^%2n{4=VK@lu^>KLT&$;~r&nwm zo83h7k$v~7{M31VI;h*I%l~Ks;;i{@pMFS#KsK8{K0p0282n`J8aV1sfQ7{Y4)q!= zgP+LSoh`?|K5gm!JuwLR^r@w-rN#7Dmjie+n6xW&Y9h+7dilB_iUI}-%I4qeC7__f zfO=e%bQinoPF%5#JCs;Dz2IbA$A7hGoB0&J%z`(cV+H;0xp(QK?$zlf*eR;Yzd~D{MMSIco@X@ny8gW#iw=o+0g}xVCPdAD z&o+}fJpIw>%USW5^ovUKkSgj~0*Y%xm)&%9G~v`DH|B^h3kpj$-1@IHq6S-Iwth`Y z1+i0-Aly!*GSSfuO@}PYq>csME=84;SWkc$K)hda5&Fp58bylgGL?g9ZGCP^ij|G2 zX*{Ds?(@|Z*sQ@yXy5)96=aN4I8v)F{>=sqx(Kit-1$Q|c5kx*wsbuo6F2!A^5Z<0Srv=vzR8TBkKafO zEL%2Ed29(lj$FJRifzKv(RF$HG6csQ59wt!TT&LhDmZIAAdvt8wIeDFY!sDlgHbrb z(*1~npG?`ey%s83eHHN5%9**Z_%K%Yd&{Aaby}=Uz{QG$4_NFlbClpKzUtH+0L|w- zKv&faM&Tn)pUZbYotl6i(npTd_d8-2Oa%y#zV9|8EnpofKy3NJi;8X3MB)1Q^YdW>+A=)>)5bG^j71#ked3iS*+> zc(SZf-~JpFn*-xv7#cu`FgrehgB#NLNE;>GK!{v;0u-_9qty&D!_1!e>> zCK6shXa?EF>4hYzs(bcs8K}eZ1T&N7nU0)wybNBO)7BzpU)U`#gKD6SR_`wiZDVsl z2$4{Eq9vQSx{6NGU}iRYFX&r{UO-9L%>ns+iU*6V9D6IZx?1x^MulZ|7)Mf57|b}# zb6Hw5n!(d%g(uLt-t!`6o+piIJ{}`iQqDgkesqH&gPAdoZTwq(w<0eFCJC+(ANoiV9gVMoLpkS{jHmUQ%)zUy(+( zZL%ZQudP+nI*{x5rOD`!y+OHy1(d#DwpP?-W-gsIH}h-D-7-Ow8StitQ};qe0G{O6 z@V>%VKL<_5K>g8r3_QfbG?1{wZOr{;3MQpSJoy@LLL`1ad~oGfx&XmhOfuk8*-N_1 znY2UH1nh}mC3q$n!_JZ&gU8nwmBKg*DSA6%+UaDv(&OjG&@WjmT#;`VAI4oBcOzLG z1ckZZ4-IWo7vhB2&MH9}OIMU>MhiK^B$$HxDXzvYyETr4os!k~{g{2lgVnIkPQ5dl zGa5H7LaqGTBD|3fS?2j4X#$tOv+BQ~@iU4y(JaT(FfAAI2){&gye}1BN zm$elzLIYb(ng`&x7#lws!2V<3j4)@qd6dsdYKA~J-kWG~+oiMreP*`|q7qme8yoXJ z^&s?z?>OHU|NS?Jh|g95F_8nq9w1DAf!d3Xa`)EhL(~|=3ak<-A5^^A!2H*gjf>Sn2vpm9G90}s>z2^ske$1Vx}(xBU-UqZwP6o6hs1Q^>&`3S1Nq6Fx=5R`{5h@9xYufmneo_H6^F9)7#oY z;kj|qavY1uATX}_LG{}1oG%Mm{+@Ikisk$A-aU$=_qU=Oc$KTupuIS49>!SoV?{*{ zSz<}5L%y^WNAOGPaujRnAQ};LLQ7=G^pfzpYUk$u@NW&J{(mjX*_&}(ur)Tyxh!W+kcZwnSKxY72n)!HX@B%seLGY2m?(g5}#Ds)-Jnl&eb<|F^ ztt~ABLjwaXvcp#tWx(SH*|Ku2Z@s3OaGBuw6!IIk#(soMz_lMlw#mMBz+hB@TPDHX z!-M%6e`sf)Jvx~63v=N4n_n%^G6YtxeSG#i1+}N2i9cq5d{4x+GY@!IH7SOkpla?8 zNSavon@Y1f8&2*}P}hB?0DZ>_^icV@hG*(|sxOEKb3*NY4~=qlR%0S@-Aa<|;B|Mu z6u0mGxuhSQMmnQ03#HV+oZcVaY^-7kS38o%WtVg-KR{r$%w9r|@-00KxBUa}huMZ& z>v4Z^RrFDUUe3^EX>wS1OUpb#)FaBqv+9jDhrf^3al7zg;pPG60S;)?!lDA^2MuxA zFki&BPe|{}TDuJ-{euk!@LDLVu)j^tDeq*neToSu?QXo=Jsm-WEeB!)$p4_2@wty@ zzvhEL-*>lz9j<*`zzClM4!!1xC0K9>)C~_0SDaiy_E(;Q)=8whjuf`r`o=~L7UCQ?sQ9q~cjo2J z3#PA+Is>}7ubc!U%nQGmKixYY%t;Sle6*e{=+}l}xys;(z;fxTG=fq?QCF>=2n?2x zSzHp#lE6v{ReU<>QdTtVVnvW{3Th=*X{Z;&`fXwH9qI=`sqX^@lAWDu#cbnJT~lUg z_KG>K>Dy4qt7;w)5!l7`?4jfB|Lbe*z8X61!pF7L-5kTK`WNktim)&*7foBgQF%U1 z{;C*7R~6Xa7C<2G?FOGzP)$3k;*|>H9%2QTar1ZY=&!K_CNJYzT5U3RV4>hb)-A6F zk&zb+z<^-~V7619AwPDXySrP`M+TWu<$hAVJ(gp2kyfJ^t*BszMDxJ5+z;lH((<9_ zKuR@=?HR_vkJ6Y?&+$-krKBx|fm)3(p8O@;&{}XZf4Bn>)<>G-068 z3&AflD9GbP?d$t%zWmPm9Kv0ZC_NXYpXG@rfxQ#Lk3iiGCD$S2za9(JJ$Gzpuw@4t zK?YzoIDt5%3sXj1xCyisQTL<8E7Ra46H+ujf6x!$#3XGj5i)6XmctkUzz1PFxY z3qTXSjZ5*DJNWb=48*;a-pY|YrFIq;Qd?VF?@}{lntr>#ynGMH><90cF~C#Y4#=`8 z8-p*I9zguH4?K|)EiILCUsHGKOP|4x)V`X455!pG%WJM^mC79M*%6!-Q<qiw=Lt zWjyaG84;3U7A$K$dFzwa^aVUc;t>mEe!+8Izxnn(I?^bH6FY@9ga)l8>c#)&k9S0{ zEGbol1)|RD91;_5?EDDfCX{WZWy9rsGue|O8tzJC6~|fkon_X#mL0{^J*v3yKhTm_ z7j(a_I$9O^jQ^CsGYZ4cz+{LtbKIksqg0#LR3td46vT@=Wj{FS%+KJ^;(`NchI|Yv zqa1+G&`m=yDdro&+h}dme!ca!NyS2dC^j&ZT}%zR_~Z`yW9icTe7jS4?y}A>=L;et z0k^?uqSrL#n(rL|U_lR1p?a0_Klh7n0>xkubWDEm92X%aqYIxnBecqndn9g7LV-}E z8yR?Xx!5e;<~SyBc>#8&5^xyeF{~9b+W<}rlSs&m-CUa5X3M1R2~-oo5AZlt#r8Fs zLbwFtfW3Z$?G-Vf69_ptK>Cd(%`S1aq5#Ky2flT)>-~@>aWDr5N20S?T(UzmIS8}1 z9s@78&(cyI_qDw!>9v!wF3w&?D*wgK)?wENPNkGj#S9bbzf7^IHvKgXd1P5+Q1a(a z2pdYQNv2{wU7$-Y&bgm+_T#~Z3A2zO@qvCq`hE668dVJ!PIQYrUQvqaZGu-es&)19 zHWVaFIr;SUWZKl#m+v4dMM%R%5y^w$iwwuc`TjSx;?=pM1dA zvi1B}JH>zFW|u8xXjrDRBS!rWTqr+HFdbZHdAry~mf9*!qX))k{Dp6m8s6Wxzq|M} z_76Q-He_r2au<<%z_{V$z zMXKtipD+e)Ctwt51rc@W=EH);0iwMiadZ&9tAB5=5vKF13D|jgpr3=d9za4taT3b( zeQ^5(pxaX+B0X7RR`fBG3hpP55lxwZd8-|u0)ZY0hE&p-*;Lyh(8XX8(TU$f-}n;X z_x)*=|Wf!>>ToSTpJ7&M@eo>gQ=4P0$@Dfj?H zSjR+^m7yWT6!$)SB+6t0HRt&1z;jH3ilsxEmzk(3%sgCNqjZ+SeyE7Y{?0MqmM7=j z0xm9;TxrSP8g=I60@ocKG^3xWKl(2Opr_D5_#`;k)wK*05VRO1fx7l7laJSh)~@mi ztkYu!E^Ql~UE5mqP@istV#d>2n`adWCuROMwJPO4l9g==71DeWlvPbT;lA(vcX>=( z!i1}b!YuS33{Ingt|f)IxH}e=0jzr;-&x}L{Dx4v=DOmo#{2hgcJJ4ZB}tf7 zWmvVWoKB{NS*r!XT7RonuJXT)C8^Kzvb8y#pb1=3$``6{;pPRz#Nw%FCsi56-Dyfw zwvm2Qo2mL1N~W_}Ex#Sj-}ns^gN+1|VDXQWw5|COXXXOCoP+{cOv}#-=>g-3_&C z^C+`G@`CTA(9x(1xN={!vsd(|ub7lt=BkXFYU}DgFes74h~I!fl_EGf5|o0f3M(I+ zTPQ>|LFOwu?yl zEGIWL#TWW2cS41S3mQdFPxi@0BEd3`mW^T>KQEaX4r;n#72mXK{HZO@WV2ok5Y>*%GD-_NzSnPy2d;^zBx3S0o9X>`$w=`FaX zbAeM)Qf>fn6~8Qi76;6S4nX$5*L3`a@z-4%1eMk(5chO%Fb4%qoEPtjds5y zjd@LSk6KP`y&EZb0Xan)Kq-jG)YSAsnk?#{wD<)9vkp{Z#P+e-8*ZfT5xe@407KzX z_$wHZDdz5I*0hCzP*g{I;ZbFH?y+{dg_+u=g_yTXVH=mIB7_t%o*#{}(`~|nf;TS$ zeWqv)kR@?zsp-{?HQ^aWE%H;C;e4*IT|DgOVv;H0-r@~Gxu6O8=+2&8#^e+6ktauE zO;aUbQ2#{46$=%NY>DmK3&Z?l+MLbu*xqQg)-1dsmnB>Btcj;7#^qqwU7&NJ8R~L> zUWOw6RzPp`GlO9VjBx9iPETnP&N|_Xz(A!;-n;wzdq-^3a7Ael=v;;XuMLW21k7J? z#bsueo4$LeLwhd9fCf(2mxnP8AfNq7Ee~4G(%IR$8)C4;g&@D^)n#M_UFnhKWzwU1 zMrZ>$l%S@jn?}I8VeSxm0umFL(r%vB)jDO>Fn>1OHc3?7!Ai zAEHCXg8-tm5eOAOs7#M#SsIlq{>uXNiOFN~@P7N_H1<*m@~Pmn0t_QNxlD6hAy6?9 zA;iJqLY7X6;jCCB%W$!uEJ*ccAWIA9)#O76we?Xn>t7zHAA^zR`H@Tjl*HHT{_U1H z*|a)Sn^7k#Bx&cFCpJ(4*eDS~czAiM{n$_fi_ubNB&ldDxIvMgC(wraVQ_(JDy4f~ zl*I*+MV7>qP;bGl%@R$>OI5_gX>tg?YE9e2jOY@fM^j^c=IW>d=k+hSkICsd`(X$S zi9PE|+7j2)!k=%j2a+lyd8?`f&Dc%O-_ZAO^ePbjF0{iy)A z7-8UALBzFOvmCbc%%NccsDSU|oN~YYAyZUzOpEqyzkVHi3yguqwKdUjhxx8R?aEQMI>w2HtHr@= zWaTZwkL$Kfc0sgy_!YVz@Z6KvmCim;w->yQ#Y|ta6NgHu#{D!Y5#+g>Ex?676O!ME zufoA2j<>trZy8W5u4%>lLPLYnjy;#%<=f2VpV4?k=O9v8X47?PAIlp*I?+%ee?!M{ z?C)xUU%N~8!=CfI#{BaB0CZGjI5z2;8~65j_oKCX0&SE;NO~wTAd}%)BK@kRU^*9b zv=qS}lz!A!B%7V&AeKwR*(AJ$h6T{7MP_>KVtG{!9=`ZP6BAGXQNRZTEDXLyz#4g8 zuCrD)gKajx4Wz-Bl#uxAe1-?s201izJ2x|8I5x(%fNK9| z;5M55`OV?5Snme{3wVFnlPbJoBO=oIK|7aFci|zPtkGI7u6tvS&{@R%X#k$7^BK$) zNKoCYqC78OnkIp&;wzhj2sk0?ia%fn#bSf)%svmY?_VTXr73wZn%0VX9xqP-o9sR= zRvXUO7W6im9+_d@)=2B(?cHBoN#D#6q>`GQYsHs9>HS&ajKFe#cHhufF*NQAms5~_ z&1ADbU|7Y- zT}E*b>o*avr@@O*`H~$jv0k=@iL{iiD&Bv+$~VI5ix;q++N`)Q^N9lwx3#toYq-a~+y{~o z0K4p8%AS+g>TiMD2T%f6?gbp#WHG)&`2RZ~k}K)|{CHyuBo;|$HyWFVn4&KFau`GS zyK8Om5wgqKRyH>F6?(1oM)DSWjnqoh=4RnS%2Q%PU{knOeZdH+I!F#^gv zy@T#AhcR7itXZ_UoQTNOJt6rp@VK|QD(k8Nn9n<^Vye(kBEIW>nw3}26BFD4k3Db_ z#7J=5rBx;3*)IYF179Ge>+{INaI#OR{>VJEsC_4kx$=muN^eR7pt85rWA|I8J8OO_ znoOs+-C9iyD%&-?9nVy2B1u+(W+4fKDsMs#j*X6@Uz-Y;#t4dLkuIQh`!iLapnRnV z7Y91UEdKbjJd&@sBuuPev&aeTj$j2RW%#xd-ku0rKRi7A-Dp6hQ$~1XKpArtra<@8 z`p29|T@m$f$bup8i*+NTip|4RKotbdUM=&>)+tLA3m?LwJOn26DuKmZS71E82Rq)2 zj6tSXR?|}U3>rSHpkS+IdwV;+klaJ&OdOX;3%)hayb>|gayNN5gHyofiw(l1Mm^B{ zTG$E?dZL23DGSp*1((w5+gkf*%Gi+Cr(m=xezx5X@oO;S=3;8FumD+`$Q_9;tQ(41 z6G;IdhS<+-&35NfdgmBN5^LJ+6)9{#*Xi6;f~jd(-OX=;IU)triiB728cANy&$+MQ zy@ZB4WDdYvRf*^Sq^-#W$J4-FnrhS?>$s%&Xw?l@Cr3+L_GfZa(V(NKs0hJ`j!x*W zMoO?jn2x3@Joj7;0()Q7k#e6zgBD^?!=<5w8hRF^q|S`e@%QJp=I8rDqK}yd zxtw_bF*S6HiyJ75;ef#9y;VJCaRIO>!C*6$PpsTqnH$}yAvqae?s+uGTeKb(73Hq0 zVnlohq(esJFj`sB<;nF=48<_mvljrl*m|?~Ym1G;@ZAKvj@|_r8?q~b;dKD)2w^as z7?2dk(FB|7?gI_;UIkaWe}hYRVz6~3PqPa#tNrgz0vC>eAfsNlJWfj)d7@zY@;KuI z5=z-G4Dv(5hUZ=0Vu$Lv%n}9pOLCvLemaSncApfna}lR2n4yi+Q9WL|UBmX=kI1Ve zr^CO);_c^IYV26GdWh`=kIy$+G|&lCJsgxY;BN7*;|Q6e_qko46CbKOwU9_87lxQo zDDTS%qHURP_t5EJ1>Qtndo6jAHmbO$yRZbu>tBQ_nmd|`BsPL;Uq6eu(6y+$slOhR z%kpWGQTZ0IpBxeTl3k5Q43AEa_1i63X@xm!?zgg1JrBo4O?A+|$>hR55>VJ48nC|=0oFx1t~RPKBotDd|n4HjnDP|%mJ^JV5bc^uLXoIZDueJ z+LE9;bnFk8`WQO@DLn@WAjjyxULMaAaakR%bbR1*?Xq<_isQuCMfd}%2=?GljtmNc zF$BjtSP2Nsf0}Dt!Xyg0VEN}ByM$hG7M!hBfG4GpGoY@H7IKm7>9vT>J-hyIB>9;? zO^s)P>T1*q3tK`YqSpXtlL4UKH)52esA3#~RX>9=^}{0pBXoRd(v0S6~zfda*<4Q$Idi=1A%bzWMUq zm6H_Kn2-YMlP}R;{n$w7G0T?&EsEB4UI9fM*9vfmMnTpBT z!>bV^DgVKl@ccAB8|(z)dHN)Tgv}cAQ+=h4HTK25F}(msY$jlG0Fv^w0s`LrTNWnR zlr%3A;^Np*t{3~If(OK) zgECqDSU+QfV!}YhYgny;#jG3R>fpM%Gcl0=V2P`1GrFDon(rmgJK+lT*eul^^>tWH zCWUTwv**yl`n$&ze{lgc*9fi3xXp`lJp}mA6Mt48VK9DVDaAs|u>;1~`Of^0qg_Wl zC9L1+7luo?P#KKAc?DmgrZwaAFywn<5dH)pXBFYWsA$ka@wo5ND15ZgP>(U*6dv?# zmEhohGHD-^U^Ij(4&~lQ6_qEDf39g z?bdRr8@!_H$X$lDU{6~Sn4-M_mENUC|JOSzwTz?~6Kp;d7ew2HgamL~%uWn9;77_^ zcQXV2>jx~UqF3jCrs&XdaBy12i#^=jqCQ7Oxy7SIpvjw#=R(6Gyr@|_b1Drvey;8yDF$461Zh?GI zlM=83#v>=NNhEXNDzMmQ2BZX=6y@Y>9)P*%0W={0&HaR37hrh5UGDhM6b7=<5ke1l zRBJi}dqF>=Mb^`@^`GKvlv5*G!NbN}8@V8V-z-iwCAgkkpSY>BbMz`eIH4LXWYqy3 zbzC5xaB;I_GfnBAlPU ztK<3Ql%PJz^wGBT1jQF+@7-x&Y$!~?$GWSsib_Q3-&eU@9n>mID(lnQAH$k5i7K`f zrEJtQioHAK+@8GRysIeoNz-l8pkOr(HC)Vo*D|?^`&&X|cg`atvoJTXsKJ!6P>Szklr#(p4U2jfq~&0a36wv*YOCt z3g=@WmjEC&>!_Ws_x6lrcT9-R4Q9O+;E9jmc9-UvP%J@CBviezp@d^YnT&huEV&Nx zy5@n`^{NLsqZrTVN3Q3W%U@L6W8cS{8Bj{o2w|liSg@BV$t4}cUzQG!5)hTtT!bI- zc0KZHZwD?z#X?IE%F2B^tZ`j?Nc=?OE`uHj7p#Y{h$5yh87RQh{4&%+NJ~XIxK|CD zo{=D&819*(&`{4V`#N*UqFqE9%OwFTCnMX#wYTrHr9cXO{W(4PyDxLN^k?(g90n|< z&{a_rWj|t{ZCt-!lDomG+-_J9v%#KnQ$q#p+v-4>J*xiCCC(gsc z=;9bF1JVqD)Q>$Uc|E#a0VKJ9ruNO83Lt8pY{|RJ`QwB2LO%fCmXoGR_kD~#()*qq z*;A8-y;V66V4%|hcgIj!R*R{vp(k3|&~U{Jrv28p8G+nv?B`G3kB|56b)e?Xt*oT< zN&Fi=C?8QFb$^#f0v@m+L^@3e?gWMIpMVhYI?f4qfuqkg@yUgQR+F~L9d4r zWLEwylZZ2XLPiGL7RDYQ%|CktW1D4>&%r9El8Yo)}L7AIZ02iAV+*Em==X;-?>7yan#u?-gvyzXD zr%)49T5zqzzFwTMQhYQ)$O!G{kq2UOmVKP<8EIvOa6bAN98;QpY*;4f8t06=-c7o6 z!cMcO2Y4zPHuPU%NgSAPRr4*?qNc=wc|j!ul-uarVe5>zbH4yAcDjI1fndel04cw7 zz*#jRzB3e5n(*lqP#Xs=bL*JA0c9lAef!sbt-|WxXB#v$Gy@24f=7)zu%flJ6d;P< zlpuKr;KD6byNqn}3f-YYb*K0p1GXzI14G38ld9rhjukpebGc9XrNp#Z&Bt44F#bnw z06YpDmlBz+O>v{6;^JanI}q3+Mt-W<@Xk(8w=s2e%n8h_>Uw@^a|3r^^7TRSghv4V z!yp2&NZkZ>7ai)MZV+<{#iS7V8@c^~=5Y$g8nN0t|p5;A|4Kz^+;fwtT# z^8ZY${rz8N*kJa3OFHSw&+#WB86FDw20RVDc0-kaMNfL@_7ZGnl2Ij@}joe9K@uK|_N0~4PwQOO!Pafu;WGn{la#7{j}VeuC4?N%dH|=Nhq7#5Sd!=5=jLXgt1`ege$-s{ z{k^?Sn30|yVf2K==ghEpb8~|Uh#N-K(l}YRXlS6(hX~?k_Qe465bT|=RO>P1v=hN_ zqb?$H@}s{pHWI&jlG_@a>e%1Zo#VUI;f!UOC1@*zBxcKw%zhmiu6djO`bJO5h*d59 zYZYC9nhI1|F>OE9 zh_EiVG8R_nncJqiPQ!|it|`L)OWnd9lhwuVnG@ssbc6vH+;P32*`(q9lh<(TkTJ5| zafl;}zHDs_j)wGADsx7O&gbuKs`{FgSi|Ca%gUM1Gg`8$EB7S~ehny2-yJVnnGg5A z?uwXZQs2RFL8xJe;_C&8fcT&R{vcHV^TvTxd8A+bmeTp&4XhN9lkk(MFh>b$M>W%C;2&J4ktR63b}o&oyTA3_ zepB@Qe(vusBXhBPv1|BsY@C2;vb3}ST_YL|`d#6IVZ9hsnW#TZ&^by?u~Ogcwd0af zCZc+pLP0ou4$1T$Q9whcB!-35^O^P;wafSQ3wo2na-{kh(-!Ar?8$XE4{cQRJ6ah9yVa(WW}?<5b$RJ4Sxql{07-|- z|C}fuGmV{b2To@U>NxR^e!FMvBY0(B`M~6_|F)1aUk7+1aM^KuG^kfiNbU4E(gSA6 zy+LBT)%zGgJJPN3y*r=TqoQVFaI(xsVi=~@4h#i<-TmNK!H{u+q4(Y*NSOZSHtP?! zxWuYWg#e?|fOkNi-0}Ol;k(13f;Bz{nIQM-%1U;4nVfyv7BGbR&b!P1x$7)+5Xa--Uyf2A+OfAd*n@t5LUbhr_tv6yz3pWV&!+^(yG=@*^0Xd!9oJToh_ z+p-v@w(`nCxasX3=+>M&xaTJPa2bWRC5K=>w2cwGSAM?yFMA0C#_WUhIWeBRyS%da zC%B)7H#aYcj4?M$t*;1m22ah!Spxjpr8;hX%Y0f&k`+$gF3#KeI&~1Y3U4gbER>3L zSFl=ka+dMCxE3{7t{Woji5_ONe)v$&`$tUbtK4R|ePICjcsDwMtDL%=`!m4Y8VZ6H z+k9Vd`3(|G143d_ATLou;5_dpB&&&zhE@ #XiXrKXNf`VpP*rKq`CM_XUN?FC&1 zgol3*s_bl={gez#V22xVvjRu0X@L(gqJh_9ib_4u*6cG|X^0BD8@L&}JLs1i2p~Ss zJ6l_ISbT557$^$a5mn=lGm8D1IR;(9Vsvt+$-ZXe?xjNPzoW&JrA=ZTjV?k8vo7`_ z_;)kkzaxE14YU}mQzI$&gkCYr;uJ~#t;5byRZxLmQU4>YKfL|9m7YukH7#>O@-&;J zHGB22eI=gzjqT)=ey0wpbEV+xB|QWC$P8k!;fWWgNw@==J&`WG`?f0CnV9=bOap`E zGmS?xvw?Qm^v4$U^Q5k@XB<$$27w<$NIR~prM9uPs>^~Sa$qL3tew&Yb=i&C3VTX= z1y%GId;~Of2AzMuzrb2q`eP6WKYOx}XQStLD86e=G3pw(%5++dDsa zk>UMwSuh4ObIL7{P2cagvqDPX{JhQ$Ky*?hRA>#`pkiVgvwwW@&1b}x{rURs)Et^( zI;Z7Fkc}-^g@dqCA!Sapr6m8eP3a|Gheea|io{DgSkD(T!^3~+0Oj!D(H6=L(PpfEV>Um8ACCP~$`M+%iG?zqz?yBRvC+r+LbM(gP5asJRA&knJH! z$Dx?uVG-oq2;k019naU8e#AY!o~de zwxBP%gWlwGELfBgVD|Rul_SRJ{)o7b3j!x=SLdiuSd&Yv1UV&@cx(Oi`oJX3ST z8>}U2X=oZUhdhfsO!`jWWGbB5*`SQSFi;OJJnPu|*L&#}1f)WN`SfS|WW3${d`aeF z*k~0iHyfvT9GopcHmzpYfMJvulIwnlD`-O0x129XY3rDp@T%&SvkXqjPY3Sje!1z6 z8&3pQp`*vwP*66L4(=H+n$mb#aU>QGk~ZKOX3A>(F5l*!*HKNa4uNP3VH6$~=wmev zH8tK!Wn9(Vc+9HOj3OAR7DNGblaNg>I<@&Qfc_w?7=Cr zm%*m?e>eYTNHf%K7ASL6E?XwG%DvXFxVfi40(25dS~YL+L-T_UFKs2w?*lMhhoOV9 z$}{0XLw-ct=8vg+-T^>jtXOBCw#G=rHZ;OdsF@kxN$f3UZr~(p($PrQ~ zJWYu>O%>FT>WEaN)LQH+*GGfQCo=n)>Xh%OcU;cT3rW<4o{^ymg4Mqov3nG1r4(#O zx*yc~&p$aL3O4cDKL#*k@^jZ(02UAplvoV2L!@naVWE_A*Z~zIqeDgVIJh9N^YD!I z9d!ev0K{A7P9iH@D!}RS2a*OJSv)5Az<#;{`1afe*F`pbG&DuhK%g;}n?lHU1nkWT z`5H>bq+~+mz&)^{j7w7T{F?nq7!jOLoU}j#;9N$Gn-R28^pHhb7V89A7f}m93p+KT zs3G`)01p*D=O!6s1aw(@NZmE||MBz{KvhL++XoLJt%S66cZWy`(j_4w-QA!xNOyNj zcXy|hNT*6Sh;;L>bMJS5XPD8khJE(l>wTY;R-!&o;};Ofl9rJn9C4T2T!9tyA-V&d z0O;yC&&+6Jz`oKT+4V<@6k-$PY`Tx<$JQ2LSn8IE&)h;~L`Wp7c;0cLURBsdtS9VT zM}$q=d~8M{%G_`6^2yov1Ck$dD${lru9R`k#g&A~civcyc$y zZObOh&)jX{<)vTpm{+#+I8JmIbcIPG9X&ya9(%_%H)TJnz~S#P)UxT}ElQN2&dp4c zaaJMDAd92A?pZQy9-Uat_WX?Lb*;_={Ed%X#zyeSl1%>lsPX!Ahzm89a>*G>DIB@{ z*98ChoGjBv68@6#dH0EIGbY?wL|Rs1G*~h^((slxjMM)HBEU7ETc@1MhyMZsBHuNq zR-29!#$wKL1#E#hu~H%;qGVoi$#;W5$LBNwX4*ntail#XX$@`Q)(Kx|v>`HTAed_Y z|L6TcXt=zI_1aw-z_^<1PcH^|=A)(D>qL4v5!|m%w`ALZck^X>CMLW0^-0edsN$4$ zuk$Os0aiGmr8#iLly7yAimi3D5!KVaSR zkq|@q@IS}uE_xX(ETcBKh{aCXUNqqP<$`Za7RyOpPx0&6y_S+%AF=q&QkD+(MHj`r z&7ngU%`YD}H~sUDx1&m*l~hy_#$95T+;fZLTgHNGnUp^1jF>kiIvm`^RwA`>pg2Br z9ER*&TpLs&FF1*@4DV#zl0#*|K?|!87-+VLyV!>1+rWssi{|?uKYlcF@bXGl1xBNq zLl4)FeJ&Ah|6}bS;)&Qj46m{PJt03e+}zx-F41u4fEH1F$(Ng=B$vv#OH^;OfHxz5 zTVf%of$@J@fGPLqC@+eYanz_So~TMdT#i+T)NTb*9kaZgoFnxfQM$|4)|Lejm$NTiE71Cl8X8{Evpg-KcgU@F|AMDHdO-`Ps-L_`0j;< z%#+z!A*A-}{rv@5qN!&+JLMG)7{-m;4LdCuYCA9#xn+QqE-$ zykBP)?GYS3cIKDj?;+TMTG{#pu6zW^Qg>O)(e68}?bATPFj}b$m)}ysQ=-D;lxmw9 zj+*Qzw-;7KFgGrnX7 z__!=D>qYCxkdPEmGM4eYL;=nJ6ubv74rj~oKDjTUQl0Kj%gagnO##;a}1l?sD5ZjM;Smr#+@fi&jaZO`1IzFk>iQo z^jgunSCarGeh*%Jae&^4spM98W9)n3$D)Dh3RKSgUvR$+%Bi5F|Lik8x9%9RV(eay z!~A~IrhTRGoZ!{AzaS)jb!cC8i!N&E;gXxJRBXWS$5wBA2>lgX$?H#yr&-`5Qihb*U|)2|D6R2yClw8utK z{M_*Wl&2KLggUxm;#rEI)$KnZf!Lf0b#m|iRCrXAjpZs05R=8D7M6T3)z6tIR=c5MP5W~ z&>@&C1c4_#FF&`f=Ahl>K)ng{Sl>%L!?b?>d-YiK#w2T@JCSK#)zEMqSZ&`^jZD#) z0i<;d2p?!Sy?^G2Rx^=nHKIsn_2b8!R(AEg5xAn>*>eg*1dsz)N%9^-=a;|Wf~00!ileyxyGbDHZoS&z5!B=ub?KxL# zsN}KFItyGrU0PhyU|Altm=_Tmjn)KAxpWsoK_?=Z5XkbLj^+)0dSajT78oKx9{RET z7-vxb7-;ljlI7&&Jddv%tLK$k2M@)2fP#HN|3I5Ek!2IbA4mlqpQ~v2Q;H+WCsm ziS$Y;<7jYFeGc$a309GUvZ3@GwnrCXnVLu(>FIp@WKAYZR}G5h+`S%$o)n{5_q(uO zTR=RlkrAGW%p(guF^|eFE?j>d&kNKQ&}q|4DA4)wYRI=9zxgW9vwPjfZsHjPN7)A7 zoDl-GF?LXG$XMaUFUFf9qilMm8XmJ~Qe1TX?_?>d5l13%gN%FcW|$zovi#i$!}NzK z__?K5eEK{Kra<9yt+@H*P0Fc*$ zv24ae5c;ht;~Mf;SXk>#=93iG54$jts1$t{bs6q^nHTHfT|j8Fryyqf{63QeCS2bh zOMPR^jVI>{*aji+do9bUn^J9xBZRVmbAgp)=f7o};-eyo?&|W90^l`J87zLtiVK+N z2SfJNA$X|?9P?8S3wd?|q1zA23V=*_Q=6B+Bo(thOa7B!d8YGo zvISYUDSKP+_{Sz!_7GCVg_cx@KM8hJ&GxYBZcW2L<+`@eIW~0qVU3v7hPU{dI z&By{EJ+D*9Jf9G9wYK$gX=p4zj5C)MRjFIBz7M!C1uAZPOhqiSkUlC5C<0dnw;A9K zZU$VW7I}XLB?*wG>@kKKMG;Zb02>VCudw=80XV1Bc2=68tEuH0eg6Ssc6lCieEI47 z&>EI14jIn0Tsq5wKg4vW3wV4LII4d2d2Vzp{tre`4Ths_q{HM2yXmkt#lexm!R@ze z=O^@GaYyvzy~q~jC(ri_@>#~;uX4$kJ%gl7+ZEKmfMpC@gUr{3yXVrw9e+zBqodvY zI@pYDt|TvkwF?Tu@p(2F^v=N%l>}l5q{u_#eX8k{aMTpZDoMpC3{*&;F@d6&x%qik z6Cxk$ZdtKy_Du`mAE=Ig&EB*3cbo)lovW+*3vXCM&<|wQ@ugGnG;bJC-+(;C^gB>k zi(ncBq@}M$duhzx2_kL+&GWWI+Tn%3N-u}$3ZGnC*pv9dqs$< zQ#Q1=;ktaFa(Kh>pD7?TOiSnFu0^`{*8OrW9mi<>rnD5du<{$PRs>-~>Uw8)SPvO* zfH9(f9;YG8FJ>TgQC15@DvwCi^b&!a{W!I$|9<{XksG3JCDF3_{&Mg}{?@g6y7z;? zZuHFA29%X4T%yM^-xeY_gQhGV8vi9W8wH-r$+YQ;wX?JGXejXI4g^Wc5Um@+3i18I zE;W|78wN6&fYvWkI8-Q(h`b6d25jzYy@8@#kGEwr4Aj6#Y>=Cstw&eMKQ=Kj&%wpT z)Mr8p+A0eTEHEg((!$!JfGzCqiJs9`-`5K4jiaC*VsLbXrj+CZ$1kOS-mzc3XkddD zu$7{J(KIfQf$|bL$DKs^3xXc1VFSSMBuno^z;~{%uMb9&yt$;?`!tM2ihM>P>qBjU zir5CLslNk_1)a0t4)B-~#HW^C$*6o{Bjfc05~11P!@aYT(n^_33&&X;HBl!LD;oB^ zzw^@dKNRZoV#)*oK-(6Mw`TMJ_^klHXz`5haTx&A$p%-h| zZ0_P%9PJT^TUcWKqt>z=-X`wr?QRGzpqxAO#zTUL$V_}t&GBuh;aEM}+I9&Hyb8i+ zt|&%YL}>WVAcvmtd!W3=Dv}Y^#q+RuW-KmZTh15vGfGZk7BeHXxHwqnmiupZ^Zxv* zvfL;8tHn`Ya817hRJS^L{^s>pK=Ko_fxHmeLz9$uEa-@)V<|NN$H%e?%>}bk-;^rc z)Cr~=4@`V{I67&L$q#DUhJW|;td(+Yt78cOB3TFKbdn)llw$yF!DMN)`tX}pW5OykkDs)5LyXGR-1;VxzN)^=cOgv%(c$`>zb}k2#}OO zxjT%tPYJE)aZ=41h~0Rmvs(`Alfkb0Gkbr*slU?D*r*^$IxHRc-S^WGanL;hz6TFA z@?QT3CmpMtLI|Tw^%X-IGtwS40R=x>P2G@I)p1>NI9JGvwGsbk;r9X8nk~acMWe03 z$@TO~JX|Aegljk!LgptzVQ(&&Y+CsrQt;^*@?6^IirQJ|@yH4$Pv{`!a;m;zd7nB; z*=qi<)wM{tM^ zh9a7Hot3M#!RC?@dZ#+&6_G9jSTh?e-b?&kD0%KITx7`vWe>n`AXmcfNwx5Omr(?| zlP64*3%mi|NW6L=Y`+l3E;?+G4G4L%LLin>J?~I3Qc7k)M-?qt{5`J$0r$&HbkFTW zu@Npt%hFrV|65X|w&L{n#58(ij_@p1ZdtuSU||wE3NdfaO;x%4K_KRle>mPRM}>?O zovhP5REV#aygCFT_`2$SnN@Q5paP}q-sQ1Z(t%FTr6Of;cBmi8zr}PcmaRLEVXqdu z@}lR?+^e|)$Gtxw=-t8XT1uLcNfKil$`|QgHtzK{h2DyD3$n7Z7djZ2!zC3}CZ4?J zbfs`anKZRQ3;G@n7cQ$N283~0@X_M3UfkhwR0awm+-Sze#wpqwFNCYg54(EPB)ScA zfaXsNv_w!h>%; zy9kWDOQBS9IZD>RPY#k>G1r2&fpmrkNN4bEiBHXaKt3I()aM|;3S5W)f)ZznUkJ!X z7r?YkOb(%qwn;3@AJzoY!O(^Vmnbfqm`V?rG7QH_a8?V}|? z++_HPK%Yvu^SCMRZTW`Aole9r>2_RWG34=2X9w#zJT><&^#XXE_$Pjyk8ZSE`b&ka znE$6rN_sED=O|2j@`C7x8mSN$6cW?Ps9OX?x!4_99STHfq@8-RGYM+K-BNm(#6pe) z-&sm6*IU-d&jkEpYTB93#dH!enb2cPiQ*3GGsw%zibUOA(O*#T99mAxJuJ-yj^D24 z7(5K91y6JdabgU(qIQ^y1Wt4|Bz=fM*$)`gbRG548Qt$=rgK_YFkS%Z9-N%hy#Vzn zs~Jb%VHJQv*861EF&CnS<-Lz?>uX*5~_!n0gIaule<0XMGOc<`|$(p|&A`1cS6G&^@ z;pmHYwVS&z<0iNc5$yWiq|JKbC%e*X&<(EOSpTkm&hNMFe#06U)mUjPV)ufBU>SsIGPr8lR?S6nn|hcK-my5KuVJ?8S0JQS7xRT;m5)cSlPNavwm-$lR20g zXtuqa?Apfph`Yy5zjJn-O*DB%cR8&@S&m>4xR;alilgPh=PL19M$PXXZ{)7+?8CED zvZUuzK(%_~^T1tvuQoD~$g1aBDkXnc(pie~c%5K6h=&e&L04>*P`bP*q-r;Ln(KZr z*K~1r-U0#-lW9id8v1$?1fleQ83iU#zj&k9hbFLC_xrt%C-L^&LaZN6qJs3_|QJNLqvqon2Pv zew7ja84yN%;8+!V0vpt$D8s^kq8q^VhwXde#E8egJCRxb)ReEkAvj zi~qaa)nPWzYZ^5P10TQw_z`No|4<-=W)NtK=(i$~&A!S=f#8yitSY4DYr(<~lc7s#cWiS=F06=>3|z^b1RzC8BTbE!oD`CxLov|9<>;qV~lH2Nc|S z>f3gAayZg;4u{KSiah_lJp<`S3A&Ii)QWW2Ep-x`d!&8)hTUU&cJ+wP)y5fk1}HwL z$>X?i#FmtoJ+l}Xo3+EeMse{OYc2pe`R+?ia)PT7AtNHTww0l|x#W8nmzp)~lnhrH z?VAK-)XOxW|9bT2&leqt_in}And2^?C2JCNQ~Zm|roXh3lajK34I)hsWH0N!&;T;t z2vu>tZV@RYGa+I>Cnu)>hG0&(3Q6~6CnRvKOir?=!hebr3?P^Hk$Uc20V)p`!7MN@ zJ+0Wd7xh0(Y^B-Y#pue4AoLQKhnIJP3m7)*8ykase2m|b;qm%pc>TkV85Q|x`=v6} z<1pmP<-#SRtsqP=aH6BNc!C|{Qm-WuVbw^YQYxyLt~9|0ztcnmsqV+T1R))5Z?=#5 z&JK~<_D=dN&a1TN2+vY(5Uz19sNZ6kTEb~5*QiVD2}|l|zd^z$*m4|a*)oQ+x0Oot z^O%qgVv72V>BGi`5Nj|Dr*Bk>hJ(9#I!mRNWm3^zEeCN7|0#o?Mnqq9q&M5EF!z=K z1O$+_HvJhISwcal@m|pAi~+)#q$iZ0&tDGNRNlEa^4j{Ohaw#dwV-uF0qu2O?}b)r zY-Xn2Lt2L~=R~79(hb6|H=;@2Z2MzB!dN62XJJDZ1QR4rr1P@2=eiTJWeq*>+z5HV z$Y0Q5r?UX@Zpk&|07(=M`e5Zy=_gj0VFNieu(p=V9&}59*mpf#F59I$Y{c;1v3+i~ z{UwkOJ)1iJH9zmhcz8u8PZT#LFP3uv@}#f_L2I_u2chAwSs*Os3Fa8d=!_ub-iJX~ z&ol7(mLucgT7JoY^0AoBJ!73{{w7!UU*C|#OOrq}%Rf<7naV1vjK4Q=gz_7tgWr_f z$3cfdnh$^#JbAlr&ZI3RgmJ2GwNSd1kV;y~!vqxGhoN=vJ}nJsZg{Akhe#Ytt)%z8%lt~26wiK3Zq41q3^EW8;ZIRM= zZlst{s&mfU*d1Vx1X%eG>CX#m<+X`^Fbyi0M{rA;YU@4E&(4-%-C=(S^GK6^AVKs; z)84|&L11XivI@aG6?*mhRSKAK#{e-Odpj`lhP3wlFEHNm%+1a1)z;H9YXb;30%#p~ z&@A>OXWNX1Fj6XNplVt1?~Hr{a?r&Ifv3(Eby#_3Mv1|=vXB}%io@5Igx8m*YEuwEPgB3gBFyp+IcV?MX` zMR{@(W3{S0JXIv?Dvfzr<@9F$ZRh3VGywuX90kGuS2I!cz(GJ(XQS<6Z6GdfwDeYW z%{r`uos1l{I+r@nKKZ#N-Q!^GBapqJ>rZwzbG`bIe^>-U&76TAOO-x7HuV-p(1LRp za;Z-*NVU4Pg-gvjMC6wXI%?3R6KWFS)vSkOo(`(ITAdH(jIF!>HpCD}xtQTBWp2lpr$Jz}pRC*{NnleoZzIS}ETbFDo>-D-m1 z2z}3^XLAZM=*!B4oI72z6_^`rN2Y|EOSrA0--$}-w7iPf!A5_lpKsYs^?v4|7ifYt z2UluKzTGJDHbpJWrqi7e<5MAmx^*LkzV9mFmwa4 zq}{l#G# z@G<>hS(0443}&3AxgiJvAdrKD19{#Axi$tXF$fxWIp3Weq828iu-Y7L74Q81+(fYZ z{{&Qo2^GJUzQf)o%k>uVcJD6iH~107O++f8I?=VH7g^bS=p}y>D4v8hb#P)xWdfYv zepWdJy-I4~tt`Gy+Vl3LcM`zfF&i$JLIex$`UxaW53$5}f)gc`xRxBTa2}35zduz= z;rAbE=-zEAp z#HgP~y^Jpm15%6$SpnEIp^R3Y%bMJ7V~EuNAw*6A`lY^z?k{}5-n#53OapLTUsyS| z3YcJiq=8n}Ml3_?#TU4#8uS=I1~Lvb85$oxOtI0Yt*JsGUCX1h$sSnG&g>1qR@;k< z_^Vlhh942Y{Fjmv5(cbwuo#pDS~M#f42J*YOMz-hyf6k}yO* ztCAAwVXik@>t%KEbB{Hr5K0k<2+`M9zKS(w%)N(4J721lK0*Jf{754xCE_UZA-C8F zFz$gMN9Ox%nt6J*6Oq$?F<~dkK&NN72jFZ^^PMb=s{*FM@aSTeb(TOku)!UJI#mZ#{c*~v&kOF%iWkyDZ!f?D#K71<{7S{Wg=4Pr9>?S@h@;&`YU`J|$ zIhC&*PW>!_V)+e)pr9v4DQJZRv+xtg8xT*d16<1*qjOGvEEW}?6br9`En7Y)UVIB9k!>@$noggRhn*>`dW<-^y9Q#jg|$FVKbM7XYCu;_IrgWS2K0c z7lyk?N?@=AAPJ|ipn|0TqOr>UqU1n35u3M8v*qYf+DJ}s`BK#({8vD2viNm#5JnC} z1*9@5h#gU(ZEo9@5poI|77vG9j`?n4bh+e;X#Y0Mq_``L>SmYS0W3Ppmk@4lo zVD3*>j$BP{nh={Buh>hI#0U|(>x@M;%J+7h%@53A@#OKEO#7IwJmFUS10@K^D# zg%OX1nMrY~H;%sGfUE}`-&hQ5yB-~D6UZZ$N4gIBKKxkMaKw4SZ&EEQGs**j{WC1H zPd*tBzBQ7GjkTbD)}*JjM2dABV!}P<(a-6@{2YJY!Si`phE=}4?Ej@lJa^Me%%hte zMR;+HgMH+0Fp8Dka=(8}lK8 zl%^CRLpT4JS<8uyw#v!feHYb-gM{m5Sp}^Zz=NJ+XYHK7;a7!pvc)YPxZb zZ1GT2XQ>ZnM)L^JOki|d^sp%+`uRUC04&NaGBg~tyaZ@#Y8Kqx-Udle{e`765fP)=C5`@i%;RsTpZ)71Fvsy@**$IVCzyY2CM+_tKczg0sGer(*rE=M z&Z{=kK^mz#F)cCWre#$Y}-6P!qB=Z6TPRuRQIs^zL*N|b1dq5wLnTj{T=hIE; zH!F+Mz{VULxxABGjdu421f(mt9wfB`+43jJ1DX=qxn&lhG$;;c!!9-DF_N7WV)b3p`(N}s-1Szno_C8M5t79v3?Z)vRvbu1GmTt|{UI1+jqD9qt$7s?3FUJR z94y*%G5g)v(c+dOI8OG$gdv{VahHxnSE2V(G+;xYKSqTn#?(s{$BbLd&$jhT3(Qvf`qRB4lKWT@|8Q`d% z1$H|)bqk=HOr_hy`rOmgBfCmdcK9~Fa{zV&gN%aV^Xj>#b`(_9vqa}tS?7zC0Ddcw zHAo$&Zk~(;CB*DPLf$yG#Ko5&MgC_l7{jBLwhE||6BA8f@w)WP57x77KoF%d<-o0Q z59-&82(^>A6gv5rpR(ZwBc;AjSl#e<#n1oNb$I_zT1Oe3YdZEuh!}+{^!ra=#Ko;_ z5mB|+gK;e+nl0)b4Ob%!hxs;Jnr)Cj#LAQY5%$sh6iD-a||X%VRcgr_$ff>2zDAwh3-8! z1%;^G)KtyKXtvcK+i~-8_Ix`J9*tlzx2Xfw+32wFF!Gn##VPtqRsbmu?mCWVh@3KF zdsp`^7-K?wUU=bH&W&$uF$?DmP|%*%F-?-uSR{+hiZ)2$3$&)5J* znuwwP{{DQGuxELr3D%tQ4iwv~(wZDwvAk6gdLmGU@;r57bKZk5jfq5fMyl{ZWBoO$ zX7k42wi1m(;>}36LbOh(qT-nzT$(A3koSnD(>s?wYZ{E^yIQmEfdPc9P3lJ8F%XXi z3o6_WJWO3<;(3K=Xk=udDOc{r1HM}w5NMxxLixiXsWJV3VOus#R{l7`dhMSLVB6wf zn87>V9~pNDuqX=S;IKOIg@Zm8>D>UN@DL=lSLW?wiAK>@1zQ5cVv{p0o<(9+j3Ez< z8lfvdThCNNXD2Shs?)j&UitQ+@W}?*A~rThVp!EV`o5`=Y<({!)YCXVk!>v>1_cLC zT!4N$8u?>oXQ#8Ty+BE0fk>#9gBpkD?WNKmN-MdO^P;3%1>)*H4#|%DJEdjPu(j%C z!SiCukTf6%Y2ne`++Fc+;3rqSk&$(9&TYo~VP==6%j;a+;)2A_%Ih@*52v2jIxEzA zK!Y{oKZP>rhWu+v>%l$1r%tH${L$g^PuG}iMyP6UbEF?h+3$xc)IYxa?%UQNF7oOg z#6?nm9MVlf#q3cBUE@G&xGhc=V)$~g$u7GnIr-zaUCpn7`agEf5QV(&&5f-inq>N6 zc{zaC_F0$~BP3uemL;R4>{}Tb342rSE0Yqi1ryB!N0Xon0H(C5%C*+_LGj!x#SFfG zF)b}$2CovLK?j+tn3#Wje&p!c;5HEB>koI!$8F20dVz3@w=ePVb{O{g6dt<>e6wRy zQt$^YF{id|fQs}VcrJ*TZQjJq2>U$VSAY~?(S0j=>9(gi>WPW;pcBNdHbnq4c$5*? zdJ}SluV^?Bm^=GbVzr9f-FH3>{dVb=aIkrJb2#Pkcj}0_rvEiAHzCyZB`y&|h|aeK zrGA$8IH(%R;>zVYHCBh*lj25_BJO{^Mm@X2hwn~LXHpk4TRmunNx!73Jnr}mEpBCb z{2(~(%&YXDfnYe1VC#eG`S)Q$&s}N-VziQw_ob~9ynX~%7TN|Xrn9HhI%g0@8Kf9O zyK&GVd*p~Opc$Mo_S`;~$MG%ca1cE~k{my{)wLW{wuU?Sb6>$%DZgv;%s-qVgUZ- zdzZGS^Sx;h4BzNAGT*#L^P;?djZr8-wgG>YBlMg%UT-M@ru2M~7sgtsZVcFuHCVXe z_XIEFWI`xdG>(BGh_umsGVfxBD>Ysh$ay#bsAUBF_rv14Q&UscpnTI@F^%O@^GVUk z$q8xbJ?e!JDTCs0ds)B;YkKq9*%>+$Gc)7D`3=8bb?u>wTC!*NB|jPn{*)1L+K#|W z#)th6NZPfIp?+1J;I>!?njeniP_@a>&zWnhvTC_E<#^D3N}pl?>Hj z`kiKCOTMYC!ZCjHRMl4>Iy>Jo{ue?vwVt;0`UqbW6LB40sg2KL1u9}f?X%!B7)7JG z2g*GyRDwpuiBoS3!XzT<6HLh7zWsF!8k>K?&bh#Gl;D_{7;fN7Z{ObAyMgpo6EZlk zad(-3Ld@xBQ0TW3{g-iv05|jiXu;P|*XRTb5hBome#eT?soH*zAY*?d)N(I)??DdB z>t_I@Rg0sdzoq3_0W2 z*C0szY6v;its!XKdI=WB`{$lAJA0!Z&F2E&>e-DU?7sM`Z=ps>9Y4M^mb#HWv$U#2 zh`3N*7Nf`ta2I}C1=y#TKf>dp_v~6M*U7Zi<5|jBg8TKn+#GOTHn9yO#OsK)pp=G- zD5wdMuZUIz{Y{;x{LGL`hNT|}r>FNP00dp0G^&kf`#dfdf>`7U4RR>IC>jJ(soSwM z3i`eUl8uc`O!x)(_>3Qk`aob%s}HCd&0qR673l+vf{dW#LGz(K{|%x`ZSY3(C724e zi}LfE3knNcLAhQttl9%b4Fkv*TtTU`zz}HOng9J;s0_@)tU!EyIkmFVo>5xiGs@W@ z?FB+;{Xl;yUlo&3=RWvX_`oWi{|}B-#+kkog)gcv@@=o4I?k1dE+<%`dq6@CZ|wYP zFqO*R7YviRb4%G~2VmW{H7@liIjIO?>YoJc5{`s(EQXtROM~h4+^NABxPFMCZ!%g! z(t2SZo-iwwrS|ilw&epzO~_1Ha&irYlAdW!@uz1fG6ex#F6IWGy?>!4`vv%Op%D8J zHrc-k4YT!402!UY5i5+inY1{ooVR6nY+qg z=^*kXRzAnf%*@Tr!^5(j)&*LZ&-do5{%}`1oJ=C-){Wt3pJB2iCFq-^j~;8PNCxXC)M*WHJqQb;3Ae) zvqExcZu4gU;I^r%H6!C4)y{B}lOnrw?^ZWjdlZdSTZxv9pP$zVQ>V+caeujN1Kzh|R8W;VqsZH=D!pJbGRivwuHryq^`*(}fcf+yPn} zfX$}k^<32X- zTn7jdye3L*6C|B-+^i&L4kGDiLmT%Zdq)^2iguFWca3^8dP%*7^*Cjn!}tutTnKOC zqYD3L4t|W^CUbG@{-iov$L0pP$rA5VFjAkjciUh8_$%FTZ(%VBZ?3O5wmbzF`if|F ziZJGr*ot&fjYPcg<#m5U!2mr+-U}#r@^Y)xQ0VLO14<<|Ck#f?jK11N^E#1ZV@1V;udE`1EYCa)n$O&iH%f9!Kyp zRR+$|q2gUKO$gYNr%M3gd3Do00Oq^Sty1brj(GAGg<0XB=W1ezQX^KWF+LRu++g zlk8s}Ca9z?Y3UQAy5gSZz6DMDLVBG$9+O%5zS2&Lo}B!@&v?MmW6b9Ri&Wm1d`?F4 z`W8$ySnsY7estd*YLUgRiz|OuS8yQ&;hY=Uq40a{PkllYQ(;SvMilAt`nXL}=~5AR zR|e^8^8>>H8s%u^oKexj^C3zvkl6-d8m>g)*(4b<1}ftf?sb5*?9X^zM4w;)V% zn%=UlRD54Eke-}8-vUCX)ra6c8F=aytSk6V&v>Dh*}J_CpNuU1|HL;1+AYO=VPZpr zU}EFtP^wSNX;Zswk+pH)ec$BZaE^`ZVMD90`}^6#o-v!`*QeIZKDG#Zw}b=(ssMZu zwCcg`6qBmDv#@=eP+|cMODI&wKy~=Vfd8>imS1_VyOOf}H9tAgn{EMX|0Wmw^iK+W z>vt3lR4x3mig-qn`o$(9RD%SCSM(>>R~j?mxg8H`IX>@fZZ*M6fFO6 zok6%jT5AUC5Hi%JP7?6~!o3hb+oNb=_f-d}zk&f>cW#XDUJ*yOPe$pn#`QxrLn=3xntVn6fDW>d(GSvp*UqO)l4pUwM4@{{@} zC_f8r!(A!m6yj@B7R&kGlfS_#{<0v7gq*ID5rI#^PGMx^r$9%OWY>n*${q2)YU?O> zg}R6LWP6wx3M>$i7>0r`bx@#1NO(bkOoKH_#ZjaBj$tZBirbQV)9_PRI6FJdES!Ma z((Tgh?Zs^(sOfCL51Nf}4(x~T73tcW*mC4oxK3TSt%CMbh_U;LN9ub=`QU07#alHT3fx9bP(YdfT+#b$LHy-XUI#N za}{kcZf%IpfHX27XfG_7p;!$*2PTF%V8-gRP|E<>p2GnF@LY-zu{5|HI_G{f+R>)t?;SHz^-oSoz)8|bo_)2LG?%EEJcFZ|JfEPM zG?%22Fqf37Hj_Z~aZU~Y<6JV4+KgJ7s%&Wl6$dht1r8hp$33`PrB=W&lfkA&ArdSh z1&xIYkx}3)wem}6WT#DQVvSB~q@PP#Vy8`ML9R(_qMsTI=OksBGn#ZS{8`R`fS{mWnKj&zh1CzqNNV%?KrfsQ>mjfohW_#5)ajAsc?4v% zydP4^_Yq3|e&1R&x_aq~auuQdzO_{&3{0_5ShFUOeU~mr^e!FE47sp?j_!d5Q^!!83;>bZ^{>O9Msk)6GY2{O)}o8A# zlAC*lXs?$khI0U3YXUCED>vl-8AbHN{o>-VuvwO~2L=Zf@P)naMJsvEqQI+ZWPbh% zU)z&Xc)t%aJ#h{+K%GXl+!UnhD6TwkkQa^2`p^Sd@(l()+o~W@XTTwJh^RH{`wiCD zn6ehKV7V^I@8#HO2GV06zI8kuJLq)}Ibd%$HEARx%D#S0i1oh{PuMYE1HT91ej-{uf9T7Jeq^9nLnn%O}I!_~y z2ilR$g5FL|){iKCY}*_*g4(*DPg|9eY_e5!H#Jma{}kuYG%1UI>GaZ7>$DLy>C`b$ z>-4I~AKcZ${m5{2r33BTCroW=w=|txmsUyNRwUHs?fNs8x9Eh{m*VsM%cYz>^3G#j zEgZQr>2sgqsp&aLWVv!)_$V+AG^qxC-DK+h$YEPd%fSDjg6h<0FDgu%MUvTcbg3x( z@`h`U2VUVlhPYvS%Tpy4?u-;NKN5Rq@KSRcH?^GH84a5sl;89Eq!^5KLucF(VE5^5 zq*?PE2(~X_g?_TluK+oZTi;_TF?p@#4w)qx$$Hr$c++c%9eh`&#zefmJ0lAUOfbex zpGJwxt9=t+$=$AgwPE?A_!QxDbGikKSi$W@Qn-#F=i!-~n3!Nq!o&wfC}OZhG7eJ% zrw5jkY-eLLej~a8Rw>cyz&r}};&=?b-)hC)FMx(Kk z)KkIi9$}HI!LT*{_53|X)FDGDGvdu`pa?^9jUUk?R!At9IEUoMdpjMbm8bJ+a}dQo z_7EDZR%f>DzMX3An=^_OwXxJv(9#%q5$GC-`y~>k422k_?2H(dG2Hr5Mt>1k9d`k| zD)ot|_{b{Ay&;g28%O80&63O@KL;&}3N1I(Xfyc=?Z>6xxf_WrKYp=(YfK)iT$pf^ zrFq|P^5RD<+Y}!3jg!1-581)JJ_Kpi#dLhA38%-!#Ej7ftuI72HUurU-G_7Q_WKr| zKeFOT--H`4Hf@}G3!W^CJhhr;hmpD5P*Ni^sZ+GZ#mjb|r{8_Evx#MqjM5#|8}le6Be9Hj*T6^9ug9=IK!2g98`C@IwrWqOpY4j_hjlD zIcEc?Sbq1-?=SV#cDw-(ybO|&%}&S_>izDw)VdvrFUPC4`?N&Sstp&%#>yjDKNqim zzC6^N1`JzAUa4eLyFk5k!Ph#NYDNfvIQjzC^hVG}h0WvRtnvu-{BW>SKxCXB!s&^J zxM@kR?zmf4<*jO|wDwY=L|zwJ^A~oNo6TrAfPTqK!%|}1scasUns3gJb@=9Hdi$zn z`!ZI+ISyv7gT4|Qj{+IHoL}r8#RjZyuM+~x3!+j9NJzT zX;M;B{upgivl++G`>yjUKA|g&j>UWdCM=ZWAz8?7(T%T&P3bHseTV0Rbvo%@o>!NrnZklLH{hzCgeE#w!(?H<0%jY1_ zVaA5h$ZL`&%W(ah|~)?685@wzW23 zInmoBG?I`AM2Nrp{Aoxs`3X$POggp3!j)Z%K&NAx>TR6b+MKSk-?Vsol*USb zyAt%HF~`^5;B?FEP!mj~d(Mb0uWuTl)q(XdpW$lS@c?l54m$!@yZgov%q_a1?Yp-)(1u!frB5noVOFEQ#I(Qf#hEA&Q%t)D#*U(2+jdO2HxU!s z9OaHyymN{?Zm8Ku4oCLSNfqq-JxJ-)^A^+*R8{P)bq+tyY=9g zNq7cF``DO=8ELPKZfp>uxxw!MW(}D;Y35OLJ`FMZVP1@t^@m8Od9WBxdJParNlD=g zfe4_DFl~KxClF;y4%As2_s+C+$`l8s-QA!+3=1IRTNNlZF`;la&teUbhv`Nj8QB*b z(ZaT>s@dhg0c7Tn$E?-gOj`RnC8p1TI0{|gL;?Ep5h&!TL3Xs|i`b0TU%{DaAfmnD zJ?;8OwN8(ouzFDUY$E=T!;92QH_nI;r04g}%{BUFaMgW)C1I}s8r5ygU3Bgxl+^4l z=KvdRl`m>sG0owqX-u-?J61m9oJGn`u3xT$wB9-zO-tCph=_v=yYDcoGdW_jsYpI~ zoSVb3tEqWe>>K3*=*8>;PBMYVKU;Q#ywM+@Q1Nrc@m%~7N@KJ1kV>rr(|3E>(0`7I z@r72_pUNreN`977eLd&X7h?CzJsWG+Xux9uBpk8;iwnIVQrNcRKTp3G?a^qUZx8_< z5%GN)loW)79?yM~#GJ$MDux!(4L+IbgJ_Y1`3h}G_g5_(_a%Ka=img6CIzV$%^ZT0 z1&rq{iq8K|6yE?!O1BBf93OkiLa+tkHQItTt(e66xki)v> zGiUxg|4bMR3?|-fLFo~(;__DCiV_Ix-T$-zoxr@dBMKn+*T~4oeK}{bl#*GeXJ_FT zr>8O>hBU|MCNBadtMl@D9ebs-g~5?&JuFl*xD?bq{N1m-d1l|8pS4D^MF+Desiw(P(4XuZ7B8 zi>nh8J(o1TTNudhvA3m}Qy+DbNUb;LmZNZw`v*t%CfXdYy@7ec)F{QBqfQ$_j?(`UElp1@$ zc)58XZw6K7ej zsZI>LcqN}?T4{3&viPfKn?4lm1pL$3iMQh+O&Tujz>2}Smc7Lh< zmaAQg4Z=qJI5G6}mHVC%aW#XdxwASBb7)H>XaimN#(U3lKgq-WJeqXQbXVbHUE#!a zRI)6L$DqEuzmRWqAzu!y{Lnt*{PRcWfN_eI$zuK5TbcmI4hqzSIV_r1O6i*z+m|K!~6+qLA)M4*H$Y8i411dcMnH^F?J;-STWrWGnVvC<}viNi@^2c(U^} z@s6&wy0fyhb3?tk%;;TIq}mf8iAF-S@kV-Y?eIgj@TR&bK>Wi$LN0S9tcRq%nr2Ws z$fsfeMz$E-euxKJwPs+iuIkfYi~f+8MS!tnPeA?iy~v%t8r-+kut&U4eFlJN){A)( z;ds_J8@-sMJP%J%cYxN=x>JaU2ZMo>a)XCkW>(XYf z55etk+r0PhZ!6)I!hAQ#hSp5T`|LLDL-#wFk zhsH$|r*RTZ^CDba1bh1a`VV28=6cX3?jP1`G*3DA*{_5a%wVtewX|SfK4STIT-l*; z>z49r-Ei&yarKr_Rj$$6FpW}5gMfsDf~2&xgaXptARr*!T>?^~hzQaljdXWO3n<;) z2uMi7H`hMrygxq2aO~j^x4PEz-1nSU9wh7~m4?}=>a7JtxJ7gko=JYk4kJ_fenEcb zk{0eRO}ZUcM9|P-xQo7;J~ol6kSlT zjd^e{>3M#?rHR7CGX;THG>;x_VzRRrmnR_`P5cG8Ocx4smKfS?KYDZ^O+Xq=Ka?9t zl}QCd`^p(;|L{!vs7ZYh^D%U}#*ngZhTxDp{sW8Tx08aJg$T=3key*^S6OS54f}Ra zy~E@&(tS~O;2qcQpofF>(f&Q6At?a5!*ZEJ+ay|e3rc30(TO`Hfp6sc!fs(%3LNdWr{UhaV3@bC2G+7l? ztE@^~KdWyp)=War(@Ztc7*&jyuoz1!;B!u(j?vj~4imX{JCzwN7Sx{g`(!YxawWUs2!utE}i{>|%R{^ETSM58;J5R`581F8N{xIv*tI?I`|2)JHvIo7R(fZC#p+@5*`oSqpJUvW~MFoBTQVhqBr}&V!SOfLBqI1a4{M9}M z0QdeLAzS0*eOnJA6o14ZYdXx7*mUYTBsDvwdgb(ed3o8VyT8Er@a!QQ8!s=PX7bm0 zu=r?zG~dJ8@$``))XF}Al#d~KK`d+s1nLes&)!!0c7V+sHs?3U`XVrXbMoIWB+ZtJJ+zYYqg;nW4Y2|}j zS4a!5@s}8`3xbWFp=&&&!@ef+xjfT!A-UY!I5)&FdeU*v68B+o0gBl%ahifo7uie1 zj#&KVSte6OO4(F*cWx-B3<^iXw&d>G^7z3zfp{f1398ep&!FQaV>-lh*O^xY3R> zI{v(k2H&eXgv5`3&$=AKiasI)LD_-&vu0pzeR=kI20}b0rx2Tm`iaY#$|Zl)HX_~> zVf8Y_a97z>poRJ}i_aUD16RdTYrGKIJ^k;!Pk=%UobQ1Cw?3w-QdNMf>V zr_S41R-8aloZ3HJmVX#eX?$?bKaEv&YvlCJ9I-X|>50(e30Wl^$&xIicI(G4#oA9< z7~Anb+r;lY{$7kk>_UvI?k}$gR7jSH%@tjz+rHrk<9Lqp0$Bp5CE3=Q4OM}}u>Saa z;Q^J8WS#L5OU1nD2oe|imoANJE?&N_-pTMp=M3y&r0vjMSHR=afyr2;=HH# zeP(rl#ZjaW-6edoe>f<5a&vvL$9`AN@c}+1B~FYzcJ1G&20Dz5SN?w{C5~us8nRki zT4q7bfMwK9dJsV74?`(lb5QwpeJ@B+NZOv!=GS2X?ePRbIK5~8s}J=WiVzM|^cVfk zFhA%X^gw{^?q(!LOc;gmgZ>8fV7+|tqv$Jm0<7nN6ZHm0@TQrZ4Bo&zV5sjbVzYBN zOu?v9?cLhWM2V1AfRJ`CzKt7hqyJES^Inm0SLF=30Al{5?WT{7cQPLxUylxtyosO8 z3MeDP;ut6lm&a2zIS9d>{2=-_MAYx5I>hf{`{oq~Q%I}Rwcpj@c=NL$dmEkw(fu5Z z%SR|2J;7|g9KeIE9>?Ow8pOJ&S1|D>%P0HY{CYzFoQm(GpNHC3IV0oM!(Xx#8&*h| zayE3F&W7k@9~Y4t`e@|q77|QwH-l|hD<^QszifL^9XzH#xP6DIf`e2kqktZ_g#Eyq~OzqO=5(C(Zq0($XP~Z{I#+9Q63z!4WxbL+mj?9oR*P;d^yFUJQ@k35DXq z5Xu9{8&^i!yQ^a&C4JhCq&X@BR_ZDZ4UO7M<3JI5_(cc_2}vQ<1l?Oold`h1eEJ70 zre8-!)DUH*YE3_o`0|8>gys%$M&E;;4WCdC(xG^%4@sw45R$Ea<>XERy)x$=f1&k$J?Lr!*av3mw0#N7!UFu5xg}w zFO+x?c=Y4KPEO_C8WDm1|001t()90utwN^lUk!W~F*`H!`|3`ch^eTAV_E$qec?z( z)FNl&_uM|ahOOfs*e=`W`fnFCD`bgXC0UB9?I7w7?SM-zQu&1+zVlzv5^z>` zQkIB~$vO*6QS^n9g*+U)K_smB%S`dbQ$ey2{Ohv3Z+Bu<*I%)0AgVPGJ+Sz(4-zHB z?9B`6>z^))>tQjzh=5=%L4H9&^JtB`{xl#_DSZytD2eLesc=tTx9c~P&j0ZK2^D4W zvQW^FwBxjR7)bFjq}5arb=!M3CH}MdebZ7Hh3(4?m4hZ^A~|cuOU-sePD|_wWJoV5 zoskN?pT$`{fe2Uh+8;qLNMBS}b>>K*CP|m}bS_t1p!!~VjCs(dV(6UXvc3y;ywX0c zOaICC%SCYGQy96q#(&RkLI6y2UQlovS4|<4tAZw6o8Q|4!*SOK3cg+cq2*_&6TwY^ zZJl=%BTKqSY6JvT=K)8*n>oB93G~DJ7=eS;D;aYMc@b@CN^3+awNxr-+Af= zHZ)`^hrAfv3F)!6Ho8PR&eW%FwrO#3q>Fp&-t8|jQ{v;}Zxb@gFVqF6-48JidUAIo zG!B=-qFVXI7g3_dHzHTgEwkZuScJ^k@-FiJnY6O5%8NKXbQlvxNVY_z<0Nk{x9E^` zDVe>#ZX}ov?)+W86xp4}A1F~7BaJ(sb23!Z9Pf1w?FMQaB%tX;@i*j`j3}xg|a^ExA$6E-dWt@8|$E#Qje) zI72^>tREm+P)eOAGfiGYc%xk0W9_mp;ZaJa;LP}+rFwoJG9k34CMWR+vR5f~6&GLp zA3)^Y;R)6OTd)_^^vPH|8$){3y%pFsLfE`|C&b@L6TP<@Z>T)};ZJ+xd zFn>R1$*3&l97bcrFhN{L3pn20-%)vvgJPr0CNux!xV!MZK1LhMJ!yB4ZS#g4qt?)Q;_lO#kFSPZS?4t8xH?v{{A6CbGzDacvX!SGc^uaabzFu zKp%p$UuqI4+W@DQeicl+JZjD#3rooSpD2Mvupg{5h2>>s)>v3r)*xrKmW7dug_#+r zudna@!XOOFxS0f#zy}X(Zf|RXW>Irc=;>f5?~!nPR6yBO?c=lAQ}Iu<&mfG3bn5NpJGJFeR8>Cr2&un46BgC$>}^ z;k+&}j`v<4$M^MFHO@&|qz6B}w7|N^j~xxD{?SoPEJK*X=kQ8_ zTYVz>9!p4YFzcP$x3zmTu^=H0L6$8torT|7l)xEkZTTR2eX8p2&R?M3;=FJ@9Wun1 zxVa+HU1j952R7`PVVK)gRaN!poBj5{9V0F((uqcB8Wj~q4nH^da~d0c>R@z9K&S@d z+&PK=ylV2hxdwZa!ptB;b?PDO$fQ3MNg`~`-l?nw0mftwH})bTWJDao-lru#piHJ# zRWep{hG-pCM#YqdIAx|(j+EHgReq2v@1LHY-Xu`ysHm8jm@`KH+xHgXM|ej5!xXyy z)s-#o0qaKv)$T2m;xX($+(GIn0$Pz}RXW{82H-)0^DYd3Z0{9C>baOr;tQ~mDsy5_ zKj7gilj&jmpNXXpXQYn-_jd88DU~dtZA)?@^b;N4tepCF%PgF~@@KyrGhQsdD|1Cf zYC63h{?+DR{vK~|=mEboml8{>xDMXmMZwZrF?thYzy1I4zL6WeIXz4sNt`GAv14R` z*$)vk9qtazvMRW9alSMizuU5p-zK;Jb73TsM7jCV^1pi3Wky5G+gMoPQQ_fq@^c5o zqHVzlWgA3+Tpa(IJ|WQd5b2QuR13Zi1GH2Q+Q)|GNL>z%paHISzrOU?hUD7HImktM z+(9mk)6vxh@-9zjA>pypSHXLu^9bk`epp@_@AN%C9bw*cdJeDTV>O@qC-bFHW&Ar6 zPy@TOqsx+?H=qJ#g8Y@eGfGT}7fXj8V^k|+U0v&vP?FaS8gzlw6h5g^dM2iN==5^; zk&r;iva*%BQXZ{$Se6Fd+H*E7zf02tHd7C?PXnP()~V1j0l%MAQ}u$d~l_yMx2jo{!#9BNG9A+Ys%u zko~GQL0Z~Ys2gSqkd?RYgH*^d7XZLT5i<+NbW`4Gz1NBDI&7Q5X$Uaz{$RdYLk6C& zNKIWGK?<{w>rVV0cq-616R+M;`<|Xbqf^j3(OPeFh*RK2V6qKKeKH}W6ZvqLe?J6a zI*Fuo3O2cW8h|`04c2CP4`C_Idgx?0^z>-DmBEYJI%mdDUs3fAKgBdEXUN((N6F)E zdJ}&Q46HY@Vz%lcAD4riLPfT#gI{hNwqaQBmk*V9XAo0jb}roR3@A4o8IfBF^0}Q{ z2wf7qvRh6i@`W}1tVNhvu|b)U^r-dmf0DP?Th8N}0=&FpK}d6tAo|xxKb1w;LJj)0 zAq-D61zd4=H{4!g-q&6t7YqSy9XAKk>+eWNM1DQxEz1x>Pz?dr+(|{jInRw*2-1@n z%>q&e>y#?Yu*YZGR-t3{IW+=njJtH$)csYtOaPTJ&I_<1x;lg*pIsYb0H|*>kSaRA zcd*)r3iZ#0T+b$g%f{gIn1k;q?~}Ttx1S<3p-|01QsdqEd-`V2QnoFj4d;QkJ?{RnYRmw@cJaj>Af6M)d`>SY&mNi zKM+z*f^__GzP!;tp$qZkmMAQQpN9Z#$vufFw+c{`p2%*p{bzAuX)ycq)kI`@2Hp79 zJFQ|NE)iXAZq#_OCWckD)wh9LbZsTx&Htt;Cp^ZyCB*56T3P@o3>-DC;n%S>=HzhQ znRvC;@TeJp6olM(mcCFORfjC(1zL#q>O$MgDFm_ju6;MSB)Z5Edq2z{6r^MMfN=Ze z>lA{2>JNlEc_i;Sv`Q|+n(kUUD}6cNu)xnY2j4^oD5YC?PaB%s{zom*I$ir+o}hc2 zz6f$KCA_^a-9bihu^vNni;)GVw4)r)U6yQw4o5Pwx%{d_>}a zgy;^qm5a7#Y7d_gEEPsT8=?-Ch`0L=0P?;jv$ZIA5CS-!g;1yxE3Gz$i&fdX4=yty z@A_4Ejkb1qeNmQTAG?eC`W0px(^yu1_zFI1C^8-yLf0n{dnU%nRA_QWFBl#2oVR?t zLExFrhnWFn?&Y7VlqY+h%7<$&flGvs~*Q3)DJOxfa>dIh5n_du%|9eS= zn-YzmEw}b!?ZIGe*l;nyzCpt<=jLbsA?5RESTCQLVeaQnWHlhXELpzmOb)H%XD zowpU|SH&;s8*fZQPj5PlkTY|Cs8n=5XwsrI=F&S@&DDwji+jzw7dGss-B{hYKUlO*aY1;0NuT&|j zfRSyc{@Q$Hi)PH=pZ-PsnMB621y#b3sNm^=KgzB+eFmRG>d;%DL_=U=8eU zE&cw}F@>CHJn6$DL;G-tsSt03hUpW<=T>F!`-5UZ z2z?1wDUnByERhr!X|=2d{L6A;)dQMfio8G5?03W8)a3{wR5%ari1xT8@xU@96pgI6NC z+Io`i=&SXVUbV-{`Cd1y-C#8r(N~(Kh4h105af68^XJdQA10-RH8m3%B_%@*xw+j! z;Jc+KOoBKbmm4?)FTa+&tbW-vV4%u9K+K|12*HFsEMRo!CMAvI#>9L_$=qW}+2sOD zP$bmUCEY+az?k3VUztS2ZW6w<7-L{=p8a+R&*)(r!Z*{FfFj6r!!O8mFrbL#v&lQo zgYJ6H8~Bwt14;NfI%k>+&PeZ83nGZ0>A`5*A3u|m5^GPtpC^eSS`=C_2@D-f2ND^- z2j=`V6ehBt8OosY>sTJ%7HrF)Mv zXeaIp%xpnvS+2III?Arr)ZpS50`>Su-AL^mKi*+|I-s*oPxU>wOi}VIz)rLbGEl}a zO8naSo+9UAeleCOf>mB!UM6Js*Fxj`Xn-W-i%)0GLjm~NHb6B^5+9`W|6;<&0rc7p3i=`h{XjQWRrv->0f$>_i z)+5Z>k=|KX!t;LY>OF?K&t@M3ufK{a%FjE#S?1-R_!y}3Hn%C8&BAmCiJMamSv>YB zT+IuC$O3GbwQ2c6u)FRLjBrciZN{dzDO zHq8-&?cU;NQt7n9A>{t7*v8uuPz0!&`EUI@YyU(FDqwpW^ZNRI3|zO3Lq4qk`B5?XtImG0?PT8X<_g`lAulHm@#2RNGi{MKZb{*?_{{(as?J>J^ajU8dF z9wC>!l$V=ZBF6e%w5TRu{f6U=vKRok|4L23m7p*UZku#?mbJulUYbUDVPN3iJ92{b ztmm|@kt&~sE824(I$>?;dwET|%^nY-5`2}6$iMwEy2jWxE=4OYpBYdV8Vr7uw32MH z6}484kP&1u3kss|SH8m^d0qe5s*bNoXEsn)lT~dD&NMfn`QZlzmok=cNcVWzYJ~e1 zKY~LfrN$<`@D)pD`J1;%?}k1s^pPNt5uyJyYt1D#_HuoUNY_Md>e`0rD!rj6!3kPC z7cM8E6L=D#Vc3yyY)YG+pE?ELo=M_d#vrHT@W1vPJr&8p{!Zwm*aC?=m^fq{{>S)?eC`13A4ck%Tb$ zCyvxr&sp>lWi5W9_XI1`n%{8#0?h?vKS$*iRp=4u== zFF;uD6@kAjdrzw8Im7MCI6>xYCc1j%iIk2N?~OA9Ob z?K1z4k9?z=0j=Fi?0r$+8T_B;4+h8M=p87>^?+(=fauNdIbC#RbZ8jB4I*TVNz7=4 zYMdFoJ!<5y3`ICN>=m7zkGNr>P~0!Q^MUWa@`c2KoCDB|1Az2;i5a~0>Vat+Lm`p( z{mF&G%#$CYX#Y=;a`~+DKHcXHlMma9p3H3#ks1uFPfxfH$9KcU0>0ze;1(hY1w^rd z^-PQmhe(YyASvHFxMg5N@S+RfrtA9KN3ICjH?KrG^!?O$Ln7rgb#2nJ!tzyn?0?97 zy;QmVkS*?00Hgks_SJej9Wn!^2l6@ni4A-<*(qcfPstO1_6s2;g+2PbJ=n%47S(rq zarflC%^&lz9f9<;gxm5!iL4_AlsxT;hk5~AB~V6igBVsxA9_z21L@SRdV*@nSD4lc zb5ANHv>jEIY7|pwe|H(1yH{W3M%KrewGV1yVlBHH8y2xiiQUv+HlUE^Hz_zX%q;SQukoX?}0 z+!b6MFRYbrD8@f449C5<;WNffbkBp}{Gz(UhMzqr-5afA0Q1t;1$Ft$N4-7Kt`v(R zd{tfj+tK-tY+&d5Sb;1$qtPk= z$_go5X1yZF?^>9hNALsg&1!3FM**hrb)=#uHsoRivNG!*RQHMXUO-GTZ}7W!>=LoL z)SD7vY$_jcpGt%cD)fgooDB2kMUM2G-XO!__EtK?wM9%Wm8zv18(!NqB2r6g`HQwG{H|ny;Mp_<@6VsZO0+la&Y8=XbVbtm5=11UYdVlI+kM>`}QW-3M zY+p0mznDUjR3R4cBDnC6dQ!0t-{E!sD-=px8vrMrnj4xj7O5nh1;D-$e*Drxfj-Cf zM0uLnMb}}mEAh*hwZM>|n;Ss{q!r7axQK9rB9wx%r;6lm_@=R#VsT|4F*B*S8`Ohz}~%%nTGf!3^)XPew)t`SrwUp+$9@j{%-Xl%b;W zv5f zNF0nc)W79efEC(?(l~RGf^%id%2fXAR;vR&(#5J95EN>ok7Vvf>vpu{kPU&y#LX$M zMvu9Pz7XPRX1^go)>^>Tn-Czye}r8yC2E_(Q&dl|{|(v3%3uIVdTj$Js_p)*PgCL$ z1?m8S*K(TmEta=V!Q}NGLMF~bFjTipBtGtOQ+6%RxWxLBVjm;-4-D7_;oZ90+k%Dr zPU=R+7Tvq7_4Q=m*psrxjC~`QxW)injxk=f5wzdWU_#R}LDF!}`fTQ}W65gvAx01* zBx6mK!anm#D{E5IHhPH{xDY>1%VZSZkI#r zy@YgnJ`yTfNc#Njb!8M{5k{2TIMy~m{ud``c2YYSFlaAAVu2tK`705HY@CxC@c*EX zbJoMYCCDUg-W>Zrjq01O;=>K|s`36EtK+O0ox{Z^TD7+(hJls9OM*U2wtqsu*Kc<@ z*dQSt|71R=p^>{WLLe+Z^VJK!ydskp%GJgF`mFi!U*}(N%S_q(j22-R4=N~OWRzCn zm++NfaQqEMocaFEINc89&shi2)Q?B74Nis1w-+WGhMuh-KDQ}p|DWUPbWki=5>Lfx z>pAid4*9w_c`|aEUQ`MSm)~p1yw5u?wu5ft3csXUuw9a1W^B6E$n1$pPp^|&be~M5 zqTXq6*t_*7PZY#OJ$Uk@oU6!z<|Y(5AK~V@;c|twmli$&VX0U-!Bya4k|AhQbjVVb z%DUA<4>7BQx2gB8OQ;(J9lt9qEX0CJ6f6i2q3~@J1$7xZh*K*Q5)ugDx8=z*#=M{> z$ji!ND4hG;_TIgl7)gwson5iHpHS;Sr`Zb0 zb$)QLUO%oe=|G>!grYH#=+B>5euSc$ekMY#ww>KN|5H7mQABTOgw0U*Tv&GIcJjsH z(MWfF;)CsPl~{Y%;oOof11arl(Jyj(k~VuTn@A5r%5c z)`x#(wl>8jyp4@ut$n9%c9@``0Q@cvl+pKYR=oIu=rI6P3Cbdfspv1Fk<|;($at+M z$lcsd-2ARQwig$pcNP{tz6UPs!)pv`tW*7m0s@n~-~zbM*b2fOL|4~d)0=TN1s2+# zprn|XSC+3|Yd)9O*x*83Q&aE=Yf5~0J9rB1p9y5p?)?iva}V+dhp<9bcqigdzm3b6 z=nmS8I$Llx^aU9Nyj`xbfVq4d;p!~up*}!wLFkylY2JX=&Y`9)voDq=8HaS;>HYA0 z$a{74hs-=_)0-?DJs(zKqZ~V2=N~0MnCtEWVk*Mrj~c(zi?T{T6@5i<({GEthGX}{ zk!5~K(6`p;x@cG+v3ue7P>X-WdtL-&$He9BK&z_3fG!}lBHdvT&a-tAR97buzis9i za2&T%kO_wf<0xKF`-qfM>IZ~tJyR&+qlS}DM87H_8$SV7el*fdJY?u56-m=(DyjPi zAn+ClNYgDsNguk0GUFVe&R%gZ%cw9dtz`@vnBxMm2+)xb4Q1O9waag5DL;<#*s#gl z1-QmNL@LUi_mPp|8)bb}$q?Fm6&V@X3Cgnf@O4ce!SV7EAwiz({QJlc5wgPxafF24 zEo~SfQ04g+wBlX#^z>=9`P_tTD^(8Ms^ayv&CCit$^PGFpWfi6fZt=I#JF8;%YPP9 zZQlViaY=tOsfin8U&U?ZAm_{aOM%X#h(9*SEMt1tcFOVKLy%=J3g zyCf+Bh~vXBDq|@ZqG)+y&9c;Z%D(=G9zLm~gYnY$lV*3qvvC5;!0i7Z>AF|S8~yE{{kD&z`VFo{`wDfzyJh}7%y>=gsFqw|R8CG#&`Bl`W&V@AjPQrhJAV)W zTCllm!`E+^H})jcicNb6Zs1egCackWVkjvWI)qy1n@2~gXQrvCz}+iC?J(Bs zW=$BhU_X~akN#1>?8tE&C#E%@;7*)Fi)AD60Knx)SapsF5dx6+0oXI(89?8bJ#DyItQtRm zV_cfWlf@_-k4z_@Njqs)^ah2)|30c&jbvRYG9#IPPf_FnrIL<}O=KiSHZsWx?(z0I zCr#9Cz3{2aGmT#>p{IqyrpJH!EB&M4f{R&=V68oB5LD8a*=1tuC}6sD;i~CvTneLg`J&`kDGg?!X>S)fAjq0&;urXJE(Fv5{q6Z+4MGDKf9CC+D_;3EX7l0dG+ zJqd&Caysf5?)L6X3?IWmNmgsn5TB*?+41}CA^Jd89Gl7jtss%??)@Jj@(#g7D{&v+ zXx~C=VY*Y9%$HxhO2S*>)7#bBbs=#g8g3xo**r8w((54Lu6#<;;)d25yhmA8?S!4k zdnXKSbW=puId9miK%o$!G}x+}yslN(D>eDiJ2a%EFeL!;hhT}D{KFtl>K7&^8P(O* zZght!pASonS_5B-i;GYHny>yK+CZ*AYetuLMOluZ{WF zm2-$S5cY(|tHc-7`L+~)Xt3Oa2QNKL&WkRl`P9cX9|=2S5A2owDn~m{@I*036PLls zWke+>)=vBOMMpU@`?+e_+QmzQdG0O8F!n+r57o_Dd%0dSB!3KY`MRllY0D+HWrk5LoILL7J_ zWVQF5x7Sx!wVebg3IH9Sn3+kMihEM`;_k;rXmRlZLc3}8$qKyonDWVjJb3LTRwB!$ z6ckCgWCR3l+A1dqxN@tYxamCkPtDoN-v4uxD#o9NnnAtDQ=`$`u&~<>c6K5dX>{>R zAe@I_LKiiuHd?Aqu=mgbcweTaFZ#^LWgLR3#%}psGj5{iXz#pZmO02Ba6wa>ik-Vgh=;{!|vF_*Y34b7fLH4o7VV|}bZ$$1^!Mp2! z2Bah#+%<9(%Mmy&v}Olu7u(O`3~?BFXvTF0QM9lNZH#SQkh_cNcIiicR~^q%Jv(uE zAeE4CD_uj(s%(gvEW561?|eoD%F=gWa%H^o2^^3yfyhv72xD&FtjO!FvZowJN80E0 zKvT^4K*qWBnYlQC3lb=t^Xvc_SrF$@D9V)i5EWYn5QYzsK}D3?-WJMO z_DgH|TbCgt&O5XEEU4QF5dZ+Zc_3k2z2o^KZ$RznLbUy4w8!#jLxYSAeO zEqDm96rZj;HBlE5h}4$R)B3Jr`PRE^>Iw0+HFT}y-@SZbgKG5s^=)V6wg^1IXxt9} z-#9I%nw}mc4+}DR!zB#+(oDD0W>fBK+*gdn5PuusP01HuxpI8brV>W1?Zj)Y)6#+T zbDOS@^t&TJ5;?j{TK<MlX~Okzx_S5Aa)i$r_8XCj{C4&+)}?c#Cs zbVz12)TQcu!pP6!35xfgikhVyarp=QHV=V6g*jq*up;8ZQkNnV3oE)Y?1lrD$9Ae> zlL9YJ!%$z*nx*XwkWQ03_k}icLfK*jDR`}K!NxupZmdNW;~Qj4p}`I_^A|_rE;vH} zOP9jcygpm2h5^GByhiDufluh=v$kFixV;70mqjWk;i6R@Oxt@fA0PnXz6jx4W$}5v z3=9mo|9Pg=h1KKU&8p*pGupN-=r(8m{eq0Ytyo0>+AyV)6ohk~dI*DRCRKIdv4)zO z)|Io}Y?R@rVw@c8AkaaUjFgKJ8#se5mImXq5mMEAxq@!+28@^|By1$CN#lIj-LzWi zUCkcyqien?v=)0l^6t11L zIPWDBCB<1dj>ReYx~b&E?8nGQW2UAxg~lSJU*BB$|C~$KuTN!oP{76ox)a?7NHnb6 zB47_!l5jEs!eX$=fpB%mmOpz28Sr@3$Pb2?#hy>JH` z-lFOe+$XmoSnG+kjSco`P{g2CP0}zA@Mdpe&LBMPze74<)b<7rPN#N6@W^7STq=I7^cXhMuv{FF6#2%dMK5*i?UpE6OD zdH>XFKeA~h3xQE!A2_@8a6W>;4;gg-$z+7a>SG*-y)I@$yORzcGu0WjP3nK~Tx)sW z=g$_?jX+A%kWfp?z@0^?Hlix$s!gVqWpg+;JUlQGGG(;CetK*<-4Qyh@m1H(CQ|VD(w*jOjsD9d z|9}>ARY;}@IC5N?C;dU^Fgxn+wMG`A9=#}`Z(3vRmd!?)$-nuU2DUlpAK}DFKfBXK z>3*<+KC9XZruyASw&IpAA%+P1BLH#2j^cB!AYx2_Lc&I<53A!# zA%KgGX!kqNJ0YMX2FAEFP_7qvkLsG5t|Y;f+Lb}Brzd3Ddp1-4`oj&xp`9?l4)*#3 zqnvyahp~iEK3aAY3Bp?kNpvkvBRs*y$n*LjUlj_(nM5aqKbMj+E2_5CvAr1O<{K@$zW2k@ z6BscXMq+on9Iilyw5~R)d5_@hIKJiEPV*wrT-2v3N8`#`_1$vtF&p1K|3dpKB`DB} zv1QdfIYIe-TUK-c)H3~-j{Q9IMQg&+!Ja)l&);C$Oy?D%Pfhthx%>0*Cv5)^eB1_6 zr3p7CoYUb^QEP?j_pN4S8*J@wn0R?zPvJDd!N4y5@*TlsOVV

6#9t2tUmLC2G=ra4@_!Y{Po({-2NqMuqi!S^3oPR|vm?(Xk~A ze<^v6N#2pTe@G4-)Ga#HwDc{gU0zRT7wipu$oTd6u4SlyaMUA?VIJg!dkM}7(p88R zjQ}7Ef#;7dkn$Nu@OtYK>TjwhU%#eSjjT7g@OyW{RarND`7>{vL4}-BMPV!O1sl^F z8zh#7p&vM?;q3TEKBDWo2En`V>4-)l;0jz;T!7wy^7+%E|09Uek?uz8+pzxp5Uc-iQtAumfuq?d9LQRVSs$^gPs1eWeTW7zBKK7xIyCKQ4*Tx(0 zN^hV|HctSK%=O{Ur$q`>kh}4Yjm>Dja_Yg3THZIMdd-T8=wrg>GpJn=d)@?9Vx(T6 z;Qh^-|22}8oqcesvy(9uF_k|3zhs*Ne(+K`tU)(56&#v8r|>{FDuA6=%|Wi;3Wi+Y z7qBI>gB5Pq4X1sj-}BknSxXDxb`(@X?)C?Wn9);zOcVc}??C)fFAt03HrYeYp959PVu5i51dfmCc*69;3lD#rghsV`@{RI`hIv@8M(7 z!2}7{1}cm7(zV`0Z|t%NnY89bc5}=#tR!z5^WDRCu+x!Y@;N;J3?>e&<=zdTtZG2kFcNbv#Vy6Cd(D!l$lW9KHj9atA^Hq(K`mNvs zTvMfYpOO|EmD?%uXc*B@3|e-}hKW`2CX?F~Tx%MCBvUs}oK(K+xE`TAdGyD!uM97B zuyFg4X=%+XoR|#tZ^~!mMU=EoN9xnPI+(F!=&6Dhse;UZOIxd@><-jZ{P^(t%BH;L zsXwYKYYr56jtXXw=Mh_#4wDF7ul7DRc*=iKS-XBU>DI(yu#dEXgnUa(ED!}k?9yR0 z;W5do0%b)jT%}hkg@3PX^mYxxL8KLN9kv6fJd8V~%i4a2F(yr5@7UxsYVr3mSq+hh zJ6WksCL(3ivv37j9m$L)$h;1rl~H&x48QilE!|2hms*(0$!9)*e7(D)-B__3!!;64 zMZTvXd3*^~7&}!aKf*0j5?D`slDw$FNq^k&lW*Re8;TlQR7>%@E>LDRF<=<50=4`P z9|u2;$_WsGKU91V-mC>-fV4b7`dtggy&m8adRCiMR8_Hsc`%kvrICW!=K19uiFR8T z#tcQ`|NYIolk&YRp{+5R-tDt-sH_Zz3mLguddHvhHf)r;`wjj-wH3c+>xsCXTy`y> zPT^?Z%p@P8{mDM?{wK-9W4)7EY;ayVW4f-Md-?#A{MqNb-x(W8ez3WX1z%Wr=9Q)} zzex_Pzf=FjYBT1+Cv&XbjbJ?L6wkf0jRp}aHTq9br z!yMZSlZBBnS+>GG?FZP3*l9znmdc`$U1ioA7fgKpmpgHHQu8WDL)@LPaHZH|o6`5Q zB!BfKj)YPLp^)UnmdV$yINyFc<}KNU8K+q5a{k05#%yJ3>ZeYkM@9Y^KJ7!&kcXXV z-*P#Keh1eG=t-iR@s>>lH5%4%+FBIfcrOyZ43S%8$7LKN;`D6aPr9f3k}UO%kMo;S zJo9Ho)fbhIIOCLG%vOH5*61cWo@U&8A7Ml%|0x0KQvebg%|Zx7n`%pdFw`P7`w9;3 za{x?F(kSvU&EkN}7ltOQkywKGhCpzwr!GZ{Evd*af()#2w`Mu2-DHIqq|x6&-@f?P zB=S8cE#fspg2#&;nW*maO=ClX4TCQE^^d`6^Bw+^$rwgCvu~Q159r=YJWH3Q3eI;X z${32)3weOoXcKiBz^?SVTFF(Ntyar;{CVUa@87K3dpx?SWf#gs)nPlJi; zLmvvMiI?KbF1dOv9uJ~uBt{(cZTBn^{S#TtsItkv@EvVVIEm@aNt?TN7(Mu)VE1+4 z^-mWp*2++eww%0`seR$9{mzk;i!V4jJYtT7ViI@#kGJG)`H8hy{+ntkBtuXjxwaUU;sOhQ_3pY5of+!z{Hd#du z3#0zY>6F7CEt4Lp$0sp2-kg$LOL);8c5bvO*E3a}266)G9`r}b?W)xS@eYPv$}Alz z7r{w&Y~`Bkl=niD@gnm3~y0ZOK%sA?HhzjZ9bX;V#a=Ylvpc$76S9r+9ubi?3rF`zc<#+fiCrqMS1Ru z(4a9nYf1L$*YA+FHJqou5s)>UfEWFn;kR6q&uXJEsq6do!~4GqB=YiTgB4YKDj)I0 zsXFLS@*Jvk6CY1A{fW5qx{alHk_;E|0X)|;8cGwtXgXbr@HNQ_o7HPu0UI*6D?rD4 z*xRd7Kr4e)5yI?Q}T$$72iDY9=}@o_POr`t7f!x5pAW!@x|aMuGEu;Z;zCWzkdr% zFcI%@*cQkcl)E!Knm;cSDCn8~cami-rF`7f$L70Rt&3Zl&Z9RIvsudF|75<9s?FT$ zAs~N4rM9*P;_D7Jn`19hMN3Eab`A|bu6^0Kor9h2`)Af80y%v>+cVQ&qF=sX8FyzO zwZnW_+)tO-?)uZ%-g=QIYzT3{5C(v7czz1}+Yd~#ykP!f1AJ1Hb?o$-e-}18H$<&_ zNqj9V+*DT}Ic_=}c)BNlq;TXH<+(AG4mJvNqkIb#j zseXh`VwH$Eo2FLiv}sj)TVGAZk}2Znm1wH11@*tPHm2XwTsW$G1NYnu20v4M@Nui! zII{6xdiMtBb3VkNsyW&`#$z#4Ge7Ko**@o zuKC;EexbGIdifyg_3+#p$tyt5e!nJ!)}c2IK$=DAhmIBTAjbdw_-;KHqd84Ee}N>d zq;kq|(?R9Mw&S@}zhj3vP06dC){lK?(SYP8Pv^F-lPf!JKd%$YB{?$d`aU(Q@=aeHbosfBsfZl(Xk@2RlpU<_y_0=C4GleCDjG4+P(F=un7< zrT-P_8;n79cpA>GX8JLr@!w(AeM`PQZSTQeFChUD6LG{_R*BSCHsePV{%M|9|a$bySv1_qHM`ASfv< zE#08fNOyNii2~B_(4i6%(jkp>OG$^)9nuZb-4D$-4<0=4dtT2U-~T_?axIqJ-gD2) zo*mb|_MRU3yC`@i5Wv*C)r5m@AOK!4?NFjd3c zQPerkB@t=CC3k{}wo@6kvehp~je|5y+lrc<#Q&`C%hb0}Up!dX-?{ctGo3C4oz-l@mg#1v!DinI15 zy^B`w9WVnArQ5(SY$D_o^bHOD@2*zBoHBpi}!Yb8PxV=+?EXK+J#h*wH>WM+x3fx=FgO zi^JhEr1KUTLuA}m`;)_!9HqzJ+}sSrQEg%c1B&G$vQEkf%#HEEo>3XM)%2;$vOTib zOyRwTsO<(N@7DE|X}fTVWAfIXBwK$Ulf9>+xu%``_VF%uU@M%;^A(nBJncK*Kf~zqX(etCviB$wl*ZeW_dy$GiPV6z17T0 zw)Lb+zn!Fb(z@&UzDdna2cWt$BOBDbG+=}6FR{EQ;DfpXSMrPOyn`hJtO|MDqUK=8 z@PRjSj2tMSac~D|5QCcW(<4m~;zBED&LGR_vQXc;RUCv|CFimz65Ftt7?pMuNlzsj#*Dn8oQ%R%gPbiF z%1q_txEolHRR#JYXQtQ6$c@O^y;|rev6vWqoZcn=oUmr#S;4^A?t%0%kxuIq2y_6I zGce%0ASUMLkPxl!zBfrDU#5ZLhifTIrTkdI&^0n=cO{+yqg$v0wLuL*@1oZyjb_<(|OKD8AYKoNP3b zK6d`wcM!x7xXZ=jhs6S*t*8Zxvm;Gfv2$17+X&!~_kL&j|_Bp-qv?3kXND^3T(v?$Ec9k8cuZe@SW8g6Xu~ z^jIjTC;>PAS*^|5mn|Ng;h)I~7cTu}u&;@?m|2{Cjna~GvX+zi^?f^?h|Y=yL1~4- zUh5NMM|Kf1*ik^oobi(aVXdQn+7saHfS%g?DQt#9t(q8YYYxXuZC>^y)U-|$riIsyG%W$JLK<*4+BvB| z5o2HLpH%vp$qc*x1g@g6FZ{`AO!OIS#e(+2FPA#~I=eZVbHA$678ewAZQO(=DATgp zziXCzn9huI5c5T%MniXS+a_WP=W{>4;GkYQs(S)ETqe!%COE$S4qi}A9$r@AJwd%b3ZSWwK>+Txv`zopt?_ABC`a?KaRS&)Td?lE>@9;&a1Tq$>>VM&O$elzn0%CyKu& zDs8UMg)Sj2Zn?_u_J4hc^<#BIMn`lUF=jsz&qi1^(2t~4V#u-}N`QncEkKOK*n!Bg zH6jmKPlEfA1Pqbu_an^?w5&RWC;HweFq)pXn8j!9#Oxc3QU+G#JGLT12weuZT>6gj z(>I*ftDKB^3bdUs+zd&UH1x7k-;U|)fJaG1P)1c*nOBE>ZPQaio1*M-g zu=<_~{XimwpMKiQpp0+I6b7^rB19^DY5*n3fc;mfAES0WLnU80&&+TgE!~F8e>GDl z>DgI8QD2nHT}E1_oW5hdG&lD7VRL2Ce1BzsxL@V&QBh^Tkb$;vT!Y<)Y@Mzt0mMCC z4+lx!YhXqBjzRR3+)!E-+<`j=R@Rdmp z;xPfdvaq=OoSv~-jCJ%On_`E~p-*X0#vd^X}I+cE% z{a{Oh`|gw{tO=_s?t8uw7*(Z))3S*t9)nAAwY+NA+NR%|u>#E|xm?vIT4Fa^JW|m70<>`*F#fj)UhAVa7*xkaZ+)y$cIyM*TBh zQymr6$Y(3Xg&DM~?FPs9SGPnTTJK3ceHj|qvL&9Upjorkw-t)xowcy`&K{;IQy&xC zhcp6c33L&SPFP^*qAO+-n(R-_$x7$~8cA-`X>;lEErZN~wS$NWdCzYp&)C`73vo+v-{eOc+qJX5EUnVAS~=P8myBPRsp{F% z`a#21x3Q~$FXpYS^L7fv^=p3AQLB#JYL_-I0u>D0jAnL8Cc`R;P}!0m;2AFrL!6vN zzwL%$n{LjmO7Y?-vg3pd~Rm^o~Ibj0`%AqQDR!mF4{#RRYtVz@wxR<#L_54CxrR@Y|u zgC7qzrU-W#oIW+AUf9pTW&{aeXUG%dUfY5lnoRO(w+LWaN0gHDHT-;?iMouv6(9JTTYMy-eez|1Z;i2}Z&25Yyt)emk4@n8`Q#XBX@9gdH|wjK zkxk*yb+r4oj{2fLTJaNz62*B9Kq3Gr6IT9U?VIoiB^7u?*x0IbIu`qHuR=@&equFX zrI3aqTB=EA$Zl?$e<@Bgu1f6Svs+>m&Tx*W<+d@1uX)lr-e6NzSw2bFH%6t9G}Gi? zl{}*SN%DX}uZh7+YBtVssT%QIa<0&!*0s27=e#eV$w)r57916?xCAf3lX zzSRn*{*$5sinS+&yM>(oticTSKN-kvo`?_VR4;wqwi7h>zW|Lki^GTKwyaMfQ$go1 zHxhQzek{zN<3B#m&rXVY+8xF4n$D5QL4Ku}`G%WzSXLFMbUPdv$ zHNvQp=BS%NX6D;kOYM`aCB@>j{BK3M_C0;=JGuna?wmRGV;ZZ+7t(H8R{rudhC`a1 zD`hCPJd0iT>23K=RTCA9JO5V~Cy5XJeZ*19}-IpR&{etW#cM0bjq z^s^?P#^AVbggWj}6-#J01A|Utq$^)EWSBOH1=%2MS#yK0zn!u8E2mSD(@0@4djrEl zw1s7b8nwHHEgD*ma!053_$n0os$hDw)Tl9vnHjn64L`b0a8YGZ<7+NT*F?sonJ%n6 zO%XN<;TxC*d)fCg`&GJ3y)NvQs$1@q;_lurXnq9`JJ0%X5fKry;7W=w&-e+TD;bG8rJyV!OTT9WH2HHs%`2OVp|gK|{7t2ViqCeJ`UgGn75|e0f+6zCHsQKY+llB#>ME|xE!O901UAF8 zs`NYzx}~;Vv|qKC_Rl^tZc2B#wq4-pUCZ7AJ1xaN-`Qz8friv5F&kwmmVf>>bq|#H`JfQ- zy7*wGJO9QTy|jXFT`PwRY7n})=i?W7^UER4D=(CeIJ>oAI5TeGnieUEIHHqC5lVa@ z*+~ucukdUMW8KxFyStV|pve1)@Zq=Tf`iG;*r$xRqdZxsH^0SGjCcZr-9#X{w6A3Y z{mkMlX`t+?FC0PkO>IMrNoCZ4{z=qkPZj9&fW#n}`I)9YiOQFS%AXWv_cPVIY+?=_ zyRXuUlON`B4kBJ037Nir^ZJZcm)B}Po4L#@c;RE8u!4YCTrflE8&fMa>J*k(RX4n} z=ChTQDn;agZ>>4Eei@ADdJv!SP5_ALoqk#==sV+_w);Yt?v_P>Gq}=a4fOLO{`BC< zY(j=|I&yvTp`hP=sJDD3RgB7aB8q3+{P{rGV02w6M( z;u%LOL!_mW@s~2V1Lo!A7cBG+aB!SJ7`tdoDLB`Q(PG~b^ikeNlIT%Rpg^;*X6k@C2SBLzqHTKO;~G0iD z@{RyJNLN_qo@k1r-c7Zyvu=uQ>!HqTd1VO>XFDy^``?w$dC5D9b~6&)oSH!4M?z@3 z;cq~R=9u`wb!2v?2-3QjT2Bp8fD|$WzwTWBa@<(O_^87ARkXjI<9rXBm(+8_WGl?% z`|tJ=;kD=_RdeoTR@OGtM?HVzQ+v?4VUx~`Om;>fR3;)snS~TS5ZTyuX1&^{WuPKS z!jX+dV}dpnUe0w0H*54&bJ>sDXIRNLJ2)nYa?|APq*ir#F9OUJxUsgfmvOs$2XK5> zhXL|;4p)`TA$zlM7+UuOmY9wQ;rel;@yYYP+{{d>$Gp6?N)lm!x3>9NRoB(-W4N^d z!(aY1!cm5Y=p&@lr2&|(L=}TmH&V~z<37LepMJSXv;r;++BJ@So|j?h3fG3~NG#88 z!a!FTuB#{&p%SKP71L6}z0*JGPb9^hYFLhSBmPXsW4w)huOp+jgZX7CIty-vlnDLz z*kFzPhyBw1Z;GWVlsasp=F9VgYIiMzS`wo6-v<+bBj{O&Hu4vOd9EQvOkljEeGYoD z1QLRFJf~BH!fN|~GjF>BI2f<>YT%05A#5`s=LwN`V-NbwoQ#a9xoM84MRM=L5~whN z&3HEFWWa9#@dQMVm*BX8SD?B(jRPG5e{@X?x** zDjVe_PprD=g!P%CL~O|(rHW%>D0lrnfgU4OuAu8uKgf#ch>|y>%dJ@6_6KFYZLWz( zhw<7ldne0xz8?ci(3K2KS%O;j>KxD}<)&3o!GKxFgI|*&#({aM&B`rtB&hIkUJ+ie zHQim^r)nj2KbCQA_%+LL2w;eONS@ftuYbmpSNKTlQoFn{7K`}lpP`poUQe_O*TMC zIhY6R=ln&+g{sJBMZJ(A9K>JPAu1hkH*a@H7$l$~D0sp$pW`hmj2E1))9(Un@C2w; z*$9j>RlV0n}8K508WUViyMV#3NA+j*diTlwKAB@ofZnQ`Q6u0^<{4Yd|V;>2*ydMDKa#dqS zk|9oWO&!0Oi@w-{YcSLbUTzt^g)jx+>AVli<<3llBKYWZV|84TO$~fL4tOgHydr2r ztg3n@8Jm+}Ep=>YGdA%VMcymsU|W97{jE8 z73&OKY52vhO_dHI+*W%(F7ZYk_gQoT`@r${@MwgOq*LEr2W)(7|A--Q5b8z%g?io| z=flG6hL!sz|Ghd3%PE;fi#bq!fA$HO#9WjT44BS-0|?sR03EfC5}kfa@7}$8ieaIl zyABY3!D|giTrUVz+#|79a5oX)Kh{r4RuQ>DM=FJ}7I#OQJ0&Povm4W%ez&)>rKV;)0(Bm zMCXTCpX*tcrUy$6(ddxxoV=$c3K|_u(8Gadw^oZhzk(j8&@!{L{gCh7%h{H(b(8Oj z#A}8sCR>SnX0mWr8?2V zePyP)hh{CHPaq6&o-}IIJef9~V2Ye^U(;nezH2M|q))JV>76&K2frmE3^{EAJfidu z>}z+4T(*D0vRvqG0eYV}O1<2dv<30^fOfo^Ts~2Q=CPCJJ zL^=>@{%1E}>NvQ;G8jh=YRIsd7#7*_2O|JGO@1%%H4w{7kgtgtww~-;pbc7Z<4a=Z zMO$T(K$fhR_pT$u%X>_dNly~F)Mw^yMB)> z%exd@-LAW-y{F~n@P+{aF?b48LRouayb4I)vEH&usT?ql;p#zz-GdGqc-?|HGkmrn1@}ttk2-Hw7^{NXD2CIQ zLKi8Pbs4qCi?LOQThn|m0M%dc7#c3pk&)nJ(gkvV84_gphBvQgON<0&>zm`p0UYMH z@E;8yRYFD~{zI{J>0-|bF9+1}EWmT)tkr#_pbw(3G?g+d}o%aYOke1r3e{>Ene-Xf!1fh$u&FO{~^0| zpz04icz4?tLWs*aP?KSxZwA*^2UiU^nsHo^`~}d}&FLIG;tY~BnnA}oWKq!NzLYX^ z&}4$dTTjhB;jZ;{ki$O)^r^VBYcL^*amYMD3V4KKx~}>{Y)G$+k#5O8g?#NNwbq+$ z`Jw(t&EgC+nbeD}M19j;i-|v@K-# z#D#?CQ#&4^UJ$B_$>6;2ML))b#Yl)U>n((5OKb zbUMYy^)5vCWNKw)<=bW`!9YbNMg!Pk-GCid>2hqnb_J%Rx5$oVgQfwe;`w5~NdkOW zAflJWzW*9v8dGR6+-Rg!G%7!@aBz8&kVFJu9Jpee_7BS4Cgi=nN}J~Aj)N}?ZeX%_ znZEKl9SYpqAAzgUA(DF_edhSB2P>}Dv2RxO$wF-YS4+qvK+j8C#t?6Y-*gYPgj!oc zZFU@2*!8f8rpi(xzP(`DeI4`!=0pKB^YB~>C@1Zzmt8R#sf*|V$1dQ8taqSLYFC)4 zo6zQh?`C{2De?N1X97E-9$avqr%G*vDElyfgLYCTF zXkz?;E2&8@ZDJKJ8&c^WPJ%6wGcYtcyy98fVBzpWo*YAcVe=$O_a{6^MZ5lNi6}5T zJuooGjv62TGAF1{rGhZZd>a9R%mfLTe@`4ulYXQN_sh>3(Wt*tGT?uM_z@_Y zg?mN;;4B`c)nhM}$vW-%4AOSm7Z1fFzQP3kOT^4@B@veVk3J8;RE`hBgF$GAuRn%$jp5E$_{J;mDu<-}2 z$5}cgXVHdGVA(h!Lz@)Z%A>6M_lOu%?prm`$1KP{w|`I*zqvLDXL7*u@oBN8B7+IZ zB#kId>^I#i=B-e(uC3xqp)o`1~rjE9r>Vk{y&BmfblvpUza6`p&df6Ta= zx&He@P58K=UEyT}xZ8t!D{#aY@pqViAr9tJ%?A@XEkTd*rlQs5wH!#oxmgjH+Qg)w z_=%98oKM9M9^%sx^}AR@;c^jqJHSy%0u6@`kIXT!4^3HLu_EatxONb9@Nh22|FozL zRgdgT@HGtl@F;tL9kXISpqg3l?6bwEino5N5{^qJFQN)1A5ZQN*i>^RU14sBFn7z< z$#dH1y-vF`sEcRp1Rra}=p{NLf;2xr&n7J?*5kVv=#LNqP zWkyxEY*DDWBBA`Gkgm=)tU-~{%+XR4+gs1F1cQ&I1bDvWkH3JztkL3^T~5lKCG8y@ z4f^`}CXE11y)3Tp;k$Hk1$`D|zEN+(*Pg8AxPex+(?kxe4FT6jej6Lczcfk6|$Y9mciie%&Mo7*tYfuLGp=jv3MB#pk(71)4G?C$o<%B8$tT^N|JK_T^iF zo77J?_S9x>JZRj|;SMfzZ^T}L$bh-5-XT?ZYTr&G@utC5i-rRtEILzH4z&?6dNLY! zaQ3)^DIXZ+zDBfcxxIRDV$EFa1TH@@<~?06mV`mWdl+J}vUT}@ zW{Q0bJS}(B){YKI32ABieIKn5vR~224LDC)4*}JgeEOGnZX!5{gQGi1dpAMXeP;K< zX9D+o(vYGb?}u_Sl%11HxR+I4yQcoFtJ~}8k*V9^9Svjmr+drxk{~Dn-w^gLv8b}? zOp2Cv58J93T{vp{%>pPsi_~#|bH4+Sxs*U;n%(Lcf$S2MorJyMLug)n-&#e+wYQ3n z;jo&OF_a5p4!_pdD*$mjSQZLd+EQ&;T3Vu{q|jTc(WM{LB|jZAE$I3LDpB;k^|UT{ zvF9KBbxJ%g@3EackG#ZqaR(-G6BtL!^NYsAM7mYc3(mZwiQb6vtHAbq%}b@^b6Yc7 zAwuViSf`r1(o*FE$Sa>2( zpDS0>A(Lijr1U!HF$C$u@LPmok?@JBv+ka)S5r%2GUD0~Sdo8@W{;JkqwWl|(y)x3 zN88NTx?8n`6aPZ}btmu%wqy96#GNuKbh}0rRWxJBZBaL; zKEL!Ss%CilAF759gPXbaGE$ZXInDMIQjXne}eW%|%96MSrZiSFvbtnpd|@+Ia?sS6VzF2vKJ0ZqA>h$r`q7%M=N5avd^QFU}GD$cULX~Y@0p< zH-Q3)>y8)TV9Nt-v2j-L@(pjOo!B-SFZkvduZ&O?h9me!{?xN^j=@oSB`sA6020bB z<^|Id&>LtZuHpZnSSVZKwT$r!s(VA{v0NEm8AdxB*V*VVph^&I z%>4=ampUNYsB#AvEGB;VbgTKWx*j0w-1_saA&E}BETl+S(J%}R6tmUf;p4AO%Ell{ zBX3-D>^?-!72kVeURUc>5LfLm6a)2Jo#vMRy? zJE-&rwOw<%JUv5+uen|RYoQ-|A7 zYZS$06rKU01OCL}sX6_obLDVdflbSsW*;HN^066aMRyb>gMu_O#e}4NW2mE*S>4br zKGXrkK-gD`iGk(cbPkkU56AJ|Ie~H)F5%?W5<0>88wLT$KWSDpd-JWuVPJRgD-fiXLeRRm@U1d2d0J{)29-H-Uz=kXA z>Y%5wVDskyUZ=GE8gZ(JVOu1c&;5cHMykthY;4@Ek0pEwOJrRI;Yl#Ce3>@j7=)#t z5nUo`jXT$TbVipgIWPr6UzEpd>$PEKBcb)vXO44f$z+$rWPUgK>}2dJr|ICd3xcoD zC9?oSro`pto~~chkGD-@iOQA1x7aik+--_@kXOg-{bAPq6ZY2fD+DlP{ZqIJ8GO>+ zuH$d+tZBb9h0~olDPqrOKyENk$Nl_39UxiJ#9l6N^gf_e_S_T{RayM>{m!qc-gy@+ z0ywa!S}=@m*va7Q)tY=CJ}Jq}kH5c35FexrdmgAC zg<8fpHdM8m^PI~4^I=hXRr%B>aey;JZan||sXH%5t%kN3FwBYRm&1!* zZUs{i=sTZjkXqrq0Y?b`_Iz~Ai z;1RjFfr}+#!JQ&*$}d#eu0wZFY6Vi!lKma;<^lB0k0!<_zO_Fwm35c4tC@<#X3pL| z?LCD;Ij5A^(~|SB8)dgLy3NhYTXM!!B<~O1Oyonf+$v3n!2c7;k0JLEA z9k8kke{5Mqnt&A08pxsTP0^Q4?1q)7VvNGL2QbmNaT5wqXzS_wJg}pWfZkfrZNdI# zDb=FGL=wZx))2=#^Z@ZHYv=vdounSC#$?0JuxN;l;LWn-ZR0+FXIU>AJCxf>HJss? z0ZFFwZ+B~yf3dWl+tiU9q)$q2s{s3rQ?RFu08yojv|puwLP0Crum+q2t2>D-`a@B(DgZdeuIWjx=R zkS`^Ld17wA{d+Fch#++3G7W&8R}ViL|Cr8^y`wTO<=fKlt;yFgs1@GnN6D4Bn^;(> z;UnoxM2+fEM50VMy%%_#A!j5esOMjGD&5`6Eson;2y7+*l^9=r6!)IrFMaT)oH_IZ zrhRvEl6)@?Oto0RmkX$Z+oEZ`PgCDV=^!_h+j444gYZEdw9G6nzuZboMty<+4^rHU zbr&aV7ho!c`=CGv8F`;N2#RW?_QMwHaX)`urU8st=Q{MFwgbDnlmcS<|!5_Pa`Jzo2n8;OvmqnarhNYTq zi$#+5!%-UfkK+{tInyEqFf|lt2O1~qyrMvb=A`+kvg&QDig}iby}=8~yUc46+*QN_DIIvBJW*;*QVpZN`mAZ>Ub1_I8k zu8K2{dYnoT$68kHY{%3#Ss7X9I3q3@WrWmo39rx+Ym_W3n*P?oRQXI_5(436arffM zez|${%KXbpn?r+lW616;VHLy=uNE9ET`dfTbp_X&r2~o&J-Gf0y}i9%zXSXQ7o^R7 zHJ{|{|Dg^N--`{v44tK>rowHuegl^}xooz8C|iF*dg=9>#%q6!I2e{pnjdVM0NdGi z4@W&kZ}ZUjP?w4WlG1C4A)rq!f=vOoz*)S0#X59yff$r{`G9h-sCVV=OE2P4xZakN ze)3s6eN6-iCVFk~d5wLs#_UkC(=@7=Z&Fu2~MGmr*R->jPp2S^i5ph!x7c6R^3@Nmb0 zlWUC@!&6Zakud=O9dE#gJiA^PP;~?gs|}Nb9|PGc2Q(pXz*8HkWfiWQAhnIO0xm#? zKiU`yaM3`5@e3Pfh4t;N`PJ!{+urIBUOu!h*GA-8K7XC7S{SNWHiZKtz1LrnBx1js z?r!QL$xM=jQ^DBx^6TjB-iN41M4sNH*buE`$q|`+duATXbq}ike z-8(^x-FktOen0w8?*$Tq4ZawgCswcaQ>5Qjk0;Ds0(% z5I~wfrUue{XuvT&O~$7GzL~%61YRG(z4~6M;OPuzW{HZTn>2}-)bfoRc(dX{&y`3D z$#YvH@IvfY0-u{YuN>^8eWUhK|KnJ%5oW|Q*@kYa!#3pT0f6ly_UfDt!^i+34Ka;5 z^B^2A@;%VZ&q^L}dRqI3Ael9w;82)pN?Q-CTJfallhy5Y}102||+x7Qi>nf_ZNRs94Bp* zNdPxxeGy&pQSaNpQwCQKomWbM0@5JBNDZfcZ!heO03YYku#gnD>cTQk|90tNPEdy? z>(Qjn&g{?!i?dJHSXN#AS4F$JBu823}`=Fjc)n^{~GnJ%Ht>?gF zGGCacE?k53JkVyM^8S!z1v0UJ`v?Cy1qW>){{&K7aYv%{ny8B8K)H>Le4*JWUR$nbR6}!J)1_Ujx&VGPooV%>TU_zkl?Qg)KTF z>m3+^J>%%fgUy>*zQ6tXfMJN@sAm_D>nVI5qmq?pEG>EZqHdRXT&Dwus<2M-RnGaM z$CuI&ksF@Ds0#nJoxhGQkVRgMdNP?rc?05KU;iw8r|bOQgYM~7OHM<96dxKvs;*!u zYY|J;WWi(}c$depO49E!==vY*OgsCuTj~y91yLGT4dTZC{^;MYh7IV#05-O#jhXmL zIUt?c9%R%qx*e=D_)mUD!&3+8;;=7<{Zs>2YfnxwLynC#fEm`Nv7n+OQPF=~z_rLT zQE=eVWqzBV@;?{-Z;dPuOR{?rzJn>v?OcARzKTs&s0^Q+5xW;+Af16>c&cm?Lg`Gt zfRBJPf`f2UxB2Vxpq#1K5L!1ttGAw=l)O_aTX}K!-#6hEP|F`gf;?f4t%C}24rk6m zZoDiJO%}b3Ev0n{!&`KLIbRYNQ_GeIt}cxAgtBT{sv<^1vVgg%3qG_l!TafjF&rK0QnU#U; zdwTT1=ibNH&Z8#Y6ou$MO+TD~PVnpz`Raepo(%MK`|~Ex!K6Q*PXrcV%vb^4RtnUn z{aQ$FgWv>WemFIidZL(nVnH zQ9#P}Z(ILTPN-IZnDfcz)c?VuncJFq*}J6QIP!nZ#lsMOdtZS?(o+Z~Xbw6vdp6?< zTCTP$8W|lcDwd@J!dc1Usmdl3t3fiT+wVR6?c@t9;%MkZ${FRXB4lR+ADc9j8w z)wh)hA2REr-pZb3E3!0eT=$D$QI;rH?SJf@gqh=Gt}t9@cP#{2N#8l#Fh| z9FC&B<+L#K7?vo#=o68>5);P{=VSTuHS|g~ z@k{9?CoSXj2)wccYNF^!@4ojK)k{;b!3CT}9+t!b-xA553Er)~?ho;GP%(aZm@)WY z(b~p``MNK&f~jD?-9nsKT`F@rj;$a{!`59M8<8)OKxtahm)ka>GrV#5bevuZa24<# ztk29j3?P-BCo}xiAsl}*C*ZBOepAew`p>3L3&#Da7QPxi={YRYbLOktn~My?U*gp6 z7IAm)rDDS*LYLN~;d%c~D`^u77e)Dz>g-*SrU~@c5A)b_2S1VpiQql`lmA_zD;^%+ zIWcSI0h1$azi(1m)*hV&7CdrrlXO$oxhE?no6{>t0FsVL`#Cstg-U>1XI&gpURBP0 z0Me@wJXU)puOX$Y(2xv$())i}A`g#Cn3Gp$>eG3p%ca2#CbP*TBR4qf42l3h(~uy4 z1`Ck;-)YDq&yEL)%2D7nwsB^C*AT~gi7~33z$Gom5cXr{Yr_8Dm2L>a<= zSu9^-56VE&Bm*(nWG+Z83)l6B2dC-PA<6R-feG#EB#LsWkK;&7?B_R1fBfX~< z|0J6UzwK4d;~~MOKE8BY#W8wkqPo{(oh6wi_ef`e?8|d^oQupb zwijP@tAEP+{jc(Xf1byS$H11?<>BlCq@I{K)FXr)kM~)nUSIy+I1!{W?ok&Y>B*(W zj4xoxNPOx~DMahsgR?t$$udUMW|(tuv`Nn}9^E_g-N(Y^?D!(@ZY|(XtsUbMlm2n5 z_>*8XB`VqvjxhDJ9m1~yQa4z9blT48%Tb!u%!Zi?qIaZ0ha{)Dj~tDXpapr~`^TE> zpK=G4wsN5o3U{Q-S1!6A(7R@Klmq^bS~Uy8TbPURziI$J9F|2cjf+NxeaJK-LF>q6 zuXD?^8%G^Y!KbGpMlc|R-Ye%jl@^AGwa8|m9_k(uFv6ySRypk|l20MJ06r?hsKmob8%1>dP-%&bgK*%d{3v^-wZ-U8(Z>QgOzJ%T~U^)<2r_gCx6gFN4$iW_r2cno<@w@{F-3e7t`=C1-&KP^GEDPZhj_140~~ zTRj3TAe@05p@_v0$ciUub1m^v#&P#vLxy~@1>xRgD^-28?%hjwG z)8m)yx$TbHJx|T<$5KU^S^+mMoU@-wDK!N2$>|N2cq~Dqqe&O329T>ptgm$UH5+=m z>{&U~qlTN$GU_>%ebd}ypW};ut+Q~{a?psR5`Q80rwQKxm=mbpnTf)|xFYMI+{7^T z8*zU*P3GKf;Z9f9e+qTbHXWLfA7hgEndwtgt|y*wHg|EVR4A7*Y?g!bz;v{mz(KI>UEsxrN3gRy*2}K0}K6-}-`7c}frHE;xYU14kf?AiE z3rQpFe9CjX&?K0o>{UEP_e_{rm69qV?rXRc^}lqTW=K(@--Uw zkE+(A4u*DH(0aSTtRnm;SV{ST;gIaD3)h1Yk1$6{r@>LvhPMIrOFsgKDd+8EYgS_M z70f4`O^dk>K@r%FBRUQ&3H*)dA>7m$QngMyUM`w*H6Rm zX76geRMFauQw|xVt~#@BF*C4_eVbkVkYQ-SLb2jyEhy%Rc{sT4QtbP1DOQ-BZnGk%87d*pwDU-QD1jY8VCNq%V^kCbq=(RVmjsyXO6HQV<8zf(uLUjt zdGGtt!5Pq4^EA7fH^1S{e1u5tuy_>{8fn@bYBLoj?f#OnmwtIgD#Ak}QnQ0YtG;-q zc0v)QH0_IDOn4&9GdW8n3Yc+F@C4JIW9XpegB}KBU;Hekm0nAMUIf*=%(N8T6qE}1 z>K^)$83Sm4B`JR$z~7%#KMerwT_~2&#nnF%$ADR{TH83Lrc5Qlkp1U+rQX& z)NMtWRj{26e)7lT_V+PCum=ymJty=y{gJ8JsRQ0jXR#v?!(JRWcOb!-?<3~kAOhiZu%houMaq2GuhU- zdz>Qv&-Y)yMgFFsYRR+0e}7;LOceUSlJ7q!fBm1Y)`ghn#j@W+$|wK&z>Ic%@4>~* zP0D{P;ool&0&CSb*ZBBfAE>^EQ#r*~fBuJ!`1KagOpjz*jkuA2e*kh*aQO6YdE$TV z;@_WuN{oudWGn2C5&N$XY%{!AcDlR){r5fpKcTo@y#Eu5-=g(@Lh=706fck@yXjkf SBMWYTKjOkNLiqwO-u^#EzX{U- literal 0 HcmV?d00001 diff --git a/docs/src/assets/logo.png b/docs/src/assets/logo.png index 5e8d4920fb6d1768c88ce1f855f3cdb1780e3ec3..a4289cc9f7735f47ce70f6dd7298763e93c655b0 100644 GIT binary patch literal 107081 zcmdq{1zTQQ&ovC6rMN?JhXTb4#ogTf>I23nxcX#)9(Y^O|f6rg| z=0RZ=j&rUgGZ`64R*;;GC;}`FEC>Wb5Em0t0D&MIL7;bc(2&4y&^5XBfFCfnVrmW` z5MtNczjtxeh`7Kn86AbcIttoYSs7V7f?Vx63^|1OMWh*o&G?Ni`8gP9%sCjD*}pI` z(laqKGtxM6Ffe>!XQ2PWhKBwT3G^27$%+;~#KvDKZ9Jn0o~NuY!q|X3 zz=D>Gz4>Pca>W6BEQ8U#79dXl|XP*Fb!u?Y=ZRvKIQDepVn7(v0S=z)~BJaQnjUV z(Vaz+bIz;4xx!)J?J=T1VL8EU)FaQ~{m1Kb{q26&^QReCv#0&3v#W!tz9yudm1GYZ z-@o_VUJRoJo-=P5UP8#Fy>M6TUb;}IJ#Se;$75zWbP4QQTZup zkAFhr>B9>U(?}Z`4!WY=hvGd}0 zg2ZC~NJb~ByLI4-O63TZj7`CR$;}a7+WO^YCJ%!#r{z4qJU`zs$~9WfJI0`h#Q8Wh za&+?OG?G5=uew4rz(Yn}0{9M4Z) z_lq$YpQJCA8w)X_QtI4YoVi(ZNq@}-e5-&Zdk-r?x$U!m?6mvrG};Kq%4(UKK9IlE zJ;D@;Pe{;YFi>dblgMDd@WGE7^7OMa!G9lh_sxcjw@@zqp4L?Fb6-_^hlffn86GAXUH-_5XdgF$DOmoWwlK>FO7QYur(-R1=94ut~W zbB9B*^7Z<8sM#HV(z|V8Q;!m|9%jiRvAcQw-;>Y8fhSu@P`>{1;qf_7X3coqIUFvR zjwJC3XBzJ^a|zaP33_aqy&e4AG^{;jn$f2uQ6(b6U@S8sqCU|ON*Y#^YfniZr*`CO zZ@GQOI#S$p7Dc()c2!JON~M0p^`C;CSi+d$>WZS1XK^+qiS;|BydyG(s4M!0W6UY8 z_}X@hS{pOVqQRe=2CEjo=8+f}lf_Aj4~R)}q{O?@QeuD9h^3+Z$$6hQZc>`aenO2j z{o~LXjST$1;|a5TYTr`?;!AV3o?VTg&sr}vy;v$HwYUoplyY_-o(0aXqnCGnv|gT0 zIh2$b84~A8i8Z99Jhv2xZir26dGgXVusG0e*vhq8;&JPWHSutw|KHKyv*8HEDYrEh z+YTlQbRLgWUDOYIZ)ddYPte*1KY9Pu@kET+OJ30^e#9e_h@Ow_vWE#W+CUv87rItB zXXfBCK0Ja^rR8$E+fr_+LjL#IZxMXsKVGw9FG@VGgV<$HGG^LO1c3+z z#vxEpVMkv_y_2HQky=_t%3M2i9{$pWtLQ7%i zLb8FH!n&zR&f<(7-bjoTJx+|AD#)@*H*fZL$L6c|beDxUTiH!DE^O6*$9+S}+{4L? zRXEvROwg*om`1j+gc4l)bh;8x{9r(Pg9Ejd02wad!qa??qe2~$H_|aJ=>HLVa z`@QLcp(H|h$VD2R|Bh?-plF1|%8RY5^PWBut)pqEWu9T^I=D|ZjQ{M4f z@o+`kVc!oIV|9|i22PE6(&AREPlM8^?3<~lBP2YT1W8e04JjhvZeQsqxL&*92$DS> zt}mIStMmE)rouNJ#cI%>hg$nuqi##vF4wIPGz!$2Cr_`$YguV{e)j%~+VhL0*cWq= zkbJ5z47H5d4J*WHuvo7VPQcS(evpRS_)k4ZqTn)b0m|*W;hSD-cm2omGRfTuYieW2 zRPUmjfG)|7A4qc9xgVz~KL@2_YST7Rdrj}Qvf9ktg%DIw{0H;8psc+EC}-vqj^nZf z8?TzIoOM^~9SALN>@9?lS!;2f0B;G(s5-}=c)|67ddb|$P6ucGK8Zdf!>X}nM#mW4 z|5FQursb8WFwo#Q>E-g|JR58wWyImmF$yRaMn|K~$@<&&%HCK8$KW%nxXX>HOdsZo z&*W5w=1r1;j9Y)U>EHVYxPo_hc5#Gz{d4ID%u=o2`m&2no9-NgRxbUzuS76xPbxy4 zAoNkvE!`4v1rhR7Y?fhTZJzxDh=f$f`7{iX|D?H61f&U77^w4ntittfIgY}u5}BEf zQA148g1i?{*r&w|ivXj^6cV?{)S=sh?rVd%lME)AU_1m+FO-96S&BE_y++YjO<>u?n{ve8hE}eBU|D6^ zJb@Ef8RLh``R~3SP=d>NHoBMTG(B$z>*>S|KXjxI!En4+I4no`X0k5k@$!u;Z^SB$bo)HcZMAp>73PN39EA|XD$%CRy8S%q8%nvzq0Wy;y2lOqKS|hv%TQs#Fv zS{a$7W2j{Frd$Q}Ctan8g9U@4^S_4R$<*y~Zw3dce*S$GYqMglNAsT`greih*(|M1 z-aWE9twwIDRO<1k@IchpcHo@vSySh{a|*^>HLepxNkytl$$F#7mC-)vyx zf0xY@NM}w(FG0E3nR5Di@5`&hDN32Q%P2U>RCFFdc2ru$K`{1_--Iu`{7g)+MqYp3 zwSJWMGGf)I?c+F)7$vjDWR282l79*h5aSgfaaA$mp*8S1XVgw6JQBX*g3vVw>^ELCdj6 z&kpCuqta*`ov%zQDpx4@MTK~w%)v#CucUwEv&J=ScyH5_re6*OAp-xyu)X)DLK5w~ zh{sP)VZ~)Ww9%pd7wo4~_zX;P)IfY6nDTXyRa4F{*6LJ=lr5S>P!itsY^Wh+pkD+a zVi4i{D@kUTyd(NN+;wr=OGp9+cOrAui|0r}(_k)(+{ebbl5ohY zOKIJ+lkVNmY1)d~m*rqW|Aj&9m*6szhwXT%6Iaj0@^W+(50_E%?EXl;S^)fDBjNYF zBx0F_aPt`Pk8kE9d7DpH=yo52Ly!7VWj8^Dw#4(bq>r5+58;sisS!aSIbVuK_audZ7TxPw zYSt4i-j5lV{0TY%kmka(u1drYu zI&(#b$K@CRWMP0T32sdsmnwIC53lNr_BCNP$I-I?l_y z2L0H*{-VdUa{miFm^)U$AaC^FEmCQ^1?p_Ip}v0K$lcx89mDugZ4O<|Kn{kyr%=d` zIG`)@9Xc{RG%5XtV$6h6I=OiujxU3RCk$0^CP@_*yucrVs7znWYDc7@)+>&dTkcaN z;_l6kF_*o;wDf!LU-<(#C#Oqv+f2gCbi0~pTA2Kb6-AEgO_!{Mx4(aFjU0e~X0MXy zBRYr+gdE#->h)7PW#>ng?Moh{-(lhMS(Kbj_$q0R7W|(_0&-Icj^|ZiL=@|TI7AP`yk>+AixWR??%R&!3W$4yUBdLEuAnNa_c{ofq`&>Sr1 zNLe_)eDt-#c_w^@Z;+^=S`CsKJq1o&_f7|#PY5ksv~cDy8mMbNyE*x!-d(Ul%|kZ) zzjE-0WJiwZnw&39Gdi<8;!19f>?GM2wVSGvVDPEKeIilXHOu^smcXp4OUd={XA84z zlpZ5Pj^e+t08}aDljG#`nOa%qF@52@wLX;YXYNalh=yCkgU?Q;pq zcG-C@{X9ng2rd7wD+53FnksUryBEN$AdpJZs&hJye?>C(2Fh0&ADMSkN21$<+&{q(mhf3KSu!0_#FtG@xzlx=Y(!%WRa?V3l%Ge!2(5s_8Td- z5WccuT$ISp2yMb5yYb7z{2}*}f#UzK{F*`(jSBt_*6wS54L&XnhXDgg9}>m3XA}me z)MA4n$LQY9Fpe9bZ93qH(7Ipyb$?oa>Fl}pe>zOy1(%_%rCMN=X`GFW-*o0|4+Z&( z#i`{PPT<4=CzS6Cb1Q$96#p1lwG&=Ph;uu~`Fc&TyOjI?C;=_!nAfgI_`I|2!+g{z zKN8vp{;MKhA)qHyiRRn8j?8hzhT2%6B%|4m_|dY*eGdl}hmmvQ|J4p4F-3|-+A(U6 zU#}jU>$j)Tj32*54O9=v*}uyb3+Un7ozapB#> z{DPl_G2OyL7b=N_J9SVWI8}Xow0ugIE`u24CmDK5nRB7kY=zJbg;Z27{;nXlj~l5z zZs@#D%wG2vq*3ics1Q+1Qlw#-#9>6~PZN>t;QDu_MI=~A3N5>AWWPx*PLiw2OT91R zd8PD1yV2hw!d4}M>!L44mgjz}(|i}S*MG4+SRkFka;#hnH0|D2YM96|Xx}`-q4zVA zjpv$Mw7fk`XjG@VZ}cHaFewRI@K4I8%;^|@Cb<9+ zS;(;OHDEmU5)9^{TNUwgE%o1X>2L>fj~uNso{EtOg6&2M7~J*7aZoj#-O2VX4jiA=p25>=UQMuPp|%FbNUd3bs$Ml6tVcf6zjI zQldr3u@OWoJLf8-`ZOrgqy{2Ko${{EenG&laUVdwHe^%t#%6Lq0!b3b+!)SpvwNEp zYAe)DTP0l_BKctNrmhZS#nh9*gFu-H2Zx9F<|ZJvjNstlPGIwQv0(a`ZR70+x-Vk%AW!+B9+U$!Y1UB4Iu zQVn_4o|NHCk{I2ZlRV}?33JrP>crHSP14<7{lQ9UMP{iFo2jFQq1VrZ6d;h8$MyBK z5%BCiaejV&3mKX4(aOSUyIBQVf0JHF~F$#B|8Az&uczVXUvJf+A#c<*?5Q1$_wSmziI#Es2jIX503k5Oyzk0DY&1k++`FSKkW`7}5$ z6Pff_4`rporzk`{)Fz^RdhORABt(erg6b)b$qvG*h2SjuJSWZ}FR9Y65=63EZd5+T`sbOu{@K*x z8s;zFkDERocP946RaYuhOxu*`MLNOc*;DA}(R3T9s!et7)X9PJ9u*-Aep-ez$rQ7u zA45h$UEKS1CpE;Y#HRKVZt0>}@#nw9UX@^U)We;;`j2{`=U zTUQ?L5 z-*IeSy+SK}f@8MI@mDWxnSe0TjVV0SIOh8OzJPst%Cha$)lMv2tX&f@P`mKDeS@4M ze+H|57xd9TE$TN-XWg35ZcTK|FIeJ9Jz49t8SsM2jNMceluruY8icz-X~e7zodg5JedJB_ z2}1E#lqv7SmlFk1#itgQi7l)(j^h^z7tR#9cRgVZXiDUOE)*7@CT)V?F+30sT&}hV zE6Q(!4-m);Ke)`n^o&Vq#M|QF(%u+W%FS>*IW9Oh-nSCD7ILwl@K|O01&3cS{3!Un z;8Ev`R`M}hqREg=AM{s${2u<&GS zmb0LBl%18({P`$42ftPo@0jgw8h{3$YVI)=F(FFsT?S&ZLb5TDlgOXJG!Q^sDl~Yu zMcE9IkpK+IYoX>eKvadhV*K!W#k^L%DYg&DP0xh8nsm?ttqbfRcLwL9IW20{G9}J` zXdGXJoMW2s^+~rZzV++Ig<(0X>6MZ`cCKM=&3c5hqD4iTZ)N1h?Sut<R|d<#zJ`S1WAsRiXWtBt4|!5p?-O6nGp zH|~SeXWW;~pU;H~(ad@xb>E{&GE&4$af;QMbNsVsSc}!bqPppRMnEG=G071G8q2`v za4f8$!{3FsWl`1fx|XSB{Ocs3RHPAR&d%O2LZ8Eq+a2%3BR{C`K}=mLG92>7wpFde zxaW!*g?<9`Mmkzbsmkh?(-Xz4TANt)C)C}lUR_ur6z-iNh0J5gZL_r&6Ed?m8n@|l zmtnRvmQvFTfNu%!cvc<6tmLv~))aJ&Wa7G&Lf0oN=((dZaVrX1IIrgIm|16>( zlj8pYOWPun@!N82TcamFY|++ep852RU27NAqc8H&a1Qs19^2-_IqeRFbD%9;9_ z$}nPg?EV92?eOt#IdZ(E;l2J;c8B=Pt$)DT@f*HYf!htCta3Sfg}X}C>>~WkI}bxW zU&*P7Wsx-EdXht=QNqgG#gF7I7q#oNt?8Zip216`(nmH6ueG;Gt=y-CwJB|wl3ldF zvrrSoy5qqH{+yJ(7I04q4}VvgcgQB{+0}VmdyWzDZd3pM6}VTV&zir__6st+ANT=g zlq#*Kr#EPl;l>e@+OuodHrIV)6raI$4?|ef|=!ID;Z;vXi^a&QfM&V>s2A4 zQ2O|&D3r$CbPsKLDmoAa5@(~#L(P87`Q_}Bp>fshp)fu|6&eM0(da?2Ya#oX2n=P^ zgp*bNQ8;dK>+{0rP8&6qc*eUmg$E0XSy@#B$gp`Cp~YwKw&&XSK3=B7yVR#hy?-87 z%08WgGvDD9Eh6Xg$8+hE=6Pw4a4`|yh=N;KHyYb3VyaGr&!$yAe}HhA6aDlg-9>qC zASHY6zU@Tq<29+sh-I)03l#O=y!8QV6Q0gj?^3^c^W@$6X1&c~omDe{7{HRLx16s^ zx?Ofz4LNt^+QwzIZoS`;#$~bmO*wrC*52)x9TFIS^01lBdeic7X;@Cjv>mmam}8jB zBfJ4)zJHQAQjxON+*a}lNI)n3Io?-Cxmoo}8Z;0W+M(b=LQ^P56zZ)m^9F})uBRui zBrAu1Bz3{i0UBmpkmq$md0shQ>wNzM>#MhYP;IWrcc9%fhP1W4T??%F z>5+G~9P(&+UXM3n1l;ad?`o^xL{F`<+w4gpmr4CiPY^+ULE~|<)Idk8R%IBDYVp3k zn|>{VmTSiOtZLq7G%?}m+BnUOFHUnMFaqa&DO$n?r5ast>>`uVP?}F7*TV=Vwn0ZM z4F}&gjstFxsfw5AGhzDb%T&m#_0tw^i<;;LD*i32JP}4Ni31M|yeKv*iqXN4lHO;s zf>$;&Qnyn*7pLj4R-5UXWgs@2EmAiZu3?42LEo6V*;&8z$l3E}RO@#5?gFI_2*jte zaqeoFmzP%@N7WD*I50495UY+0q78rdER^Cv`(k})^BNtADt;V*lE~Nneyc?jb=Pre zRf<-}KtiGnO!B)$$1Gvv>v0o{(v$8`1ZLKjMB2sDMdzrry8CkHs=KGUN?2ho+w^FWVYav$*D()YOk}S&i)4Xpv47MT_3@2GvrQ(_ z8{E*T*Fc}*0KD%TPhMlAd;aJyBefgS&(3uvdoPgO$~W4^mFK77d*)+N5i$?2@01aa z0fiK@Mu`*TRpaywjPQEc60c!ApUl@tPHorWB~WqZ19sk-W5 z24(e;(V#@tL@M9|3Lt3_X8^$knqOf7Rf-=xPRo(n2>Y|qX;m+Q_G@+h@={tJAHXlf z1f86m(BC9Y>()PSJMY1*-#k44++i%icH{?{WIU-Cs;Z5$ipmVN7ZONr<{Nk8*zV=_ z()mnoZ*d`fiGgMV4Y&t$OAq*`lDza^}UL{A#+A8g9XA8v3%>D_i)`OD`M zhXsWgd2ODb;y&>_O&<=gi2Ph)-cay8Xr`rNz*MMvw+xRGr7|X`tb)z%6^0h*Jea<^ zzvR6*w9VW46!XEuSW}+s<|72qYvjWz`j^Y@|0*svK_cXND*P|}9Ck-45fBi_BvuQ4 z{el(7wp`t&r6sbp_t%II5No_srY zL0L?DKeLviBif%AE_q3}+;BINAiB(Uw1eJ~w41u;-)!}}z6DTWIZ8_2?8Z$=|xjk+MU6$;6aRI~t7TaX}XN@w| z2Lke#qDAvsXG$-^H^b629yB6GsJqw>hZ>fTGVVlbAGM9<2?l+^Km7GccFu$UP*c_E z`PZ!X(?}#|S>g?o!O!jDtJGN&-nt|Zgv z6Q~A4Wbpz6jN3Qn=?*f*JR0Yh`!+K3%Ck;v`v!{dK$$*v9BFZY4-k>yu-k8S{qAOK}Hzf*JMz8y7rjRhj=c?AK7d)sI(E<87RBr?Uk zF%?Z($fHHXPG!Dh)yL#KEMoPliEszN07al7uogi4LFLKqB3M!cBXnUo#SP%v%;}K9 zc8&=JXjsQe#-GWfu1_My8p{?2ja!(XH_msrfiZ01e0TSnd)Vwkd$hR$GIZh%#)!a&>dvz2CA$~BnOVPx48e>?0UW= zN!J1tT7UDxi1Fpl@aCWOiVacAxP*jR%C}%zc?EL#TR+pqQ_1Lt_v~`XqrgytoN>Pe zjsga&E3S-)NX(`x*cYNh%RjKuxiSRqH#Ds-gccmwgk6|E2qj7zd_RlSpEO2l`0B#* z6;%J^T`?rRv%<-kdV3hB`fb+hsJX~-LJ23(}b0%B)Tw2B$7S*sSC=&2LKE-kEU zela_YYgd~g-aZAo=yD~aP^AKs8ytz9k zWeO)_tOfQTK3qZKJ}jBNui3ER?`;=U5M*=)lhGxTCe&6o`+1Q}Sq;TqV~FX~zPza1 zEV0U#7Tc5!t~JGz!Eg1Ld>1LhAW@(C<-#J+(mD6b1C12pg!=S;s|FD}tuBBPLjXDp zT9DExoRHT$ZHisaRZfBu1S0l#ZP6x47%ZCHS3)G zD{S(c1W*7vC<};{wm|wu+wFn4j{s4rRM_KyCf3tp45O7Oa-Z9HVa7;c&s81E{}n^5 z97pO%BY*G9Q&^?q*L!L<{@~k{%}saZ_MGm!|9u<^Nla^*0JwZ$fm7q6*HzO{*_EEO%u1w^39;Ay<7b;n4i^a zOrxjRX+?{a+?`LB<{N5iu2hSviQeSZ5*HNoBWGls+`7Cs6qsG8wNO>t9Z9=|(Sjl- zEwM0mTsQGZ;6gEvksKKyVyBhg6U)B1tKb~I6K$UaqJ~mJqnD7u<?rl_#qwx-?IQBnM7{nJmfiV_g7 zviJgkuzw8ZtBi6;YWpE<&oyZ+?ce-?V&ZtAwzvsPnilkJV`GC+e~KR(<}i6~d|D`F zqt}Nkf8nIA`=`B^wI{|NntGN&P|v>0A2SQ-5a*=AZJfc)dY(($j1BKWh`<1nu8Zdk z?w8w)-Qn!mijF}EJe@|JW#|+;U_xU5vYnt$>4}UUH%I#dv*Q4QH!TxP zT+d)!H>o?wA{(iNBQYUYRL+@VsCVcYTuMLdzPM7_4h% z-0-9APmWb@wKz-*v+Wos5-KSa`Nr4C0Rj8Lo`0mqy_u=H+*ozBsF?iqYpeMM(&I0e zWt@P(w}3JK!E7-D@3x!fk}r|ADY4@Hczls!-Dp<+?@bGwcOvmF;lM)*7sUbgl!yha z-_h-uGbm9S5cgBX-xJF*nM$pXE!z%?%Qhc^v%#=r=vIGq_7q3{qSp;fSWrH*PtI|# zWjy1Bcva3d@l-vh8pb^35HRNT6a%`ZNgq@T12N6*qH0rt%l!hcmAm6~m$Zk4b8uB2 z;hQyIkEeH-#e6?ux92w*Z@yw;U$kZZIK*#rd#bhMlEgPIhbFE>Hq*@Q%wLZ? zAyId!wYbNiSA7VJ^+%wN1wZ(zp_HfT&;`okcJ*N{nb*d1>k!Ik!O?zGD^`J)hE)VG z;n=GX<#taPZvBuY%g+6ZiI9ws{2-YkM?3dtH#K|vQqA%< zB1xInOex7GKIrvZOaftwyn_qonAv0Awp*4~il?d2PJo~7ojLjf(aUN5#{WJl5gZ`oAX{f(C-wX<4fBw1gk)pQce0rwV@ za)k#62MQAt6G9%>SLxgy9F7N5`^oME!EzKnF;0N$h zjcw_+d(XgR*rW_@d!a5sAhOy%>my76*pD}^ak=1DY7sMG-J8n!!t=Fw8V4en97e({ z)o8Jo5}rq;$gwau*k`)Vs!4RV<wRlJlrZzt)sb=~N#e#u)4HbvH=EZ;#VV(Gn&-<1ZU8N|eP`bf z@n5-@dUW63QlTJFJTMLp3?O2K^+b}323oDPe_yqptI#7N_Fw(p@`eurs3~lA8$Ss> z&pQ0{rzFJ0D%5G>1M&F7kT>YaakUmlt?{3(9`-E+8&nF$1gt&d$D+WMCq3Uu(R$PI zm;zcpdgSozi$a@WP{bD&jwfojc|Gb5)|L8|c-&sYyK~`ybn`}>#3L0ghG4W_WnCIc}cYobEKS2*-h&9T*bpcjHwrf;l06yz^Z@4XJ|1 zZKk|LTD@*PxiLyxSgImP5?bvg23(Sq40x@h+GO;zM!oehxl{aL|G>bnd*CWLdU|_V z#nT|(6bvl)_4WM)EKhoKppkSw_VWBF*;V)h8C6?~r7^!W{c7^%(nIiJy;>{-j!K5i zk+g%UQA%!c$i~qsp7yaXJ1gTV_$Ezr%5-YxxyHzIlXZ1v*7NAAD6&njO7R!G-9Wk~ zI}81cRvofcyIuu>K#c>btnAq6`4pX3-DevM*S1D-;WThFDof`dz5>WNS{PqAQrhYG zI^jTkBLm#5yUzDFbsMdck7zdPcPh{cFkk; zI2y!Y)C+$kGUS3$h=LN6Zx>h+`f_CHI$td9ZcTY&itq2v!>=t=WUUu|+ex)=Pe`T} zF#o=DH5f?q!zh5qZzQ7qxLGpgCcx}6SkhL`QDe#Af97_RF)>9MnH}6m$%{fMMwz%| z3}^;EzkbaLQDEybbX3!zp&fy+3~R0RV-o?uu&h@dVPWCZckdxRftLl{o?k_27)?ez zcYp?9I;9gLpf+??{99e&|9INwQ~C9JU~mwiZk)IDl?TNB{^$;2W%X1#&sQlQElv@( z7r3-Ax&~_nL{WvlL9YF7fc5-7M(ACBKH)Z&R7W^oUOy5a=5HeIHk)CEnm$1QU0kSwbMka6$9IE2T4rQO#_Q2 zMGMEomeP)S1*1j#HFe7Pb(md?#ksG!vWlW<_(mw&^h4ubZLq=s*f;q0J~h6cO6Pb7 zM(dG5SN4&^1PH$i&_{zMhOi+kkRNNZ^EVQIIT|xlQ!#2vN_IIN9l|K6h?~P%8w0>F zJ_2HL2Q+_!^0VEl*X{G>q%1G~CzTer%N^+y4yOaIf6SoxM!b#Np;~Y>c~qCC%akkZ zO${i}KE@NtXpDYeK7_t^$ELm<#=CNUO{ub;C)F*f$cc^aIc;rfGCuDuZ?Leq%pLg% zsjR)Sp8E3>ck$^8cdo-uE|bY6?rAx9^8xYNeJ!bC3}ldZ`eHLvlMNa`QUS z-htOM)zd$3jjlm#i#1-k+e9$S(@1n>-w2^Fk~g+*R`MTwx<>Z|`1?0xx@N5PC-hLi4qkIf-|$ zk5DhG!&VaS0awnZr zX7B*CFs>zc#Gc*E%*^TW=9pSqRyN|Ga_h~cjdehho}QYTG7MI>8BS&v{E3K8*bJJ%EM@-KeB7qsfED<+N}mHU`$4v zN4vcSBp&x}HDzTgCk#qc2?_6^`^(q@$61}%kSVp71ho|H#|knNXCsSA{&Fc~5{020 zwDv7}!I(jK>o(D=2ki5t0*OgZYwG@^ic-?NXIb`u>==LK@;!bbK1wo#@Bp-wYWhvz z9OC+-2KvRhrO4?mODP+`vbZU@lS3oX~#e%&hdcq{Kq0)|3L!!VL*J-flgi*o@lI9KG9L;NPR8qt{;_FS=Au zslD+SfAxc|pbtN~ySqPim$;p+bpWY;LZ`bV2pLsIin)>eGhy4QX{QZ-xih{fn`wfQ z>%Q>Sc=xeOC(NLk_O8A3@%`P@8`XwOplY^+LcC(Dw_TTdwE5Y@7wQ#S_l>vGq+FRj zhMc>2XawULmxs_sx%J_kRdi-$)C7ST+`a}oQ?UyU{V;j$iZiR<=v{FyBtMT1)}KIP z_}uOSRijT^>A&ydU6@o8yx0aKU1-$OJo%&Lh%4kmRwfL^E$n9tSQoB}$-1YAl^Ij+K#`YKd7* z4zL3HNg*~P4uy;Mgxh1N-|n|m>?5H4WF-`ZAD9rZpN0zDj@PhOyr-Hkv}L6~d{LQ` zixGD@@Vhm^X0>z$b+P${AnUSWK=;1WV<*Cc3JrzOrS*ay_tAlh1q1g*zIyx)`GkB0 z_3h!A?|DNC{3G731R={+OR8uAwVP&eMf%vpY7Y4xuwLHZGDUQ6666FJ&S_v&b?JNrgYR8_t-*@C` zGA7|aT^Wh}UFyRXByh6i-^+}#En~DSFPGH2S_UDObN3(Aa2lQoXV4W*jpknWi=j3L?4X*j7zD)?bo48pCmQTegH@8 zs78M&XLAx^)6#Y@i~d2dp)M>7gjOQ3C#I*mYx)h+M6quclN|H(#q?@-Ia@C`(d=-h zqu}-CM!?1(=S#IK%sZ7LkVqCdiwsPt0!)dfbLHd&P6rWqi}qR567rQR(Vo3v`yqt3 z#)+$(v9Ss|&*jnYyNQWoUo=w)2}(DI&A|*Myj5Ge3@3Qv$bv!0FsaWI{{kl+)@G|kapf^AR)}2OnI(J4g4TjjiOMsH>t-I>-7O;|O+xF2h zF=0*4#|M!K37D0f1wd*g_7~u%Br7T^0y@J8;`-Ax)YP06K#5u`10e`49Fb`1C@&G` z$9te?b30eg@m`uN#4p>Q3-Hc_$z9)=ct&EK@<>XpkN_Wh1AhAs`gMg`g4%gh+1AT? z7o4_LS4@zts#g2^93@bJ%KL(8P_bN1YZ3Y{U1Tt8*1MFH4jUJ~G0YVMTWG!JeS_ZC zG)A|JP%#UA1&Mx()wZ+yo)73gTh-yj@MwGXd}JXXs1}$h`QGc=tF<3hO4!F^1_LdL z#e%q_J+(Oxqllx$cq1%^5pjG#b^oiK6dN93bT-;d>=Zso?9T@z%2)5_2d_^~7=H=F z%tD*bD*`~&xk!zNQ9g>4XgAnz+tOQ*0EA(t$5)Ak;B_}qE?E-<3iu74wds!!0n0r6 zq+P;Z`*z=;AS5Tn?Ula-; zFakR8 zDm?UMf-bPy&E!jfvrARz3!FuZuZS#>@HbQ@aRno&_q}vx~=@E2^WGMiae^y!pmHZeAeQsp|uyI4ywcT~|%9mu<}h)6(}Tnr|5{ z7#(Qfpm;%f0dFuMjq6yl64%HzkN^u5mtP(ZBDRH&B*+#D|wmXwuY7^gZ@I~^}L zJ=R$*x}yqou~T5Y(ReXk!2o!*dgtQ>e*KwLR-2U^#R3^G)uCjd7b&dTyUQ>%mHOqN z<-60%x7S~?Js&o&B8!6hs&mWxFt0l4Cn=#P#zqolW{vUEd`^F|eu^<{Y9CkkSNO&8 zG7%C2^gScYj$5$uP`4hyBh8pW@E^imH(y=judiC!U9Je;Id`xG!7Ee90(G0p=X=wv-{p`=F@*2U&8(Fc4X;l{;r%2P!3(QkaFPVhDXiT!?GT;~>Q?vwP zsu{$B8Tk}a-+>{6|D1(EEdOxC1NHm4l(06aEF2jU8kq?E@Ydf(?f?mpaA)hs_w2a=bZ( zRNuSspa(idc|QoqjcuI31Pda35Ev6^So;&3r;P2x^4d)kN#i&PbG+Vn1GY6R3`2M* zyoNgu;J;)vwp$y(<{JPw?I6LsQ;|HtO7OlJknMz5e!P27t8FV#M10R{2zMR0XJR*} z$_DJS&f_<3jJ!(?Ytso->1ynCMnw zk=F{ld~aw0TM&lJ8tl!33F1>wm{6lU0Pym|bPgO{*yoa4s3%qKL`vy2c*kM51__$IbCX~rY6pvzAXEjO-aOGU!GZ2l$5Ses*M#CqKhgk>E++z zZe4gqk)5EGy~=n)Bd)xKaG-V;8!;^y!BzHOna|2r#Bw7(=dtX_p7B_ze(o!uZ(VF@E9-B%I{puHwk>i8I@>Sd& z)6X+wOOYzA<`c5JR?I&L56EBH5n!d0oM@ZK1fP`Q&2k~6i5cYp+8MBp?4NNi19AoTyYYorz9BtzfWS>bU>%^l zqSuMX*Sl(6WZ#-?Tm@`_Z^SfT{NIJ^eIvuO^?w(j_sT2~X+RhD;VB}@caKD83<@#h zL$a5Qt$W%UM%H;8a)Aw&3WNK4MT*Gq^oJJVGhb+8M{HoWnZn?7Fm(^~Mx(a}6C&_{ z*GeRc9R7}+ek?}tzRZ=HjHdGjX}L@l%2TVBe5F@7YJ5vg$1K=zwR@tpIAfnAHwKp!|lXKJhUQH42oNRitw1$$SSY}|eVIjkavPQ8@N~8UTEKu(B?bfn; z(Xv?;2Rz;3>FRrW8qpAg^K}gHqik5KOT6=C0-=E@vTW@bv0A2f^UfYCxB-*~QjPXFMO2ao%1YZcTE zvOqEfWdO23LM1>r5w#i}lRi6zQztPP4S4=L^ir`}X;zzx`q4HxI$9o`O$)Th{`o=Ni8{k3Qx;~!-J#7yX;!AI@-sslC0C4Ak_oc<275?G}{>! z{PCPKxkF^Ib_N<-_{Z|ga>G#gtrFl1FE(LitK?{dT@C)HH0^f)UP!yKGrtgHE|8%M z=944T*w+&6MlyROUlYDo?d4?=-g4GDuJWem#`gSZffj&%=EcFmVKV3mRog#zMN=?o zNUf@>^19p|^-=hD{wq*aQgTEstc3>?CTAJE@X5ca*aaSTyOB)>29|}y$Ge3lrJj9j zS(C%)&kwuhQy*aq&^Ui3!o;OG-k#$R+(AEzAlA3Z6of%g@(5f;?NR zPK-g~1{Bw0GKP)`ZzB;L&uT+V@@bMNb3Riux#d_Zx%QMKXD9Jj7ZYI<3!M~frG+Y! zr3#?2f;8P~*6FoXR;ed5S|7c9;jr&-H=stONfV(AOZNkaqrAz%R1sksmm33#Ns2}7 z!Vci@t3P(J-sZs+D)JJWbvCOjYQYQl_xE#q0u+$KRyh{44LSk~D>#HkO%0)3&JN2y zmtCp|UApLB5b37Onep6i=C5CNb_{tA12rKJmhd9Yat>D|U>Ur?3ac2te|h3@Y6%P5 zFVJz-b0g(uvrbsD9xeIqbNE)2c+LQ0VK$TUjx;T3@o!`_Sg{-wW7~Whf+)F#sNW6# za6&Zn&nI%f7O*??J{g=v`4Fga5|^-wLKB0&{qM`hMG<^kFRo(OTB_st=N~A@)1OzGR?gr@>q(Qp7 zrKP1Cl|~u~QA(w|k&q5iLO|e~`+L4WkLLjez239;o>{ZjnmOZvQC=1Lx60ZmOkZ#t z@9uR{9{5~T2gbeoGBt0t)VzOkT^I4~c}v!?V0j-tZ=-&J8RTdhD~s2qD#0F6(ojl< zc!gug-5(yYtC=PpsPJCt8COkWx5kdwb`DBLnz~N_S zY^j0(p=rZjzYh4j5;osY=SCXg7Ua2s6@!vd<}hJw>7qo4jb8=y$`K zW5AZXgq+wX{x+?_?3R8X)0;}asmBWR?_6Ad30IMruDiVY{&izuCsgXki5fa0}gGZ6M#0a!BVGQGhpjJU62r)M6Y?)~$2t-yR}=P8vCLbYq$eD%|r z?aRgA3Z9RZYN3N1VGko>6UEB}$>zwSX*IK`hIqKYo-tkvTrUAOJ$F0n8`e~y`%TL$ z!So@HTJ-v=NH)Qk5mKdk`Q#91$6vE2v|-a6p8qRKt<9f2Xn+cxCArY!<;#~1MD9?I z#k6vum1z0b9Eiz963b{7ecx+D#XIURsHN;1qzjYkMv*>n-NYGY*Z)iM>#v^K+A0U1 zwG*Rm`Q41$S^~3T%_CVx;+-b0F1;}(npy1XpRA$9aKZ81#7FduU^yq60oH=Z`Mv~= zdIrMb_AS^@s0~WQX_u{jReuva*!)foh9ju|o=;4P|JT9{+v(S+EVc6A<8jqKc@Tz4 z*z|tAO-f3l`~Udx-W-TuJ{MO3^11Sxo11IrhFCrCZlKlie{LQNoL+KPvj1U! zLA0{3N9;&w)$B&4Jpz6*ErNIiJ!IB^>EG&%p zA=nfeDoIl1N{C)ytgq|frSmUr$}+r_0;Ps|UynzT-TPT@NSUfIHu zt(n@xook;1*-Hc;u3KegzXAuZ>9&*j^xVCmLThSGXCYfdZ9|I6tAi&tZo{vQT(3xA z#7wFVyN;r*Tx|@^eWmm-m_m|vPF&6iG~-L*S1v+VQ|n*fO5`pnb&9+bST#)P?6u-HVIQ901rZ{}>V{K!;xi%v@Yt{J2Ihq)Ely)bw??6qtSaf`<6_ zZ4LA!?I^N(((nTh*RE{iB^8Y`oTS$Xv>8$ERjyG z0oUH7Wo6tNx}{SO=IvIiP0oidjVITxIXn{iBG9{DpyE?g+;;dh+l_KOzf!0F=>2x< z#l~;CWux4{g2LL*Wt0!^p^pX+fET6DB=Bky&oln%%AfPGWJcBa5~eZ~PZGG_@JspK z`}bVl7socLhMP@IO~XCzFGFZ1VgjmpFvPH&BM(lCPAuQOr4Q#o$C6S$K5)34#c#GR zwTkd3p{DH)5HY_HFHw@Y-V}*a^(S8h{Qvb=e+H!@9RH&=DrlYImbp= zmOahbVenF!b>sII^&AeRw9^u((tn65hE;*b9*layx_}ij!e#_-D$c$@de${#MWqy!yMYJi8PvVU6d&u_UGAY5w8M$wa~>;}&s!TB&EB7H3S2DLt?qQ% zgl*&t3>f%(G44f4sXn^5&CAEPGdVfAjfU9T-Hq`UHYUs)POq8WlhM>1cX4(7oVN6-jiJZMx0K9oOvbp&iQ0r3hVt(z&rOFEzWa_*i#6t>J&rPLRBdKH zq*ojz`}b36u-KC_Gk12@*8T_`Pz+`~u^EdZK3oz95;Dit)zyfHo7-?~eB6kKhsOxkg%SkIk7=o?8Q?uH+OgZ&+M+Qq zFepe))@+5Vw64zee;0hc;d|Jg4%48dC{1uj0}6|ZY#ii^@QI{@KYsKTsro}WprfsA zp{1v1ot~9tm!6((m6ez0n2x;QYZGJRC22O2T|PcOoCoCO?pc|cdX5*jH=Ous2(Y?a zTU+DcGE-FijtVO*Y-hLY=j(fk{45N=H@tHv!7aM5_c?0NNXASUJ@mQMhZIx2J%{|ejb{POg?4TJmH6MTfrt2oY>umY zd)e+c*Z!*91xf_nb`k|6EF2s|H=qrR9~~Wqa`W(jx4Tw`S}A}RC7a9iQ;_e~#Xb{5L;dP*DS=D5@*KqDZ~;;5mavE1F=9RMKKh&Lm6&{7hFpS+99%Vjct zC)0|&Jd^D3aOn&N9s>5=sY(NVTiY_$cvk~Gy_D3Hly~);n4?@EC%$<^Pwy@2_qWE; z#l;2r?}@;_mzF6gn$V|tda!uuyxie`c(yyu+4cQ9YXR64GS+#T9O`XPJgqyc-HxAbs6%NK z6VjP>fAV`ykbsnUoa(Uog^hLk9Z})pUO9)cIkWWKc#awJS%kr5Z39v=sEKF@awn+`q2T>w+#j#kHOD5Ysy~2xqDrqnEub6J>n411@;HLe`hK-FaGMp!R zn3I$=R#a0n(n`!(@a+ZOtzTyfrR)Zces&fX^!7dH^)k?K>u+ywFXravW@>0?s0N3T z@<7RP3@pySSH47=P=P_a2MX%H46~ZAz3^ze;}a4NePIm>nqDqGxcJc(io%@{>+Bj^ zRaGSr5D>scCllK&3}R9Fiyl&APb1SvT2e2Ki1@X+;snfXHG$b8rywXx2u-hWeL z{05S&k2lC{A8KSx@J>pquvhMS)~_W$Lu)lSR#5XSZHi!G-%sNIS*H~)WhP$1Rw5s3?3v8AmAR=@N!X z6ikj5$ud$4x>;Yuka8|KZK@N)zV(ATBHe1>&C#VSwZB{3`$aLV6LBnO*eIu6+v9Kh z@lDn1q8Ir zlixoTo8VRAJN(XnZ~gu2Wzu38FLcjAMn)zMxq^=a{{2m+lZ|haP)|7x>yxv?F_PAn zZX`@d7%JjvG+?y)@u7Jqe!?>RgkFug>ZNmbOF(-4ctQFiFTr-@2R?@dZIA7Hm2b^w zg(*iGn$SLo-!qouh>XZK6vOeEc066?cr(uX=*364^Is3oJSzl#Ti@I%qw3rI^{K0$ zq?xU9F7q`F7UJE@d*bT;%uFej%f6X?D^CX~n@zj=sc2&t>mq4hHOi~1dLADgdFvY( zlqU#%&x)lKI$g-%v6296h%fO9;t*@)JL~f<3HRT}$9@-EqjV$DVXa(G1uW24V zTJLUg-IQ7R=5%(jhyr`}j#Ex&J47Y$Tt9FD#oY$-d1$JFMP##hV|_gvE*{?EQ>&qD zN=p9|N)!T8(#@R0*ZzGnA$d)(aF zLa)_r`@(&9ioM4@K;jAq7uNwqmZ$r0gz~H`Ep7WcWY}vyd|&}=aF8}zXLSe(t3 zU-;vhF%=)5H%X=$-CQT6^*YZ|Y+3FUM#(vxeqbRij!q*T_M5X?EfomB?Hm^?wv_)+ zx0pNd<{F)u$WL%h{=3A+>nJH2W{wixG%H*E9q)rSx^YI5(`O6B&X1iBakLZ0|8wo? z>c{Z1&lNPgi(Vk(QbK{Vt0+F9HD5Y?UNhf+<40QW$6Et4l5HHG-)d z0m7hRJ8V4J=Aq@>d-v}70fixQxHnsuH#I#y-WoY2W5_|OoWcF`(;d-&HB+;*0)(Da zg;!d-y5w*?nB1nTjFyW_OKtle8L;9}3I82J{@BIU*>Y#NP)sAR$>XFr;V;2=QU%um z85tS!^CIjABk6~CPvs>5w+Wh?ujz;BH_+CXZjRhVs0mY59|5tV--M%=Odzo^{#G?% z38c()IO2cB-tWDM;k|__J`Q&Ggn>NKo3~)NOnS-dn9mztC3)ve>Ogtn%3N_R)(gYb zY9{k^C6iuTZpA{Iq_TFt_u%woYt`LkFmZ{+YP^(^KAj^#+KR1Dj2d0%!F*>zegW-M z>XdsvmGAZ~hxKg*w(Cx8;b6%oow8p&`)^ZUJ7KM6=7*^@F6zt3_T@*f@RcXDFyYwI z=r#uY3~1t5G^dMn%(O1G(wT^d$n?qj`rkQZwnf1^NW|{a^6jJNA$XdqsT&)E-1e6{ z1LrpL0z0TZ@ex7DH+cyqC)s?&2Rd=ABcJ1~BYt}N67nI61ZvUK7pvFqWzE1M2KD#% zr%qR!P)7^JFx1Z4Bb#fF;Ll3pS8JW2e6_%*hyuj>gek@O~2BV^%OMa=F+4EtNGrd3fPQ5h4?-<3A@6 z!9N>YX}!<4GJ;z;{w(XVc5AC?E&paA_sZ+Vqm*V$AXyy(+hvOE>KqN--jNkb8 z`#UEtG7jAG#217xpBfjZjB-E$?Wh+giYjt)zq)ePjB4Dr#~?OT8ieqCX?9*y$rE%} zusc(R02O~zrd|FJStmGKb;;8Z$fdJA|KQORO-#-dU3gYZ--QM7YNr8iKlO07z#n;8 zo7!1@4i1jlyLa!3s$@Rd=09NLheSMUX=&No&het;?c0xU;R1v7tlI=JkJ9?#VIU_T z-_#vXdNWrQ@pGvhhr!su{T;4gXEJ6A6=(&nbd{MLRn?>#?-7qzXeA;+(9!lhuo4j5?fF zKEwSXIAC*g2Y3P0*3V}P&>9714ga?G_GIVHt$sMfQ{&&i?7E4{Ygoo9iActEs8&fqPW&V^I^9FMwz((6Bbk&C(3? z^)r0|aVbJT5;71oO+s$%?nF9ShSi}ibU1?t?UsX?KS9dCqwSfxfrB#f3tWhUSpu#b zA`8!*zIbqz7xm6nyg^!4_BW&&BZ6ws>-w?F5fsZ3$--?b?zM4k%*ub$R5H27~Cj6-GL z5_q}aWFY+PS*Wwse-9Joil4e}477K3brGIHN?!c-P2rUgA&0(tjw+|(VX0cuQy`^DRbe5*gA?@YyRerRD4b`hLj^4|~R(IbP*s|Nrbw3J;!>blD_m~kW6_Gw&eqjU zP`ov|C3r`m>EcVC>`}9ykErR58e?VTK&J3(mak^`AtO3Dw@QDQts;a~c6B^NIplDz z57x7{qI+@5J>q2u!X6m5xLC}9R`>+~p~{OF>tiq(#P%mu5ZF!IzpowLuWcFvdFbQx z)KpU1UE6z9R2>+e5kMZVibDK+-t0PZ^lRmdh_SKpSNO4I4FiMk|E})6;&XO!`PQ3L z&<q}>nyK;E&Mkrwyz|FW2BwY zz7@OX`gbROjN`$Jo0YYus;r+8F)F zz}o5I-rCsCeA={sv&mWHaf|FIj#cZK&O^tLfgMBZ;`2Z91n7T$2?#s!)<&xQo7!(r zH_F(*n#{1N^-d*=wVstG=+43pmneU)`Ic&Y!kE460Zd5B1KY>!DpEKNV+220 z1BGW=BMv@(N3_rvJ1D>o*M>$$7utQBZYm9$P@OG-MzzDj#Wg?v)(6=*c4zV9*&381 zjtkGR23Yd)c#k_&VL`o-aq=|w9m}Q*vCLaLvsLy!J*WV8Y_+wy*qt< z8hVo@x8nFC9H0FIcXzk)Y^7`sh*i(AXcyee1aVpQWc%}DRpQDLcA0nfek%6us+^V|SnHi$hp?noA+5;ejntgzcl0Q_S#fSu< zY6#48@q^#Kxpd`7FWxuVlEB)IdUVXe(ZaTIh{>EdZZPpF3k4@%J~-cfmEi05fgC|pM#ZTuM_a@|T1L-n zDe32 zeSXwOP1ZU6nk51l5j0PXpw8tOU>C_Dh{-xbhzWq)i1Rt4tGm0{a;eQ*bY);98}1Kb zVBpOgup)-93`zWuR3~a~ZQb}WRKOBOAjHk>;w>g7=J)a?N9B+l%%?e>cQhFWsf1JF zRpK<0BlNAu5$1UT=H2xTIsQUB}0IYhec}(HG3blrrmGJ_|Vegef~9>kr5DG0S8e8KIOMp z@O%#-2%pTpEzhYiY`Nzve){pS!(bWmg8qWQb!B~heN1QPO-^%jb8vGVf!A{b15yp5 z679-opy}IRU0ofrnRNK2N)(u0uj^sf0ckSKsG93{8xow3<_!%Ij=XqE7m$!zOTNbB zQh?(C2ht;H+&c$sNbcZa43~MYAv2WqeL2!bu)A0)Io;5MNObv}pUS{#BDFJUTEajnD&ZMuS{%EiEk?2+)T{z9$Q6DRysna!WaUxfRFEWp;+WSxcxuS zAY@pu3&nHJSr;bt0^ACozMP*%DM?AIB6d?1MRRj68|Y&>-H4}C1ySn%{cXUP1=yhB zQ;C}>$=J8KGc-nF^Rg;S=@EAcPntvi+XMcvOsA7=V&>iPF(0}xt>qsP!G1XyMoZim zqM-vPABpNWyrSB$xy$Mb3>Q|;Q0ZUce;1RzLlNf?{6*vHzwgV`2ZrrW|2zA3swzz& z3GgYw83vKaL21d%&b}Wa6xkRP8ynl5 zv(iT$7@Ln26XOFF;JbLYZ?|P7Ca%N^;db2y1V+}(GvW@t@kh{5_wl)rkpp}K#R*b= z;#zNQgY z$KLd^I4~7Y3Tcq*xY3HyNV*70{yc5{tK&9l@!~H+EGDpkC=$(H!~`u0%iVN$`+9M> z{FYq7$a`h~DMhrzC!Z^0=EwK*_ICdmrypQf3K&Nb5KX5`Sh{oU>l8!n6T=~k80tO! zWd27>4%BiK`3Zu_|zi8*q$V- zjzAHxB4akzt)_LA8cL@c7ZOdgOhi=oinVDvNw8W)%BJ2&E zInET%K4dCpsoV3_oYymW&&F}qwN2*8CJ&qpDth>RSPs-`;_7TV<$dtetpD9m@K~=) z{*k#d0s5D%nwB3g%y3%`eH;vLw&lixEo77M*|d>2$XFapJumS3ycUkTOx5Z(2pT`5 z2pM}diKKU#`5bzR{aay|^@|ue5KvLY`#V-a0(4^7_f3HS$`4LjrZJJIzfFMeW!8p% z@nO;K0CTkX!FsfC&~4)l)}R4o{oh~_QTdd!<^WMiE;w(cqM{cTV6NdBxvxIZXFqPPv=%S6;SP_T;>Z0`rN?@D^4_L> zJJuXmEAU>fDG6WR(ib5-8ym_4w5UQRo0%Wf9flP3ex>*ZkxLuiHz5QeV-Yz0@4iOJ zv^2NVfdFku5sjwO%V*lJ>EHPkxl!8xPPcGQ5&+$SfKBn`3>ddSSeT~;OgQ)y{EsK+ z=32EYUpGWSsP*^vztY)voLhn$Qknkd&6YQe1No1ihhIRz0`kxr&Cu-|Elo{xZkP4` zT_AcaAuJD=K?BY9%F59_IHfdUUdS_42lG(Q;d_oxA?iqico&?tmOAq`e^UB)E`i_SIcFGftC^e|Hbqwzs!ckZNFK zW21C)opsi$S6F-HEf4AF0*jfVm+)#cf%r#7Mdd)U13!i&(m`%~4#Zk+=LKia!a|#& zu5Q!R%#7ZP7cU;I0;?Jkfw6dT;UliB948OLbtP;fMMXtdf8Z*Eva>0;3zU&$@g2w6 z54SvHpMu=Qje~{Nd=IK40#A}ci+fRFEMhPkUn)(%#>pa|nu$Pf!Y(?0d}56V+E_snf+*Vd&sbXG+`I&L_P0Bl3^A0hI>rme-;kSTFvJ53VwcVj$&e;24QI@ z&#+Cg__8Y9pxO^OY(LWg_kOOKgF}uZ5P(+J)_UdD)h5W#eJf6x@PTettEZ-`Yo!M0 zo1O^A0gQ|k$w^5UmM>oL^R%~DC^4*6A#)_66j{#R_eMKcZBTBg*C+k(e1OyKYhuV0 zzw1@i431I5^XKg-`8{~UGlGVQ54O^~wcdhawR%A*-X*?cvWP)$V9$6 zHlpap%V%}o=&Y$SnWeZ={{xeDKTT&N0SmvjX@Migg`vwGd4jnghi2*YSi4N`5?@(U zGwSxYtW1;U$uoOjhZ87k6P^YtW>jsh^F9#wy<8UB(WSS^`Sw^4 z_Fr<)5Xc%*GKi>E>mF#0{_g9LS_AT#cJ`lAJoo^LPCv9c#P#JA34oUESL=1OdV(Au z5?c7Kw_JDL>c|?Do9`$;h!EKlSVw1v%c9QEQuyC+?z7z`4#q8-scMs(XN0cqc zr>tQxyV$(FJUcv_t+&yDab%07E~dJ2?D%V~2RV8^Vz@sbeGd{1Dq&F9t!!+m{>-eB4yb++`{2aV*5 z$i-HX1yYtaH8ovW?TMKVI3AUqeyfs$0}8hGd<#=_65>+;Vg?!B@4~=O18#2a>)w8GToW#&kS9JZ|OY=6(fy#esAmq(()_m%M*( z(*Rd$LBW4H;Np4u#5_l$kV6ORf{TP_(V9jSOLom zAi|LEA(q2UyU2De!Sl9*Get~c5b8!XM-rVrfacnG=aSaCC$jq57SdJ}in4aqoUa1^ zb8g8GBEdCcBO5%7i~lsaaDpw-?`yZW5x1$S61bRF(t9$7(f@iQh-;XD?RY>ua&a|8 zK7{6R9oH5PB44AnTt&mGV}0iz(9jJ5)jF>r!iI!}p;L>BcDgsaKCuF*H!mR}!SQ1# zSP=kp9td~WkgF>y*nA7rtS!vVLo3lD_?|b|5}!^Q0_uPMEzH9R`ok?@9Wb3X|2`!r z&-x4`NdO^a7Fq@dnYB%7UfT(au#vbE@Vpq-n)TRbDD`d$Jam7|#dUXoq2*5uHX*~~ zm&4jXT0vlBgvyLENGPTZ9aTIzAwjjUy4ukX$-&*B;Qtj2b)mem@op}uI96?PaGf#vQRX^&i54JD)45ef_dpB+g{D0N!HcU z8zGZ*UTWE0#*aDZEU9Q)&>vL4A71z_!V!%m@X!3#Vn8E<2j~2S53h2=Yn>cyq_V2r zrb(HACBZs--|ZG1@!1?}6|2k5%G75~EZW~su*BcxgWbXl>JlppZZf(&jHA=jfO^Qz z(#@_vh$txh&^#lifu$o16AI9;Oq}%W>D~}7ECX$c@nBzh-*KMfA<~sa+XOOHmp(p<8O0B}+UT&?=FGRJ9xADG#h-DrLC+FOPF!a0}+B}ik4S_N;n-lQoL6B|uZgeUsw zc`r#~oh)5C`F0=4%6?f_@GkE>zl-{gNLR9?`N3`PixGKKR1Cwb(^y3UDWMO%^kg;f-SVLUKCac{0+_dB>V%pF$#s-~r- z_4FlBQn%vI}1t`;dgA}mfN43D-O@KWxNiZdWNuhqU5gO$4+Rkj z?5zhR#PLEftqv2%^awZPwc&mK@@2ZDwA6SD@R8sF8{)>u;`a%1u;vYHl01=sf6WPM zOn&~#Q*)Ee)uO&??(Plh27>GLnBY!j26t+)07c*CM(40RO3>KSk_fIONh$zh5ZK(G z0SgM0;0Ev-!b_m=gn%t=xy5b!-T^=2*O?>(qGoGnvRoa~$uli=^-(g zVi_4cc)M$gX-`#DO^ubg?tVhU!Dj26Eq{YDz3}wqKQ*+(`>gG3?PtAP+P*w2V{LQ0 zyx#VdkBQ#|F)!brAu1xBz4z-34Hu^bHAM~ur{o?{K7w8YRRSv|NGxikeEH_e3M&Qi z36%t)Muf|hA0&;U_6c=Idwy}c^8*u!lb-#L5B#70ON_*RBN0XLBIb92y3EEQB@pp*HV7jVnOL5;Cg9n(riEXKsMMv7r4lGcbj)B%5h#brN~%(vt;ZgYaF%X@BShUrtzEd&h{6VnFxh}PKI z*uFtvRFRjDT7o))Q$||a=4Yh ziy69aTLBPTAd`mza3H4}J`mYAhq46-fp}rq+S%cI0(R!rff^v9IFUOH(<&k!hgK>) zEG%2p^I(Aq>Fb%AYIFiBepxwJ_?(|`j_E1TT%QCVKmO$bMqS(kHb88DfMf8A2OWsZ zhe5e2Wu&aQ=8aMHWpyW`aE7wJwtRVrl8&1@1ySTuXv^3)92rjWsW4q%&y#1rw?@t) z@7)k`Xv@tnr0rDE4B6P& z9o_92hS(ATfd-8q{MaPEuY+y}PXl;~(^+&l29bV1Tt$qdS7Ziz8qWkEKosCs$* zy#tYiIEI9)=wmfF$+!>$@(K!bPocw+4tB^^gwS{vjE1*V0^30P|IB62C*+~*dO$|@ zA3MnYGjnsmOpmCCM@Gs%Zs_NQV26f`CEzO4Ih(Z_IOw->a4>qEn=3LfJ$(jt>2R_k zHl*A0hf<{wL{9@TGP09RL#cB6{&SIZd zP!hgiipXN3+lyfJpT*4ezoz(5=Bhnsp3t`v)9ByE_z74zObz-MxH+8X3Hw*B>-iQA zHnu?E>E0|?^U%#45Zb-*$rl=D+L;x?I! z5O_eb4k&g~AZhOJ@5i;TW&`87x4K$L?0{&%P8Ko0`TF^Dp&Ohq9vR5aw>(L-7e9Jd z4@xu9wbOVd~( zdE@8!FSMb+T@Z0QdN7{q@-lS4{7?Cxg}L2^9vAFaKkqR)R3JFA&>{peOuKmy&*G^b zzsOVQO=SH^qurk`Pm7kJfuoE=CP%FCnx%wBv;JRDFkceuPc_C=$(;I?&OmTP<$os* zqLM}U2)5>bzl(^e@3tp3j7G!nZG5eris>SC=vyYw!G+V=vTjXZcDyg;UZ(9T6*9tt zbPn`QAWOE~zesnaeB$m6l7#yJWdsZeZ@@la8yy{$L_npvWI72&rZY*49vg|2jLfyy zDcjPmK`dHISy@?`ifHURcT9+Z$BY+>EQ7q72hnbA9?UjAj0bE;KNaxrNqNlCNjAF* zm1#6#=%9Y_e}`8F@0uAvRoso?yiR1ZaPwOI*5r~V?j1_OV=tJ?boqjttY|~DsIU-2 zR$3a5iu*;s!@~0kdRA84OLr=1^&fkChC!ep{ef`31Q|~_R;aHt?g8)4H^4;_F!zPk z*~%sk2uX&6nObw&q2Wb{3@BK%Mqq!R{}@br4YE0OZ{>LEC2wB*J$(9zRt$^hKUL{& zZBl)!UDSphXSZf61Kr(s&lFO1Zk@e5ozSid$_)?D{S2v&)DxS0m$TGSz0$^oZC^uYFO@Di-RjVZzEpo;=2#wm z!bK?4X>2+w)I1z@Wgg#j?YZ1DzDQZzf_L=?kAW`5`Wua8>Md0T4O+zVSn6#2Axy1{!`sg+pyZX+ zZ+dx!K8}Jgh0OJ5-9_MHCLMs9qlJ0Wu0WmT;2anD8_j2^z~jYU`QCL`S}F61%_iW| z`vSJZ%gdsdFjcgAI7cYGR*2Wm3={d*M}~O zwEFSpuEih03p!sNdvZskv;NQ4{X=@;P-3>>COhY|#_`12BJvCvj(;mgz8 zye=AwRPZMzsIo3F=EvIWlHJpO+qcl)e0q==iK)k1^(yp>hA=?MLC8k;ebxEAf3N$) z4O~jt@6tjtsofq3ME}6RaoYEv!824!bbNdsWa8O<_U7hjP~zX9D*xcb!NGX}Y4w6) zo{pRQ_a^ir4D_6OX~Uy$xW>D>3d&BC)7GFGlB_u!dY%MXn7;y{Oq z=z4}~7)(nSNTu2UrV`z=Yo0D;`c@APyYM&os=2~8p=3AOrf!9bR%7FeZneclG0(7D z(k1nNpAC+VI8hokh^wb~*hhXXzoR5X#$WoS8?VtXQJOU}wi_$;f`zG!BX}U1LXJf~ z@-6*)8qVQ}kg|tN<7Hx%)##P#m<&8iLw2Z^BUvYE78Mw&Rj4VM8vWI5Ap_Bda-5o< z#*?1NsWC{p*a*IO_kqSB=MJ0~wbIHUmQak_Pdvz7M!uNxNxav&MfUUgYpo=VmK{_z zSNUqeH5M1t_w5EbRDbpl=XI{s?=KHZC{R#T1QD3s3<(`(PuVua&Yb;L{rvfJ5;VXF zkv3aZV{=P@8t~P&Df9K;wS-0_B|~DL5C}49g>k#j z(fOZEOd%nm^#3l~SYSCjk!IdAFJY>pvPZ}{&9`Ro(=N)!+@G?)9*dLxSfj=(e!-CCC%;|28K@T4Xt)FC-j|>XYYk@+RXE zN6cjRkLxaSD$;M#w-7fRRUAwI7IPfaZ&EJ4@aL${GdV2E)L~CtD5&kS-fZ5isT2M3 zMkVm`JSCMsp~J6qY>+Pc~&kp5?7_4XNf z>4ofbWS8~+UR-R{LnNVxW~nZZO zV6eQHx_aVU;0CmmFhR@t0(ogES)t@u0QOxODp8L6N7OyJXb zK;M^LY%?B8kz-vLpM*ZH^Z?s?DJ%KS=le-v|IYjL^(QjL1ychsjs zq%~_iJC}TPN9kT)c(fU;V~VkJtk;i+0rgWJkNIXqOXc8~u9deR4ls1a7qP{O{iv=7I=>-_^-<-HFe$ zooBL2N&`vL#=h)##|sKgk6E_^*8OMEN-xY6?e<7FD1Nt!sUKgr zHptYCd}iVO?CDml-J_sXk@xA-5d~rUztm4(-;aD|(22qQS?JUBCG14DH|J&JX{EBH z@tr;AsE_L#rVlM8^0mrC>bV7q%Zc*GQeDmQOi&Y6#e7km?>#leRhk`7&B@tM&CHAzA&J87?(Hq~a0Mgx zHYrp*IIP#MShWA+H$J;kZ=XEZ>Du5G92ZQ;U`T?{q#+dkon>3Bk)7Ri(zyB%m9VO3$(j4%e>8?MyNtMf{Km zX5sm(wYr_a`6w@^aOyq%C*{N!w|R_otRylL(?@*)LI?v9Dd|VEp3Oh!8_29) z#Pp42(*ydhE_JM}t@T0J!;r?TcNs2;=3xt)~*!?3<3JMB0Kvba@XNQ5e z=6l9K<4!eh^K$+M+TMNjW|%)K4S7Esfrw?tDPwhMq42k-MTnW2(oBz@L@#s(cAA6e zB-(QdPcQTE(7AzT41X}|C)XN12nFxXUPuV)VQ;*Fy86n@3>P_$%(5OlNXv)Pi0wV! z=KKs|q@Ov(HZ|`0Q2;Rhm5B9dorWAbnX01i`j5wz-C91nMpa>Sj|ZN8`L+3@pq z!$(Q4a7wh+_Vk$;e^&Ka^oaBj-@5*`^DxlDqPFux*+{jV(T{ZFq^YWk)`=D_VR);S zbVNF=1@Bt#8}4O)`eOWzzU?|#iU}j5ot#KJeMGSO>GuBgzgGLFw8f;->KM4Y&QAHi zA*niox_tY~sn=al!FxffYiCdXdhJ?Jo z9eBY#YuZB^(=!Q%%w1?jjVt+O!soaYJI8gk5=nhfbNamrV(0QF1veA~@|HhC)QH^u za~OEz8>;c%4PFpGp$|GrD7-&1KWfS4u^!3KTY{$`B>xX~?4u%TfIe?Am5!)@@0;9qYmB3; zD=*E1#Uxm{!- z6CPp@HOL4a$9Q44p?YZSf4aNX*K)Kt&^>@wHC`_6L=eque9y7vZINzidN=DA#XZ(D zr{fe~xni6<2FprOQWBh!qjLP}DkTUt90WG!#f@T_qF>&+=s{+MQou9i42oQh<9hR! z#)U%dr|4KIwp#gPX`vt6$@}*ff^;Jj{iot1Ly5|6KX}JIl8Lyz{cWO3C~&#sHX=<= zD>f7@5!wGr22<~~AbidM+n5bCkqCrOIw*Ka-@M*x^*OOMvBy+`uJ^3Sh={_2jHu{n z{r&ZIt!O}S$X*e=SFy2D^OTHGAP#$>5(+RbaTT z^o1;R{{%_$_NpSyl2r3WwNY_#MlLXcXyr{0>aaX)pLW+xl}-4&OMMbv3?dT6mo<-&P+)?uMt(hyJN@a>nk-!G@*{@-bTzqLOSaiNyYbK@jke&SeblKFAM z=s=>S=8sV;#=_pB3xWm{p(1j#{QLWFG)=`^H)Rf$fUB1q`~AnOS}|^FZ?J<4%gmTY zQ-Wb-7VM3JR>x|^P;On{qn2Y$4jolG{C_-scR1JW`}c<;MMm}>A$x>u%AOh7dy`pa zM#v~zLiVQYJ+iWivLl;}Y?8fy=jZ-D$Mdh_aNoM#*ErADI^7N@axT?WFsU-^R!*mO ziN==}Y+R8=0o2Qu#85djfgJQRAgj;$;zuLCFfuf2Ec-}qtUG0gn zK+p6sS&6aQ!_>p$cm%qXs|P|tWQxCAD4^7SXmueSPE>Mubi~o^YJ&yRQ8#=ZUqB~0 ztuC~Kaob@A;5G+i$$TPIaz{{P^Vmk1nGeH!qc{MucIE*EBd{k%_=6-f24O-N$`LH_ zW^R~}gd|Z@Cqzs$p+>~}h%FaR-S=`)1OjPJdkMNR<mrm1x3RB~i#zmDR+_?ygAg?|Lbyer_a`C!09A z%o*-Y8Rst^EBuZvE$GX|u3r&+lOKQ~C-s!j=-w_CXJ~Z0{^I@*{l&DMrLBaW?S;?H5Z>9R#fu)B`>Ewlz~I z(<{<9(a?HZz^={w;Av{BmMJ^1lS+VQGqJF-#hErD?Ru|KGJ6>s0N+av4i3(8cX@p@ zY0Wx2GsD6~iMaCV2LlLM7~wqhs{lDpPJd*pxMJLf(&V#Z-pIY;3U6SVd`VuChapcx za$=&+W?@LT8OWs*;5|LsJK9#R+cEF`!0`PD6cBATyxH^7P;$G|BiOo+Y>-C^wSp3w zR~{e{154TzFlaTP*>Sh4(L2C%M;4<_AP@TuXhnr4SsFBG4|D8X1&pLSIjSA0jUNM`@=33?9fyR)a_4r8N6K5_;XkNVZLOEh5zAKuVl1C-r z(m4>~Aod2Yy-Ar}o(?MDsBn6Y@Z~YpyHq|bcV@0mkjrA+qFF4 z78+i0+rWDa^e~I# zLcXw{s4+I86-6Rs;pspLia33Gnw#RwOZI0s2*WkVdH)u(;iuX&ENrgHdvoUTEi!)$ z&@;ZRhhK902k!CmPubqSpd0LY4kcz9w`E8FK$?9Ha^Ml^1QlO}<7@-kc;z)}3fPFl zvE*G3+uUvJ)t@~J1feD$_ULaI$*R*G=eP{aTBazDrwA3&B{K>F=*YaGHRPllF|BM? zP<4>Q=i-75LP-`W6SR$j05Q2T%o6a5W<5KJh13~)Yl&=3!~*HZxb%TlW-s}XELRT=>NNo<;U(uWTLmEIa`L{&%4Z-zrT!m_LJ<_{jO zMd-DCAHb7nmHJ^Kxksj0Vuax-p|`5Q5>u$PH9`4Ba**TjyC9L+1|i47Nd5M%5Un};b5_NS4WEv;o_ z#L*UZ$pU@zhB4HR5v@nBo+>FRxq|kC+T5Ef7zUSIKySb{`0d-{c~(M@J>1brUsGp^ zk5VuAi8dX$xmHLGD9Hscoa$TC=J4vK%z15=P*;eSs}^W*3=9kykyL;=fe>zLRG9}2 zFp+jERUz!>lpemgjM+`{FeAF#m?$a;0_i6X;j)K}&62vhNmb$Dn0Q_^$OWqH_n@(+i1e4DV@CHkLo9s#`2vQpMmMWGLii|P=sGj6ynFfi zUX)*)!9jIL(Uk$fMq&RbUJwcf*6E-(F;2`fEQUilkB+7(T|Mxb@N<*z#a@i(-_YiaaE+K9QG9Z z6G$xI4MhC!$``L=fgXI0t7Q-7VaTLdBP~ABfnqDO`J*i_nQ-yM;R;m2Igo9kXX=eW zgmf)#0AL-l#E5ah3Hd4lk=rn~o|g>f=waR{7c~k7k=Mb10K$K0Xvp6FR&52i*T*z9 zHS^1=2&7|>^+_I+LiGBZeh@$nH;mOwC7{KG&{r1*PrP4)?y06C&m|(kAaQ1LgoDqN znoPmY=}lH)^0@B@Ysds^afuz;{e727VZ?XC9{ypIl>zHn8 zjvZWT8NqCBv+&2#BwE%aLVfe;FfAGN&IP?uYYvm%YSG=X56arVd@N1bv7^u*aBw&x zeYLA*-ak9RCYBA%%1iR;4zkdQ2n#9J?@;$7ccyc|!}toSp3p?>3Q!OcNOS-Be%znJ z?`_rvVDjJKPhXb4jg1kOje02RsW&;#d* zKMnHJo!Lgi3Q8k1h^t9o`=F4=pfq{Emk9oQLp$I}N(tKDgND?18cfNJ6I2rye>-p_ zE6nO_hUspz9u>8pNF~I6gU$9nh9Q&#uUU#XmyjVxvr{N2NCe?O#Y1DoLsGQvcJn0{ zjyXd!HYRyYzB&o+5WP$rD6uZ6A668bT#<)=+9f}KZBw=&W^H`9N@%A*TcdP{DIOGsl9<{1?od5ZLM!$ z0K3!OL}BEEGSaM5GZ~?u$P%xhI1Wu6hGqZ?lHpNCqRhi4n|<|Tua(=&C##=VRQ-# zWn%FME!Bj9F`(-s>|1B&_qr>#$JM!wo5b zgWmade*8GZD268;oV!9!^o~_klF@WE8nnR|gshJkbs9TS5c%Sb%epU5B{6=q1 z2lVeNkqKyfdw{jF2BpAiIVA!C@&RF72@*eJeSGJ75t3d zhfRC)A8R;`Y}&lVr+M|qZ_08WZ110LG<}q%rc5_~!T<-;6fP7yM^JG_ZST>6qiz8J z;hg5?=I7hzEBWF|q{gpS`Us2WMr)w{vI%T!?FpNQ^0yJ`hxaGEmY!*FE8^GM0__7q zfktvPAvOMwmnLAuvb1Mz7Xd4Yx4gVOJF;O!PHpED5xJnPy)_C}ol$nG=yz8ag~i3y zihvMw9p!UEIv>geHpm5MLI#9z@$m!2%-~iiJx&r~$garG&wn8dIS@LKg-QgASYYU6KE3IZ$JOLrIHs<)7sI)<0 zp`p4_$hwp$ya@a}EhN){ccB^RgC(}G$3ggmCV*oC`%pSdHS<9O*iXJl5i3)P`_3?O za9}%7|8&tR(HmQqB|=4jEs}H0hSyzZ4=(@Uy%$HyH~jU=mhUpU)9c&&->Q97{RvO4 zpzpJdll{Q<^!$Qw*z^964+}S55B9zZdQ$SqZ6M3iMr7r;nvE$Xoa&FjWj>dhRx_sd zS=9(nf8z90nA9k*>}~Z!v^AxMw?EC<(#>!d_SXoPirj4LF8SU4Oif{fdBp%`&+~jF zw<8F*I>wYU2h@(*ke8@!=FLk8EtC4?#f6RX?{a#OB@aM;iDTfl>22gntsQ!yXkCE) z-_X*`3>g{aP+@kwHMuXOWp5AQFTG9$L_6wokH84H4s%c!OCom}grQVvD%YHsi@9!3 z9mdPl@Dl)HY;Kti_DEv`gRQXb-@Chm+iThKu+_1iP}Ph8{*N5Y(HOzfhJaS1jsxzu zQw-Lx8Z7LaBYEF%6rbEhS;99=$4+<7{9V9PW$sFg@Nf1vGJB+%6f`~z;?>@IMQuG} zE3B23A&WmMx+qqGC=tBJF7*dn{ToVRV8*uuFp5o2@Kr~-^ySB=F1TTv@g*ZE2k%S! zf)oM|quXn;UM;YH92TQKKIq(H)70WY?y;Yw5+VHnK|~W16Z2Fq4Rv(ntRerU6A!2Q z^uWpXB?#&Pq~h-D1omBJKzU$~d4Dz$D%Po>yTfx84zj@aP!0&1bRo6&BCu!><2_|0 zqphv26_EpZvX)X6A6i=c29QExGw<@yzkgqF3?JU1r>8$L7e*j>?ZzWE*Rr30ynF&& z9P&MK+UYRDwk+dE#A5)>yS8r2@R$n%=LN4=1mWnKsLI*Q8G>6h>H zTn{#VYQ&=Xlay(+EhR%g5=u9e1yz;c3t+63Ebt~r1fn5@x#U z&Eosi1C|Jz5^l4H5h|`~1kx5}x>RXZUOd7_uEX1FeOi{aY^5AW73EJOzZ`FRjCaqa zkKgJQqE`{%;!bJHEX;J%Sl&#rCCB~yvt0&S8Lfc2W{>etrqky?whwzB)cUBUv8rI; ze$ym=?3;Huui`EIYDmoc{!-s`Xg!X%jw;l0k02p+qYi>32Fpcl>4b#5q@i*xo1jvT zfv;%{eX|#FTFv%4Jo!hUg&E{6K7n=yP7ANrMMBKSk8d9ll>&7es$6fU0auWleR)=n z9RTiC74ZI9Jk;Gox<6n)MS@707r8L;3M-(y530o}`)Xljg>EJcuK14-7vp6fk<0~Z z{!(P7Vfgmm>1qaNmwRgvZaX=^*H=(z;-uVe#nCV2l!%me)Vg5 zTgafu=AFC60Jzuy zuyr0yA^4DN+7j$^!pvc2mtA7_Tk@~)Ib}KZ*gp&1_nzyZv~hQv)iCh;-p;})EoA!> z8!4Q_JkNh^IPdAO_3O~n2im0i zjexh~W(4Wj>I-TI-A>|t;k)D+#Piebpd{ggc~BsRb14R+A|*hlaG2texE3!!tA(Bd zY!XsZH}7{78@2e(vK+ZG$19WpY1$l3n9;NJ1QG+xi;96?7?$ZcJwNAPSJVI$+C7Gn zgyh>=voOrmoF!;B;%q&_1!4&{=iJ-vHZ2H+8VDKA6%-VVnDEdfP8=F0{100*5emYc z1Nd~PQ3ceSEhy6{;h?Ub=P+!T5qR}?LBO7vD%#rj`trEW+?)4iv7GCv;H?p|j8EJZ z5B8WU+;ZJ^)OG^XgK)jRi?I0%2%O;bJaVwHMkk3ybog}^?=r<6+pYUU0|HO^z!VttAGYXr^?zjJwcjUdkYMo7s(6JpD z=7swGbP+%*ON+|GvStJP3jkCq43mX`>~=f-w*m#xT{wJO)_Nu5M66c;h#l8BOh3BK z>g#|!OeS4N`}^O!!}@OmjL1_w@%#60j|#J2;6Z?G@kE&x;Y;Ip%RG&#TH z0b@}4qPL?m6KpCtO}-(7#sL1peb+$2Dn^NX|8~fnoOUUY=MXMqLKy?_rn$+*?CnUt)yyZfWfdxlV=t}PKkKU^=R&Hi5jUqJ~? z?Xl&vX%Afu4SJ-WsqSEV`q&Eu;SXvDAtEfP>h^S2)`B_a5bd{`Q*)u!14hxrR;)*C z-{x=IpP2Lzzxj0ZGW_TEkCZ?Zno_)KqY!Rmt{c)1M@6C3?cEyG0=5Bm{w}+F5 z1dROE@=&YNjp!ySl&AF3vMeaZeK7oF#X5O%zyGxGqDLv zrV1C~|6Y9lJqF4hgnxD`8_{&!7r}q~vuVMT98WJqtSgkiCh)}XZrFaVew*}-GdtGl z^LIf)!8D?@lyk`_mLg9>s-mB9sHy~@MPkc)_x5hrjihGbb@+UNj-dAN{xgskUND@W3Pg$=K!hI4%gZMwjTdRinlm&cBrdld`SRQ5BSLlPGoT4T}A!=cmc~YkFKuMd@pIlP`)9 zn>L6Hz?7_}CXsFp@+(RxFvlRbc|MP+&wsmp&kw1X4oL%$2wE6DW0U9EMR1ef=iBRjXZ@;PXMg%mdjYW^??lA*kQ z&WwxSJiB$|dnI3bd7e=T_2sQ4`ThcXztxvxEmv|yUOepjqYjIUyy4oK_!2aAa>R-H zxtL8p#(d}tgUi2`Et!+pFSd8xu?UT~rut5+!!f(2EFbo-3_Ej~$>4i}f~GFaKV@j>y}M;}2NCnJW^nOMi?Ye9mV18?R^)@ec-<) zZnL_>?MKp>u?g80X|`M>L%YAgO@?_Nl((P>5Pq0cd=%awlL?8=_5 zma{RYQMocydkiJN{-k28q(*Rz(=)tG6NIr#|54W0jn`u{u}dDUO3O;*V#nuqlv5;UJxiC>%=O$w7^TkU#s3RT$eyp5QTy3jazu9ZdPfOk>^m(EMmj5X%i z2A?mLrP1u5GY{=PJF;yIGg$Mb^1n6dL8pG_nNd~wJz4x!xXH^PPh^!93S%<>|BWLl z1n)V(B%=PC@D%_O&o%d+0m$dB1jcaXofGn+mUN7yLy1E1xWk5hcB@pf|l%s7QX29Oju{I34Kf7N$z2HnF zNm&M&v^ppdE%8A1*O4rx&68jijmR9gmaT~~?9zNH;T2oPk$1sGcK$tiwQ<*xP$5s( zb>LnsX*HVkN3F$Iq(S~1oH+h}!VAqeAN`v3J{B`KWyQwsevFajC|*j^utnarZn~eL z!9JOMx>Rq+P!(s1j$>b!%$0Im#8Ye1;W(-N@L59MCl#aQ4}1Lu7j9u9h9tH6hDl^s zPY%9BJ=^q2`lzjop^3iPV7#cNktQ_uR7^17t4Lyp7dX&7D0!@Y$ma})7$sfN0ewRN z-pEhE6I2v@_@tzs@PcM%OSnQOk3B)*AEsvSSukTD895RL& z+dDf(=T|7KQUw}9&=J@1$k?l>4AOntiBx?06x-|H^6mR#;VNlDZk)A&d^9m~Bzr<3 zr@pGx|DQvDKCAmk6L}SpPMZR7?_Hs_uX6{gYp~Zm6Y|*c5eMnY6|DVkQoogb2^mZ{ z=>~amWYtGLQonb6p`|9zZA1`1g*G+Kf1(zMQg@TkOL*mUg%`#3JLVccNn4dyiyOBd zv6vz}0q~Pc>qRhu&RUpaYwqZ%sN>%h?j;_@Gsyfyt4Wr_xzwqCW#FpNwkk|u?Ic!6 zQaYSO>Y1faE-2lQxg^eded%l8H;R_>u++d$XOB|?!)|U`eCv_Ra3c27qhBlBH!+x4 zoNczN&F~%FpVDMbj=2dQj;yu!Npv$>2u}0^+kgWn-rL-}2_sZ0TMQ8SM%a>9{vE?7 zT$VnxR;%)_zYPv1I6~Saqm~abal$_sWbe=(W=sr$w`v~yP`g(<`Y9nxXnwee`c{7+ zALH&2#dn~+#6oWKH9$}TfuI23%C2>II0i4Fg)H(dJkG%qcoEwRySruZGDAFC3iNni z->m~*gmhR?f0C5^@cun6$i zG^Wbnk6PsAVgcDX8L5yg#RYPot(kfkJ!IKwj}w)!4;~L=LX4Yh#t0+@M-BI|4+x+0 zXAO`}u*0_o?>h2vy8;ig<9215b&o!3Y5Fp4K~WFpU~Slu0v^*aF)u8(7W z=58*mg9)Jz`Mn3L;vY8a7XZYx21}0f8L6#B38 zWV07{wY`zK811{N8qe+VJBz>gH+HM=ID&E0S5&`Vm$z3r@ZnI8#`*&yWKYkpy2a~uOur((+ zaIBs~Myf7&Rs5>kjtW^}OmM@&tjD}^$5ACuIS1^=Tw z7-KX6oC^`;D{jCXo|SMsr2FGDQ&^!}0}eW9ah#Hr7!QKHU^SOSMMS+&Gdjt6 zA&rC{u+2&2y)i`N;OyuhAkClB{(y>TE$ET`qoeaKC-g7m`)$R3Uy0bda((e)$Oo=t zClGF=PcBn{wgPb=1uB1Fok&|g7iDL+JHVc`nKcj-6Z3@bbTXK*j2f*0cto6)V zJ8^7c;)DVqg1+OBU}Xv6qB<;}I=(}M-`zRT7;XXbY5=GWA!wSeJ>fFk2MC!gZ2PDX z*)E^KSdv?ymnsdC7d`Pq{J$Y}uQbMv`2W^?9?VThqK|!aoBd7DoWyMf|Diq9Cu)6! zPo6Xpm$L~eNQT? z>&e(lH2Ne#yWJWRPuqRzsncLabzN&N@~k-v1fH>+2Yu1Yg!(6M8uHuR_|9zSV+(rB z5GLZ61x8r)joR8q^$#z51ET%V!_Y<>7Vj;DO1CCdF0S%!k`xxUs>(Mfe#5_G7bt$} zn%b%lrf)#Rh?+rg+eQN>VEa8Qi@p8Ex(BE%#Reo2&je$Tnk zb8ZI?N3_{=nmjb+N<_hu;YoZh#*;ssMB#CzSH1Wx_#cjnK~X9{Q@aCyv#=iYaNOP3 z9K!^s46Cy{k}G^9T+2U~pJ3L};QpJ~ce$1h5~2MPl9pLm`l+qvx#i%LIMdS|yq&Q& zj$6IX_U5qu)Zl*%e8n0|JPEPgaq!yq?VAez7=q~m-p=(+P1V(LQ-7Z%UvPi6W9Y@Y zs|!P43?gNg9@}2S@%e}Oi0A_(nN&RE0039nR))Acad&&0Qqs(cpT8~{>0o_7(MU^T z9s`rIc-LWW0l?v*5DQMbPU0RsxXQCglx0+A-E}J!;4YdGQUn57+{#;stR1F{y6xvP-^>2O(I>BSfJ8)tzVw4>sl{|-*#ou|SK=x9-`Z;3I@1mP|) zy#45V&*oFyqQ(_Uz91?%`aNlG| zU_F^Lvsr%-Y^8Af#4D_|Ll^PzHqMEN7j|d7Ch4->@t>Vk!W7?ve7T+w9D2WC+=srW z8ymw$PsKbnjLmsdXo?tdxF&io_*6&_qvU$1x>d5c@AM2XJa|x-0YwGdItAxL;Mfe8 zdZO#rK0bIyiYxI4TDA;^v?Z7x)Xd!-Av$x~<50SudxR7dcSF`On=Sbd0{GMsKj)A~ zzi`M6;`U>Jg>$~=jRey;pW#>muh`_|;7}6hicL+OF#=d1L+pU5Q-sX@`*)P+VnPUU zhQPk1nTt#j9D|u9_?8%alFZ;1f`HfbnFqLU8ekNR4MOhS!4>E|q46M;@Rz`1zo5r0j`MHV#CUi+!;D9`Z>Ymnxr~{0l$xSmXW)xz zF7$1d-_c7cIf&#UG>mh2*0MCTpIWn}#N5uM!B_Wobf&%BVv69dNI_5>u`n~gi~#Jn zzO*zdW?KoVd#?m{1&-Y60ij{{F)XX(Dxqpxr|bVex=lh9^;I z=>tcE=S12G!=3F>@2q$H$B7yefml&oC4Ok%>qiX3E!p7E(BM|oYY-O4i3WP zxVRV`>xU?* z%E1(QNLN!=0y9=}6v3C&7&GAwdXpX`mM>%G!~!`jqZzK~{`1pBJ2Sn_cdHpADllkA zGH#;WM17M~z)yC~AuOyfEflDb*XR3p0cD*e!UZN_h~=(+mX?uW98r4#_V$-bii&nA zsi`w!;1)$X4uwH-;4YLF<#vc|r&gdL2&ES{GAf>%?d8`3KSBOPWWZq20Tz=;l#_F5T84YaQAF_(y53HaXYe<|Q@1;=KWO5lb6w=BKxPEgNM8*5pUbCfH#_cJZ6?vUvnn1V-G z2gzIjJp{;GV_;$Zpi6rb3a=e=|JP5?K$m2^aZb>!;|2F8HuNl%;&643L2DpH3Z{(D zs!TuOgM2~eRF$;=;n)kZlC__4;Rev_Tp@8IhZNfwO#fj1hJcl}0&efOMDag~_+Ry_ zI^?4jsG5cAXf4sYp_S;#KHomdWe-PL5NQt0-=pBQ@f+9PY$T$(Iu(bf2R>R#8m789 z^@5_MYj>3}x*PjXLIPE$(<@^0g#(;F_3pa^A)_@EPtD^#nyp0et?w}>O+|5@*WC2E zSsLZ@!i~V+f3Ui8TzrWZp)HH@Md;4sBK{|qCAWVH#pcc9Uxn-0x$AyqT|JK!|8smX zNU~!8N#w$-<=58abHeApJ^neW3(aKltz{}{-%eL|j~;xC(W<<7$8zfJG`AR2(=VPn zp}OFf@M|7&siks={6B7o$jyNXEmdSR+#Ap>xX6xfOF_8%t zsbL&sG~S?4`YNO)rpa3)PMZ76KnH$zo#Lq5go`V0D6go%w8z*9rczO_W3j}C2@hRT zK<6DspGFHefRdygS2q(3{Wyldj~yH%dN3_JNX+veJumz(@4^5-GtBN9UcQ~3oiT7p zUYkI$#5rV)NC{!96|+Jw=z6&8dt}q+v_!7=x%8L6#GmKka#~9)FW;gn-%k8-*Eq^c z{o;$qVpR}c4Ex!YRiMq?JE`28lFD8(I{mYaaZbsmvA3%QPigj!P_x2$()PAwUVEbb zjMo$Mp0w+P*f$L?WNPk(;@Q`zf(cG5LZ_8uC`w#qK7(;i(Ms{CgoQ$1x^4KHbK5?HlK^#fIcW&SBH)t0#ilGpJq|-#^mMBFIb>g#U zp2{(wZl-o&(=1jr24^)+1gk^R>q6(!{2YpZ^ty7Vsc=LEFT}xFr#JNCIgN( zd@C7<20WA1;L>Oawq#YuA+iR@dMC|;gkR?9+MvZ(G{6-d;Sbt}BFGP=r|j5CRATHV zXppncL^b~bL*Y8DsR}d-*+xhcKSBG{H9A_w-irlw6TpBMvUmjV88K6@6w<}}x$C~7Rkob<$uk(Du?fbb(Ahs5qn`)!fLlvrsA;Lm*C@0H@|CaP66pW&*q?K?FJPOm zLAWJd9Riwn=LgEk)ft_08u07|G6!>X6a3u{g`T1F4GmBtTGbps1_zJ1$s#321m)%q zEDIsAmnwJFW$)BS1gxU5z)^i82=9_HHl9xfVesJvNNMZJ^lEJ9fax-B^t<+D(q67q zf}52mib_=IAi>zzGm_Q$(ythXf7y(m65#BSiRcBP1WvVvWnf-+Np7G^h}C>|*CixP zQ~5;Y9S-FuEks&qrL^qa-&3>FKbt!Hr!cfajHosFqw(y`=H1NR0r>5IgU%1*H+ji3DcppXSrcCV!XD=6caL{if=u#m&ZOOJ4Qgw5z4sUIp71 zbu{iwoaM`O;2!}BqG@R5I^jlqG=cXf4y1}dkTFN1@UD#q;IS(GFklkxDnxnT%C3AxMjJwO_f8EE2!*_d?_AM8)kC%~Yc3Bw zlb*mRIk&z(Q4G!XHU>Y9w$tcP?fbd&Mf zRJybpSfA>k=)U-yR-4iiB>}f8uhh~Dn_zAkyOzQEtIgm_(xGn|+EMCaH_t=bpfeLO z8ue0PAlhqHp?!-8#s~`NzWh>Fz~|-L#LYwd+kHijwZ|^b$jbTDBg9b!|P8AdN9A*_XB%$pc{JkSvjS9!Y4HW9|nR3xW0w< z=dhIo-$_~%OsfCJw2kpLE4f}Qw2p$r)!pSrE>snrzMM#_F%;QK^flsXKvC7fRr9%D zerp!A+Hs&w+PYuCuL(`m8&Ey(xF5aFGG9d?-x__n_;O$OHEsx8YSE(M^9KARyoZ^X8@@h_pIQ)r7*C-_JU0hg5=bmB*@VfiUluZ>dO-ngdA!)Ub%{6zwf*|G> z@|}^gy#^MvUlxP=jP^flZFoBqDzsk{Vs_C5FsLPDFa^~_WwRYT8ifB<0FC{J6ritn zkQBPHx)Rc5wgh3EwIFujg(%#k`=y!ZgF5UQ1KW6=oyvBmMo95P|^$j?%ee7h-*{M+fI^zdq zNX+E7XY@vNeRp4`{E})Qn)vn=(epBUzf)rev#(hVUm=pqjZwy<<@tNoE(z`D4x#f) z+1?SaPTEbQJV2SX`3{Hr?T3Lq5xSUfuF#;6;NajK)tD@(5)ly{=@e)@+SvE!OM}aV zv-c8m@6(XU-LwUlVDUe-1DuiBP1-aZz*qVhpP>vGLA?dr{8Db)$pOrdQBEzFN4`8+ z9iQNP{4)05u6ae5qVEsS!bq@@H_R^DQWm;_@DC4Hgw@vI`Yodke#)+;XIqAlJ*iNl zg)eN-;VB4A(gqqPQSlGxQ|AGatOkW5>PXUre@$?ys^^*m=L2emy5PdikpBGF%p7Lc z?=kZGTYr+fmR5v*4>=t6#gl*W!gk{9?rR1GOD$OXM2 zyPoLABfgy*Z=dN*bd?hpp*wHi5ws8D{=+$$?n3ihYfMMcI2;$18i)3zPrB7zWQ zv?1?pxG>TT3=LU3+S{pOf0TRfTbY$GmE1OuKql$a@bT546<~qVFkh^ig7;7TtizQ2y!Rt zrw;@9*Z|Ai1H$O-`>E@ay1BC~xEK=Tx1B=4AyR!m4F!=2Hh$(@o(a}ZGizpwdyJrG zdgTFRYgd>!t3LEMM&2W&0zbijLIN_LUI5K#;rf39wM=;Y_m!=t8cC8OOvBeHhmkpz z0};x__Qn{91$x(;x3VJsow5IVRLnLO_b@x-uQ9TKc|^{Cn1Dl3{dw5;ASIZv$$7;QLXL^Yc!q=&SsV&om4^yB$%~dI5@B$^2aYySbWH?{A1@1OeY!w-o+OiUAj{&fFF?`6LgPZOB-#Z4m$ zYRpVC!E9GS?{pz3EL_BIKk>L2*FF@6LU5)%mG$||M-?Fa3cGGrcle5%hd0i*yeb(uNj0@ox8{U|>5|O)-HjgA< z>k9t-Wa9A!MT#9P0G0S$Lr zqY)v#NA`iaqVehZmGjDvfDswR^e#)i$hwXWXdF(c|q2PR$B13cm% zLM`G-+s4Q39zPqLONvHbDEDohrb{gi$fR8;usfO|RGJ*xT~g1E$27PLuwo?Ij!`zT z*%~b0L6#AxBy=lO&=8Dg*zk(pKf?9nt=L^096z~-9l)kZ0@_>g z*5YmV8QANLsKtFR!~px`9|fZG)PN%aKVwN&T2D`p3yFt8Q9=s)3l<+Lp%`N1k6^_~ zb9s2_!9LSt!iDQp)c$eWsTNX5nrm};UR))Bh|d=(cpgX>o#uzpi3<>!dO-)Shj3R8 z@3svjfV^u}MwL}r3TK+}$3S+z63q>q0`iqZ?T4IQslHzAi~4xA**-2aeOli?MiU7L zYd8HQZYRMSHqDvZeR-9l6Y_Y40li?NvhHTQZH~`y zRD>~CH_>IYZfsSR%jxF!oSv+kXo{2D9xmzw?wX@uewVU)89CjwtB@g(Qw#P{s7#1; z0c!1O_BvFcOA9pjRmWAvsR-(vt33cw!L^)F z2Qq$10Fn|Pj|j5uA*Y>l!|=ylWyaW*4*il`co}f4eIn7>*d+NM=-pE;EVqynU$Nx-U zf3pJav)iE25gs(uDi<3kZlDXySwxUgwFXy^79NZ}l9R%5l0^%7CDFIZZ>&eG|Lob4 zym@ZI;&{=on1M$3clGYk0_X(R#TWSSq@K0YJLPH2A`>ncx+eq-R_!{>?C$#)H=P z#Sh27rx+K6ABh{)tN3QMc6-m?e$t7$88P&!+aG5f~v#Y74)c|?VUhfAWbG{E(LTE&g3{I(}k63~R%NPj$s*a`Z z-@Sv02~-TP8|2hZ{r5qzDM*UOY-f=JJ%tz<|IkQ;G&@-7_#XzBx{Ga>exqED`>v^>GOJc5TwPj;j2z%9R{F zHa%YK`o^j{ukX+QadlS)r~G@WT^LDp_*nbPowkl7R%cyu9F!AQDVmxWsep+N|XmU=@{ozq`XgAj&*g@2xW*&faF&eiNMaI~5@zZ7e zK5^>FVyNj!L0%**BP0uxfUE&FtcR(Ni#WaTHnRDyrg^yg0NI|R%!ohSje#VvMxlqb zqvz2HW9DxFa3NkoD6kpgU7yP%%Ta|qGZ^;1wPnB9HQ(C$xKVt_wBX?2zzRN*G?E2_ zDB)>jOcDxY%p5MX1?J4ZT8g}ZL3?-IUKwo@69Tj;W^KRuve>zyi>^hwxyoio=6pk$ zVc>}S^}y1Q>U1`;B>Z`V1>s!BTL+x^=FiQ~rrn84&VsqoW3_I%F<2t6vBOc`Aq&gE zujM3Inwlmf>#g=&yoHFj+~@{Qh#W1R<@hUGDP_qpWZwY#@XoWYb&lh+v1A# z8CuvC+L^#>uVGo92rR=U=(Zx-U^}D9dI72M+RNE@acKYU-_zIITJ&@IVlz7XGug;> z#r9j`k<)7i4-M7i$pZfIH_3T*4X;9OJ*2HUU9EE|t15hbZ*`>~l56^**Aj-VSj0z+ zdU0`a4n}WU=2pG<_5_3U>}cEjjcXKey;BxXiMw@b!AR*@JF)4Bj8A`x1ka}C$3eQ? z7>2~{pz8`g+?wQx3FU7D<;UfHI=XuR)pCcr5@~Dy+5nUuw{d$yN2z{zEL$40F|AQn z4}8jY$h!?0_6jH;9S_elX`vhXVF9&qF)?i+o&iTs*@(y?#rYnnjCpoJg=-2?1z*7z z=qU~@BbLOs$3n)>I?v#VX>A?vs8cu8SL@lqK%Lq$7|OK{5IQ9NDW-hhGO62efeI*+ zzgWv(&XzmCm)uJJ!Iu9z8NH@pF!RxUd{;qFRc-Y<-L^F~diTTMH0zg*?W&fj=eY8%HAVF3MpCHA!L)S zKK7QGS@zyDBH@2O-{1dSSI4x@`3^A2B1zI?&7X%})RoYb#TyW7&^fyn;C~Q#Y7(A7__Ee>J+Q<2wwn+iYv+W| zcEi*(O3G*NV+Gb-G2_nzEKA?0C5RA$fDFb_v_Q@coZV8$I`vHU)!QVQJrzC6)6_TA zEX4^)O2aLpW}&E8vX8`?*sI7mJOBqbEO5tpsx*CF@f8GEU#{n6a5ANZv|hu)xh5nn z(u(Xc%P1@NonY>Sj*gE0Jh!*cd7fYV(=I6)S<(F&i#!&_w0TuXX)yuBXqAS9P;fdgT7Z|)j(9M6%6?6-V+|5bRTSwf?)TdxVTC!hAFqSm6Y)5 zpShQR9+dw#i*0yO(hu6Jq z-`K4wv3U?+^x3~_a9d0w1AWeT`!!d*{l5-Xy(ypF2B}%WS)C$H6~oP|6m z8pd2V_M1A5*K#wHP{Lb;^4t=*iJ3VWt=n+tto>o); z1d|1ixGLpM4Z(XHfRkizb$pP70xAUB=_voVtINyUq+lXLLHFdVrVO=$1HeQIpk)}M zki`kX_%B03Wizz|vL7CEad0s3qo2a z6z?B_oy%6>=XNsy9gAV)Z#F<(4RW`hjLmhR3AEj)zhH#O1CkomNrg-`1P^8pY>&8+ z0rabuC&})Rf7I zc<&@`d{uj;(lEfWn=^`gD7Yd)%GJmG@bqx@X``R&!?#@$xFi{Q3~8^QiDjzoio1@y z=3oELG{6k2N&eyzXM<+ks&w6^%kMMP8oN&*V!0br=yS=UEr!Gd!FycANV z^4VjfktzNC+UN>sxOGdy+AW-#FUoJ4Qbd= za-seOz6y26@q;qtQ(hifMk|;ge}v*%@X@NRA~ZBIrQ>9$Q-5@gHd`*^GctAo1^fZw zWx%78XZQ}bIDJImC!hc|wli46i%CcXp1in1po0+O?#XoRWXS%y{S}grLitJsb}!lD zs|-R2D-}FV-_6rYoQSe;_^N0e$H$14IGw)FRjeez#gTXhSvw!9!vmi!iv~`){Tjl9Pfl@3!x1t0Y*tm&DUcKX;lceHdY#!D9t8m^#};>iFAjE@W+SS>KRFD$yp zk^DwSPp9rM|zs`_P#B559cLA8`eL$P%|3QAsO z;HvmT?36O@wOh_G>LxsVVE&)3j11;ew|ZE}g2lD9djE}&Uot_I*A*z9W@Id!a^dh& z0xpd3W0B577|5Z200)tXjXYxb570?}!L@Ppd{Q9!mgNu}?4|*TU=uJ8av(35T)>g1 zkn(imSq3W*&kCS^BXL+i{Q`wB<^J46Z*QbWMc>2%5TWQ4vSUuv*5JAUL8-d3$U|A+ zExo>Z1p}4L@1XEISLlaXlspXfW2R{orSPO760jeJjti1tCtwBUU{q-_F_{I31%=m+ z(Fn|tSKmO=z{8R&xdF5n*Qb)RWM4iNPj_A7V0GxNyg@wo_}|NIWfxN67d0W&5qu-F zt6Pl~XB8E8c{E23Gyy>!ww3REyq7SV@@_4*FWdA-v55Jfod1mLOly82Ecv))aepKA ztGmP^|G%oI&(OYiR)79=-kK&_#g{BC;nxVfP`12E zG4z$&;MOJ!T7&gjQU)X+pslkryh@%b_q)*H!2#=iXqAl7zVTz#)mmUw$*~S5@SiOC zn2};4QyGdnP*RVJ&v60aV(O6%t$e-~1%g>T`;pt14d2_0({%mOIS`Q_&dkgtLvF*X zw{My5#K`h~Wp%I8t!@-s?-acIDN&rj$@Zzo^lr#nn3&F(;>BB;m8&AzGzFWVJ@T4v zwMOtp9y3^{k3YmgE3M;D|DX-sz%Y|UZH+^wD68_f2e}8s%Ov%wsn$*ZF7vK`vF_XX z`e)5&#BFVGTH|g8yIw_tJ0oGZ=au;SCa(s?=WjQBaarz8Q72st)LoTENzll2u|Eh& zl4+EzpggR()F3B71*{SMnN3Yg>rKqicV8R%VQG2Wz|djv#f%masl-?7rq?Gj#0S`B z4+Q_2vDM9~bD_+p=PCLl&7)3%J93&|RP_Cgd&27e@89faVEEhl#pMFOQvk->_Fz@% za|k4d=|@%%sNOgO!f1jr8~tU4Bn260X?-eN8UMrzkjp0e&&)4jj}0x-U%X1-=5P!s za{{8IirTfd;7LLPK+QpKaFsqS3@C&iO6dS4RfE7MCVJS`7k!cQY*s(ArbbW~Y&pQ) zoePTdKrZm_sETxt!hxw)zT*GLF%TkHNhdB2GTs=&K2a;)m9BW#(brKm7-kI;f;IBX%f0kHT*g!- zKL0)!PC8Oxpw?siKQNKor-@cJE>R8f&hXz`_@I^l{iX5OtDV3hbr|4cP`<8EffZjR zMicZ_^eYkF1U=o|;3*YU`wbkW&AT}czD5CB;$;tS-%P3bO!d=qiIFu7au)Cdt}x&E zwjTY&gC{d?m@cvN3XvbC?%EeNQ~&+;EHEIV`Q#=>B!dUGY`imI>z3h8L0EAeA{T|a zkwOZYy*EXKaYmKH+$GSlxYQ;eev9+pq7)d}A{HsjP?JRfrI&~PnJu3c9^C}g;(m_1 zW?7^5l zIu7hY*wKiN0p5wS15iv2z>#X09AAD29HgMR#rm%B6njbjKKs|e0I8?;N`l+5On3MOhDDOyMYspgF2!(A*-g#m9husZPr?)Ex(SO7 zO%K<8tn$eyVwFXA6bndK7G;r$cGyR$RH?r(~l>40uN=UcQ)ajAOsT zmXVN`Rjm8VAvTcOj|oJS_qxd0ijEW z!TJYx!h9`zTWD@%hlM|Pb$6@B^b-RuR2oVQ$i($|-@m5{*%`-hI$nj77N~ql2!@9+ zaG{m@H88-%3x1`zbe}+k7oH-hf0-R(Y3ZrECWAM+`>ovLS9*#l1z+XZ69MVE2GS*s zFXu%f`}!L6A5PM@~y1A~@8OCU;Sl;~HybSEby0JG-h=#&&g zIFT62DN^A;v(p95%l*XO{G*^t+*_9A5PlkxA-VnqhzQryl-~!yi1x9ZBP?9`)=wkf|;I7sFs?{92EJX#pa*I70GJ~51S_O}R zcJ%V9AYzwokGR%7a1hfl~o`>`Qmx`wV)QZp`Px?}l)$+@cF9HK~a@kmg2_j?}b97g0_F!{+ z4l#KIX$jGT@E>Ii{j@tfO%4TfrJ zQlE&PK^eCL-fa{Aq^=r9foo$8*kFUDQAa;rS`G^pbXqe&uE>k&H@^;HyHVa1%oLIE zVy?qn@56BQ-cOilKI;7V|2yA!rFejhetaI?z>&Nh<^X{4ABY~9y^iLci`qE}jPW1J z4;{Eg5~lgOZuPw!qx!A@^;bw*P1l4J;}WfW@4iI(#&T1m$i&NFk6ear@s|-ef;cDc_aBw zqY*a=%27m}lJ1kfU*PiFhctHs^D?XWJ*qxFY0DiH;4agrI2O)Y(cKkU_5$Cjju7zPZJ{ zfCq!z*Q6(TC zAY{64Z3UF$&<^I=efcG$OCTS))Q@4z(wD}Q{6;Oa^poiOzCB{h_(I7pb`H-`2cw(R zXL~2}xVRUxntbp$LaD`d|K9z0;?~hBkLOS0l@h6vYK2Kmkbj=GoG?=LI8ft9?1~~B z>0b`7;DQ!olK+THJga@>KBj&6(HAxzgCEpkvaujzKSgNgJV}q^o^|{*05XOX_|65= zz(er_Otsem%3`wjlrV5;g+ZI~?K9t~a%V8d%FyGDWq^etkTEjj7(&bg>fo zR&Jf8fPeeb$3|aWrrWhm&7&w!)sTSVyB!nrQ+)+EzdA8dcvtYIsOy{ri$YLh#w>R! z7Vo09xYPQD2H?B)ydEW_V)azTm}ADlmmnd zIO;pc+&)f90fOLU+%^BsVmz$mxaT-g^siTglvQ!_lTg<*cp8|fIXVe!QQROoGav5D zn(%Pz@2@ZC5?zgU9?QZ?v0M!lHr+32nAwuEDh{t8CI+!bc}#2^t_Dd}4XN%<{=@uH zJ!!X3mqVJPZ0S8B#nQNpj#;A7+OGj|9QjL?sG-6VQ*Dsifux{)gZ#E*dgSofn%Iv< zi?=OJ%s^zptD5Pr&hs*}9p4Bo+4Z2fv>EG{9fi2H*VFecc|+d`0ItynR&+Hn{T2g= z&uExCjsx$lYi+-RF;LD#$HfGeQPFoLC7_}~1p!O*Qbq(z`?0Mt7^TMny@!lD(}ONt zHp2L3Jqk6mBNle>7zxb!P!@o;0hcA>tqs-Fkw2!>J!tN&R;(ZWH+3iqiZY^F=O-#m zx#IdheTg2xa`v2qpF1f$^4+jV_EmyKuEhVcg!?bohQB-%%XqkbS#FReH4bl!#a+7oes8jN!>)hQ2$m^f1SR@syb3#DlM2`5|QEmP>4e->av_ z;PtqxXJo|AlqL&(sTbH60+UAz^lvz^{lvgGfD4BV5(7ytY0L#u>cYjS?`<^kA!^V1qmQ0a z(sstQ@l~ABm~#ql+61?4Ov)Bk&!b_dSYb3=xAppNPGEuawkB)9#3Fby$TO*TRS`br^_5Xi1RSi8}I{$uGNY0!GLY?Dx zzi;08VyOLWlsk^%-ki1DBnCFBgxqO4UTXyRe%{D&mf+B31YU%gl2uy0iLw`YVeCP^ z_-0q}_iLHL_?=2Xa<1V5wnCzN^ePxWj)0`Zj8EnjN#0N4<4m|M<$m2v?z09rn^%=f z{D|H>4hFbOv+J9XJZcGnz5%3uzp@c}bO6jcP5=Va=e^xY(O2PRUt5eKuj9@Cqu z;E(W?zmZWBD4TcSQGBm0Hdh4-bI&W8aiAsX{ir5J(ISMQ%*d}pNB)mbOtf`%J8vur z%l(r?{eNApQSA5y?%r-Jn$kR!1RI;&dA{Jb;f;~{((Dq#hlO|St4frrbkN!k#X5y1 zo1e#vJ+w-H{TvI%kHCKsg8p;uKHcWO3$@(=2W=YeZlCw5e*#W7o{BH|V?S;=4Cw!} zNgP&!XB94K=vR~DQP3MoOg}dH%4as$V#e9QgMT)Q`Lt|Fs?d+cdrW`v61)bUW0c0qe7klE=cvke-IS zEeuoDV^Bnvr+&7mn6!naOz*$Ek%_z(+Qiu z5A!ktc{Z<33(k1cOFVcm$ho=#s%eEOlAIswU00s)K2lM63OXH?En}klq_1<~_Ppu5 z`VrAv1NU|)x%25GBs{E^_W=o6dLh(E=(`$MTfS4T;4iibO5~omD#lk+@%f&8Q;+Xm zka<@jsp!3+5b!VUMW_VX^E+wI_mh_(U8=eYXUg(4-P6Gf|cIZY0%oyMuhpK@b|38xU&W z6?&Dd`Ym8_g7A>e?I63(7>rU#Qz?;ao4MWrZnM7_C8CqFe!FN>#HiI}Ya5U8@rN>T zkTiw^ot^qNk_x3qLJXc{|}z zy);%XpuyqmKQ5aje-%b}NV-IvYFC}f&N+Vi$6f`vH-v9+sby=m|B9mAD3>IcKF4l<2Lhp84*HfZQb2i7P)&Q^fULzMn;4d`_14EDFUORhDcFdn373{I2JrL zM~=RoYt)3DON)z(WrsIRUkyzZl=Z+N4DiZ1e0A#sb?Tp+tP@jpa_dSEr6< zL;t>*)EPfLGC856>2n&=w{c*q7h)$=(`wD|Myn67XeRpT(~%D0K|=Y@^rWQM0b|XF zur>Oi!+oeh9)`Y-Z{CQE^l?@%@aPanu@JZVYVf>%W^K)wpi>65#6=>yI@B@?u=yys zQ3+LnS9JROhw3=+ibST8Os=H#DS#s2d>h0HP9S&=Z-BADSLO&F@zZm7xafyaPx89$ zu*7!TLlQ{5V}Iup(sI4;uXck0qb8c7+-ipV*>?rJ-4^=xXPG) zy%r=34Gfphk0wuX6H?zyKKjp#B-FB(WlW%tWf*5zpN)inM>yQ<0lt(>X#m}({rjE` z-dD+A!;SJPN@nz&+)B^=DbNBRPe)HHF9J!&s^HXDCA=fThwT-8^At=#B%eYABtCL> zZBBpvGs%?q*cl8+HYWy?A!O~j-5d4+Qv9%=FuuJY>zs+^1OUea_*Or7czC{nBfDre z@JIi+*!U)asHqJ;V_=}W+YXe#F?1jYxmfhs1nRY9FQy;U+=ZXxqsMl}df;UeEgB+z zvYcqjxX=%N58$*d3kIhG{I?$<>D8}G7*&$r-Ig?wf;jQ#C9IsBj02k36olGPmwT7~ z`V^+oP#Mv05Ymm3uR8wKvUin56|5Vu(;E zqvXuy0>a7hXMevtV5swnm6Z>io{m9v74EQ^XQtkE->!_4lxF^#h*m9r~Zlhu?O#@z%b^;%+tH-=rmzN$htM$MqntseK;b_`f*K_7hLC zhMcbrMuu1Hv5r^qA{Q%8iI=vBEJ{8Pz0DkQ_|!Nx@x^wH>s&KXSw!p#om|NivQVtL zjFN2(J`>$GOo55!TC4@(`!WJ=G*+`I)VXf6>W;gK32I?S1gpT zzwux#>2b23XKrJI@WmJi;aG4E2oR?a?*VC59*9DGk+ru7W4V@}4G(}4xM0=T=(?;_ zF~FN83wmb2H{BSb(XUqt@bQ&WB%fbI08jK7*x(Yt#tEY$m_xVfogiX_*NSS z3X=cj)PHw6YIhR6NUmI6j7)mYzD2UPqR!iKAMZsb%~HW_$5Sk+pA=Z(ZQWud7(sqbEn*}bXu$cpUD(p_U6jq@=S!r#Y7S{@D#j_YS( zFIGwLu>S%@mgvvk-skqTFlTBANz8-TMB&@x7XUBzgC!9RNE>_xE@m>GUreHb0ln?K z1j~Kw&&mC!LG)$z%v>5I-r6kQvHDLci-#0>F4QV}f-@4#Z1x9sCbDScgCCeXdK z{QPMUqmWHP8yeJZ5iCVJcyQ`}Cu3mzb^C${5z*Uwz~PXAi(dvl-UE||f8glH0KtH{ zUCRf_kaQ^oj;QR$^_A*LP&b@5OiEy=g3-iPdVJ{wAO%wnu@;w=y(6zT!IF7pKa>?V zY3L=;0b)DBpB7&SK>osB!J4tMx|$d!ilqtrj(ef5prD`!3)MA18mcY)Br`ILd3rgj z(VAW8s9pV|%WmD68c|L%{vL5p@jF5;eC9n0zaTPI%(?>1LCT{yfkml3TM4FVscuUZ zt+AJ>!&V*NoaKdzPnof$C(;X8mw5aGNsA-bEP`9_FnTgu&=t$v+%zu%r?&&K;yS+y z!&Oby#vg+sh7QJE=0E&Ib+e1BF9mr7EMT-13p4mu5~MJK=bVe=wvfOQD*wGPx*4H( zU!!nzqod}7w+hUfwo!9VVI^>|ayR8L5l{V~Rg|`?bPH<^mej!re<>7VtI8 zV$L=w27=J#*DhcN7_!fTBU03)hffU5jdB^ z_+PydaaoYN1hS0UlrXdb=AbEPgl-AfD^3>~i!N+!vA3s%u(VS%g7u$;T;U=lVG^Ud zn$kj?*$Mrs$_>qy%Aa9~|2JrCs-cU2#F)lF&mHlB?FDh7l=BUt#$!tcikY3${meg> zjbg%^iZUifT)b`ccrVO{@i%R6a7;Mj3pt)i+}T_U74voC&~W2R^z5U2_%kNF1BF$nc}=kOHBm18^HK8X-k?1fOR|L z+zDtJgpxoda-#Nu*5TUJP&rRRoeq`#T2=w_TE1Kx2S6C0AcN=XfR769OJjY;N70UHntu~$6v4f|+pW)w*phCB`(ONG zTJ7Auo#cJ8Q<^~=s@SOWqKqtGeRL2AoMSm|tNrI!J7+@AMF3?XRu&hKSEqu1Ycj|8G^U-j(ed-g zlh;$tLo1U8_QoSuyZ|(Te09gRv`lUw$CcMi=Q2aP7INsaYQ|$~YPyw?flwtE`=x|v z!W$NtZ+0KfJYQ@rT`$oE4e4L1ibBWuD5hYuag_5y@Am_5;pHTb@rx>pfXfWOl}qpS zvnon{%D(RhP1w?AvB`y*aJv2PK{!%eF(NG1^vlJq`zw&pf5_+6W6W@)%XS)kGx-}l zN~>I$eFd|%>5|lnJ5uJ;ah{@uMR2n+xjp?s~V=}r>$Zh6biZd~F zA~991E>HB!%$mVYd#IizmUitk;0|xzzO9)k0*sBzqqWd61+Z!QpS-zwdAtW`>>tGj zRS_h4bwF7kzDG}Ao^JN62xjeo(8zzK=POB542g&+e>Kuqf8NH}yrtVwaAk zPIxWlo zt*s0~WG9{$49b@W^*jIe4bcfDUWgT?E#?9rlS*I_?BN7ahZ1-G`sK$OO^w(r%BjaKUVFt@bWcL!;<~a#puBw8H54mH z9quzwk}GzAD2tYpvp>sJ*^iuz>`;|)N)JiQJp<=8#|&XN#vSH2?;+;ottey{e+QK) z>&~sHu}_c4ws^loba)z&8eV|dPQ%594m7CpPlbnI;>N#ZC7ZlolKSY(>GqTxI`hh zpwvTWbMuL~pgTDVb^D{s=uG-;5h`qI={p(pgLri<1`@g-Nt~}x>1KYDqdV@|cRuJ> zR}`z5YL9qKl>aa~ye))8_WA48!hGoibGKNke}8ev0!4-O2^3qHDlv6Jga(#KNhlmG!7<=6ozL!lRSn}- zYEC3QIC}J>-aYLWcaDVCrxkf?z(aimrba0c>`uAEUOfkCC&qri*SN1C2PULF79l=I zEos^1aA+0>W-nBHP-`=w1>6zlg9?}^7IzXISs3=K>9lPU)Ue-9ZTkar7v6REz~yqehqV%iqxD2|ME+@TZ{G*dD~Z*vj3~I z+=*HAa%L=Vaosxo_b(NZ0#_~0LYC6~l97bJ*}Z6x?VB|S5?r33Q?gu9%YXUY1>gXtd|btmQ>M@4j6gm-&3X2UivF4_~^QNWz?I! z2*r=gJ^t}JS?WrK9+eR{j0o)v*O+Ys^YbI9u!>`YR+czjxjt3&2RRlFwwsqX+uGL- z=4cdt(s?B-W(AV2O(6f?ju7oU1i;uG^4Ma4+3gvCD`9SkI1r~p9ohY)<;!!}v}C9t zV6CdOmR|zrr{NQGPqj%XUW)T#$MwkSC~#swtXx{;l?Gfb_6laOPT{D*xy_o*be&rs zJu0;=Eo7ZoMhhULvH57@!;cgz!UQK~2yv~s%I9SwPb*M#_P?9={C}EUb0KJ;T%z%Oo5dVXnXY4O zylr63zQj`JCnSLuTQRhIn$v2M`pT(ESar^R7R||9k13!uV$sz6Tz|HWHmtObIRLE^HMO{JC9*w?pe1Sn z^P1lGep3wrUvHpNtCnbL;al2*kql|cYF=J@M~5el#+v;ga!rBj20bRSMS!$2mGN902Fw#C=u)h6Jx@`nO(^@ZB15)+(>H<46o)7MT2IvtwJc7okSkoII-l7B`o+cgFQ!^GEir;D zDkeJq97p-znxk+RoZz^hpYNb1Q})m{%jo_xAen$ieaRMd&{({W(X~iL^zmAQ&Mp zA?ViE*AE7w?*(?}jB+Ap+Cz2n^lZmaW5kF=Q+d|~Oi?4#Z&^kF2xMdN#qT zZA$pVHM;v+tnsr8{TQe@FlafPw8<#eue2qB9ijub$~oUnl<<(eF6aQC&2ze2J$Z8S zDJKWGL059-ze@`dYOMKoEd7_^?;-xjps{iow|K3n^haT4Z3#KKMQ7^x5qkutGgXnE=0zA8ypes0O;tRnNo+{uh{EVo`IiZUU{w z>E4^DJasr7_N=a-)F+r}yh4#NHr|kxTILlZ1JsC6GIrpvDq;1Af0$D}Rq>^yr^3K+ zUO%(s_vgVlKdsMfVr&~qH{cfXkH?@S7fX(n7n8w2two8LE4dupW@+zsqW*8{@@fXX zT7ZxJN>~Yo)5X}d=t1pcM*4v7eMJSiGvWOroTtw`sL4?&^1(wV2TCCNG|y4MKTj^Tv0?B=v~v?YwL2v;2vD@l z%w4(9Y&&ycfF?+IS3(TnKPZmhVg^|u2V5|DeL6(0Nfh6Ex;28eJ3UCbUiF7XJu&1J z8z!Zy#k#jZhZ?gtT@Ol6jg)^XQK zqdRZg$DM_Z?j-gVuDW<1iBZ&~cdm@P3H|dGzSbaus2StTzwfW<8ZDIcfX)*jxN-}4ewp-_E;mV{%&=JC)S^NNe;2l z9+YKNn2PN9AAcu-^Rl;L?; z1I9tnQzE7jN8p-0=6@9b^lnpcUtg8)uTz_=*qCSrC!HE5_;YCJ7En!qXLn*Em4FPs z@c_vZmAlvOc3Px>@u`dO^xN*@?r>|^<&q`-&v!+=eE9-4Pd-w#3e>%xI&fL7 z&=wKT(jK&`c$C>2Gg#z6g`ut!Fi$QVX}?y)i{WOHxAt-Eh*QotBt3QQOBrOvUPCfI z$)|wV0xQ@Q4I|k^m{`0@%w+Bn0zg;tEo4LCc~Lk(u;n*M5ZFD0Wp-Tn<;(B_!Z}4u z>`_7SV-p04Frk%b(|+}Q4EWRG@I#+pmw5%d(;v1ULtqSu?xByOPckeOc5b~RbK8iNz3StFcDDH@S z!{R1<>-Ob~6vv{>+kG-OZQmR{QAoT3h}jh+q(T(1;9)SXARwS%r}fez6+qn9?(bY1 z`vRs$KDE5W$Hm{iO|gcR04n>Yx!8cFX3h`78NkM4Hky&pnLa^nPQG- zpo_q=3keu`7+Yr)TKo#0!W+>L{@wV<5wV zsZ0r8qA|#F>r9g^);UBt$b_iCmb5Sy#J|1{HldsIs<#D0>$6X zZr#kw%QyS>))iq$iqXig{Jk02`_fI1`p)#*{1RPh^E*jbloI2Za3qt6Lc3*~R9CQg z8ptQePpd1hI7VDCqQLcbq}iJkn{)G~H9M4Unhy!0N73n`?v)US8e!}S`_Fx;Ycekn zwm2CV%;WjGT;DreN_loTIJJ9mvYHzeaW*kaP8_OYv6kbG{mPT4ZA7x|aInygj-t$Q zjmXMS0`w2@`}_M(T~@tmC?c=G?xIn~86&GkQiw%`geG>*$T*Da<4Aa);GHqXC>D8U z20+zBP+WDB6EU}WL>S)sD!2qG5y-u8a;5NIcf7sZ`0EJ|_d=E|_&`Mp^rxefd(@XDsgnAXm~OF?Iw!w8aaU&;0v*rQ_40yCSN^^ zWnnHtR$|&XGh=Z2At-gtUP^i3)r5G>#rbHX#dz_@Gwz%X*SU(*IaXSxu5}A_a-l=3 zx$lV>GAR2HDy@10we?M@?D2(Yn{nokL9HvO#C9CQ$iuap?HGiSR~#vDC&+ILk>8#m zCzxOtC}S2Vv#sv(r6iLHYQk1{f*JbwqaY=uhKBTi!a@(BL}FVFHw!3I7e*r96 zLJ%$+{1aPTl~+|)TdRcz^>`weQ7!RnQDuOk4&fAO7=i)YW^Kis0F0M7z+nZW)(s_A z-)#%Fwq%g4Cnr5Z4ML;xoAoepA^}+P6I6EN%dS3kRX@iYklBtCt_D$)ymAdoiv%d zeAigQ^wyesqUktSUfX1I2z_mEWYbkK`JFkoqZd+4lC~?u2$}b* zCEEFjAglIR^Q%7l=Mo|h=r~tKQbUrglKP6P%eTvlX`Di}A5=;GW@W1X@YlwhCiyvQ z1cA}dypgNEP&J<;YyvWvc|r#Gw^|@gx5I@bXNjOSEosdfdh8TP#2PnJ3`w(77(%%q zgNGQP4I7-z1O4({Yb~!B_zU5OOH$Uu3113i*1ivvF3=L4gSzNHPd9oPN@&7tGeR^R zyf&`DI||KWMvPSl=AfPiFd-xtj&8~j*fQN+UADtu?G*`QDrJ+LuA@+$!sWLh{*!Wb zb**JM;J(h{KJ;6@@pRX>+yBdY$wNG8PY;Q;C|~6cS3GKp+eRlmd-1-~pT%y;7-2O( z+_>VJN${#prjSDH0|XSzmHW`HL^OqQVG+epe`81=lv^Q?=ENXoB?{DPdXFm&j1Rt; z(-$>uWMZMu=I%LTUe&$kPk0x$(T*fO2YmTZY7C)$tB=HfQXk#OZ0E0o?!ytYji2oni0< zzqUpv^;yLVGFn4mHduY{=;ji>@N#ow`&Dk)2|=WS-1B;%(D}`j*vmiHZ+_LXBWe=V z^N~o4T%na%zVne6FVXCEPQaD|o z`(NgAUNq%&<^43mc9g)I_;-2g1k$WZ1rFQa27^?a(w3ib(}o0`3|7US(REq51h+1r z-X7Uj-+0>|FlDi)?k9~IG_Zn|(TAJZc^Qog?)Vs-{8!}jSpLTuvkO9FxP9FYF7_X- zY7Ru5?H~L}nQ`Lx$$!kqCU~YGAa`$0YZzt#$*3lQh{Co_-dlF(v~8{l2gQH?gzvoO zG-P$fKy54Wyj^%d7G}){{!z<_yLS+vED-kw!b0lR-Q8U^tXEgh7Ut*GZ$s|UHqdG| zUW!Hv10B55dA>y++Ps!KVa~)z&LfOUVBRuLA1cEV@ufG#F zqGe>XzJmJQlGgeYX1aen;0@G;T$TU8q*H1lF3Xu{SZw^6UrSrNj_+H~!H4&)2?!zQ zT4L|$=8-Sw4mj>r1JN1 z3D0#uT+N5L(w!g-&Z5v-Y?+`~(l`ySJ@T)?Mr)WF4cM?-SX|NASIA!p-BWutotdOe z%jfkJAOAHjY{`n<9NrD=@SUv`Zu{A$O4EgEb~!xs0f~hlotMQbyg}OQgm>_vx>P*< z=1=g@j@od;5D&$eyx2b>Cc)yKgv4$AEdjm`1wuW5^J5oCVx6ya7_j$6K{N)Jp-9QJH?=c717EBay6UT z;794pA^WO-FLezcf&s}X@Q`-l;-6w=Wi1=<#zX-mN*h+9y98i>FreI*KxJtIzgz3s zuU=d*^UhTQs)9WT8b?7~nNr+!)VAArIpYR~ScQG(wT~?fbzN>*tiCBWeOlMB9v>|q zS$hx9{ebtEKYfdT6eInICTzUs#pm~=@Xr4<(me=X-*42!LowrDoHce{-klrz&O%W} za~|7{Nzsl|ja9AvgcUd+{PhON7O8F^B<_oBap!m0Z# z^PMW@`@Lcwf{~XvlB>CI=)K(W+=!IR6d3&yQc^a_NJwJzz)HL8mwOIKXZik3f!#7stsL6mHE1F1%lR7wBiYY&qh~|73hAhLZnU zn{|0`?*g)!Fj9zQqET?|E7n@PJ7}X_eAJx8g{9lTTZ@<49JY(y?@tK@0sOX9k6-0~ z-){uNMIjMRWJHljUU%zd>q1ar6D6{j;&DS{76`=>PXBvmqibE2PsBu4f>cVR+$G*@-Ec1Mo**|1T;W9@$TvB{tR9wzhF>tPB+l1rG7$-EMW zF&EZj4%fQ+dL1N~_&$p($#E7mmRG^c+VcmaFU*7`WA5DymIZ8Oz|c=MUWwskA(BhS zz|Yjd1v*MlReKWcP^&_=E$dAVgWq>{s1x8;N56wk#nf6Oh5%)Tv=v+BT~~lQWNZ7z zxx6wiE{>v>cOnbiG*bycNZgXP_&+DT4{(K10fL4}*OmeQ`BFVwVY|j1`4Y%v8fjG~ zhtKBBkPY!9aJPN8Mys;^g7t3p@Tg35MADsPyp|YeJ;y!bdTBQwV(hLs+U2A?&M@K^ z^j^=?1YB2}49n}vJ;nV`?b3NTizE4Qu=0P4;$!taL%T9nkD8M6CsP@;UyXj#H^!H| zru|-?-6lRPdjbtyw7>wUbjGRh93R0;_bY|UUKmQ8P~oHXADtUF*Epcoaey-C4+oHN zEH{9>nlPq~Tn~3|-|n3l?;Ae?hFEE9+PocT36ovHgKDPMO-~7COocGnQid6#1dN$E zp!NL+Axm4;UA7Ny-Qso>1s)G3o!rm1Z}%dy27#gRyd!N9`4WF^0cf#wUTxw4=SMrZ zVJyH%UTD<7Z^K<=29EeZz(lM@4nGUOlhLJDC(k*v$+(N-{RnN>m+O;N-+*&q-~HJ6 zg&97JDJI5$O13*c$*00ftc73eX0Z|`KX?3_pR?Nd(?nCV^_9t3!Y4I6d_XTg&{SPv z*vg28YmupB^w-45NFgv+A;J&KLdZVc08tx*=d-=TZY1#wuvT}H$D158mg7UBx1Yh!Ncp3I zh7KEP#-F_9b+)jy6osXAZUMkJ?hd=^;r>1gh%h>DMPw-wkr5P@mfD!sxiSCYOS&Y2{43szN6?LWKN6$UqQRUYC7Sy%fA3EnZr<$O%S}nr zfxI-I&TAx;K#fULYH#}Q2KNXcX?S?lRy5REq6?I2+ti;+wy%uXCq-~+C zzvB7X(l?#b8_*gF6QoM99AQTIQ{ugFdEAA^kc`i)s~bduzVNHBkqviDJzq)8&?MJ( zdg&c?I2lnH>ZiYfvs)Nbuktpa!s@lz2fUDBl&bj!TTOtW!T{&P-*DcG&M{Pn6>F>&_}ZKxjj9@Rrh7N!UnK_`Hd zA_)5#7>77J?AYckU|at!B`%IUS`_?s4XfF$A#beLd6gEFwsPR1+5Xc%jzjJuA|>Sj%>t2ELY~L^FvAY?#%M%q>SANZ9!l&4G4ivehBZ5y@I%@FxOKP+ z?H9Edq4F8n+RcIw#`rEY#VACMiP+O=lIOI?XI55fCCA6#C{SZQ07Hkq@1MbqM56{d z!Qp7uw%TEYwGE4 zsoLC0c`E7@6ZL9FjW4Mwf4D_`YYYaP79R|jFi{gwUCa4>b5o0C=pGZ!;!Yil!KU*8`^MMX_eGDvYIZrVM#RhIGB19AkR z_&5cMUma+;bwS7rydEQ9-%$e-uLyid&w$I2Io6*uWb2LOoe!V}dw=>FQpv0T1wb)C z95jbdkwYFDBui*#xo+OLAwOyHYXtZ%moP*6=L;BxCqg!A3_S1u4#iPBTm{Y#%Y|Tt zgow710bB&1V1q@@uM={NUZ?>*PLOI#}4d%kn#%Ov^m$}nWrlCwfLFB!`?+Hr(URzw7Bfa^3l;J1i-zsjDr;diJ`z)ZeJ;c6z zVv@<=x#il0rnw6OtZqMQvhR%IG#`6B6db@(aBDnP!X z1PmWS7;aGgWqrdJsK{{$SJm3u+Q0Vm%}{L&0sACBy5>uB)YQ~eltH6QLzG~5I6~e$ z3DRf9c$f;vSpGGty1F_Obgk?nP%(%=yiR!jcogc7>>H>F&*=A9)&s6y(|G;SbjU!> zkQqEfqnIC7)N^gY`eO4337P-5D4u+$iK7_fNdVrYD4rv!NQp{W$wzhpuKfe0#MYa^ z^jn-5jI@WME9>t)pEdvd@igGfa&QC1*Iq2UCL9I;NYzMvb@_Q{yD6FYP4ZMSap#2T z1h)`e5PXg|M=;=Qg}rlOURKFguf12tVE!+-0y3u1t||O|d7Yr|$uI z@T)t55Oo50bW_Kg(>5&SLq15jS-~YC%?%NfRaZHqheU3?!KfCUCY-1uFq>fv7{|mO zwp_r#{R~IQ9fTGl_uUghz87xegJTK()n1Jd(t@hVb+>uBHI<_3wc zYcuCsTdjs_FR_1XG%W3}qLPJq>tF^H0jPn-e<6{Se?M$Xwr#xU%m8C==YqQ7X`Vq+ zqiu6a&w#n7KLAe)3Ch(t{L;U&YDy)W+4lJaT{<9WC74*n z7}n`F8QGJJNc(1*S))kjQ2g0}peX<^8%o-`g3L_6O6LLK(y`mZ3N0&%T%i$2sbEq` zst|Yt*dbZ3k`vX7)L~Nff{elf(6nxc^|s(_20IrQmmOFY+u~gvdHMcVsm_w`HkcV1 zRZ@jKIJY37ySfImpgO=Sw)_R-a$sz8f-m5Fa!5ubuoHRVXLUds5D^WE2S35SDkf%T zGnjKbgD_qbgGYSz$tpM;PTX&V>AXfbA*(q^{-hV0#o;MX z=-NJECm`HT-U?}1M$u9{qWXRXe#F9dplRjhU+i?}ID2@bBKO2E0N1@tYeCFn zMT{eP`xADdG`Os72<|sKXTr|2m;^Cc8K?m6I#^_*Qc0!(1hxo2R2cjA?caUlDb2L^ z-=9NrJ%Sj`6d)MhaP{c_RCn6y-RBqw?zJO{Up^ljN}>xgF;P!>?K~!le9y`o!D?+g zA{jXu(xv|(lGTks#f1I&h!KX=?{}0fJMF5q>5Gq+_t)2x9SGf%o;}If`1RuTUTRte zrrx07QIz7fC?Ccz-<^ejvnvwTd|MMH+x{g*GzU}qcHgNn#RE-c`Bz_HiBgxmJ~sJ8 z{ArDSzwWKId%@l$&qJJW{uC~XJo8EJDUtbrA@r=I!DbOUZk820a)x-EuUT;+Z_RH` z4#1!}zvtLcLnF&!D)yI@OV_z>?XCQHY^>t&tla!x=y@4#=iaj{H)r9=l&V#q9ap;S z6WL!b?A#@y|6~RWSrA1o*!Au~#|+}^Nde(*E2c5r%|MIj8hwu?*$ui;nV^}OhYBVGGr0ZB{@07W z3y5#~og>D-h8V(@gF-`<5Q5tf$gSj3bvFNjuhAsZcJsN5KQAu1oQsAXE04q$T-kCe7^VK(y zXXYdT@pfc!@g=<;8;m%>0x2D@6^tl%c0F`o9gKZCZNR`xs_%}`^|K$ zX-G%7)BgPKmLa`{ES1DDi8VpQcCQ#7D=yUZd}{O#OS32`6ZPeq*}AdwVT(liQSt)M zv^oxx^_eYDbxUT7ZGP_xQ++DX%+FSXjKs*Jar4gV0ajveo3ehT!2B_ zH!h38ISVs04H5zZ7YkTtIiPM!hv|O;`+05{KlZHN*-`?MioH9Jg!K#TU5JQ?u*D@L z+#BobO+oOw#2l7QvY65u3%BN44H%{j@bdBo!<{(Q9Yl?=RJhttfK2L5-5XT*kh}a) zSN(&XA&fBNliB^3&(M2-$Oh#h5n2V5E!|T78#N0{1V=a)l%)0FhlfAka*qlrRA+(? zHrem}{Tmq|QzM)=z80635-bgVOitcvfqL2u>~-(L0Ov45U~g(nhU<%GeLeXB1+R-^tPp<@;pEdOBflb)wrRh@#`O=*UQ zoA=1j;VkLgZKHOsTUu(}`R{V?5kIIV>`@lMnHMf?l4DQX?Jj4A`ANai(MqadGXD(f zhC?mLiZg`mW~#61otI!XyH-p_hCQSq5&$2iode^c7Ly9>!0waa1J<<-uzLy_F)@EW z3{6IHr&!*H?R!}@1eYV)9$E6=^#Rf*K%82kPJ7aw7K|&L@$wl|h4=^$DP+6a)f`xE z&_=BQw*G{Rjjg(d6$O!z@~=VkOjOls=R7%&D_qkU5I5o~5D@JKN4S3Em? zN{0IYv9};ivH)Y1ONCf*uMSF9ysG3`0;~rA4-jH zuq-dS)1*?K?=!Hb%&ql3QWO$oTEuTvEBndEB#U0r*Ej6cDaPC}E2_ zZzvEwit}%Q81yTHBQY>U(^n=;UqR;a{UpGB9eSZ7Vu68%_GWodVz>%=UF0xnl_*#| z3(cKS0c!4E5%3JS1O`7D=z*d@fSLnTAf;KDcE)XEv;C*7t!)-y#=0$g$^yQ>4J^#e zeP{5au7S*c+6vuXcHp_$%^JlIv#_FWB0hT|u;#GU@wW1fP&J=DxD{J1rd1~6nU}IvV zYjOMc2aOv*+1{$IeGcg&{$16k(o^~h<#1>f3ybr}szxfYEbW(2v6jpj-J%XR&8yy= z@$5vLbeavzS9jOXAGLg{cu~lJWf-47OhWqX!k`yxmJ zolpP#u^!pQhHXm>0*>r4K~Vr2|Okb=Ro8_)pcBpf&XJrtr)`n|mTVfXiMP|B<)P~l-R-MMq8I5IM_1gten z)|~k3N=xOTvZMp6ly!Jg35cgPK0dAqts3#UGZ9Uo$5Om(bYUFs>dNnI4FyTNv5bgF zD}BB5rT_o}Rv@zuOiMFh++%Th43mR*VIUZOl6+T3n$%dme);InpS5j0J%uxEZJC1* zYk$HkWWb!c^Hrr4;1iBudv$Xq@&F5c1?v#!@aV|vJ`B8!VHAme6dtdbJbXrU&)My} z?Shm_{LG>|O*!(@BmpF%SaLkHZ)k8l8#K@%rt)riX1#Y^&o?&CnPZ*Huz8xSX%NPI zVclI>_iMOq8yf}5-OFd`^N&7dXBEvNzxyg3g5huUHRtz@?O5@|6+9o(N~ttodwFU0 ze*ANbxip=y+^FrReVFzmh2nT|Ymbi8W`&ahS$1#Nj^f+Y9Gbu(NrbCt9;TzON;77~ zK}(t8qpYEv$Ppu0`;313mL)mH{F&!BL{`8RiQ5a0zJFRrT!RREVqpP+Mxs5I@7=!+ z--JP{4jfyXTED?sfxnL}gsl%rqwO*O>_K)S3q$IS??QZqUL9K|LztbZsk)Au+L#eW zV!~_is#q)oNCS0`%%#l70tZlCbA;#6Da7SN`JIOo+5_dPMSa4D#7$lrt&iw{sh89^ zso7^kl|!8}NKA$%w&~}-u`?Z*R_U7KW5A^wh$G{(eg&xZy2OXIc&jYQJ1CvV*3J@*L!wyMxosn zw$LRguQ63usa=xN(&lFq3?Z-hTvf%qJ6<%VHphRhIF$bEhEj@cx54}Dj}bWZ1R?y7 zsHMu+#pvQ30h}lR2+`aWXyl=k?C z3B>Yd-$TnnPq;<6^m-jdzxI6J@v-WAqaU5a%%_|tq_*gV%>TUAx7JKWw{bpbXTQ-p zq<&b@sEE$VU)Cx=-Z#muI_n|4Ds^2Y?9xT-aE#1h+2KV#Kv?wCdsd%S#<&`k0AvV` zdlx{Y*Pyk=K6_Tq=ddK2#`^f`pyO_zqlIM@n3yjirhkRPR%t*Ol9tyW;U&95;lp0_ zN1!4mj^=aHj>@?#_?PAD~SueqEW~1)AGy01x>_A*xgT0_g+6onix4$ z1^fj{859F+PI}&_Nco(+yyfrTy?cnr$9I8_1N_TRZ?xYuPjOv45b^oTSeulymz4Nb zhfONW9Ni)k!+YZhs!Rp)Z|!aJyEYG)(7ndWYC?0;WxUIRElG&xemIFFb`bDbZ6LXO&8&^CJDz$AZ zfr38(7XTVLm^)0@b8Uvf)qFumM|U5N$`&}2A0Vl*uk{Ay1EV^MsPn4qqajz zZUWk&&*Axrjn*80H8NGM5Y!zH;T|TK(|Om*=e*vJK-87+#E%z@>O|oxoJN=dwx(!?kvp!q4|LUZp!r|`a{|eO1+f)#hl69=lw<15+J_7T z1zg_S^`EE)6O1x{$*N9Z;ikf*6ZLERHbrO~8D#xZPKCfW{Oork*U;OsGYv6K%|fQk zxF0$KCW|%AG)-zrr2*GEHskL^zp>Bs|8Z9hSDg*FVJ_W_;8}jMZM!Cb-eMv$AI$b% zH>&+PVsK+tbF=={157l3|Eel3MwbZ*Oh(A=_CW4t0piEzDa5&f+6iMQL&Sa_(i03& z&n`ZWeg5gBU7kBxpUnaU{pHhpTlX>1K7vIXnYppC*r4~Q7%Y42*v~xnM9KGf@hY3Jx!{z4VBl^Dtqjad(HQbN}9bwFWhv!uZp|R+UACN+A$|`YnXWQlk>4d zan5w@@V9=(2UL>w+gjM1P)5!}uOkwgc*ARZdx}^FzS(jcE()KX{8GR8yZdr!@S7=g zF>wJmYpH@mZwJRnXwCyu2r|wYxO-l~@hO6(sszzI4KR{lcnS5xFDBSpBHn-X zHxu6lf#pSH@PU=KGt(+0h@pLp>hS?Zc~twC0v?)S2pQ_Z6ZcybdRIe#TIqIl-2WZ= z^B`8G<3c-Rgj|n_uBq{pba|#_9qaf;FXrI{8S7?+uk)w9 zCZu4mAq#edV0Z=468SsCf^?c!F%l6a)-cWT*FATuui|9upf60Zt>j!sm z%|C$StQ`ws_}rUZDj^~7Tlg7cc;Yp%Hl)HZHP@!T3h#!Ml5*)9VEtJ{j&dLFtq@oh zd5k7p#|wJ0?ctfmft)U3X^%2Y)jlpUmaaRm$5mFYAs!jutYrS-ekF<}#hr@HZHsGWaZEX{aH6F55(G@>iG$iE z6jf2kMH<6;m^;)CHis>ZH`d)zkoIwbVj%!< zU%mjqsL#O|LLRU>9(d1KR6&B`ara>WFd{SpCAJnF17ooT+-F48)YL{_7vHX&ww*xu zDS#s=8jMVZye_l|QDpOi9|#oRPsBVt8m0mJpo3!WAxuhP)+&!66L4W*#N0+A49^3l z_F6Ph1zUD@cby$KMxPO7vMpT$&Q00B3mIrl zwbsRIT#rpMgp#rMn@Bs@5A$o5o9(W z>J-ZRG>}vrmX($DLBejO_xy=8&?xrd(;k0MlbB3>=H)gH!>9&-C1-T6Fv1ls1e$p# z2QwokAP2y@e0>RPp;)(a{V*gXqy}-%fLlQWz$lUo1PR)@x(U#%`}p7Qh_ChZ5;wQE zC!y=x0At(4i`1FcoEqAph{Z+13X1uV&47j!0%@K!m>PE7fB#*QI`*NKhDH*UE}wvZ zMhsscVq;_5f#jbY`XoEh7~O@J9TXn7_x2@B&~DvYXacjP@Hc~yV2Fi^1>TxxVP_{= z8XTSD7Z6wnBJgn)&_BK}4~~+pZfxiwnD+`$c&`j*Z|5szT-$)XnikwXgit&Hq9>`} z{KoO=9t-Qof&z;!5Ox$GegafMiLqOuQN<#1CR=$#v8V1`_Opx{<2+k?723MD?|bk} z5$gpuq!)##-ABt}Wchwe>nr}M`nveXWQ*P4o63vOF{S91-`7$ zpaY-gFBnSq=FJ;ZxNq5hbac?PTr&@hjd68+`$qMMO7a;@f5`z1ToPH-^-8N!U{aO9 z>{D#4aSFgp8NjGI_`9oj0ZqhcXc%cC28n>fOVFbSiinck6S1;0)6*Z92ea(}HS|e{ zj~}-IQ;H2ZQhL7z$Mhpfc$l^z0aP1+XzXBN#Rd?ZEZkfTo4f7Y9?uBD^?ld6Y7Kd z>^jwaUyQKRDp?3ov`C);XxA54eqW0@Gc&?4E4)!?I7e{0i*8HG zVthbEL^_njwXp>KmSmsVu6$D?BT9r+=Mt!+p6q)p1NWp!q z8E#h}2A#t?u5w>l3$o||Gj(8YWojx`3|Q%k`kaRW%kO=TuA zAQ!SAi9*!8P+4|7kCpR*i*g=%-{#T}8lX@3&wIR3>BR=-!;X1jLlj2k4Sh!dmb>=pQ2K%4f+j5Xd4K%>h)s&JKGuT%WDLwTtEOR63f% z=Pj%@_79uFzm!3Rkns)yQ|po&zy%6-3WHy3Yabnf?DY$Q5g?EQC<@&7pixnQ_=1g= zwv*I7eH~a^g>GP{yA2j{wKZQ?NX786-H`$)f?{mnR?t@Q;mq>3_qXlj;4-t8;EP#% z+=~CHiG&`XPYq#+3KQ??!ERgWy3&-0Hjbl*)_Vp2_S1O_v2MpcxniJ8@? zro_IJW@3o*mzCOg?U;&TrFdl8q|z6AtqCoigX`^jF_$jhb$6UPYY)~kN8#Gto~?@3 zp59)k_B4OWaS$%bz8<}j0)o>alybgp9DEr=iKRe+<%()ZvV~qv39tP^=@Dx*;42VH zyF#FpL;P=#^JA+fFwU+(2wLxpKMxjssRs25fA_0jO=d3OGkiTo*KG;Z7(tU)uTbDB zC0%G#@{R$_@((tSw;oRr9ztDV4}Fd9b?sG8@IpV; zke82mH0Z3FQUYLx8jvT_5%tCMF#Ro>V78D$cij4i!q?i{Y*7aGzWpl8I;_qX`=A<16)hE{Y)5_n8by(pXBo-)xNX_5CFV3_?_V|+KSO*j zUWHt534tpXU5i{1m-m%0TYFjPp*nhVX9(j;<6SDYbLuA zxU<*cFsmp=GYV}bhO6;<5^}gif(H+DmOps`pHc7ATsdwi6o2<%sSHB}yG1x4OJ_H1 zej^7iihZB}_X0zvkeXGlsej&YlUiGEeDseBa&zAR_3_EJLUPEziwpW@&y(USu&2cx z&lr&&ro_XPgf4b^9K`5y;B&Y;Y*@X3acTR|u=Hes>tp^pr@;{#VGVFLmLShDpV%L^ z{TU>lwF+m*gqoVV3qt+c%3y~J7*;FgIW&4NkDhXnn(^Acz( z8Fupf8XBex4xTwKADx*Y&}~9!yW~Llcu{|}$%z|^3JtQ^H0V?C0-OJ(?&zSL0VkCE ziy*xcc;ES|rf^>*yfXm8KKz>`J8(fH4HQ&TL-Z1-p)FVgJb?Kh=2d#*i!F38(Wp5L zx>_d(zd95l2c-&E7rD>q!ZAfJY~p^ydsu4Wo|q1qb!D*`V6MIXEQh3pD%JnyyU?=K z$mDn0=W0IPm_!E_-T4ZTztX^9lbB(fC^o?iGTjmjwcM5Cj^>QT-7&QSNZ31B!&$f|7*$9sTM33 &^W|tCq4%XE zoJ?PS0XQ)JA%IA1GsxZU&hGf@3ObE(;?jRtDcHX|f0Ne`DA>i(&9GK;uMm!rvwvU! z+;zj%SOR%9u~(F?;N=?0_}H>h^xu4Kt($b;dmM>_Pth=0?)2A?%d9+B^~@lh)vc2o zb|UBk5;|2dTYw|PG0VDZyW5DKsVkUmusinKnV6_G18dG!Fd+$Hq~-Mz))9K2A}dE{ zA3){jlk)N7gf-+|tKPA`OB)-fNK`>~*I>7H*EVh&FDM2IKyDT;uBr(z)cE`R_wSlb zXFZ6`z^ME?2evqZhF`rsKEuYBn@f6k^AJ=!>cF#M_wu?Bn$xMygCD?%sI9?R^a_6c zuqgKgW|7AqKvvjb$ZzTlbjV%cL8*v}wza_Rr2?6vVTRA&>D!~}npMxUN|H7gt$Sik zR)###J&vP?zpo&W1;bbz{*NA}p|Gwsq55TR=X0E1wt>HIKb$^!H>mwJ zox?$g%uZ&Mfi^FQM2fYnlH5*E%7zc;D2Chs4IgHAg5zSQ+*j~LD#Hp6h&0&)LS(tk z59ZnlS2)0LWunN=$Y5=O!oi;g9b}N|eHp-{(#0^WHH9YUD|dHy+aWO!0H)Wk|C9n^ zA3C5EDM`Q!LJa!wP5ShS`@-4px*uk|o`ZoVg}kETYmhH-> z5O+M9_m@q4`t<24jDx=^SzR^FfKl*!KlzPiN;SQ%_+} zh`b7}dav%d7el}>KPGbh0eM?cfgv~v=^0l&c3POY4D+^Nl~N1$6&z#sX-a;SCzzhS zP%{|E+;icVudb4}@LC`!#^uL`Tb#opQuDzdd4~B)6jW?G>5(v| zZsSz2E{_u@`t}sEu-st{c46U#8<@o2;m{8zN%4FT8y(1;xgdmlf(^n4SivC#_bdUW00_@i52#q%!NGwQAhHp!o2G%#oeLsj#esnVSKzwO zQ~-?VmX8dHABZg!>FMZd!I^*bb3*-58H9CqL==K-v1=VDJXCP+`bG$_iYto~9jG}lfk2gS$g2tBr*XTQs`gWu0 zi^{&`#SMMd4+J_kjLEv@KFd#D8g;Q22pVSPE-6&rS;hJ@H%3rw9k2XYb>FgJL@dFW zT=#ILn&3TQSaw8to*_(mEEU$f8!>VD3?eh9ryaIW*=#pWkglY?w=1wFW^mkZ1t=$m zhxb7m?9vJ5MjZ|V0CUVtU}h1{v&oYqf^`ziuEQT{fcAzxB>#IYWnq^!0*;9#`09}Y@saHABjL+4@EI<5dYWqPh-yfQ_YUD+3Da*IPvJZ2!qBvk zSg!g{RtU5Wz=IiJdY@3H44C&1(WeaLcSVtVu|acr2?F~raIZYy?V!C*HSP$xdJ7GN zJ-8jWE(iD)ZR=`k0P%mJ2NsHp;00>bjewm*;40>W@D;)G*-GGFGAq79c6I*v^`c`_ z?VNloS@w@^v#KehB70oITjp(Mjyz|>Xkx>e+3GR0gZ?B9o@G+=XcWdCLQNUzZ!B#YLsQIYDFd;8cJkQedAlzeav8p z9hrJ6_M{h1>;(+NTI@_it%EQdjo@PtlR0~Nc$fkC&ol19+;Ow_*=QAPwI7^GcP=ob zV0OYZ|^$xMsfZ|~F8*-4h?Hul~OccOGtRO`sEKpEzGBH{A13g0oL5zFBVHx{H zloBrzfMU+ak5^4$VeM&xNW917p2$IevIJW2S8sri%E9#UwVUnX$%zlof7}mnHlTty zuRrIJOcR!^jp()uz+dDdkmHilQttM&qDnmR;XJuSdbrLb$Y31B8G=O8mR3YdRU3Y7 zHh!H|uh)-^r2oIIUN2<7zdEEsY$)_pjBZwe_0qS4^Eh3t^^I(M%BLE}=ZAjiKGM@8 zMM45fSX@-J!VmlwtIzMgiL+SR{l0A*VrN^SF)rqbUM4ls8<;_{yK4R`Eq7O5o(tHm zHo)n-HU4t%{PGOXC@i}tPL75u=;2MvMcYt_u5W&;;f*_qr+ygd9XK|j>NNp`EM>P> zO8`h?o*o`~VA5$aP2~c)p^}8SxZ?7sl=pN>>E4n=gX=&k?1_M-H-@`8LJ&KS9LzS3 z5D@b>4tKZ^;_&hDeH{Ad$yV3a_z|MMILNAIY$C{BI77Yl@*9?9s$kg3B)u8I=XzN zSFf_8Rg1nm5i*mL;mSP7{gp$vx}s z?J9G749B7ej2+v=7YX2r`FD4HeG^Toiw2A*;%u@Q#)<9Rm5uSM<<^j0vOq5kT%@+B zl`}84&V)hr4U%VU*kr*%sym~Ujy3nOdM9Robehwgg;b?_f4E=Q^9o zJt+uHO(I0!-?t{h#k#b$mCTyh$N$v%Dd2lBFXhz5-Q72kOU4-wE>?5Eyzf0-2Sid) z>BWmfI4jQazzjy{{{&%u+-+@bGXTU&%ugnzLIX3+Bq(8SBM>XZw#ERk@J$$=YK7G8 zkx~F`8-Q<=0Vn|u;vzW6xAxcu4#q4*xhBX>!*(Yt7VW?#HzNO2qNO%Sg?<2e#uIGe zZgl~0EbQ?o*97MD?y`v{f8RJNfh6h!@GRoI5ij@}3a&@n?b(jY9X;gjgzvH$3|Eih zoTG$=n@gMX9rw@{@9!b-WK5xNth4Wn3n%Zg5RO@8R8*8bPg2ToIdDJ`Qk+vrj}fnE1ip!Q z0f|jO9o_~^(x7pjp*?yd1b1M=RC{|iqygFeb92r8BO@G>)6@LjotIoo~MHG+Z zJ-XVAV;}Jc40E%EW53QHgd#4>*=^`1e6XJUOVWi2EmC1kiY&Ba{uDD=k00YOzR#oP z-?ig)LPxg`%Qdno!l1jAJ2ml;0Er;6FH=1G8W^Yn=*Er8K-YMBeLVcCQHsqZ{T@xY zzG_~CTZ*bk!JZc2$CQ}aKXw3pAugxZLOdT(U;0qT_RK9Tyd~KqTZG{R)~c(cWF{si zww<>DgmLA6SBXuv_)PW-j9K5eSP(!PM--Q8!Lu6-H=C97gQwZO1 z>F^CF4NVS{a0WXEC+CNU#KbT92L}3pSi`xyy9*=j3L!c6_GMVkxgFdtj>LfG_pg-{ zXsQL^;iFunP7>DnDhKq!sL+k!e)1%SCdd@t-r&mNDySoBFtfJcturuLkEDG|p#}a4 zTqO~AMT9@E_E(vN19e36E)q998O9+r`ICQbv{LK=!-cwL4{j7a}brJkQcBz7F|XE=;%Ko4U)X zOEmNJJwxnSE^5w8trsQ#T)LV*37}+5Zc6!25lqC3svAp_eX5(ziW*wl7 zCWfc00W6B`ot;!EcNzbDAXBz1A_x<|;QoQl2n<^Z_oaS>9X$px{`URxRDB=ZJrqM2 zDc`}xjq2h!1$az}3@!{VGG~1FerVrVOm7Q8p8JEyAiwNvx5OFu<@tcNy}6NLd07Jj zLttkN&@%fSqZx~H_JO)EUc+BiN;nkndpD=iHo_W2`^Nk0)y>m57w=!+f?8~9;=+WL zELv7skLE8Pl73+xZ;U}HnBC(|h`0~CKh zf=0y04#*5X_MpDT5Pyz^)QZ%`emKoRqa=J`XQg_rR_1G21nE#&Hm>Zid2`%nwG^Aq zZ!(8QFs{yhH!3~+l6ATj@?>vgDF?_Fp`hoz(MnZi6noKfqv{`~>aUuILYs&7AP*%h zd+X++=?|gQ%3B^3G|ZNZS72@Aht7g9jYlSO^y~1v=`6ol{(3>bT-YM60_c4UdoMFTCKb~UX?O;y;&1!{i>l?z^$^On-mbh7p(eu z$+>MbVzxh>RX9psQOr1)%zBHYuBV|T@o<0@TVG~tlxh^?{&kCCG0f<4=!2xPOJ4$N z`!HE6+A>EdI)CP98099c;30Ad7*lAk#q?2|&>XC!|LMZTvx#4adqxloDO(lW@L$?3_m{DxwoagO?HMqvXc@nUV2jI==6a zdi34Dbh2;sG@f6Ogk>Aa9d{j&T4~vwd*KL_cfP^-Y0kkGugUZ`t$a5-t@xDw6kO9 z#udAWvT9#u>aBB%dSnOwt}zv%gEraH#J(cvR@=mP+yxWyu&0!7T6IqNzKXbv5g{v| zpfgNduqz{b8{Z1qAnv z%gf93<>ZVa#1cCUihy8%zw7P_ zY4_>=lYeLumHL_!rs`i$j3?Y_nc}+b-=$)FlK^QX=&8zh*?vkl|tgUBK&jXBxlY`JD30myd`kC=5HyLqLq+ zj;MmDQiw0_<;BwwhU1u5H4H3142z5`gYm~Bb_Rg7;fa5}P2U*Peg6Hg2sV-hk2<~! z@Ba6y?$?jbZ2b3V z$Dfxvl5A!+B}z~u)Dpj=ZEG;E9r9`FOW&9-G*zQ zQmPMPGz4uYR(MX46Og#_4wI0zid1eUVmhJjH@_EEM|+6q28<{2efrD$0`CDv}P;n>1F|86*imxY*gQ;&ZHRr?_4HLLZLlfniHk36CnQ*F5vV;;tO(zimYAd64V|@I6*PeDZs){0?Ql zvFjCIcp{meH%kTEK61#bh6ub@+AWl?=i=}E+DESU=$W&h+99w`E>%`FxmB}(iz{+g zVo{@3I60MBKXYd$;U`Z6f&XP=B>Uq#83ztMiu<(3={y#bd)%}7UQ6w?WSEIToaFC0 zbW3nk$7@>mI`AKvn4Ke21vOg%ayta-qwXq|l%WklLugp@O%?8L1K{sjsnZI4s``n? z5-<@d|BFTw{AqHkI1)ZN)nXvcgPxLjr!@m4ZFHm(>zW$!p@g)zf5b(q>Q~|io%Gu^NovIlMn}&KG?iuVa-OKH_rA)Y<2(T z%%#dRYOl-hM$ICUm{~{%%kGOdPGhabGq$w?s6s816_HueWjL$ILYP=f{xSO|G=>DC z_D+8Jow(9fT7#G_ZtoK>>tBhre}7Kwcx6<_>tf&6PE}rhHx}*U__%Opb;d72ofA_6jOdN<|7(vqsrrJsh@|7S+V4iDYtp&GQakrgfiR zPv4Pt`LT)dAVc$K3VTbH+{sy)>WbgbxxN?3+@6A#$8It2p5i@T^H=|l%vN{B+fvGl zH6V?#sk>uzH2R!`>buY$Rp(!#{B9O1@K6Usm5oe=sz`pc{N}sIV^s%%K z8R3%|dEaI{BxyGfZ_so0J3Iy-vvFvt-~61Kde|>*7yI1wYIgCu_8eRK;&v7gRna#%xc3p}AH0v8@fKd>2PZi-+i_`3s-PJAc4;DO{HzbdOh{8`NGS|? z0dvW7V*C?Y()tyRygQn93!>rkEJlJf){j!-0|v7((xmP()lVMZNB2u*M`a7`bAvge zexmxB_hnrl7=0!-_*&jMn*HXa=ZO9_-yFpRMY2J!!-#?G?gEJHMBQI zdoY?!ywF_ZUiR#?OEGTXv72j)D&g)}q?1+w(1*HC9ya;hYi>6pfwf7H%?neImDTOn zyn@2sH&dOdWU6?7+eIMF>%zoK2Z(d1DJNGNo}TA@wiAfS#%D1xc$2tHQm`g?D7G$C zgmWo`^>6l%Zso;iy!7!|s!85a;VX6ynJ;hRX*06B_l0b1cZ0py;KWU3M9^9g7g!v9=emt}v6HIJGm~XyR_s&g_p5`GLaSOIt zx{0RU678Xca3NsV8nQ&F4s)5_Ed~wsrx2o{T6!-0~dVk@q+M3P;V^T z#r^o+dwS#NhqMBEoylgHL1!C(iVb~*Qr<=uLb;NcZ^UmhV4V@78u{9-g8Y06Ay+Jn zMnuqA6@WGTc~9IH>ENnm2yo4b{H^*g=gX_3`OCtc3Y|W^AkB;md#rnpJyVDJ!$T>d zSbv)JY6ekbC$ka`S;mTcB%RrWvD!lu;i+x~J*HN;mYjt&HH5C!U71npDb*?@aRTx- z>Y;qZDec_muJwb3ICggxzPx;NY280Sxr{}nO@@Z0g6H7#=2f5qS$lAlm@LE3guv<7 zj%SU8a^cH)E7(e_lk3su7yacgT~)E18S`3%uk!cGDwOzGYiG#Vl&|uXZfLu3nNoRqk7{aMa$_g{E0#Fn-}}`y1>X?_472NcB$eMy)>g56{CNW z5;CFBRla@D;TUpJh%rnXvMtkW(s6cP3YS?gb9Qa<>kEBu^)xQ_VF8EDH^p7HGV(>& z_QBoH;nSj+RNpfp@1lVAs#2wfFYqh&fqVE%Qki-4#FIGXL{C>&+phX72Zxlm*|ut0 zdsfi|oVP;9Mn|WZq7hH)(W<)w>S`-P-rS%3KZBiH?ZP#zJnyX~By3x<45v*kBW$0H z%aq_K+;c0ixqB-~6cf2f)H>$&>XxWuCY|zQt-fF<*O;e;Dr|R(?-F3}puIpL&83#g zNoBXk#KnA#c`uZ^=JW-YETa`Suk=T&zDBJ=InP9%kH;(%@opu`iEblFHDb2l9g2!& zOo*nK8TQV-WW)T^8G1v+vgkI5qWBz@x$ZAi99Zso|sqaPz(tr?Bk zULmR>vr?^rNJxote%-W4qW+^V-wu56YPDm`Gjd0H+U!!BkV1xhzkKgUY}R(Z2;uc! zOX8iGnp+YQ$IYlp8{EVs27CInXq{)r;N2_g-}sP)?J|YnsB<96_t_%}Oe?EEZCK)W z;JlRD)89|_LwXlRyBk1B0LR4k9a87;=VDv-R~&x6`5kTP|#E(B_9^XVdd)b$J&^3Cao`MTE7j(6GzTG zU)Z?WeUvr1-LYRgx2rj#J=&yiFq-rEx8h>JxnK*N&Xm*!=_34bbJoHEUA!a7R(X%x zYr;NkN_s1fZ-yBeaKozCj9hc3%|6%mBvR;oa?RW74MOHXof-H_ot5>03F(tT_Lr|s z&vLBQ{@PtBTXrN0T=N&53`r9*Lw0zeaw#iDNkQ~&+)pG4U1P{ey3)kGSRy8Jnlc8{&S6Tv$N_YQgxzc)o&z>^8J+;y@u92g0<_$Qff}V+EKY zcL>Z18Q?ZbXS3K|q z4%`btVG@3!I29t_Hx8(%;Q8{v^KD$%`65u-&R3N{c3!^puPUc0V!p#jGCJ1x3wFdhihs*;*}LRq?gLUf_nD zqC%^-6K{>-OQKHGXSlya-#tgp8Z}zN-`TynSJH8dR`)d70I4-q<@6Gjy_vISs5J79 z)KOTkr}J_p_Ez=&+u_UTZ>t9df%6PVPnUB#>$;!auXb&x$?}5hvR1-ld87A{bgSD= zUJhNlxMtg?PRaQHznLYPYMY(=MDa--D+ev3M{gJ6`oI!leV~u7aOc zM4*eKo2N=!eOl$@6RcWP3iAF4b1n&>d+NeA*5u{-$0^I`4yGgp0PkEdr0^rserPvr zd`W`h08!j!oKpwQjJG)Vvn^b(FcdcMFWF8fiM(VG=Bt#|njV=wNgcn9lTT;KSwf|q z>-XdZ<^lh|oPCjd4rjwJ?qSf*VAT!oWXFr2gG<%abw|Rm$6XfvE~D?7Dk0%vNJboa zF4KqM$`XKppb_ ztr#cQjxV7uiWXNn6Vz$!*zn^4(cB>&Zv(tx+p&BF>}hN8oB{i*Wma1!4<%*gn1o&F zN$_Q(M@8ao6sm>>RcqD!Dv5y0rpNz2SGE71E1&GRyJs4+!085d=LTIuaX2x*+Iu{d zZ`qXZ;zN)Vml8s5-WOQ7AtOaRC#l&uiM7A^i1*5vGI_Fyq(6<_oKLAawIJh7LI2a? z-?_VI?m75xr;~?2jT$8I)EDe1%YIn4)PCq#CNUbt^N=K)=uxu!i#6$vl%Zc@AF^o< zE;Bt0ruETD`jTe-gj?_=X3qsDpo>}dgS&w+VC|rV^xe8e^xX~a zR&Ac<#yp_yH?n!*pzUI;+%U?sr4m9ITHhtb-=ePgvAjdsXA42`;uEX#dt5eUmgv$i z1q54a-o0M5-G#mn$Q(Lx6t@S`A8(hY6Qt6Tm@*8BGzZ?U(Jwc!1Rq=L;c?NdXe;+l zbch8r?Fk)%?{2P2rL*ABwaj-k9)} zOI`gPZMG<)tJ<6FclpS?y_Sypp;3%Y@rMMYH>FDWiT~3V#P%3v5-pX9)y8^S-L?V4 z{BUDcs5LK1@PYYHmYb=i!iw3;Zn4>FPbwXyRNiwaIFodf*@iQkg>l4OkjX9*<_P-vm6*G9%r+lOREBI|-0g<)a&JgNIH@j~!)}&@_V!6KEDK4MUQX@3dK9PyO*1dscv9(GA3nXO)2F;Weo0mE1TvjY zz&w!s)0nTyhaS_WulNbSEDh9n29o}Bc#+iSVMwVs4JM&zaUfy}`_QVTG@8jNuU>v&5 zt@nXviUvvsO}X+$6)y`o6`Ynt=+q!VPiWpMou%EeEs0HjrnlG!4U)>4x{P_a{kRKA zofhDu5^bAr5;?R{UV($pf z88Eh5?y{jOswt@R1VrSh|ICDbg};@#)zoGxMYpqlc9d35fJ*W*UPLMyAoaTN=Z#kN zRv*St`^>znR^j9fz}$XY1n&o_;w*tb)irL`J0XWx4u%%M**zSwaS}h8ejiiR)|>~} zb2iLG8w+g&sVo2pQ69~c8?(W_(F}6@8o(RzDHa!~1mSEL8NjCloXC|6NqFT#vZ41P z*AMr6G4;P2c-6)GUfs^Z`hoa?7?sZ#Lp|VW-8~T3mZ#wb(SI4bxCr$m>}6+7Piv`h zOP$@$w;t}@Oq8)|6$6o=Cq3PCA8)xWK z5+G8k9B6QSv)dPtX8s8NoK9lTYL1NsT zO>tYc>CLvX3Ru$L$!cTRn$|%Fa-W-KIeVU$O!5gCM*;cG1(3ac{&T5hFg5GsfAj<4?WF%s+i+(VF-o~s!bRSaKJY^ozdYkZOP)!+cZKt0q zZLo8l=ugUUNAGjdoi??x%Q~jhC^Ljcd7Zlh4)*K48c092tnB8u7EfJCSQq}~1V@gy&fOfLaJjYTnFVsQfHMt=Y|(EV4Uty>fT za<&bitVbhRSim^HKqap;28J<@~r>Liq8_U?|V$Q0^0Hm6X|EUhR-ONkT$t&jhfo5Q(R<|JK%mM3!ps09OP6icI%t+Q`nPrCrl8wA(e57Uo{oW#eF z8CfaDJ!hCGeQ`Y}>rXLhDQ|z`O5Yf5@Mx8MQ6&3MFV1iL@VN=ckdsKw($yW_X&SK6SlvPxq@yCM2M3 zG$8bTzfszTe9l~gO+f8I9It=1KLpY5KA}@r>ul2<)}c2fNI})8=L`-{5XT?)h<4>B zKP~oCt70Qow2Z$~ir2N88nnO<&i;}cMUqsG2KN#v<%AB{Mg-#9BCtbXmK~PG~!2A>bV-tio&fTZfaa`&QZFJaG zwKf1DsB3$Aema0ADpE}p6MnxzeW?O`G$2A9+Gz3$T= z_aAdA-pIidNEWmbuagIH5%^3aR~#O03!J{{{)~br_e$E6`O(jqOKBtXAlE;G^a+|{);OrV#Ud@qidP-qIHti~B*(MTn2P?+@!lwR2v zUt7=%9AH6HPPKu{Znf5|vSq+ozvltlOe4A}?P5&o;3xz-<2{creLkRpjd)X6JC{u0 zD{?U_)6thos%|!^gfuBW&BP0@ZbIQGl|pBHLe^z!8heOT+9@YZ-`KLhBg9T<9YSPgc6C=J** zJY~)mrhs2^C=p<@&Osf!Xdhq^kOm8riw>@UgMC6LyP-w{~8*=gDu%EV~K#7on7F0u-&KtTxtC`R=C3fxXgJZCmfQYnsBlYs%;pH6B+s@VYkf2SNPp;6T6P=kM74x!OIxFwV?`So;QS9NvIGEcpU> z)ifgjISW7+8P*pR6lg8YmSKzCC#(Xs9>V}xSW{6^(Fe*5zqkUG$qv*hFEeu%i=d#k z$}OHsXB4_2mv(MweooG(Y(T@Sm95DGPE7`Y8?ToOWRe4#Uu_5H)1#{r>G}AaslmLR z&!SU5AN2h~Ie`uh-MU@pBXU0L?@}wrwYzuUWY#$FcHm!g_M{9!SV|R=>>H=V{H3@U z0V+05%xrCa^0FAp7$xArbKm-7yp}Lwf%xG8XlqX2+wb-JAz#C~a?1Lpe-d!S|L^!9r%g#p{y5y1c14|n&H z{fF_^0JwI53{T+!(aU4+I)ev9!;h)M8f^rFl^LUYz>hBT;@_71ik;@gYPRCbK|pz3 zdK~X8m~0B(xcWe`nR>No;10^Dv}6pIW=@SeB4=A$#cJh~rEV&E@+XeDhPT=I?<}%@ z7lD?q1%KrdR9-n>+Ec*1tP%YBU%8zoo@a;#<=+Ovt=jYb&~ub%cK;kt-ZQIvAC#nP zDe@<*__M+MbT8}&-}kiYO`l(hrw9S=XNWMsPA`KZ1M?qRM)VP10fpM!Lwb7s%ZA{4 z0-){zz$P}mzid!v*@_6*IT z$iSE1nvlHOUzc+EWo2{mnTDr3ZUz?+wF1yul8E*mE^(cFEpL4~5UUI%0X3(@*`@tE z2D*XblPx!WkctE5so$9K4)Q4@7Bux?nQ|M%X@TyiwB_*p^?oF00`<}wo9!Cr~t@D zzZuN-6ktVgr3T!Y;eg9z#>V2wWb1+x4Vr9@8&vCu4|l%BEf>zwW<(P2mF5+PkNqjMTAvlhj^V z%d^}FoR0dV`Q_Vm{t-IN(@CZUG(%RenLiS@KtsXcoK?;9Bp2?7-`y z1T1j9G{~`msOx7cMJlE0I?vhC_hxa#;oV6fD)WHxLEl)BcU+W6%HVUtP{z=q%{Dsf zQ_O8bpO`qYh4(kwCC%P!w7|nO+9h3uV5>sM0gGsy#VyFM!O0ix7iUb8 z$@Q0k3d>7s?IF$wO~>$6Y?%|;!}=y0ULY(_ot>Qn!QA(*xa5Tv07jgP0T3g%G0v56 zG5~Ti1Q;P+YiVn{83ReL1yw2uQ-C}n1kk8!7XSMKp{p+d>EOt6wfv)HO_M=owSpL2 zvt&dwBw#vxdWv^c8SQ=+PgKir@H}b%6*R%*!<&OTqH%h+LuuX?dKkM&DGo|rh}C=aA_3F3Kza^)#rw1Ajy&ibfz7cdL>G| zeb8CPIyX00^p3Q$jhWOY5Tqh-3T+46!(ZgN!srgRJ;Yi8OQufE!6cKf{W0}^rOYp;8U!mj#!h zpP7E(E8VNK_vrgtdoH>s=~I8HCX7Vr7gX>c#Mgu@!E=sG5K$tYlU3y(xNaiZZ+{rq zCj4+rnlFcax2vGOhnh2(`C>IY&F2;!2VM)XKP{j-*KHM?ecujP?~iT~xdRk2AT&VQ zn&nFDgn&I80-BHwqqPlK11B{QCu|4&+?Vr!EvfMP^%^YeT;g7%qpV!C7HV6!FP;6r z(K$;mzRmW-8+e(Hr^5kFcEHlhHLfMqv}V&DWep3|@IpeR-rUX8VjcZh7RD~hE&R$+ z%9`el`U6>#TR0R16sWI!;)fp%)=qv|yFC_NdgLux=n*jmhr=4$M=UN!P>spB5Y=Ca zRTEQ{r^qBpije>Clg1W)2%CAheH><2@DfD)__ZBaaw6W>+t1-c@dzGEtz*}*7qN7? zQWL?KD3N9}3scN);)+bQ9<^?%cYTdw5G6cwHF@Da_w0_saLGU_f;5x-H<1oJl~dUG zgXtaNadjp~nzaEG8*=XiSE&2zONnExsN5TVR%Gli|E}|)#GDm=4AKhPy!0_Jm|#X~ zavcWCvGzj8>?;?(R0GtLd+}hL1qn;{tD{vd zB}^9z7RJQu26@v)_+fD&zY7rjxf3DQn1*WX>|OA1m;JAY1|mZVOR@UG`yW|kP?PY* z-^GT%)rEsZ+1ary^J%vD*TuK;M~CXq7kh`eEceS0fzN(r=IVywu_$nfElkc5AMfcG zgo}pX8+F_LQOE%;c3D-k7bqfwe6bB2s$;tH19|j89Y!cXO2Wy3MP?x*+^;mJ~+|{Xddpgoojy)So zn_o9L>(hGBJ*HP~v007pD=A5eQ_1kw-p)t6YJ!7d<>PEwb!WwypF>IYld3m0wHL?z zpISG3maFR}by)6Rea-5)Gs?#lly_c64r9HyJGmmKWgp6RzN+_d=V`4@?2BWCdiF`BjVkfe zXN|2|0viHR65EB)C<*L_Up5HCxitVcy_})A5fBPj8v_Ct6k{0xa}$8oyNR(N5SDQC3Jtdi4cdEbCaIk_S$5~J^NLmPAmDr% z^k1pdjXBk0lu1XFAQJA8K~@=2ErcF#4; zVae--g`HMx{7k1r&G1>PY9>wo@$vrPeNy%%5`LtwR-B>WctX_X_&UB;vwioex5kiX z=X3Ms5K(f}edmTbMlB3Tzg7MKC>|jMMAgR1KSaJccMIFs06_NABbd)uCD;Zav>31j zye_qsnVA6`geMUl3#-Qsgz)d4z}1AIyVmfJ_<#j_6AZrX>vY@U{rZKOa-oa%<;uI- zW)UePcOy;QT;ewhneT&H; zc6U|1I==dmh%aPBW}&*#O?jh~B=G{~ckY(6!a83SHRVH79g*xCZ^Kf%rMw)yvhuSY zI;4J|m{RYdu;H>KEV+wC|5o`Xi`7bD|eM#pAsH3cb85_dVw$ato?fbEX(zKAHU#o`)0eC~A;lkZ)V{yKp@EJ97u^ zofL!0+TgkE$3nD>?Ok0PP*51%0xPIoya0zyk}Hj3Oez!%F^)7C7$eX3a@|ks_4s4{ zC=(&)ZxOBZ6k4S_a5KSDjFBcq)e6^D@v-@+l2+!U%r2JW0_rx``9L|xu?O_TLp;Y7 zb6Fo=iPv@`EMjPD>}N9CuyT?Q13Yg?ed>i)1FCjwQ0`;+Esuaue6ME!&FF&217`?; z`F{KcfZ)m{wwp+%2m=}qULR5@gZ~~^0ATj}Xb$PG9Jg;keoD2FutB(c>mJ%0I<(@J zOH7gAQ=DiN$hP&Y^R_8=@aK*uFW{ff%YK)sF{#TvjFw|*b9=2&$EGtbSk`yO&H-Jm z3Jc;|-*Rm+b3Ya|tnqBmI#aLlff!5aff_MDf&f1?XoLC!w0V8iQlghO5T{2vpk(71 z1YBZ351ke3`YF@_wof`D5@azhH`>_>$7?_kGP7&^4FCfr*K;ppK%#sc&fXX6Y*xTC z3tMAMz$`v9Gp~Ftde3cN?(zLEWyYB$0U``!;SAN?e$(GG6cX9wcfJX~(i(g{q@b`r z(TXh2?0CMoRJ>I;e-^=49;b-|Igta*ObwuFA69Evm~t8`3%HejfCB00@(H)GiSt@P z4K+PIy+L3$xO~=&THzG{o2<~b$@e^jQXuDGr-RFP{YMAq7@E@J(zM6{76L#h0fa=R^{&z&FVs?ct%;fLse3Tk1 znj&5pAgPneLCAj>lx-)AyYK^IC}%+B!PO1kcnFYt80BSUSt`3RK_Gq?Ex=Pci3Rq! zc806NP!Qi*ts*ywLN&H;^-F~M;3E7{6B_!`0Y}OUb+b4@(B(KE4#K5 zi@=UWW$@x{Essh3>EIbNab%*$BeSU!rohATXZ0MUNPQDM7h|_T zGu{Fyi>vi!E%@TUSdnl{Y)EE~>O`#9Zn2~%a@2WQIc=mbgq~uyoiB~7&hJab2YM5n z7U@0`T&2fkUmV!!XI{R1HF`$#g}t5CiY$KE*3D5jVfH&RsDG6xlS2jZ{jLc;7X6d# zwH%?d!#8usY;b=2b$4G24Gsvzm_EPM6jUHL!eB6Y0U@CnbcdiGQ1A&Pgw#_k@!jCy z;H&V~7t_1PeT_5U#i_pVc=egoja8vOC&hd;0r{@`Z| z;1p%W?u5s_RV|RTs^qiu{85Ld?KDY2h^chztE(YL7}N(hAS}`> zcdOht?H5%GzLoV`4wBf!JhhLaDD``S8k>u%;+(T^!bi~-`JS&&b((Uc%Gw9w!+fBeR8M0*~uzroL#$9HUE=IY2AD|gmE#hr4# zIikW^!K!(RbN%?&Vpv#yq{z~;i&xvM z910%g<{i8ZrWJM%^m>+tQ+jnh#P~DE2>6mDHI}>mP$TmfWxtgI+&TFp_dlL%jA?N- z7PB^47sYAax#utoRZ2UoaM^RA>LxQI8@hZ0@KJmqQ;HtXXwcLDkp(#mU6T_G=J>JO zV^qi;i7)$ZM`1{vBGG`w4Yz*tt)&>Nd==FrA{@$YGIKJTF^8G04DuyXT7eYG>$f=x z%*VZ9>{nrrikp(CzpO+)b+#g_#zSS31_*S8K!5rXecJyW(QQf`CXZ*q=EO=^SZUc9 z!s|Z5Ly`mRE*R5~fRo^NW4=LX%jj|sTa8^Pu2?sZm%UnlT(BH0C1Q}~K*7g7t;u!V z!A-N_>!GjsX#+^^>b4N>U4M(;3NS4{A-ls&KM6Sc1S9}JC1 zrXEI>i$Mu96m!B+xoDm9Pz{FGw2H$&F@AIlSk-%eiEl2qZLrRWUzl61g@vx5fS?GnH@drPDQMZ>JQ9HKZ?v(l*^SYODxn&Zhta5cW5tVBi09@1N2}t z0X6`0d~DwyRpmU?sX+2n(<9E%Nps-A?6cUHkD3Z4x}Qdl zm|i$5z018`iJ!7SCX9))>`ol|R74AWE7|_Un(@Qb$yhqrBlH7im_LI}nM}PasQ8#{ z<%L9RbpLjTJ=nh~>zzU*`arK=y$RYHy_dCh-}tJRbUBjCQMcGvHxC%Hk&kMhk<#Md zdr)F&b>P)RV!5^};J*-hWZCuYbm{IA=@7&B|H%}N8s80LBCGvUvZ=b=4BLTxY{mnI zBs+H{Gq>2`XpY_izYn+NywWGfm{Tm(k_ve$okqL$77I=c>)N+ejsGpn-b0{;xqn;c z8|1o8oXsbW4%JP>)J(jJ+UeA2Q&v)Q5qLl+_Er#VHd)w2W{rko-C=3-T0x@sM!sUU z$Gf%bIZ3AF0n+*887%+4_gdYh=i+TmxLg?vmes@BEmHBKsw zSn1rE6g=;~j1%1XW{Z<|^J+@`9?h&UK=5Aqp8TzZ3Dv;2FpE6$)I0+?R9N}io6Ou9 zD=2!r9BE<}f8to-mK`2*aNddF`Dl8%9ZUgRjG6yJt)MS@J-Lra(4sa!DO?69Cb-5A zz7@7EZ7LZCMW*i2X-s#MH@DtA(RCkY3#~H`YD#5tO$cVx-^ORS z#<(E7jyS<=^cHB)5huXuus$3VU!tNB=U^#0%T|PE*LH&aSn08m37%UgN_kVAs4z$_hf#~HNXzlT+m!X}{c2h(r z4lD7%+`6+^^*%58QT3a2;cBe}jsdpbJI$iDkczy|?C>dv$* zF{GtZzD(RrA%lNPqqjfkmG>3~yoAsqTwkjhTF{%qzTQ zNU}#EA-w;YcG9!E995sEA5G{Xc<=gla-mUAAwS9AWS%B`EEo4`G>Q``rl3hgM^EEKrxJWSaC7bL zmxrs^X?GhhJ|jku8s>6M{GHzHZ+$*H(F6&$kaz75NBHs>=5eoyI@AFjcY}pNsZ5AVmFAXuz%w7;X z-!%KF5I85|smDYa(xUyRux2H%QF|g4LD@bgeSO1w&S()mRfgHxjMr^`^Q|db{4`NS zxVlpsrlF|V`57!KanY|=#~zd1Mv<{56wg}Ceo`Px;@j4sz0)7+_|{OB+k1`LlNkub zMOU8ddxj{W<>XF3l#0qNPtt1x1}m@$F1?S~+Q*PkqSp@kkWY+rs?_xSX*)X0$kA3hfuT$v)H#25F_8?{H*_HIiBWL0YM^>&R7Pb3x#}<=r zed`ObXpQ6Jo_`C}a#t1O1|+c`nw#k+?m2jFKkhYBkVxZvxHRgoLCkN+I{Cr-=S^yhNsceh|Hz@%hnlbVKldWg zN06P>gl>N%p}p7gP`taXNLQN()^Wv`g<<8`9xH(3M$T7M|Vy`sHOi7d(l%RF|ZmY!T-rr_asn~8> zUcA8Vgr$COe>Dt$26z(7V5_f6Ba_4e$&1)oK5yD{R{K;~@9d(!qjSOQ=IFe&Pq_wd zt9h@-^U^0u(w%kNDy-U8e#Ph}X{+-V?S2^P7)s<_!uE}ainz+B>|Q_G_$(R3z(|!X zQj48-0TWpw+0Of-HKH(@DWLorB=;uUieGefFukcQ&Hh4+s6OI27e6g2-7?oD-7 zEg@^ODmO;qhJBb?iec#gB3|dcneEPzExhW8)Z9KNxqb%o8?>ydr_|Qh9*hiTb`RH> z_J7)#Pzk{C`jbThRC4o+7%@-f3HQrVhrwBSHqB<^6u#@}H};+*x&z@}b=h;LKH57~ zODy$V%IJ4@v6n3A@oeE6vtJ@ea{E(dYJUF^HMdrG7^q>L)f0RDSif5Iwo8M0{qPxG zqw^+{V$uI11V*al~EOn7*7-B}AubQ~8!{&D!7`TN3>2bmrhP&yVs&u!Hv;`9 zp}%8|a+Q@}9I7jr|1lnV{gwCB(2I5SUw-KXv3>T&C)WYD)-ekgJU3NZmse5P8kh@? zOk_05_^N@KH<|F#Z?b-~VO<`mnvwE&@G53Mdc^G9%&kPoI%Z$)9kN!vIu)i-@dL@q zi>zdX?!rC4pUJedUq5L2ArMB$`onzJWTuf9i7FPD1zcA67EW--er=JM((!Lp4ka)J z#x?T#*doNtSN{aT=-Asw7*=B?FEY$B^79rz69qUDp|^}CjX)1(LG%s(4Q9pv=f7Nn z)AQx!#>fryf!r_ovT*FqGCq)_tDyuY9zFaRfTl)|pOIi)(Zg5)s6q7j{s>GYdb}k5 o_g?Hl|3vAF?Ek;b|2;QYb@U<$oTMA*{1QY#PF1!*+W6i70aEY?O8@`> literal 67670 zcmeEuWm}b97wtAcrMrF(}sIE&}#`xDNmgIwyy zJ66mZW6WiMjFbpG3=Rwg0)ZD36_kTOpz9%!XIHPEgP$bw=Cgu7UK)Rt{0M=Rg~8tG zLV?eO`l51@5Qqyo1oG7z0=WS{`nm;yIM72NJ30^uXFLRgX_H(p%MJbkT2EX=5c2r+ zE4}Gw6u1&1Ciqd|%go-qu?Cv*Bdd*T2o9=bu$Y8I$wE&GW#%8(!zN6qXAOF6)h1(D z`q2xnWo&y*GTGnJKFXpAZR5wTJL{(2HK8&!{2ELspS-kNAV0VsNSICul7~J6FZ}y; z_Vfk!;myC#kR_;gpZ|S0qWJ$G{$H*E;j`x%>CHvO_1D{3zP-0q>)AtruxT5c+=KmJ z;uFvXRdus?kTzDjcQI6# z2|PUZYBNQQq~1AjdOaiS_Rk<9kuX~qdCYPfwLHk)k=S=-(~5^Ws|exdU-eyZAnh45 z!HwGRZC#B{k3aO>4x**dI4gO>A^*MmE3ZFnwx+wfduLna3l3W@I!jxwt#o9{E(JzP zBl(k3YZTKlP&vU9bpzrunUe{|0o$<`3(fSygbZ zw_TSuSg7}?N)}uLo?DiHtQZnTZ}D%g574$mH7ridY1F(;tfqA?zwp++Hd$J|!dofN z%*P?n|K9@}zt)CF)%(oX9B6Ah&8UxiVyF*d9mmL?CuMK&iSy`$8huSxN>6r7_p%CK z3Ch9GO-H9J!6y8POfSNtQ=Rx|f!>l3*vzDIvOJH-Np2vrYJ&D}4DCL6r|HG5e9mdM zwHV7EBzv5E!8qm#g;r%8f?iuDiMroXl5Ap)I@@BLt`;?+>|>D5TxK|vPStm))^)~n zbz;p5VXFvjCHk7}yrMRT>ZZOOAe|FYd{j*uDjIN34$y`E`zkqDTOzhf$Li#F+#AkQ zKJDLHd6~vuY;3(Mi1#y}7QwD>PaF!XIZ(%$8rn$EVS7K($}rRZj(?I}P?K8bV~?`$ z2dKT(e#CWhW+!I-rN3+aweq7zw__O2yUY2hBJ>}kyPSISgk+=ilCz=$lPagY(&(h4 zb1Ox7nISa9mGIvc$F?+rijXO5PERH>clhD>|BfyEtv~EE_3p@il~|u?qV&&YzFA7a zadJ6Nt7NF8wvQ2(9*tG*(GjZgIgaU#C_G6-_4xL+S{_g@Q2svP$FmjwiS4}uZcY1E zvfsX;{==ja^F%XKB)@$}pDeLNkSh+0Dd;tR3ZYOyTGmRUiR-m8UoCXTMt-<$_Nzu15$?>`*?FiS;}|{qD)6a8GFb87 z{nYI~g~Jo$7;)%^NcnVm3gQEvv8M4k;=$N`Y{aCM^Sa3~4&JF!>FqgqgM3@j7lhcW z7=Q1*dSy%0rq-aQ&)5dfKTe9GpLaXN06UtDYaZ%<6f;2byOWb&noIN}ab3oyZrohH zMX>I_x1zTD1e+e3g*D?B6J(MH)OlA7v{u!6n7mnjAy-IpX8#=hM%7tp5xYfWPGeEY z?-=gyjd#GFpS8YSPRNYPLFT+4%r}4&P)N>_M1DN4tLsuv@G;N3>ABeC-}22(reyc} z2LJayZg5}aY>a8~o9cTwn5yS%MI`4u&MvklhtvSTu1 z=ipbq*w6dQC*Pw2SPu_uRehVEy=0e1;siO|llz%@-j;A^nPv>K|y$M-@A_WXAXM!OGlzQvc?y;|e6@BF|x zHnEu69fSRRS!Hj`9x85Zpzv2|`pkQ6NdDdd@gt=BA*Af2EGS&+@_|6PVh5V4j*!wo z-2`hoZdis#XE37iMeV<bB?X zqP$p4ZDLp}O#NT5n?khBAa!tbx>>vt=uytG77x?ZW5^1B@#F*_OUguyG!Q)`9?zmFaD6P9H9)I{>!!FU`ttCoM8eZOC&6eULO1lIyy|?`eE9= zP+&JRw;}rq_Xc1iZFU%(*qAF{Ps;AC8~mZ&oN#zOV>|h8+FV3*dnLWTeyM(%F1Tp# zt_SH6M=wMxg|<4+H!zH|F$>2E3iuAcx;vc8&vE^|=@-90?Cm=LwrbG_AFVWFlplL< zjYHByqz5?VT9ilOLRy(m3qISv`WLom3F-E-1`h^#l%398&dXW~1uBtSS+uG_=)-wt z=fZ?#ltRTBp#HCZQh~D7)mC8PO?3Lzto$(}^AIkIUUKMVA@9bul`nY)P7@B+NKKoK zz`w`4g2z{>Ul#BB=AOMn`2!Q~6v8BuNKQ6DG$Yz!opaTue^{m2`j+T`=I`)zUbOo( zfn~aoUii7im8Bd7L)+#Mo-U@IL||QnnoIe>e!bnFKX@QDr}uBd?Cm})W(Ou(wtBiH zzbhOj6dWf(?jp##cc&I8f;cnyg$w<7$nyO(=ko9R z*X=$EUUF_Ow)p`=aJ!;Bg8C*U7Fq$0H@{cj=31f@-=`NpXCwT3b+}*c(fCXL-xH^} zE)@{xYV$%4eg@_pBb0oi4Sz<2(fr&dKdyf^`dUEa3I^XRbo^&*U>f=k6ty#{&NIeD zl>uT6k$zi4v>fM_YP`#TeiQCpdvvJAnS}Y)7L|{>5^nP2OW%nCBUID$Es_b_9$#IY zOS3*3p|i9VUOEvi7_r+-;c4+--j z?Wd%)g}y|`eBrS5#2hN2pCa1f zj}lG7H77FCGr`F?tzAYwQ2P6Zbo7sl{!{}-#`E2@o$ptVx}5XGvl0WBT|ipHaT{|RFO2y`~{I63tct~ z&u)Q%bGQEA5kNka|6rg)dksY`MT15WA$yUujbk1P2ODyv+VgHDC1%gIJG-g?U7F$V z<>7>;!LXinwW@`M!^+UAL^vb^EE5{4jqNtTaS%Roy*2oIAt99`_xvc)K-3xdmci+De5dWa221~{ z^DU63MYo4NVt)bAQmZf;fP^bWL`2m7x2{N-e<-gX;B7I!fPndHn!JSDtBtPr7q>5-&_wSrmN`R5IuDK~axb(8I64);KCl~VfRd9?Q13N_ z+rZK?WK6;=J11x5zv|(GhG^VLVngD$lNAFWDF$-UyhfBg&DDNOlJgLO@v7(O?GlBg z;|gwu4qj-I0xT9J=bHKh*>2F8^1%U_VJhc~UmPWCN^uccAWI3V6-GoXQEKq;@c$co zW?s_>=Zt9buMXZ`Q8Rt2%P-WR9t8_C@PR|9{5n1NG|%wND+c6_jSlrso7uL(j7n$d z1xo5e)<_qxk2PLEc%3&>oWc{ueu_nr>)t&jIFLPkN=0f{+WBoWF~GV~B@1og>6_u? zZL%B&8=H`Y5Ij1l7(tcB({c6I??=BinGS=MZZ&OiU6m zQ!6X0f6w@|u->rQ5ugm`rI<}*{W++D8Fq4|SCF3}Nh&>4pfVzQ&)Nh(kHSjti!I61 zNWN7V4-tb$wA^3dTK(r2Afb|P9Fuc?47@`~;zqTwB!;hifqoT@@=%m_)2HUW+aKp8 z4_2yO63(-^^x0F0nS^@zk`O#ZOib*5+fB?!V~Z|HsIQ=Jxvi5GK15Z@++iNpt#@`kI>K`IQNR_;O18*9w1a0VEOGh zo=4VDVu2`j9wv$iN(+oQ?1Xq!S4y^JlZW3?J|fbXDWdgu+;mKT9w!I@BGpQ%99s+V*-GS4%!Gvk(_1k|(+YW+Kyfe^5v2fIw!HYwG!kvSw{ax^3oDE#J&Se0@ z0jyFIr;Q*E3*2h<-Hc56v&`~E34jcSUUH?9`_AZ1=q@AI#10y9A40y`kxM3eFG+B! zs;KDvHQ#8sk2SyczkcsN6Xa7#;KM}|PTCnpLd(A%N_G16FiAZdiit<(qBiwtAR|$f zLVK-P*L8xp-`F_L%r8RhE_<1_kETz&9m3^Rh_!C7gp$OTZ{Mcj2)q6^>tY<75ADd9!Po)l6N^Gxl3 z6?inp$MMZ|Za+Oz!9At7RVJJ5jOVrb;{Ee2#70O~7FmudFD5xTSp8}BAhsDV4Y(yx zvENZ$53c?0sk@LBPW<*c^3xkjJ&A|fX0G$!MF<~XnZLJ^+&u+*H%U%Axh2vCmUcf2 z?m7-j&7Y@hm|h}l%arJ?8|2rUpVDc-@=|zjwn&ogp0e`STAAl+TSEzYI??rolN}#H+|Q*X8j$|K;~kzyh#2**q)-&E@X_ zt1q}2+8M3uVcP$~HZaV{uY8B96S_{&x{3%eE zVO3aI_~t^(#Do&&H2|cPo3=UKhTQ=1@u1Yz8)isGHv>T8u|J2Iv+Qbj*fhh~%rmNP zejjytZs}8Q>tC;&Nt=cu0P3dW27>s#?-YcP(gYtLsB$JMG*r~@zrlM*(J?V`v9ZrL z`*sUTn)$Jx*%IyAzvi^ilgN!i7FygcX4uex`?}GoW^%d8jf&1U%_7!kHP`SC3FZrT zsJXiN1%TOa>3`OokGzg&VjGN<4#=?lk&ziz+tq5pUQrIR>Ci`r4h1$gwt?{WtX33aS z9o97j@}Ylw^yh6FIsz8`uhCJ|j+gB|dvY4?B5Ut70k}d)b+^b}p_r{nA7+^*xct!1$f< z!0L!?&*=({3s;N-NG}wUicjyEY7an-$T@xrIKG?^NVwDSiqIR66k3&1$kQA8cabX9 z1$X3M;XxDS7Y@N#o_j^R#>^B-s)ddT%h5$A%OZxOR-E*DFlp)yd;vR8I<*5$eQvgA z`ics_Hw5!_nK(HF>CfC;FlERk9zH(97?=q1?nF`465Gtde8YeH-{`#u$QD2}bn>K* z!;12(SXMq?cP%4jm7&`nd;_P{^Uhhx;O$fKvqim%X4#t5Zu2p-$+_0ix~9)Qu%_Q5 zIw0Z&a&#J#l5{1EW@#X!J&BD(<7 zC6r>5Ij|Am$+Pf@M@Pz<5vL$<#uY2iKG(WH{Y$&#-l^A5Ky4mTG1&XFqBv^}^UZG9 znWwQR>jMKj`rgpMbzNxF?vKpD!9fWE0)1v)p6S+b2FjgQgZnLOKFnQPSuOoL5`{G> zyVN(1d-EK06a3@aN_Qq3AV+h)+{el}L$-&#(3mB9Kragz+mzSadTU*T`(tXh9Uwg- zR;K5!CeI-@^Pu9yQuW0{L-Sq%^{aejWF+Zrlaji6Q2z5r8!9@B1Ct4O*$f26y{upr z+=1Hm#6Z3TaW2tqxou}S8Ag-}eKhwDp0t^F{BhC&%U*)60CJEmVmHCVTk1=Uz9oH{ zjg5k;>U6E`YPYe^#>U2fvoX0oSqD?WWio>M1k>1_T}#i*@|Ck?_>F?jajUfcF)DV= z3j|QgFmQ_1tGGywd&b6Nb~9@D83!x_Kr2}`;q*8f#8jfj2y3q@kI{7 z@I73u`uakTfwX{iw)ps65yzjS{kY7^h=;*6mVH9cTI5;?&Pw%LO zg2fQMfVCP*<@x&#|6rXRZ+=h*#&iv;Fs^mk*lQ_$vA-3vN$@Q)E2+sI_Qodd3ye{ zNo3XY2SO@z)Mr+Zt+U?JizMrG0D|*Ol>t*k+R5B}w#^S-RdwjS#uq#S?d3z|<%&n4 zh8EAo`}^C2!`Vwh`lFr<0T$(A=JoX|J`b9`ttG)p;DDF}4sj>{xia9TuezKcE3kb{ zUy|PfLaj9m1M(G~)D`1Lye|TNBB&XkEPpkX*ZtjbNnK$f4I0LmfETvQJ*`<|5#AsD z-`z@ul#U}rV^T<$g|fQL{R*KMnwp+=+Z-I#7wL06=yz}s8yypkjoTFvpPcBuJgTj^ zyfikJs)>!ew0EkgXu<*8O&Ht=nXUD*VP^ z3iWhJ-K4fBz9Lk!o~y=}-3N!keKixbd~zeDshR24!Ez^F*Nd~Ovdox;JhOazL2oov zL~%B27mw}CbRXIn$|x>TS9kdYh1Sc8AkvlP6!yv& z65Dr(NmyitsIr&H4F5SY%FByLyBKPN%in?M3QqJf5_4<)jggTVFu6=JjFKF*WJ(;P4Tei7g~ zjSbwU^p|pBC~Q!{p+CC}yG*L4Ur%Q|)lHy7^_$rXY6~hVs>LcMNr9B(^rOR5cn61f zcXu-d`B?8q4ksN06TDQtcaOLjG{Q{|6LgHxGYZ}X49omBu73g%Q9+>_1nrz4xCL-| zKHQ~${NCN=Gh1UVattAj}vle=z)`k#nRHo)f%qF2S`kh<-x=2?!Y3GqySr? z*rJN^240(a^5twgTH4sFno@;psBbdE3*!rarssyWv}e^$3+rygZ8NyXy#69bYPNkpj4-^&@IK` zth4j(4bwtU*npE*DJUkHUn|NZ#*;ksTBYcaor>}RY*i2ydr*I8Cz}5V)D7icx(y{m zWmax9`&AV)NZ#oDd{JSc1TS?mv+1AFpYgN^5Xj!ig6pB!9rfW9+Wm%44{9b+vQ!;OnLDEog38iquI_7+#X=T(D3zN zj)|WvxKK_`j=!r$OG*mC3>K(Sc3evz&UbdHcVkTYBGV153hUU_G2Zru7&qpJOi%A= zyU-X>tF|=nFAE6wQhrXj(|%Uw9-VPU>g##1gl%qym6l+Yr=qO0X3khBk8;SZ47st&&Ns8BeK+_2J>c-ffqKnK`{0K$P5uA@N*od%*ckRiMZx zV=K03&-Pe|K}kv)a(P7982uzn%g>C?$%rj3ipeO6Gu4YXHA;T^Hv8|}DDvqq2Jsm` zlZuNH@{40L3rUxG;A3JA9?NPPLeA>yfp~Zkl+-Q`H(k|6-c!U~L~PG;XN(HlQt5@Z ziC+qc#er0FE`GUt#I^PmTB%RXo>@y`T88)EK~~lcleycS{WE*r zDK|Zx5J_hj#6BH-J3G4o&)ufuPTc=YPI8T^5|rF3C_(W)rh#sVPEw^tj@JhJA-FXm zA*7LrIi|UWTf2s;@$2kU>q8EMX`6o-zyUS{}#d#^;@${Ef)m!Gq|%k%L0=PQ769yk%m+$WAY`oGqI{*Uwwx6PMIF1%65!$f%f1^v5fTe>8rcei_Px2v>^8V%xjYENZS#V1$-;3H$I^EKQ%Ls zy&sR2g&x2kSTmoRpHM2-bx*^Qp_C}(o0QrIVdj{xtT7#W>*pdy+ z60kn4VrCh)z=c)%;r(bb9lMd2--G~vNTW_}LLN6>?NH_X))AS)K=w3g{cTwLk>V{b z`3$EZZj!H=*~_KUh!#si%B(dm`DyE%z_h~rmcqnE8i&VeAge3h8^pL9M#qP$0v}mn zBmu7EI@so`@gM@@1l++hi)B>&z7hP+E#wG_QsYxaj6O}ZN z9H5CN0#S}2M32DG03|;IJvY65RMiha=!sZl8s_l$# z=uoYvP;n)n3T%nV7ik=5WEbDWlE3{=KB}OXY(rvc(jg0ha6D&GIF6eKp)chp)Rh{P zj-Z*@&TxhR$Sr+w@Bb@I1|0XMK90ue$jhVlJ|&iH&3*VW>ImA_s_Ae;lUMI#S~zCA z5@5Zbw~m{4cZ=>0a1c=i!X#|uMDp#$HxCbwZ&ycrEV*FrAD%RSM;NC)Rx{4kuIe9E zWb(5AShVyb4W$xNtj-~dxe{I_1wjinf>!GCj(|^PZtn;hDPr`Ks};ia%08 zJB@g7f_h_&GVwON%KQo~er5>Jx8VfFm|v_%yKlc+^&&a7T4?ZnuMCOY+1cT}I}$ii zT?Whu7Z;bTCy2L9Kr7;Gen}w_|Mkh~K1-p$&GJA_!8Pvr@?{yxlIFyR56-JZ(jd+`~F z?E63%!p}3t$W zBZ!0Lo0|tjjxUBF_JSt110?$;b&RhNeSLi}0f4i6`}&^x>~tyy2M0m09X;l`0^m6F zsmm*QodQEfx?8V}tGc^FLm7mTTB|BHD}PT?(ZERJTV;YEiv;q{6=^1k$q_3MyBs-E znH3X+b^=lJOrUyQi5a4Y;o6;TW5I}kh_8g~vY|Tzh6(-q9gv0q!w!r;+RgZa^_9YL zl;UYx5!fKy9|l;3dU*L(Z*vU6t`S(S;puaN?kkd1zEgqu)uO#jxL}YvNOACSyz~8j zzFLMegs9ygcM$|zsD3$G`iKUgyvo7N`7X0NVq#+Af71x>ynsgM%a7^apiv31z%(R0 z@s0|tza`yOvOE8jUmiInzt4QCfmfG`{=hLX{F_x#bM8$Hwo84Hprx*Gs}cUX`_|eJ zJ%jplAJlP@RH>Yxu#LGfY_sh%QaUCkyQ`z+wTf1Nq0^r#7LO!W%gpTpn~p$q%J(2h zu7N(09j8G+L++)i)gE?QMG6&2OG_hjQTH>mE{Vuooq)D6_%k4&!JnxgMu~s!Ge6kz z$qhu!*@C8uTU#AQ3%xB`Y6Hx&Y9JgJhibV5nZlo^HPwZL>Z@Z_bQ3c9 zV`Dgm%3E91ViN-5VWX^ZaUFPFh4-GL^U#jL$e~e)jljxXeEPua)4A|HAIN|9?YRA8 z1MCvJGFEQJY=gp(aEQ3LI55C?2K`T;KK++PTP7xyZZ?+EI2~Znp}_#K{7lQ-$jZCN zodIKwM6IkWAP^gqt&)VE|8A3W>X?PvZRE2>A;2i@dBV*NN#RQjp><&upsHw3bVV-= z=I}M4u?9P1q?Ol3j-sg z3(`0L_NgASF>zNS({1V=iutGeH~1?wKvhim{HCEE27?BP0$*HSetE#ihq#*2z7qAc zF|HiHB^+;d8&R&E+j)Xs1?n3b&MU`n5|wuTtbbsY~B`Ul@afaD+G_t+})HfUxl7sb}B+SSqC-e6mvsp@Cxht$&K-ukKbA6rB3ZnbVp*A|S zlF$uZwf^QyAePFwaf24+@d0VEMvCZ|0PZdlp^tR46F92NL9vFqGCkuM5+{9kob#!$2fIZf0_n*^9)&b}jPhMW0$z+)D zrkOxGt#h#&82CL9pn(n`-^k3_5zly451{1FHE{?2jvaY%v1l>EZU;vcY!6WW5tF&q z8Deb((5Ovc?63K`pY7AzqPFRvgSM5I(k643gA6(Dv!gPAXEa&(=qCLFoGEB;Z>I;& zZDT_ph)CFcaBz^5lS3h$8XXg}zOg}0L7`m>{M`qZ4eXz^^iW@4gX^W0fWQkFG9IvM z)bHK_jf0ViDJm-Jy>_$P!CamBWbu5ndqZw+Zh3imadB})1*%VEt$-iG$j$wi%|U#$ zgrKl3z*K{(0KA5omDSO~A%ol^3>G$ZO|Fsd-A(_IgbpLMtv_pyM@ExFSi9K-@L2!g zCdRY#aKwlE^fhNXQyd&(apknwPabQ2*-(KEtV}A?)X(xzacYwEE6ZFyXiG_JInGt& ztaCIMeM|(ZY0Jfo`C0|Pxw$zS!BUzxzpkO-`gj3|kOGs%n&zga*C2&ApHHZhh=ihf z0{l$iMz7xqCoex*Wj+aH-JI-fBH^`4KmfRk{^h|u;Fkyb`piMZ(7o*+9gS)}>;h6ibyZbHw}rg2a$HJ^``y(spr2h` zUDegpK+}cCNe{(g^Xyy6s z+qZ#$144P%56!~DvN@PCTWy(CR>pu|vwXDNW@uo5L8F3%j;^SoLE{r4Fx))8kXaU;DCDR2F1^5LA!}`kFVtpQG%4r9LFiLOiQAk#`@7iU!M1PZv zU__weOwQhY!CghF;+0^!TfHesnO7-#A=foOp8{Ob?dqt%U!s*q^3x~AIA&N_SYu;j zYwJTm88=#7esXbfk&zjWBoim~kOkQ2R}Ar7gVTF%?zx_xZD2*@p194YgIsiUbRwa6 zzf}eHh>%`e&DU3%j*@e5)MW_xp}a*&bH7Lk3hDzN=u0GjbcGR&y3m585QVK+2^&WIzO3(&_!!1u!Y}=6J}!87hGj+FmS$wwd7aPx|B&+zyrBfSS&dm@cht|G<*j7eC=C-au0YV|r z*){Uwc$`n#`}>0@dP}sLa2&{2e}8|K#Af}l79~C9jM#j+;0kDE&xhoi*+M2fOrS|f z9INH-yzkp9C{&A=be-XGSCq%pJ zfrdJ;w2$P@CDC6f2)d}vgE=C}nHQKryCi5{0lNr@OrY7W#z0@00ckF{9m&+x6z+nP zh9+1-8Wk0lq4Qz$aMaTB<-|hdB9i^ecM?geFYik4Ql8P6gmB6G-JmGgu6$IM#`v|47#4F!Tch%qZ!RF)rjJ z^esq86_qq+3CGQtpd9Ra%$}8WM)IdEzkv?a#oG?q8Ah_Gc$9JR7&r6aS~gQv)$@n z1qB8IUB8Ebm{b+$pW5XHZ>(b}fR%#oUfD(%#w6b@C@OikYItL%Kbpz;?SeZN)B@Uw zZ*;8H6Mdl_6wOPtjW2T8ljCx?xCHuZ!D$~vFs-yi`AV^p9BgpX?Ck)Te;nRY)a~UV zQ@M0(b91u{r9tCu`(WHJD{ar7!NDBY%`31~i*WcN+>+mXr1cxlAn5&7o;W5<8IM~?k_ve#f;38<02`vNLE@Fs`0qY zcWRFY%MF7UV}Axo`FlQmx6K9ZSonJ6%x@)`Do0?IcGi`A)$8k>J?ODT13-*cRaHG{ zF<%5nI};OeSQT`Dofyx29u2xs8Py40T0bbfv= z9!Ix+V?iERP--^*P73d!J@tEcozE+ZVl)g4GjsFP^XH}}CN}pr@5onML7U!~IW4xE zb_%BX$9qS$`LpPWm9gv}{+w|F1k>7!YW39^Lb~#3YqJDAdUu13YK~$tCqGi+lB~ib zLH{QhJ}(RC7!XRsUZL;>3XL_8|IGPp<^$2GyC0MB84bDzlGs$`o*Ggkx6dn0N9h-U z%NJ`l2oqN5+-V?u`)~xp0@LTQTKk)`QE64)6R79kT3Q}KU2GTu%=)*iUDYp7uq400seAJlwsqNh%hIZ(o_U$uVbKVSSNpWM{H6f`F zlCS)e#7WohP%d&b+4aV^nwT6*>FTC@c|*cAybzg^!bs^Uw9_uYU<0$7nNxuU6eXC5 z)Nw2*x?Lgc)KpZaOTWIYdG@~+2RBu8EAC~V_Wzl z;8Gl&l%#HKOtI3sa4fwbQsw8Ki}SrC&zDGn>%bax7Z=5B7v_n{hX3S5QdLLI({6F3 zSCG$`L6ehKp6Uqn34xM6ITILb=cQAfAnSyCnoFW`W)W!k1MP6C-d4^7nvIVyFcp=Q zny)vKf(xT{LE-WqU;vWQOHeh7UFSA<+;h$gy=BmAC)vQTw70jXQ7Qczfu_~#?Ce}- z*B=)b2bu!KHibavxb9|s_uZY_9HVZybDfM@LmY2PuusvxSYJ zGEvK(rTpFt`R*$$0!~q3dg_(~l19l8x}0JOGd4H}tNcthzTuMJ z+kJa8eWHOUkCwM#y>NT)bTOAblvt3mLfet1lTA>#lAse5{05vJ0=@p@+HQ?!|U2ceKIVo0B3{p^g~xB^vVp@clin)|&qVA2$XSlRT)LuTei z8t>YlAEOGeJWku`(}pu-9OuBX13v(wd?%T!?#WsY85x;BtdPmRS+>VCI76p@kc|2w5OKp{DrTa0 zOrt4KKPXH9HO*^UOY@W+&!33Y28kGuGRn1GNF zGWo#k(@pWssnOx#^oHs+={b*+wSY=|r#`8}mPcM(XlEfIsB(QXB_$2 zs^?&;;q6AMbQ&+2UE+S6wg;8Zi}z$?J_oH*@$?yWbqO}CXWKv1R?ws2K_bH@Sc`kF z{g%&zyVyt6*Vh-6oBdieb9L?&R`Z;!o+Eqn4NicLs+B+s4i28Gpn&YIv}QTKqFOkB z<5$3PLsmRZ5;~qPCoue@96ND)nWx;nF*QrL5jEvvAa0jWVi=9qFkKe8lJ+SLGf8MG zFbxYFbt!ER=rL*Jx)bb>o@bm*Ep5NGm)WigxBHZAq&0@Ej1MwPM3ps_q3ol%=f&2I*NoI6_El6E=RKz}Br|Y} zEU={H{dbZ( z&wP*&X_ ze|<$dYlXWdk9K!ZZMpY2@0ouTpZK_^<>L#^c53JigfnLxuhp&zsskyF;ngGfKM}g~ zzuE%iMe*or4@v|;G_A?RaB^~Lfk6H|=6bJU=!fwh;1q^6hr;+CYAY(DrkG=g_#W}B zlL{GaR|JJS9bH`;fYIzM8j}bIi;9U6c`gGG?7MO)si}!y(5wv(5rG28Yqzy%yJ;^3 zmIr}DzHx-3E1vOu!xY!_!|`wMKNIlp5_J4w9?N5;t8DA^L9tSep2gf?=d@Xw^c)%G zuMnMIkYZwnA3c*O6`1nw3~C>i;!{y=bmuk%EjMAcFj)4ho$$h}k6LAVP@85z&lJgF z4|~fK8M*wkpg`P-L0?bLAFB7{AEfHa_*mIK5UFc(m&YH4Yy zsm)AR7=QQj0@h@$tIOc9qp;9&NvB%wefO&vz{WP6rFwA8is#GG*P|*}P`9n$E`bcK zKOQIU;&S8t2GvIM#t3xC{+9|K{b<)oo9oJg_e+hCiv^Bjv zW=@W8#0%^vcHN>i?&bb$O_+qcKbb~jn%dO>+oHXb6Y5rw3}t0?wUN!+*y?J>4<9}d zcs=omS65$PVS@l!c66BS)aw0a&6pdscz_~gQWTGrUr8yppY|^9sn>JQ{RmK0C1#nFY)&=s#TTE763Y0?XMj(QUUR zmw=LEjY;0Z{=9>#^QAK?4kC)ccO=EKiPcxV>&ojFkZ>|Im^c0VH$Q>j-vi*LQK%v& z_9g*^!6TeHV{zf;#$NdZ#4m%Jau9lj7q*%KG3f8_516(Gr;MT^T2fL25aHbd=>hP0 z6SmXYDXjN1S3E$o!4cWrnZy1y%i#+L2ZvaYVwtGwoxTzCP6u$H>P@ zkv>z3L^4DQ`rH+Y|LxniSBP)9T`%^)O!V~ifyH0! zO;>vl9*k6<;f>rAqwA3sslCG;>w7MhQ`1R{sfR5WhNt; z;FLp6O^x^ud5|aoM`i#{@b>@y{mgbbEpY~y>M}O*_fkO)-Fob`av$A@a_sHxh3uMA z0=cmN#g+-S3#Wn~2?0-UowCPq1#KOV(quB7z5t)~tzhfx^_-e<)g`$#@ zp-5FVCgVZUI@0O3win0R*7pG31$!1n$HhH0M*Sv!%CH$3z^sbw&p>#fW@H2itA%tT z1q@P2S($`xZ+m;&c}zy^80IHnvzAYY^1X&1VV`fG>l)rIHUB>}y#-X3>-N2^Y*b1RB}Gu_ z4pF)l=`LyM?rsrD6-2s0y1N_ckOt|L?r#3;J-_eU9tz!mnHCIf1*1v3d$N#Rd^=2HG8c%xXV3cT73r;>Yi~4*)9&m)*Y%Cf&8|EJ zy{;C(0r+ zf#p@2N`Xd~IWgF)^8fWj;p5>AjWojrNDt$g<<3wHMln#BDP5%C7rnl`e3W#1wfen* z^vC+Qr*AeMI#vP1=rczH&!6OW?FH!BS`F?o)?V<#Vuo*j!_V^npG$p*hauV7Wf*1- zdlX?8X#tSN^ZI6cygind(Y{EyOhlxoq9mExE%X$vppiVBkIqr5Xj>hD`)kg4RTz9J z0H@~TEI#%SWc5mp-0I?$KZ6;NkOE#m05ZlxT_dq;0L}nQdxlOdrOpPWPArV2R~9DW zA1Y&W0jtUudQlO2-%-vFyoZ2G6cn!<#&F*cvg>1QmWIHYctMf~>>wWWwYOW&bB!+> z*?v%hyP8zCAX_2xzyWz>d3oR(FF!k5ESU`EZ>jIUliFQU)6jsJ`z3U9bYvv$*RPgI z_a6_9@>t8Uw$PW_zh44g-z7Qid39EtnMtL<`cO(zvg$aGY|kA94Xvf{X^L!T=X1eK z$(YAap1d{mpZ0PEVu32jT_`zjVqyZmG~3(~n%U{H|NarW|5iMXGN5IY93DyT%!j5G zdwX!ub}LJMQUjk)lKm0NJ&2yTv^GBI;SPH03;ce$0LpRq3W?P=pgBMu;bt4*Wf%Y# z`Xnt2a)LIfa@V-)`wecRMi*H}u8d3`&=Mf$M*EpSu+M}DQFUwdOEGt0St6@cK~9~k z!k=0+wE=Tp0{-y4Kus5aRcH$B*X)Qj7&-KVl;z}_djySjb%mVPhlYkw?B7XC2mjp$ zcNsyQKFC)X1>%M}d}k#v4ADjZ@78c2S@3YBkF2p892@OW*tLm94G#}jA0+1%6i~|k zfD?4dzYp+|E-Ya>n*EP+Qo(}I#G)=$`-T32W{U#t&Ak;aIu|BWgE_zd%=UwnhJaw-M)S zM(CFX6cjj=e>%VTyPB~`l}nUn{5ZqpNqUG~!L6k2lRTKA1||vuxVPa;c4*Kc2R&Gv??LXD1L`ds-k)EE;JH)`k61w*S0XGLe zq53d%y}YbnQ;7oX$UBX{VJ9zQ7R#mBMFX3iB*3H{)PItUY4dVUtstzd)t5349RV!X&u4C%w@|4udBq!rcwv&h7hM7OyMGlkUuU z4k3C!t$Q!_@O|Nns(>5wurL-TXu8}&b;*T=w^!$O%DCP;REKa}1tb)<%~tJ=)o&_3 zphu=*J=4u@?XF@L7R0P3VYNw#ptwyuMoO7~w}2gHZGmba&kBY@V3;5Us;a7fW|~8y z!&om@ly_M6Vf_mO=yXT0?gq2LeKIMChKD^_U^B$AIRJ*AK_|aX+I+COeL*MpU5>$E z`Wg@*urkX2+ZdVx15v( zKP+f!O6wM64JE|5mfLszdl@oGe*zbZ&BhsPBv@Py^i6J*6tuN(7yO_9eC7r~sg7x; z5E|Sv2x=ss*y!XLk#egqR0nc(Me7e(iSYkG7mw?3x=tG;8&LJg=A{_%=aav^C^H`o z#2EdKBcRQHjXqo!^2*jo!F4Ud_*9p;*dndL9QJ zMi^7^2Bt`*d2_S{_5=z2xpy5E7ssvJigI98@yzvbHv;hHSNG_dm{t{L))MEqocBH= zNgRR|hYQTFHB9v&UzJ|sHpp98FwB@)&3Rs0;5GcXRR9#=oGK?cSXxTz;goo2e7vNy z-}u6B{|SDfy1Kfxg5S`=rk(pg?7H^^>ZpL=U_%QU=#Ac+ovOY{i0P`@0~jl)@{P|& zTuOYh%VHMBR$cw}MIi}vPyUOzNg{AOXFup1ERC3zgtRX}Ag4qxLY@ko?QjcZq{Ln{)2iJ)pdEUO9%SeJ?d=G0Iwd=FO*lrk9bncb`Q!;a%FWeb ze^=uKLh6KPw2s~cbe5Q>etrk5@kcRR~R4afg@$p&!c>iX`ufacUYHa*t^3j*; z4%KD24(hf4@<%0DPE~v_7u*1@Z@r;bo05sC|As0N^!^!#qLRfaJf%hShYu;(W)XL5 zcsNcZ{xbjVIhF^mwVmBtjn3JsAFlo-;6r?UQ;cHsl1wB))Jfy4rF3Njp*G<>y*d10 z074#@H`@K?9Jtsb7iyHTYsNw^*bq0_{gc7>n1k(*-~o5x9~MI}-N1i=yA5u*|LL1R#y1-!F`=YMkl1`LGmg5MhPZLm^nD2=X^EG zjIq66A!fN()G82EFhe-}OZY2_z5eyZvAyx6i*(0(Q%1Gr}k+C_h@X-+;t*d%f!wXy6Nf#ZyawCgb672G3VIFQa6G zmrpqrS?+`0lrwXKev|266q+J&q?O%vjsj&O5*o&4!VyD%oT4hM?&*Xur!Qulb_p*& zd+6QX12C->zCqJCZZ|yLi@m~alzsybVEXu_GTgO!ax(U`oY!FkCq@?Phe+@Z zc_dSv#fDK8_4lOY=>!rNOVjz2ZC-i=^Nzl*eC9^<4!T)$3D%$z{6clQ^4)rHk9t_% zFiw=prrBo~-kQT{-NDfVH*^7Cx&2{vIfTYtK?K0b+!+J&1Qic2H5=>VM`?FmX>&b*2>kHJt|p*>ypZ@LD64lov02C`AY^r=wus`q!<%#ur$(8SBh5AnIm zT6UEW1k9l&LF9MA8X z)>=G1=mFF*;vtqgR~&_#@Y5-xZnK>XD+1Aos}21KwmHIf~;CvcSyYx6Wtym!m4$V&@m# zja4-@LuzP_?fUko7gm?JpzOwfJ*~~oripp%aoES6$It=?J*r}IdAU-K=P1}r2VIjp zdc3Nutrw71H}nh*-)jq|@iNl8$I4V=N(VPPiT-}16x5IB!5!aFGJWkrH%jTcl7VP| zD<2-ybZZe3dOOYIZ>X7ttUT9nAJG&G!onTIl(&T#)3`GC+wXIjE3Mj0jq(S1Sq=G# z5Am~V*4N?dfDliJeHy`%3p=augEk-$wW2N%9TNE<7w4TL5A(!>Q@#K)N=iS_geQ1r z-Yz20_~bNrVOH!Tpd?#M#3(W7!9ybEbUUHXuP5MkGvzT*5I;MoW` ztX~uxt;1A|!?g!7gzmj6y!+q%wDk1iQWQ`SUKZsimR%Q3Gqro08As>g;t)>Yv} zMa>~aCZzXxHxOEUW5?xJ-84=$=%{zpo*;=LbV=XR?JE5Rwh&CsjGXokFv;B~-vvWm zsG@54wV|5uh6}|WIg5QrrSg6l?!>>V9F6H?0Vs)_;VlQQ4W@TOr2v|NhC96Rnyi?n#Gn#GI+vlu;>Li?Lw zi&qs}+PwDu51#&-0yf0c9~(<6JhAyRY@br{SQ8><_~=}hCf-2G#WSh<$3lu=@w#2# z+W5454T#Qkfn=_!`v}9<%CLd!kfH-MyAGW5xLy-CDw5DDdX!=FKYp7@@FeuQJebXN z)AV;A=50oO{WUcEWZ-y8;3{uu(8>{Kz(GUQFG%J8cJ`y7@CJb(rJSSIX_wMY{W~#t z6YP0%#0 zk`m!y;&NQBLdj# z2B`?$lfpV>zuZ(B@aY*CIMtquh=|mvm{yjRJzDDo4a|5OM%?(Tj0Fu1d>;2;AZs8a z)6>%fnvmFv>IJBwKVujq*umZK3KvG1dOD}5SA&11rqo>Kb}(o9)5^Z8~t+unY7#!t*^R2wU_X?2!*Os^z{6z<{+I9Gw&&`Ords;tx=P4 za>6J`&Z4Je=NSG3E&70UB%_=jMz}C_3C_nw;zI?~hoO+%o_nyBL5nm{&X5#s zHB;+Je=7@g*KIyOgz_N5av*Z*;L!IWe}X0=O&ID12yCY3;2B@~(X*JkGfRb+&%Ip_ zmn3l%+irEu@74CidBw)Fyfw&IDvZ>G^knKnnq0Gd+!GWzC{HJk$eAZPUMS}CU-EES zyIUMgf3OqQ#-6d4D*89^*kP>^pg1tX0cH(7eR{k!#Q$4oHqk0Z&W!age^+A6 z>xrOm>AUI>egCWeAx^s$`$IES?3i#P7-pJZVs`iSQ4H(@b8m9yhxkzt%GViJ0FZ$Z z5{G6Ms2am6&tJJAm+~^gH=6EDXBYTJZ?9f1;^6hCJdw(Ex*neSx8PS zns9Sc0;i9P?hjhPK`IjAEP1n(b5jb#;kv)SpSJ<>a0lv9O|>d*1$U-fGaeDxljw04TcL#+k zlH@QAKFT*XL-;6EqM?hW6#62=Dv>ao&Z=NiRP0RJ{Ps;oo}s@h(Y3U5%x@RFY;R|Gm9_=$zY!<)}C;Jcd&3tO57^q1b6quSLAPnUKN%|Tw4UKtsI zW~g999~k(u$VC9BsMt{xvBtvR#&v2$plwY9=oGVDpK#wzwqep~=z10qU4?KC!bGL78zHSBE$8JXh&?i{7tV6L?qnypb$$Z z{(SagFFkU9)`ENSHCiL-cMo6hUS-5&qh#XAe z$5{NUmM{tT`^%T*%6T6I!E(&wya%M9>{a6xPyxu@mcai8npRa;52`(YGWq9>YPl({ zhX;Z^ujbx@KjK<$FHC-#&39pN)Wsv65FhV+1^0$Ic~Nw@MOY!~Gwup^&fC*p2w1rG z8%$Gps7K%|FwtBrayc|?MO+8Gs}j-*b>G(S)3BV?tc4m&To*)NQ-?Klzt%UDFBJJ> z1Eb#Y+eix&X}M*bd*^>v`}f7~@8NsPTEE^+7R*vBeEb%Cb65ol(dN%ce}%x<4B}G> z9?OF`{Xf=iNvPu5QGXiRT9j}PsPDG4udjik*ZJ)?sOE3C`X_X25XN2Wn_uD5ck;i6 z2%7cW-pnM=?vLZpJ2nEg`)YWxio0G>QvbvY0}IPYCJJG}>{_`FhX-wbwa*tF-&r;n z{ywCZURL({|2~w95+@Fm-qV}X7!OUw(g6+-dOBHjNd?Rl!+@eRABGzG*Cn z!`W}4g>~oZt)|)bWZFNN`GV%!|Fv{pR%0#nBi%uC6o0{P`^O(_5fKr{vtl=5!oul9 zMC&)d5kBbTGY}r=3lZ^5GrvQuL*S{G(sp|M_;D5Yipfl^^-6CHh9d(PS8VSRP!IWa zUToktc#@R{g1p4V)bzAuawDP%J}qK-^CrF45BdNeYDHVySNTtz;h%R_G%CEp!{;+A z$k5oM`@OXZc2?R^6ROqmVYT=nDWYu3h6VR!&q@tBBT|+3?-va)=I7!Wxl)MJ$USwl zw%CVh+P}OFOwkO#>ktCbc66K1Kaz`LdH?@GDxi0O_$zzsWou*eQuemq?X;vC1qAkI zeC{0ixT4b1loMoXR09tQn3JojqHFc|7#O}cbg!+gG5>w@N&y`v2Q6ndjL6`%dqPd= zrW}EXgY#(33Rtv|4oOc>KJRQ_pwky!Q^0kD@G7b0>g(zKuFzt|!-w24xJszC+E+T% zSQA|hJfV-f`uMnK6Adga>2QcfPn_81`{jF%G1Q}FeO9|ItQewG_4U#ETjW-FajV6 zAymWKaTWw0%%;-D*4D`2U`nklTvHI39O6)h-4+)ukw@sm6a-V95vCxx71whmoy_Qt zQ|e8gng(25%xWaahvQ@qeK4b|3%nb!Bx_mc`YHY_1uEqGDvLspuN*qlQr%(A5KB!+qT;HHY=dvC!L>1DuiKJI; zQEa5Ar$^zDo`al1lW4WSFxMCu5Lh7!qCkxWLdTm4$1q~4@6;h>hXHQE>X%tj8DLy)QPoJP5XYt>I zm_{UgQo66{UbuS)2fxafz>&1fU2fR-^ul#{aj|9nZlmY5>qXEZq^YqEWp8Yl*7R?L zCTgPJA_x~G3WKhN&e_>n`_rSw+iRu}ksg?et~1Vq%EY1htD=Gp04b!?ksPJm4}U!X zMox&Tl&|xH9D`Svzp{wZMECcV<3EU+*{JQzrU#dSvGdW@aXy4c$ly zKX0>Qqka;+lWVA1KwwTT9EWrjT59SSb4(D@alus%z?L+YGdRgFf}Rld^lEoILH9q; z=($}!4&{9M>I5}hX~{W|D{A=G2(?%G4OA5SA@PW^VF!tX%RZo%0x_L~st0vi7LnZI zb<4Nda*LgWE47cAL6q0J_4SuS8p*M0Qod$>*%;NFUPAz?pCeo6jVo{YKzDpi4g{ez za}x+WhXn_u2LcPX5*Tl*LkVL2Oyr&P_vM^4TuG)7w4FL$BDizg@YjLUM`rYg77YEe z0S5ePZrjVtt)h*_2`o>aJ?mY+%Ww4O*+q~71jjUp2&L>Hd_>mxOqw-{If>uPD0{q6 zQBXd@N0DSTvd-q434X>Y~-#-F8V4+;B_(hN_D zI%UV0u?rq>q-yX|Kl;qq$ z8axvKZmlF-q1$SAcEJ*S3ISSld|#vGbfu z5!kWyf>|IM{5By3f|KZ($t|+<7 z!K(>=gsCyKmmirj@M7a+$64oo|Ht%EAStq9=Yh;*>DM@>j8Te2uN9}g{r;C&Z1t}uvY0Dxj^YwO*0o4&sOyZK)~`1gG#L3%dcE<_B7 zHRE0xpm|(FC-HAbmWhroweS_VslM5FgF&I`x~G#dzIU?Gi{HBiP$HJuVPFwpb~ZFK z421xMF9JMqVylG!eN>WwczUMOtCo9p9^S~jo?|xu96bg?X?N@Yz3)L&TwOh> z&URVmyi&Zf>-1?o1Sks}g@3t=8;k38;0b1d7-eRQ$PQnKE6=$h#Zz5XHqa|O41}ne zkrDDFTtIUZ6S&8VA|Yefd-DJyc1tpQfD%YK#_0jCwxY7~tI=A-IonVW5fx?Ul=Ym$ z`cp%FpHWc`uep z>ivhU@Wvv~%^HK^XllmaA~Z9x;H-YS$m=}0^Jri{b1~^HNqmE<`gm)N4c!`^>~q6o z+ftPAl?&wG1|7UC2j}`Q;P@}v8hTxvMim&0qH5s7XyC-&rRygJj}|&V7~w8zPJlt( z!8`^l>99%+^e_j!h;a$(bRCq-ku78^|7&(CK$&7#;ssj|y?5f+`1ql1y!*}hs(sYX z!M`4j?Dw9Y4IsdJcje^84)%LjS;LwU9Hx(y^nWLUTvu;ECbN%PvvKEf=#2Q4_G5MH z)9W8K5IO`ias`!=l?|mjSHU@ZM^uGFqSJQX5;Z&j2E=ls8l%}?+C7W?%KXkle{xE` zfK4`rKuu(=n3EBB!zV655?!UF(Q4?H zm~oC|$sHZrgU|@Qg5)vKKwrl;{|*c^&DijnjnOCe)n38@9~*a?lVOxxC$Ia2vG z_}>V2O@LdOnL&{BE}Wu36Ra`LjASbiPT-VRTFy0Cn3#y!#F-;(wQpu6zE`HX%QdP{cXFXcqVczx7N8Y_H5FOXUkfS5I#O-qmi35Ix=5zq} z=Lb*;%zE8=95mv4oh|qW2V;&jV)5|uqG}{V)yrn@I8kCy=4c1wFpLwB&JxFFZBFsb z%+1qwtq8&>rAX&eL0kOkQU-Uf_<}-lNy$$Ei#d-|&33mMxPWVF&u5(G_HBP4oEker z7So*@&)Z>g0Z0{ec+Z>S;TzXz^i25o)_0+bxscX>74E1&*a+{_xakRj zX1kl3rl!>9NDJJ$lvNG2oR4!?FgO_tdCErg1>`*ZtGE5 zuB_$D{xcIQt-y+)hF2I?suYj(Rn-I4e&z*GbQNm(nHw?K658(9lshjWu^8a9TRrql zUXSRQOy2uvSiT|+AH1EN9elf%Hjm%JIsx#5j8XbU{&Hl-kV0>I*kS^|8^h&OT_GW) zpgE)O-@hYcN)8MS>DFb^Dd#1ZhcsRtpqCQkp!t^V5GeuhGAM|+lrOq%AwlT3g0KG26vW{ zAlv=Fwy7wCD#oS40KaD15rr4fCk$w&UmV$o9xlm`jv}0_&)uv4Rdc0h(S8+Gi-?#c z;aR);=-){KWxw{0Eh)v7`moxMMzX7gR8z`voomNqeE5_wWntU>2wh0n;%|uBc5-xD zlwQah8Bu)IVSn=-8m6HBn>8JHNdZ2Hj_vE0a!IUfa(QO0n(^ zr+Gc9kAP%P|K(+86Y@AS&zx>GdH=`hQ6=RyQ2%gEn_wjT zlW_|@F(VLcAR+ezoa&Z$;*3p992{+zBD~LG@;w&lS@a4%HSpUI$-u`h)=S1Pwg0uV zmHDj<>~rf;$ICD?38J3_WvtHZZh6Z=q=8#^H33e;^y~VA6DmGSV?*-{MATxd7)JgF z_pLzHXuM;8nHoE!Q*?YddZfS#bhX=NGMoW3yp)^kcPjy-FoVB;xwgG6Wwr};Ei5*T zZqz4~1LGawk+imPo~v^SYlwx3*Uw!!eSLCoBFBrP^`R?8ay!en5CZMv0oOPAKp$8L z53QWt;S{hR--a3(!?6hF3s>idAjy!~rO8`?Oi`Pj?n{^IdtSAtyUv+2lYFH=u@{uJ z!0*5K7Gh)^QH+x+$c#;EEX>JYU3I@=7nD8w^}iSfm4dL;AA{#eO}76+CdV&lR$U>e zkoAMwBe#s2)G%*;6m+*QQ;b*Se~PK3&3R8+AX_hvbm=`*;_;17skQW^^CA+M7@FyDmNcnx$)jc2oEfBxeF3Fe$6CN}nQ zl@7bbBzd1q8{Eo|s1FWTB`qf;SDNC1&q@6Ly{?X@<}1Ra#QcW*WOuF*xbjW?D^Gc4 zWlK}j#eq*ce2ngsM_(>R2O0hZowKr#3C9PzWUP^UtBuZ9y?k{mF88CS7~;gD4J=7r z&ntp#B~%Hu%H|_tr)s>u7AT(fh8@++^VGMUzn4IltVd^1nJHFx>ag($UGiJR?^_U7 z*VZpx32w^~3j24@PvXD$mU5htH(YPb}LH9*|S*F?v}H0(;hg7N+O zCJdfAR@*BCUN}RqZbI7$nrK&{9|W~$zPY@S0HX3WKW-OJbxuxDGoMzKwYR`?$cu1c zV?zYkISnf3s|n|E!>NXjh2>;xd$E=pa=!?{hOCZ-V|@JW&-{HL#re1-@b$;$NF_G$ z@CS*`&ul3bav@dk9hIMW5a&t7P&}^T)Q2FRS#nja9XZL@vnBHpKkbMZROUiOnKSE| z`<-39k7K$25PjwKWZa+Bnyg%@NN?5g2yN>Yx~4dP`WTG`|5okfjBx$o!_BFUUWlVv zX)A_P)STy}m16AMMc8gt8KKi7fA9liBG5jW_Yhrxqy0HAFAt^nzJY#b3e@8vgsgrI zo*s}lp_!*@$O4_mmf%SV6fYNOB_ErS4tPN~A>I4r0fP>LIpq*l{f6;qUI|`u@-I7> z!SL^T>FJH-soIY-fJdvLe{Ny|9bEIFOu zbiZr@xQ^*`o5OgJ$~STf`(NYoS~04Mz3dPafAag8dM=LepsG$o#}$N9)af?Qy=FtN z@a0=G!pK~RFe<5Q((xF)vPk#%^B{Ywj)7t|g|-_eMBB;0Kru%KfFx374p_v)`5$+=N3pmm{|2UUeP`{jCYs8g}<*?3VHRZ&|zret`Ifhqto6PD4iayEDo85|J%__wHQ{kLaR>x6gV5 zGu0widV;pmAzfObae|SLElILE_G=K>0EPQm6qS1&io)7me1p(PzlIg0PiUzIp3Cm!+91-O#80*N4Km4PJv{1P^J8poZlaF307%&DsGy~lxWfni>pmo9X;l5#`E~3# zYoWa8}ov7%FVY|p830KMTeHpN!g?dd3uz5chn`K zAEMok#e31dQd3Ue7>pX&3po>+Nprkh$8%)jTzUD5r7UPbPe&JsSh4Hk=VL1Ke{@SB zi1Yo87A%G;EnP1$pLkWnGzW?v7gy)@Dd>!d9T^ujhme_2WZ}hL)6ZCCzy4UI{Sah+ zkOw;4<5i(p#XUKohT|?2wLqr7mqr5Z1JzZVrm0lnX z@IkuDP`ZStr{`BZhM|FhXhfTg#bt;HUIha)4*{!rAi9&*toZ(HI&oH05Ftfi#R#i7 zj(dhSU9Du6K9XQ_%e}=ky)Lu^RXoG1KWMB{-&$#X>!$1H9G4Q%w3KDXbQ?CLVUr;| z3_KHv9ZJsVkx%KiWa}p9<>FFd`^h&1QUtIN~X(oUs*GAuxU?Abf;rs%7V0HQU zY5u+QdUP~iyZrI#{b}XJw#B@?e***hE`>%(9R zmPdavdveM6X_mBTVUQoTI`Poap|@qq0GAekO}5q$9LN5I>!Plpz!F?VM-J}<1-*Uo zi)(A+#_FF!70t;KZIa-7+Wg>oS!E^b5EFQ5kg%o(24rbKtH8&9lEqru@d+_%5_k?y zh_lzsz;px}oE=X3oE58mFAx@h`WeK9Pl%9HRhTY~$s8q_%M^~I z(mhLg73_b0nBmGvGE|NJN*D>l2&l35Y3O?G%4C!juuCsFPaKgXH$O#f$v98vK9ZVY z?KeFp4oRxUh7xqel9o+ls8(@AUsz{WQ;4ea0@i0P|J2_U9b*uy2p&ki>8%b4eOQbg zLuP;5H9n5tmKg_C#>}{!m{_2sw&%q<)vRRL7KHxeUbyla_C|xueez%q;TH1{hFPsq zj@c7R$rpP?^>(Yl;rytjrKPANs!(!3r2)=Xf#*rQZta=g#kT*t?+UD*g4gCs$hPD0 zdJy7#fIZOibxwp|vWk{*2j7njarRLA6j<6CeXCI6{R2)j)>jg8%H72aUCMqCvq~c# zv}ouXjH&r-&IpHGlzMA@yP48Y0{w4=h|cqq*IT2C;_wtiv5K1G&Av3Q3}nGBCwIwk zt9z5XC91;>~ zrw53DtWs}8@$tp=^)>ScfQk>}qy5yn?+O^f4c^NSNa;`q)^gBK6lN z>FBJ8zSg>Em(*$aHCXaRWfHD%a;KS@1TUm_%@9wNnS_lL6s5XVPs0Shd4+=p%Ac{mHUOX76~`AB7g44Y zp?Gk9{9Z)l33YieW*EQinx|+(=h*pc+Al&c^VNApGl8vx`{NdLJs!`*B5Fo@S2fY= z0^{cftv^-XK_u)(NDd*j_Nb`+beWRkG~4S({DKlnaw}_rU8;B)G~#HBr?0JMHSz@g zZhO#b-8?Z}UU+I5mX&?v>!8BDDdoIT%v+B@>{^L%VvTz@*>F0^7z$@wn`#lbast?X z-89{kv9%d4_6JATdD2f1HzZ_aMD_z?z~MME4sO%$WsV?G=m7C>TcWpZZ?-{2W;V7wS^rRwXDe_5!SfHWfWGOa%Kwdx;c3g(#(`+yzlV`o#90E@CXjOu*_vTDd^;O8wHE4D zVdeV5zEkaf^cWBA61G{7?89)E-9{5&GVb5p%q!nG%2G$ab@l}Il|m1BrT*#4s?X$Y zaZTn2^f2fZpK#E*z-oKa#9YCkydd=L)OUFzustuJHbfQvs36A?pKC`dWJqS`=L=t> zf|x-#BcTe9s`dI1Ma@lKxdWZYb2p%cffw9r3;^WMUb`9ALHRZGR$}Su>KYSQf*KF} z4~q}6`_UStk^#-s4yRC%eG0&pa$&5A%X&_2!@z zEdPv?osxrmohRUgu0+pMqzD2 zxL;1{sGs`he3iuLk77QojGBZ$hl7J_0zm@@im)a_Ps_K^;jlxEgjapv6qJ-MPP$4h z@(>Iq);(ucLDjD(E$%jp%wLt9>eA@xuI%W3=To?TZKDlcWpd)KUArE`kU&rK%tF-r zTkiU}K{A`31K6i8e1P6->MZOf=5|azlZ0KR4`;@m{={?IyB~lHZSXY4{~^%{tQ{%P zc&Em0eI)@%_!2M3hUwgN@oW~c0xv$+#UV3Nb33JzTS3K!oHH{t)S~zRf#E0nK$6D4 zz8}Ws=I^ekG9UyEU^$R&UU9Twq6~Bhq6PCXnYiq_xG1y7(g&iH)Yz0j9+P|@|vO=G1 z)=ra6AA;`7D+I;>sMJ8DRqms+31Y+MY_^6~pBRxx>-$Ufys~FWU&DuAkFrXAx7{)a zZB&Au!rq~~xD+OVl1eI@;~im9wHEq{Zr5BpMrUyqmFE6MaYLck`9J&%zHR7zparaQAYSLAJo81kT_He^Y_9g?#1S7KABh;q;Wi8> zc6N581jx2({*M`I0D)|1ZJR$nH8yQ_E{+~MJ^x*)KT)$q~@O${GJ-~|0eoHcM8{-l{DYHm0|ChChz#e zHQuGwdxyoUz%DV>qat4^Bip7X(J?8C=(aS%VlG(BcTe1Hi6k%rw?I(&>{f-siVnp8 zk-hF7nw0$zD@YL@Mj_tt&moJWB#=*XFF!m`p8mGXpj8)rwfhHNZtZ$k+_oscG!vN0 zKE7%K*DqA5$-Bb^porMv?@yMS>01JIaD4&*FnKM!B0jLf;qlrejQjrlyceO5IR~-4 zXo;RWIy%VW7KVoHAD_dGAmQ`?5o0kipc{XHfSSXGpi5O<-aC1WB%{5N#yK4lvz zATgnYmhnimNNGPT{iYW<(1Iv!!;(Cqg)lOI)}v!`+@WiW3EP|;@z{*_HNnd8AHU#} zTPwA7wWrypv^3WrbR`F0^+|3H%i9qBrZd1{3k<_mE1-QRe#RfT&`(jLOYWy^wo_K- zFzj?Ljq@zUGi&1>P3fw7(`Q@xI@K?XJ0dNJ@~zWqfI+z^Og0&tQ8v4?172;WI0m30Y(9>Ak+~hXwC3tm>7~)DW{zFii$&JFo!p;}@ zP#c_XHh_zGZ6`S{jtK4qaF2(Jd;vM>0zkAVM$OppaPs~7caQL2HEkBrwdF>^h9+24 zqfFt48SbWXUCrzDWAR$ePi7vktc(W(Q+)GuQM+BeCio;ZI(p~Car*b3cwhcZBrHbi zt@{>*|KjzVtnb((^kcyv!cgnu%;>dc@8M1^yg^Sfp1c8vcBFV4 z+zA-0w|OFnrhbVk{>lIx<;XP}+S(zRN#5Rfz2z^@&Y1sp^z&VQ+rfkxUxG|3q_Rn% zk+s18+AV|%fToTZ8bO8%31(dodf3jhOwVHo1kqR0KlV8@Ajm}89VdfD1q$%n>bW-F`I?Pxve4aS<2(zK4+LBiJ@R6Lrn~=IZ-KV>) z=;=n;Nm<|;**Zs)-A|9z^JJzXaiSoE(FkDAh&UTK;^L=>g69CC5I+^>cRR^@$F~8w zcWakF;01uQe0fcHYiB1Bc+@VUbMOj6Bq|cIU`j#)@l0lHojD*w`YRGaOHcqH*YE&@ z%X13K7~k!c9nxq6IOr7+!zBc|QtJZ|L{K5=XPlNaLtuD%VPPX&Igp69H1L?O!x#7x z2>e!LJx2OBfrk$(7_-j$aAF7wc&b1A{T=k*6ZPF0*bS8UHbo~V=l~QUCtEwghd=Cj za|QL4v&vss-HbhM1=p{qW@zWD$$Tc$&E$RW_q@%rZ8DL(tbN(Hx<|t?N>wafdnYG` z`k`N2u^K*kl3JIwZ8f;_d63&gTaP2To<*zV!l=d%^#v5E3Lo z=7fY1cydXn?}3(lqR|R7CnU`qC~x+rH@CK+ut4!HO|vA z=`eREvr2&>nZ2DI)H51ZnOTr(q*3QQ2szF}X=2~p4`3Wxy5W8TxBO@;+#}ExpHb^g${^WrmLO}okBRwOdw1fmV>`8-n*lByxz|0KhCv%UZXNCL5$D@=~n8Al7 zPze3f{`BGLDV3}#6&2O}O9Fg+5vK=G9x~#fxvFZg*ZXMRD+Jsh=GPCE+)3V z>cqH5i%1i(kuJ(Wn{hWRGD^CPit{Wg>1;(j#S1<^Z-wpT8RcNpv!0cx01P*ew6dYUt}qMftm#x2?rgUp>=p4sI*Z`Q`YfVaG(t@-I+ivyPB~0mVkYG{mc9xZQ!+nd8+bI}@e;4gG8m8zVpk zcxrS*MkdyseMBOPIkz1RHFbbM2V8Pr{SyIC@s?-3bP9kk&36aW^JKyZUUf)&hikPGyHmlf(Sdq%nX-NOL1XS-R{^FC{Ih#wUEUiSVW2wZKvIpZAM5Kn&t()q;BcWbG1C5!0>$A>D-)$$^B^Rv-5A`pm%n z*CIKp6&Mm?v}^-Bd$QB43n=&PU^s^0!SV4^z*o-C&pC<+Y*NU5Gw#V!S%OC$<}3K* zliZNCXSWsT+7u6alj8}QgmgBqFZ=%_);I2kaMMMua&f~%_$%i%v#{8HMX}th#`v$% zTv%C{s9p0n_@2fKxgIa3%;2(bk%-S6MQ%j{cN$Ccp1dosl*-A6o0l68vK79e5|La& z`ojooH#hQjl?+7z3@gWc3~u{+V_0K643I*Rk(%LxU!NT<03in}378WLsCkb(ir_Td z+x-lm6xsX5u`yW13>JLV8$EfwcdVu=q`H$3g!Bv9RcMY_rusmB{JF!2Fx9(1C4e)6 zidlW2r9~H*7-*u!Bqi${Hpc)C+#^D@d5QWazaPRhZ7TPT=DtxW9Mh^*cH36lKHe-JFC3?!stm_*8sphr7>q1# z63RaRKB5yDE0Q>cNB?%mtyGIlSFGf>OxtUVC=hsZaML<(jypl!`JZpdNJ|ORQmJcW zX9pm%m$zzs5YQ47gf^0N-P_x{MuGu)#MbWzmoTwfyA*}0rT#}Ie@!HlcJPP2ISwwa zlEWMZ&6;q5Q?JWyb{el~#3)c8AqwbUb9RyMPlcVHg9+~ZH8aZAiLt)^mpP&Ubiz_d z+S{n=qZJBCut|yTNTfcK97j{^EbMNxMqaxN8Jz3R`c3~e!WTg0VV=q;IG~EhE=P`^ z#un`E&`r^>J4UztG-^QPC9U#AdZAS2=(9N7X#q`8&RrS>6Enk&nO?mjuQDkICeVFA8z`K5H4@>k0Ok`55jdEcRnI&ElA4YmKDrvWfom&8N^0c16VKdvCGLxSGYhzOoD1sWaRtgkcg{ECU6gw+pCzv=83a<@2VeS;Fk`H{g~A=iRuTZ zrrIKfA8g&i`bYUu7U^g#FVP#)?x@|2tlNRKs5L6y|!Gnh9q~nW?Eu^{2C` zyN4POfOI-uR$lG^DIftT+9Y4{5)d%Bo~fyLG768nz@$0|e&n)WpCYO60*~qt)+57pI9)rEl^agJQ!;(&%jo72xClGCT{a`j|Xu>tAc~Q_6Y_Y zb#ARw#>fG21dQA}qto7G?RQ&k*B0$I?({fXVC(Xsvhj9XD-Y8ZR(MCI4*bDQ%J8&> z%C<;{i}TpueN5xKTg5HP*3`9lU|=9M-zr?XgS^bGbC-?ql|#pzxf@1KMJ3T6fs2By z&(@~LAGwyF>kfio*#8iit!Rw+_yT;Nmu18$fE>L-@n;GxoZ~ z|38+#0;d7xlnWw7|nERmn1#C;<@4Tn?9^c30d*j}DZjF~*{c7s# zzIZ7+tSlV+gl|FxC3XB0Hsmb^L*x=E&%)gxtOJvXX`7#EXTqxnGy#a-z=p4Y;1jj1 zro&i_aNTgQz)Zu*$Q#no!0SmQsH3BUT)YcZ2oR){J?iry75-gQGwu=E-hDqbJUn(X zhajHt8af5K&muM-w4x_g-rdH(_f8g@{zC{6>@M+@XFoU{n~m|ZJ65WoEB*S)#lo=! zL-6`}#dMoGPR1R~L6n3a88r3;0a8~1hM@nOIl4;La%m3Th{rAH_FJvBW*B)Dysth` zWp$YWiHf5g>bEP9QUY`x{zjiX3||hr1geUp7g}c!c}E3}97niNjvV*>``hnh2p>nt zmLWA2*z;7pZw_IbFS)uGpO}b2X<%rWIn1-bVmZ#2`q$dJd$$2mraU;)1LM)5rAv1r zYxquj_3z({u@4snR*bl7q`wzao%enWuGII|7vnx%@;)_8iism6-Fl&)d9QYG$Fx&W zhRbf|7bfYyD3>w~WwGyrv-LwlZOdVOLD^TxaUFjCwd!%?E&kK4t~AzDH~m9Hq_<4B zlNQC$=aO+YU189MTJxv_5H}&O5a9AJc>aTV^}Wu*YF`TEVcXJC62J)#ViefgD{rod zcIF!T4)PcO{o_lGYiw*(;Jg9h=c9WsWh5j(7zoG}|IW1L1C~8UJG;O1&zgM;__!;C zpf8v|fVyk(FH+zMT&5TL7(Hy`jFlG+o~r!U@B-PryEfO3rYQ>v5qJO;yb|}1g}>KN z%DGBk4J*s>zJ=M7;pR(aC{yOy7hpRUdbi7NYL!m?qlv0I?!%zyfEEd3m4de)JPL$6 zA=}#%;ApXOApHac1Q5_Hq;x>PgMph{4d3fCkQ`lH@kYeN7~MJ&m$hVuhle|$UPeZu zGkLRsjLGCz4oKr3xPiu=IXUzz5Xl4D??H0lahI6R&=-~uNfR@8V@@di;KhJOImlAi z;18le3D>W^S6h%;;OiEaIP_5JZDu~GNm75*_SBFh(A9t;XXL~;;)0f!V1i@vv9t9Y z1SC9z9s7!g`}P-Oq`-sTyJsg;Ho3fl5Ctnjbrd%C;d^bf)D4WAy78q>6Qr`gwA z!+A21LwyKUN!R;-7y->dFPoA!q)RuNT;wTAnroyO4BxvtKXJ?q+W3l5%=N|Xh&d-E z=ttS)foS>FOSjGQXP9W>K=!e0vGLcnJUEG6(|o`H0jLKy$B<^_2oleYzWYv2PUWaE zpuvZh?X9_+V-R*-JluoCs__Pbn4i#f0~rfguRa|Bu?Z8Qle6>fv3!nn;%LgP9^3~n z>DwSe^zpD0&?pLQ!_YCZKJ_BTO40%DPg2gT{!ej9oRa z-X!{Q)}_#94go(&Vu-C(Q+R{wq8HKHf+qjlb`_GLiC-~! z?o1zf?DS=5TQC%6yixIAZ3WdcVG@n3z4zDbU&ND!If3hKH_N6_5JIHW+ z0Jp(-pTn{Fy4Z`76 z!OY5P0@fOWiL}(zKMnlo1Oy>*FEeTAEKQ3MZMj|s?~NNH-|tUBrR;vVq}KqHOg0VP z)BLT$7{Jd#ShW{IlCN;uemw0b6h3R*5N6I@njrc?+aR{N*d6K%k3%e1ao)8OtwZD^ zX3CyNhuL()LRWV;!WDQP>j9qtGsOgPf@rjb)YM$i+-hrJ_ZNcWMpZ=$vUWkK+!ccR zZVfdIC<-X!s)QXG-uIxsLm*k;I)1uJq*3p|PC+fJYrXs5-TQFS%(M~%VlNDjIf&2d zmYwn2i-!*%I-CxJMA>`s4K#!7)PxeSqw6pRpiGcKC5d6`3c5vSTJuM*+EAxd0q~!K z=S)N|q3r0agT4a;kBZp zvxXrL;d>swD*?bv5&RZv95+F1H9ko_I(Kl{kQN6X6AENVGJSckZyWT#;!|Se3>KC! z_#$#)B}Z#d?K5O;K`8Rcd+|$m&@pH)JgQ&Q8AgD`xeG{jN@-HvFc8=KqYFY0S`w+cv};`=b{Y?XKZ@k>oj1?&$| zV#VnU*R%~#9v*`TMkORNQ}w;~g?3f(z_1OrZa;utarrUz56r+8X^u zJyLOZ=bS+~1&K0jFG?_oW_xdjvd#iO`o}1XGh%&XBYp8UDJp8{mho$1VpkxOlY_%o z_AHIOod7`Ho>Q*F4y(rGTMd&6pEteMzxhw;Z{NOsAmM!R-f5CwtDVOp>YE~ksYLy| zl@?7qI>gk>j4Zz?h=>yPlpZXFyEe|mADeYwX-ujZKh;-0TueO>UdNuCJ?rbBb-g93 zl=GbuFoUMMDwruB*BJ>6s5PvucYvS)2N#4MQu2gDc4L%QJ9&iI+T7OoC)ZAbUl8)f2tp(l~M=6=nk>gz!JIqB&9R_ zzLA}yk$!SHgdDv(8oAVvAXr|Kmtr|5Ab81kNb34XgHw~C`BSWZpORvxlHYjMSEa;T z`zOcm`!*(J0(Gu|#M^k3N-^Y5`nx$n-h*rGK~fsAfW7@8f?eRDxq|bbK|6TvWM@Wl z^ZDS&i064q86<_R6CuLZ5)%_``Kv8jLvXK&9s%MjSVzh6R=puDfLK2$N;aB=7{MDGDz=hO@u!h5M=4w{(8^LkIdEi5N%5mY|oB~ z3)D2aONh-(jk1K_*R(X^Pi~0Kerx>zu2~|^-gEF=>QQmPa9@)TaHGEo=Yjys2A0!r z@^OV6kk;D2XVm&?YJ+bCwu!R7G>AY5fy5=m3js4|^L<{q6#Ip$G#si|CC23i z|1D^INpX>AyYn%mEYoF;T?GH#W&cU)m5Kt|A4qPPpPz@u+%s^gXPgb{f{EfNG#x={ zRE*33l4N?J3Ssbr0|TEVJG7y3DW+ZQM`7WzL@Xc)B+4`19Q^W`Y{Zp z>pQyu*_?A2uk@wJIv*oOJ+7%2*rms5=jD9Rx7XKwFAHg> z`j-LZN&z%bP5AL=Aj>G4yyEt?`Un6+a7Y6d`8MMz!2<0xDo25gM$PGD5ya(mIrm}n z^c0)9hC!0Qv>>)HPu5OE@Z)ep`)N5TTlU)1!2>1)z>~)t`o)a#rYbub{lm-=ChGxj z#jjsp0Di*G50|Eorje*eLoh9x63Re9M)tAJIXK=-Ir6dDussZojBK;IAYsBVd6kyE zHI`?rScgKfqk(X`neE>1T{zu+ikn0f-X-M1Ccn5JR`oS&vSzX%BUFf-4GK-QWD$cP zXImV|7>;@0gT%9Fh1b1VN;!{pc~Ar+dPzl;TwGjW zDuvTq`t}w8CoQ3(L@w7-p@&W2JA!2Zt)B?DiAyHp&}t(UrNl&(DJig3K#sNvh|3&K z6WZMW@<1)#pP#lLKmJ)Hv?X?j5d5I&f3hvIqujFzj{@g?gxo<9#~di4?Wcza&Gc-_ zE(KK=GXDR$yDvatGl-^QeD$Ibm+x4V z(CgMW+XkdkxD%xai4$dgIihdFlhhm*G>nn&ulpc^A#rENT8m%C{b(&Bv$YA%;E!|H zh^S*%E*BC~(hiSzpofi6CWOoZlKxGEt=DDG19m+J?2jA6xh{XJ>@eO9)n$IjW4fIl8nvK1=jnNY9FVq@&BVG2{`G=l51-=v zTU)Dg+1Ib_`P&fjPhSBpm8gy$jXp>Zk~g?`3e&T)mH{jym1}}{v^sE0Dtpp{UBBNJ zwQy(mKIjPVG1F2}H446h$L#3nT>WM|fM=gnDvHQfzT(b0+uaRN&p}T1>D-xBSG6Ow zn^lItgMod;=ryI6BHCBYsB+r%-oT4r98}(x!W7(Dk8@~MNa)_oys!Nslbq`|OSR7k z={SdBV)kt|H3bi2;gKB|EIkb5plfLu3Q*d9^0 zSVV|M!0o1_Ah-}rU;j_8jg*AsG1vH`XLv_PRJ_iQW~*#T?IK1lX2!uSsFoqRh%O-?>NQF47c4JIepIc?(XSTOLWh;BEorr^x z<01)7*dP40V+jt`T)`7LGz+Okm&*$)gt5OYfA0;g*junBsaKPm#h(TW1IR$f#$;oi z{#`mG(6$PzaulUZukpZ#^box?g9iVV#HxXX%`|Dy$4<9n!X@V~i96{YK{N_#bmxTZ z`FKR~)1;6x9KsvrcrQ1(6&G!2?{bkI1dg!j!D07*Y7rUO{NXjg|AXjgZK_yAz_?Gs zf?zW!a!BfaPyz{xUwnP{ZN`CP1F?7#-!a6g&4FJSZ21o#4$kbu*06WkAxLg_-3Lv3 z{#*+Jzk;D5xo&(E#Q3w3CV}(8EUdkQZL~3AdG~rG{TAXPjcL;nN%ueZQyFb^}lrC_(f+kZD*I z^Aa*7{^jj2v33_}qZ2?Tt&+9;5*dH3a2vtLg;Fe3)_=(Z;!U6RdiqDh&^e~wZQ~t858u*Tqm)NHAAd zy9>;G%_ZCo`Y!$ZAf@baNIKjT#m@zA;Bbb2m*w3%;9YlbF6V9&;^QItH2OEE5!%0H z;XI2%<|kuVxTQbmkECj#J2?lM?l_FO=>ggvTViAE(zkToR1sJIWrm?Z zq}RKfE#}86X=g6j5`7Ebt1~hTz*o6s5R2%KaXol&EUMI#hS)(+VnbLVF!~KA7lf%+ z51z)w;V@Np-&|k1zSFgyt!0O^gvs#|F6k5M4E_-R-0rvi1QaAt{( z>*GED7YUOHq4ofud{MJ6 zLFZ?_Hyyon1P~?(W2)-K2US&8@Oj*;?1m?C#;5_soCpMusV^QPEUN93tdcTTf;{wrwYJq;K8)JwdGk5c>gj4;m zlN2PjO2IOp`EpN7OzbgfsQ^@BYtgbyfFia37gj2TFXLey{ zM`q%7aE-vQ7LH*YoVTsK#j^%My@<{gXbJ``m}Z5w z#7OhQyoa1MFmoAo)>1hF=Xc*y&diXuw6cU*261QkoO;JR%_B}Emjiwi|-ZalT z-xBD;5w_JE6^xnnD}W+dkc~>P)B5~Ur1;Mbc#Ldsl^@yJ-(q9^gImZE!sG)leOUa_ z#kY`ZEdS04=l%M@E&|ag)r6$#$5xxKBgFjBh-93Q9^CsxJ?%}2k_vdFa$558@-h^l zUFUTJ>l=TpX5y+fK031!sXt_rtdEn6``S*qv3U<|m5TE>ndU?bexmRulS78!wb(N{ zohe$4KJw9i#>Qdhj=wZ5qpUqt3^Sqp*nf4FwjwNzux%{yxK()-3?BChOgLFLEwX&G z9Lr*(J5uwl`nq9U9pGe}HrQ6`8dC2Q?p$`DQJfbk_6EeS!)_;lp6hAA=4 zDNt3KzQTrAWbwc7RgIKAdNOmaFvC z#}L|>_wn{cpwO^G9uV=X6Ysv+WTzx2^ZY!#$Ohhyl5&3T;w2&t0q&w$Q(bXvva7;`+h~$7T?h0V1(Jh50_?R`j zAg_IWcb7MA0qTI5jOw}=lyPvdJ9}Tm|1cuym){vwn*823H$(1j!U6X_7Rtvt1hK?)nKibb?J>0?Mo-eb2~J*Mi}b2o#4S@1 z6jnQ)Djhl_#8Om=bo-(}00@fRy|rj}j=4%!kSl!Nd)fFVecB>*(o zO;xWBK%oI=hr9|2bUG(sFG2DU=I}gVxbhL2U%^z$tZf12QJA~8XmkOz{(99%%Ed)s zOB|YM*oGmc5M9sDbNIK};q;Hg*qw;Ydl6;uk{0*dCil-$jLdS5jJ|7WLq;i*6_xx)b{D-&%f5lZk%EuM+|L#SKBZUv+_U|Jd?N-opKRUAgIk2K zEJEzL!k^nts4lWnVS{|?ygX_xQk4rB8Qa(10qO^4`;6N$8#6Q48hJ__NwT3RcxAb8yDPjdb$5xoU zV#TDYBG}1C5r2}bk%xI?MOvRdDEcGtFZTlpykIxw5G86rlCpU0GY-%4R?99y8 zK`z`**$YIkS$!m*FSG>D&aU+4&{2&FF=0-*(AiPGH@31~dv2r8(63m?nyfxFJ<%Qg zX?lW8@vAH@(&`sWDv=;j^+PpBoyKJ~`qxJKW1&H`NY@iwB0B4Wkr^MGrhgTw#JHV7 zO|FQvbSUgH(1re(oUa5d`*^Lsw6nyoz*-C}^9QeUqezZEggxzlSveI{_;Lt6_xUSx z=*@J87@pkPj#|k32dQ2N4*hVmFF3%uIy)62H02;`RGjn2O_qFy2x300UX~COb2%PS z?a1!~;Ccj*E!b*b@lcfoFuz7m9d9OP6f#gldEs6|9*-~5gUbhInLch!yCACj6Xv${ zJ^`JlzezG(qM6&Wf{I%D)|oS#Wm_mPd!Mt)U5F`FRV)z1xNY&c9U(fch9A)(3Kw7y zexN{I@f`Q6f7J ztA29;6go2+{RB0`^V=L;I~Eoe%-vQvSJlbuiQ^Iz73GLPwBPLeK2kN-0G&Hl?mu)B z%!E%-vi&&Ncr~2l2M<|>KDM1p&nP^+`hz+ppZ7ZbK+Tbe>XlKConEq!h3%381}@?2 z#UeR=)?bPzyzcjgW-znjZM8;R|8*KN2?`2s+>eDcjzW;RMm%_qnAtU2e8Ix|S2)d2 zi_XT>w7si~6J*A6l+(a>0#yj1I7?t1hZMNQebAQN!e|bgp*^6t;2AxKuz~}Gh6YSl zkTM3UG{_o-P6$|f$A^cIKcQ1?w+vS>fKoeF4}AsZ5v8+|lDF2bi}2IiHwEB+mT~*+ zC+K$(8WuZ4Gcz-QuLcVrgHV5X&Jakw&Qx1y6je9xL=-SbdeVtXN_N4hl$RgVK-!Zf z8T(_-a1josryDLXks=mR0T05(4)YT$O)>5P^OGHefad3+5fm!HGuw;ON2HgY|CJVx z*VXv$Y)fIdz@KdEUB2PNqToKCWWT7B#4>Q*KDJ_blPlm;>L=?An9b3Yr>wpysVmVVl! z&xYOQm34T=9e*q|FyD@1JNZPo$40PZ!NA1rO=fMRm%B%x59(I3W#P0fE6k(+7LkC{ zZnr-Hgcu)g4wrc5B?&y@u=iu@8S&w5+TD2Ctnjrbzd3H|$q z9?9`v@whhs*msI@)t!Jti4x_#eH8-D0%X~7=$QgoRqhrKl53l9#n9PSU#O z+)11tsVey)emX6X@YsLu%SJoRV(~8&Uc(c z$LJdx%x6||7E!`QD~uP{+>GbO%glIT)BW2)GMu&=VM8)MOtaIfhg0kiGBd<&bDq7V zaZP1^6GwgKa%I6=cGFm6HN|ozr%DAO-XS|BAhK6-d&YbA?=$!$YrkQ1ph7}xgU9&+ z7>lU){pm=dbb%s7|D6uyK0|TH<`i9O40W42%RF@wS zdGOg*Lo)h9t}#uz*qp4a7gya-ZXj=M1s=qh@wZ^Zv9z)()>iyW-vjB@yYN#Xv#axB zr?~E95^?VHUOY#9;gi-m%;zC8PczRp!o z9g^v8D)A&cT(H*kLFHLnS3oe4-daW$@e!C;@F z7{A!2ZP-RSy6d>Amm8e^$&U9*^ zpO}yDKl4|rOmEh_SHz{Id!XbfS~Cl#4MzZ$fjtpC2!(2vZ(qNrW@Nww`EP0IO;iVt zCkFI1$*CN(5%dCO2wi4*XFU|KfEgI}psyJJPfB}FkFvEZLfe%vT_6%h(6MZnk)Do> zjO_O@-U-^LL1+tNyMpHGFWyJ9?z@oKz-V*?Wxl-WoKz3&F|Hp!J2j*ck};LCdDWqE zaXr!Vae2R(lgwelx9+#7b-mociLdpXJ~vZ!50}<}#1f&n@*C|7!c|tPwJg*p*%CzR zBJv6F{F^9@XuTq-og*Jpb^$9AKa1$ujaK^u;c4^SSKi*Apg}JjpsljLcf@j6#bp)@NkY@DeVP(VDG* zr$P|h`NajI0v857UMO<~=N>3#;I%OBNnmbwHTUxJ>U>+QM6EoEm5y8s$?RHm&hSql zY8EbAa3BgP>_QJ9ydQ|81?;P|v{FF1t=|TnMS-HB98~7~8ECfiw+3@{4hsW;+O;Kv z&g}w%_*ZEitsaC6GhY0DCW`K)(hs*1e%K+|#vCtjy>VNr_%F02$i(?Wwd5%3!rjZ- zmk#pbhWN{2xz9^-7WjvW*L64FIlw1AGb%~|!2<3ruR(;E`bZ+P#Wy?W-#Lk(9Y#Y7 zN9Y<1pT!Wf*YG3VPQ8cA0!U1nr>DW$>;dK;fN=vtLZl=lf_jlcS3Z%OGFjo=7dMD1Dn-c={@2ItWe`c9h3yKzAspsR@V$|kg#$zt z-B1vnq;O?_bUUL$skse}=MR<&v8u+P(WaV$KW>Jl3Gshp z+Ve@eIxcQaX1H58g})fPP&-rhNLHxsJ-I%0D>HQ3q4W*45|duC?Fg!aYVUD;b}_+CLY%x?fd9RjZs%M1yGQmT2>0fU}?bX(&H z(mF7hH;xmFtEu4#Rz4TjIvd+jGD&Q+3$QU-`&9UJAZ`vIi@-mOK=fl+4e~4K!j60zc@_ z%e*JP0$JGAwY3EP63^I~nJb`Vo6&g!WuHjrR*YAy{P0-!*6T7T=(nw`s`QM#Nxx3Z||e^dJd8huLl+Llqm71l!vp* zU%W02T%I>M4YAgVG*vTfN;`CJ-?o6)W=3&5UmgofoV z0+EHpvJ_y7&?>lFR$98TraO<-0Vl#%{YPjrdhd#=ZxBy>)JaoJ08;e;~J0|0=(D z)2s6{j8&zdm|M{@n#adw!2YGj-yvZNwUbm!bMpi?Tawww$`f$g86SuTNh_jO$7mf` z(X<45#AE`ftvf&;3?7upx@RDtx^NC(Susuu;AXhFxw+=(iC3?`I*@eI0rMa&$1pn+ z6S|<87z87|g;b3 z%N*4k{GhM<&-CmyKBQLtV?FHP@GH(t6SGx)ib-|5MJ2OjiInmaOb$k}9YAH(zQ}{% z1Gdp46R1P#+%UGXDoiLI1=G(sv7|bPHTnW%6ciMEyoHX(oaa2-b~M{S(>m^n1i>e! z3g&?0KmEba#KPhQ;2laU;lkf~al^>%2J1JkzOM=Y&}_ByY1x4_fiqL9V0HZRA(6Lb zZY6p&vw*$i6nxTOI;*6M_<306)#Gex^BcR4!b{ql!nD_EdMBc`BfnSdqQoKU&jBvr^WM)Q1 z>edCfMYvIgN{T>~4=LnasCnNvGdI^iJ#PmgWkFtEx?nCmEuhR8N*`_^q?A2eI?cX9 zgxxbH?PG6Xk%*Y}N>+$2eQ7;vl6{&H;B>iUnI+AzKc!Z{#&~p%it*In)O@~_CmYK+ z+2YNvfIrb=x&HfbC6XXwn$7)%%##+|9%v*h0@vnROaXOuT#Fh9$S7eJwzsvFufSdh zpzQwr`ytp_i10><6huVr-({XUh#P{BtsF5W2Kuw##VfGUn<;F7t%NQ%cz;9(aoqV8m|ncuJdXuvO;Ca4{QHm4l(GL$ALX%6Og)){1ayo{DSaaX;nr+ zO$~7W)}5cZb#>p#N1$dLHpUjp%`q1U5Hp-e71SE%g_rTs&f&VMz9~-DdW?SX@RQKS zHnN=G@sC$voAy=|&>x?rD$__RW@Aihc!lxw(bQJ`x`Fw8_+RsbqJ#6ncR`mQzN($$ zV{5eF^)tv)1cFcT!?Srn0-CmBZuC2EYz2kipqDs?;RuF1g7L$6v?2QgWFbsj-kvs~ zf}hW5zmeRV;`QtQW>mlRtq$c(0^`Plp*mgji;mCcC@e>|ubEicPW7y?U1MEl zI$k2X5z@QS5sCyw{5=nkB}y{s$@mFZ8gP}n@IQe6?YFv>?YV}O;h<2+K&`FS5}u7; zwmXOE26rGz$89|e)CzEsr>8Fl!~o)W!Ao=kSuCo>AKuc@6~V7GKi|UC)aByH41~52 zV1Jn6Ygc^*;}{SO)KpXu($)(V>EPi_hLklTq6RR$scT#2s>vvaxI)qaU&5h zkJ%n{5Fw&gVR%&P(OH053=xDMOkV`*)I#LhnHlM2VqGafY>2eMF;qy3YCC_2u!C1u z%B6l}H*wlMQw_@B4J`H7%wA;3{;Uk%%~s%T?6)=}vY+EpnGxM!{byXqCTr_j*&M}o zU9UKpd-iQJu05l%(^Iu&F}`Q6=OXk`W|8jX>Hp`hUX zvI_+-uN%Oed^C4JjtSSL3EPE@|F*#gTZruprckE%5Njm3R|3e^(tp_{?h6=;asLTw z01B1z%~GRBiKcfBZbHi%P%6ak0tWhsqeH1`^dS80J5j0quib1zscd~rs2$J7)k6N+ z-087D_H3zG_FW!fj;!*YC?@kP5Xx@y93gEHX{0EZTq;?Af`f(SHztGwZWDOc>L*SV z5n}CAMm7*I2SXV3L2@l3ui0T30Zq2In;vhBz|LR=>DPH%7m)pa4G-5yD;&bA9UUB$mM^yfTmWtS9zIOdWj3;PKwX#^8xK}*LGC-^SlXLB z{0)QV61?;UMAiW)kFrEZ%T^E20M|Jt8Qxx~;$PnhNa+XUXtEg(eQQ+|8C8z`)o=GD zI+{zdiUMo{@dG{?Hb&GAWv%fao=d%nn)Y__KFO`i76WLnI*r#b=gdCs+J`Zz9@fneiOV{kPp(c#!Uj9abnzePY`{sd-TNx1qjovqjedO z{%p=W1YWkc9q^L2h2StGCTVrM#mlQIeGc{$|kAAbOd

8);$)Hc?7bhdY1maH|_Twk7cw7qk9HPAg76IbB~jXL65* zM|uZKtYgCVNGFhhmD#|Wo zfHS6+_M#+%ww-m(qbY$0f~KU+tAUV>(@X*tI+(%ylBJ}lp;QV^<}{x+A`Axv#*5y#69!pi&k{T24I? zQ0!QCfM?XS=QLl&v-q}4*p*oGD-Jrxvv~YzQN|gSmnULk)5IUhW7)~oDKBzRbP`dU z^t|#u|6LxOM34W%j0P+LN3$Vft-&cK_GW8+9Ckty8-r}P;xKW1C;m}YrA8%s2QrPc zUq6Su#4k!oKs8cks*O2r;1zF0BJ{e)#-f2PzAo zF>!Bi4}SWOzf-+n#6}}vOHB}juk}3N#=UlH?eE<;er_)|086T8LqZDGvw!+5uqXX{ zqN2Nhsa_RLMNMCGznwRcljN<1wL_#R`4)kI}YnQ457XEXZP zbtO<f5tHMJQO!DZM=)*n_#A@W}7`tvO@Xp+41x! zPX?X!$D-W}k~g2dCkVc+oT~Gkj`$L4FYyS`By(*Ys)+l`Kc{t4y<#tW^}%@PzBQx? zi*@RP4;1Xm1?%wJ9bJ040o`$Wx89wyDcPmF?3|puURQC)$_QUkNB%r0K-5%mq0KWM<6gwFw#}q;;nF?h=6j+r&u#n>pvoI z{JgDq9)z*ZKU|%q@e(-^zFHqV4RRmma@)Gth}SP%`$JT3?4S8rI-Ese%<|snpDSBo zz+OhGWAT~>*x&Gf zzGzFn(mDK~XgWN$PcFjGF7GZL?#cOv&=}dk6rD(qU`0ix?af1-6S8aO99evX6jhZdv0Ye#9G6$e^#aWt?B@ zht#va4c71Kod25T=mln8n};dhqi2|bbap&$P#b;5=8hFJNsir_937EptfF!0??ve% zjl_9q4YP50LBF#gV{Q&HuQP*<4$**pDVa1!gI;vDPhY$fC)_*UpKQzBzi$uFKGxAH z)U-varh{TVt|h-OnTDDg4|>6ouB|}P2DoWm9-;+iX#^M%Nir`V=J!Iv!ad(PV~0au z2j+Z%>OWrBgExu2(K$H~lEBHvM!DR-1S2VIuW6;F=&JnWQBaB0+ScZ8?f(L_kXrDt zAWa)CXH`-_*S{&IqtGor)fGVKak|vPob#+qAClvz4-_Hg{7!uA5OB2gPijs^oC4|d z&Ug%kr=JcyGUIu%r;=P_n08NwJB`J?mPrR=mBf}6oYU%j=$ZqQ2$Sad{jGOnS2C+A zV_C4uw$xh}_mA!#9b1^2$IT*c;XIH=eBc!oAVf0SFs)$!w*!~~HC5Fdb6$eTsHhH_ zb$BIn6gulQwD;CxsM+A^ck~`i51HQ&UM0KTDL)3@FY0u03C6xkEjknDyU@u80H5$lUoBkM zE8z-(bh!#AL3{oNqzmVj-fw`*J{?_eTbl{c0)|pr7XZsb>%SOY^F+G5zpZMgov_d#AfSt7ibr(d7&j3uztb)7uZZ9;RO`$C}vn19l+B9zW`8AxtBWr94F*|i6^b0Ta*7nCrWHr z+ug2OO6G!BgG}N4FHvW}8^7A0S`E8O`%4(7%BO+DZIqO&?OEi=J72XU>3$0(FZ3>^ ztVl(n7Q2XSkJ``8M?B(_1^FbRr)IaZ6T+O-bedX%u@xEEH^IM=Hz5o+9+?k%040Vd z@N4Ns;Z}O_uU-(iVM*bz(?p=SzY(A&IJgiL;2jW{T{@Q((BX% z*;)mHKUUUCxQI2tjS6n=Z%W%*z|jbl{}mVq!I1^oR1cAmfYFtHgAQf(CA!D=7X;R} zBOL;*DKuMb9}(egh?>T=t%0l!DeE_Jk$|GxA$B$8Y|r-Tuj{c;%#aF-x9G7kU(gvB z{}Jp~P)FnVS-!Q^N|0rs329GVQ?i)fZsxg_7Dy>7I?G6=VGs2uyu)4ip$<>IJ2kmb zHo5kolg6;k7nm3M2|9T4NC(JWi+Ts{L`!zA5PS`U8?Z~bYqP5k_N2_;YL1pxf!#{^8e)Mu#y&Q4(h25eLp2i(qb>a=tge1UQP>%9Loev zNwC`e7SC4BkuVmT2-hIzP}6PmMg!FU{NOQ(Yd>^c>Bb!$`v@6Uh9Ws*U ztex6P=-F80xpU_ZgrTGs<}>eC;%vGsrXfFrkwN-_J;(uS6|FuGl_=p#Ke4LskHE;~ z&nzaN4$0Pyj!|-&q$TEv>eVyj-@{@t@%7pa1}V=I6;cmTGf`~eOZ&sJ6Uv>pT%YJX zIjR?8+9kukEUrJ~lLZ1}35gL{;ygJ)Pft&UG68^Hp};_SCkv7mUKiJbvY`qMKGT9g z>)JObqTab(Vh5bR{d=zOUUpZ=GMva%JVVcKE@V%3y0AzOOjB1YqL-{>IyyVyVP*5zCB)jM&7h?z@ID!@DsHM+eL`qD zSciR7fqs5=-Q-eqw_lE*|8B1TJ4BZEF*9JFB$xk50%iPi_XmVrEX6nj3Saf|f1hrp z`eILE{O&C=+Orf^IJ!Zjr{JdPA zVX@E+=r@&WGj=8`f@=>D@j?IpOJ~>IvU$(s#ykiQ!TGo-WFCjSGTDsFPLbgLM)N## z@-jH-bIU8kz8e$?XLyUxkO@5%wx**BzejZwqDxsVR)1rBXguIX?qLOA^P>4%X{1U# zd+R+bI5Q3I^?|C7wTJP`Qt8i%YBEi9nzOAkvK&erb#}6CER@3lQR9_)S1<6o9X`?EcE>R4(?V3YV+tb7oOnihT_BfgA&xi^NMICt(qh~rdLSa$i8_>O>kXH)5fR{by6a>Ns` z;u_;=9A=DIO)}hp`0@zL#TfHV#2*k%rli@2X%TKn19E^^A(R}UcWn<8f4dmK#XDFn z45n0gj4yP8G1xT&iT}OEXO-w85t$>N9R?AjYQUWcQ+x6E6bc`jD0^to=oKPbc(*=k zd8N2~Y<<@2sG;(_JTNycZO&P^S(e##Rz*1aRE(1Qeo-IQ$f&Kzdya039`XGgZh4mn zee`$mpn~Bwy$`ssaPFI5R>$1hIitBfuyT@nZ;kPT1USy`NjqF7f3t?v*PnNEx|hlD{}gYa8V8=xN979rY-;1)Kgzx1dTMhWcEd zZo@8wremId5o;n@A%3LQQumi)>1R`1ybBvzCk>T9&vs5@?X`nZv*nuFN}Y#;{E`lY}dZi0|DykmhqaQZbr2C;n`bRkr%k=hSPf!57R^9HZ^0V9d3y&_{^kaG-LUtAP4 z*Q$ay>1Z60d{T=H;9L4%h)}PR^Bv}OPl}JKKa*oPVa+sw{X(q;&m|MGG4E2P;U;4- zUIo>-XHmG3hvv5o+?G@OcliMm?;Guu-tV@)PpL7}Z z!*AiUbaK8`E~7F|-Ma7(`*V$QJf<5G6ZjqGNX{*~KHy`8a=6#@9=M!NmX3}U8<>ik znu)_zA4l?(sb{r8HzK$pH4gmLGzP3Z+cfHph~uTU!e?k=B_WI- zWlNA{oK<_88HBdXunI*pRj^7P`i4_UJT*F)Esr{`{aJh0-)}mVAiw2&XKog4TusPH z2&c5d@aBR9WEIAG)%S1yz*~}RfxL^wtvK3aKHvBs&*dMFwEKl_#e%hg3us!CY9VdU z?-VL2D}!b)toF(Xu1?g-eV|;8!ox;*EvaV>#J@L~L1dZxJ;dw#}aXzu?`(ND(Kws&K&(VIY< zzoYdt*M;Hk^r7*qW3!X1tCmoU49nv8a1|Hj>~Fnx7}i?YH&u6Y(T zgH4F6VQ)8ay2=VuMCPmnSKRMisr^9Uh<8V3>C7ne%!^E2ROUu!MoBm2_l~oe%-s9R zatLK#Rg5U}6jJ6jcxRAo3e88rUj9PWYudqhJF{ft+WVZ5_e2$ANmQyGN#(9$)=N2Ti$ru-UYL}$8Ws7$fxOQrOv6_)t z3w<7MZtuK$f2TY$F%j(XelcZ&reKePw;OCkh193xFxf|G%mQgPjfGg5#7F$pvEO)I z_bfW%;fuJ_-TquzD*e}e4$1_0H2D`MlVL;0s#H^l3e!U>Kf@H+NzA&^>9D5BLB`eE4LPvoS-#j@>VYT(D0}g6&ZMIv$>E@H+*ARfBC#N?7=kkKz z--3fKFiI12c45wl2R2XzRMlMiqiC`utC#HzcREt27I0SUZ+9$P$kOS+>e;~P8co)! z-|tQU+IHO0R=t^VP(EIOKU1UzomdL#uw3`2V%{)n8d{ zQM)e)qJRh}Aq|q!-67p20xI31bRJT=B&4Jf5lIn5qy!O=5_kzg6i~WUQl$CjhI79A z)BOkTxNA6u4#(c_UVH5ob3XH#&-9(9PVKWWH~HDB^x&87eEc&&bzp6L~&z5&!TMtVWC!iuQ<#NK%jEg|vk?uB~IfWzrkll4>bCm$-@d4SK}#5jK4Yo_`cUgVDQHLy-Qv4-htBO@Lzznoy@3|ah= zeM9VJTwcz*G5320$D4^w^W9bg0A@?$maS6w+41SNd*6ySvZmmY+tF~>KFlDa#&6Fd z4AzG-Dg^6HZHR)kCVp>k@9m1xvsefHn^FK3>o}ncGqR}XN!`{k4+ER4hk3^>F38D6 z`V7ldQN~_rLlF(OYfI~12O{^Kv9}G);ttLgf5!F%F5?~vk873M$(*xMg88zSOr~cyC~vOqBijiwOhE#BY?H zHK4bJ%-om{Wvm?!)`Cv5ewxMh&X?+`fG9ycH9ZC@Sf7v#Q@gOHO5`}=z0w>X4gEzn^Zq*c3RKpd8L<6nm>s!8q~+ekKx_Ww2`#;Wqxjc;(XuP zMeEJ#RtSZH{rN$@OK+nE^$xoeNU7S534*17gC_ z`*O7GH&$zIyMgH;s58HSx5cV<5#k%ijHUG%-$$<}4c86`*9zC+{fbu4{iEStU><3N z9vhMUhBZuc=dzO%YP$*uhPug6rS{u~oF2&Qt+y4+EdWfKkw0oiT-zms2BCOY|OeZq$;x<;&xg2NQ32M8ygI z3ZV9j%gdnvF9$5E#k?S{#J#18AZt-TSeaHw03EIXMXdM>Pd z>}Ykjmvh^IABT>k;nxI2+Cnv=&7;0da@2A3wsHt4uDe@18i;&v-~@ST6n z(arJj5(bwrDj*xwGe+hVASX;Akc*8iy868%9Q;TmM`;o&nyfp`feTfCf)nv0)1$pZDP-t;IGIbC}=%EMsnZsXPm{p-!6C|S8I5xK8iTU0Vz%C*18ZVokaIRR-T zA5Ul5fB8=GQoa0Z*{9cY3|id}C_I1$<GAk199@6dIsB&Lb~B}9#YY(x~ur?3qNt3or{CZ;9Cv&l8G@EjR+Ff$Z4`0lqRsoRzgN?%~M|2)Rb3N+G?lr@$(}k{8sRQM8}k*j8?-ynZ1Wyj7R18*+g)_)VU`%c9a9w zeNVfOm$?u+?Dt8I2=}Kysl+bVOOG(+fMLPXpqsmM$K0&JF|YGd^jgtO!#Q*(Fj18+ znU+^n=*&qc_0B`QJ74NT76g-_&H_}j4Jf|@bw_1oLv&ZRn&e4HzO=&;dmb=!)K)gD zgvD6iqr+TbNaNL7YL9Q{5t*FiP=ta*LA~RiBCTnR3Eyb;VkQrgNQQh`J}mZ$^($|@ zby(1}eBg?(zzi;fewH_=dtv8I` z^~h~qeeCh8zA1S(5WulqWwXO zpas@uCu3}U1SdNvJry(`C@3j`+;;-FRs^0G@bKjF;N9sT))5A8aHSnNOb+;ZeZ-^= zO5ztMZo_7mf#HHP zq2P-eqE-ylqq_R~q|BDx{r!Tj7*R1Xf@hC_^w2puDap*d0GbWJe3Co1W+GMz3~bhB zh00xs`h4VIG-!MoaVbUq*PTnbUJ`c#EbLnt_`%2mKq>9M^^@trMH}~%ZD&RJy*h){3wn}bQ2U#T-@C5*51D(FW(LqtW7o=7#mMcO=-^gL9iL}6LQ79xkN>M zA-)UtDUi2GOK#o4X&V`-dGP}09T5r@feVM%j>v6~35L;p)1(Z!ri(Cgi+IVRVPqeV zrREL$nmQ% z6!HNsS*Ad5v~nf(Oi%X5Kt2yC+W9G4bPLLLAW626AueRylLFf|th2P6rb-}AXLcSJ zK^O>SNIpyZHEk>VvLp(*X;~1p@~C(|#<7Ob3y(hDJxL+@qWV2nc*J z)b_l~aiB?!NziD_3IzJAuk+#)`5AUz+X1jg%9Z1fzD0T=-qE5KBcF;*{1Z@JZmIP8 zt6F8P`u3u-_cH&*KO~K6^SawOsLaG}E7&3E1jFxu;NyzA(V3+72l1G_-CboyVh?FJ zg^ezt!bWOGuz)`JUFE8*k`Sl5(9)tr-D00ms{p?eN78g(MD0fgW~7F+T> z9Y)l?5<~LO=9*{H!>s}my7vu}$bqwArtE8 z5`wd0zDMP7ihiyP@7DY@em`D$OQniGW{3jL^<*<*cgaoHZ`W_H^80#8jr*efhba{MX<_}Lm*V&@2u2n*~ie7|u) zfAq_kphW*D^huHeSYl0~fc~Z~oK4yGg^xDb(V@*Aq@WK)d6y=&i8TyEz?IGTGWSYu4ZU8URpH zXY4oXWX|&^6=0|0=jiSF_sTM`54%$Ep(ZDqe3O6vV3U%f@uYDBR|B?FzK|9S^w1Cp z0w|K5jkrws$DN4acT-rQ^w*mgFpH?oZ@%JGcCU__##gZ`CVT2cD9B>f;ymPBts?L_Nk28&$EiyqrYQ=MPoh zb=l5KvC&BWW98rwvzt%{_98ey1&fF)CX33~OD}W;@S{DHM>^>W`<1p=_?3IIO0VtS zK|C){SgzUThg4ovStEB!4tO60n?1FjZzoIo`|H>z{JkpPcKNRNz6^2(2PscayvWpP zQQjSsl5a)F|1&CI3tSj_@%-+tE}0w^Cem0}3%5MTZ+;CmZh^n&0ihFQzv|i8O3jhK z($+Puo_DLL4x&LD5q8IurXRJy4FrQxJhAbwqJsYXa170zfkdNLCwl{9$3B>Qz}5r) zB;}k}8187HUyzqWr(#3h#7rhmT*Pr@F%ne7A;Ter!(0 zzr#l3_Wj-Vb!?n?5-KqrpEfXF`F6XyD@;kcRL*-`ymsHm>6S8H1ZC%ix z!p6?-4ph}!x8&q^Cm=gcztY4QV!_RT_ki&m%%w>}4GATT#5BuF{kiE51HapOINI1j z&IU3dddHZ>@Bvf3Ywzrw2lcm0%pb|n z$>fw6=;oQh?VM##lh(TH3&q0(e>sl2B`TA&!(DHu0mbxRepO3XUodry(iI0va)Re^9qN)tYS@YU9xm5++l5=Jk}jSe@pYDV2h z&y_yRzjMP6LN%~)aY50RIdqW_NQS(vshMc>hCDunaCdC5-Pb_whKyL!?qE z1~@38e+)X=JJOUjCUhTsYq-n1j$v0^Q?vbr5GrrC!}|e;9CR>ndpRIwQb$LJl$2C` z{5w*?9K`EEqYG@HI)4=Ey*A9vAKU^GWq*l9i8S#m^hA06r$RbtOxS7(E;2({=t%i% zj)?`$v$bri1>h-(8Cd0_bLHsT1W#v71pQ?|6Ld?qf!XXY6$CK?NE#i7oP7Jom*cPT zFMxHo`R5O4#}9yiLZ|{~CLTE4!7ZZ`dmhbS9+}b2r&&dVzez?J`?LFDrRbq~;+L zDSv%!NKX1`GQ{6u;zE!gz~Gy6Kk0KML^{JNEs1(v)qnUF8Xi0s1^uC7w^neIu-N?z zf1n~nQ7A_bPtWLw!b`T!;`OifZsuVFwS%*d^No$;GS|@s# z2C1g$20MC}f8z~hFJAlwnOuOzWfS{&ZG04RHCh#sgCIMwlprJxv;yCnW(E9CzBJ2g zmS)tBR9!Ootu6*=Y@lGk|I%8hDE~Vmm9)Z#)W)@2+U2+!TMeWJ97y8*JyqwNO z2*D7;#4I@a4@(DN6dTk1I-tXzMR!EUXtkyZ8)F zHfS=uU{j%`i8HMJL_ddalo_65jIf&%9Am@_F&>os3~>&OS+dhh^R=KK`QHoOmiKRc zCyulKWVtQZPII9a9nmDpym`S(HvMYWer+dt{pr;xd1SghOzgJK>Mv&^A6+l-3>}^% za(;U0HVCquCL;uDcBgq(ydi&mb^{iGbHNyo|Mt8+X?SR6WyyfPsO~M-A1<^@3XzG} zQKl~&RLPtUcT0Kip1o_79K37j=IrxdSfP|oDQPWT*!E;!&AwtL@e8ZQnx$_Mib**6 zh1~>i3L2Ulx^MQXpgNF&{5!c~--d>}%tE-(tA!kg&gg87SFiW1eFVIvI@TeI2W^S_ z)XJT4#=Q_BlC6Ac;3_4fsol{7uIL4KyoMs0hXV9K9L) zc=rO)O>Th@PQ#z(*Lvxv8ue)o8t#34L=Q6?8V?WBkuEAu5oQ01Vht)`y*Lj1iUinY zz_+_HfD4Emd#V~Q`PoaJ$A_hW|9tDYE&g66Rx{i=$I_+CC9Q`|A%+ zR*A?%&-xJ>hV_ddrz-O{aEC$@0C4^Gtohof?z; zFPD-sdTO!l`(YQ>D<5wqvhYlny>|E&AdsWYv zxvI1?t9J#ss1UEPcDtsCM08-?p^Kbw9GPkA6*|;WRz_@Bpn@vdpTc>@C&hlYx9{QpVMI$hG+oW1+Op|fJje*qR&u=VV z%bvYRHS+E;6UCqS66Uup-`IymfWO@@ZSJ_G`9^TYafh9CBv$)mV`c}?eMKYy56u^O z70=Qv_QmIo+w3$m3ak4s8tIHIZ{c07zw?oNTbOlDVHF(4@SA7zPDTjkolJ?Q%3{QD z*?#I3R|Z>gmW;?4q8i?2Y+D}@rYElZ_4|h0vXWqPYp4lEAapWl*zlZS;-BwW_~KT+ z%j#We79!BFae^Et54Bo>n3caPme-s0S1RPQgWN0$;HFC25& zIH{Sp47g_MSYk)-M!`>nurf$&@fe=of+|KPS4vUIGuQFK@S{hGB1s~c;kiw8&`nkemNLf&ca%9LdJ8QfCgShwRO+<&>!fJe<1iUEh0dB83glagq2VG?{wjdpw$x?(x)m zLuvTMDh93X`+~-==nJGaoFl+}a7BH9G*A8n=k)6QbI8Zw&UxHV%MD&bKaGpsQiern zsCKb^tA+O>BmXnj3JjcZNOcWts(V+_cuA7>^uAzbNw6yEkDHZ+jjdn*&v`6-yV$Y-@V=)P<-@ zNg2H0{G#~OFuT~N_0zR%pZOn=vwJSVwu!pyxaxEO835Q6;)Et> z0OZP{IKbJ>XZSN-X1(&(2LTpfj#)PM|0t_~U!@)(nbQ23=#hW<>?}mn!iP5_&M@*S zU4PPu4pg?_fc3p)7x0#`7ezG6<`)jm4lXsY?CSk1=-DcPlI+=3wWO)~qVS6G+2It- zhK%@D*Ie~2g-*#Z0=ZNN;LlKQz4@b2SD^v^4a%{i-1LD$Z(?5pXR>W{eO9o*$+A*Kj_tPE9V^~-$eLp;Of4X7J}_l;vu3$D%Vs3zY3kwD$U75v&qa+NTB8P zfTtBYGQ z71SsjD@|0wI=CBPaYuj88UXK?*rz}JULU@Z3=>Fn$NVnD`uSYVsVm=x1AWKyFbgCB zAX8sX2~ZDcW|GMMc-P^c!;ljTXV{l-|g* z+J?5h15jFZAZ@!Tvg_xrS-U@dn5JgE2$0AU4jE#y1reUzejc&x z-zP4odEJ)aY`Nq6%jNj^$!e0c(U03z?m6Mz2dZ?6zT^y z@(uoN>zJ@T{MURC~Q_!+)8lZS$#hn2a9wV0)wHT;F* Date: Wed, 21 Sep 2022 18:05:17 +0200 Subject: [PATCH 364/490] Unify and simplify adjoints for `pairwise(::Euclidean, ...)` (#1310) * Unify and simplify adjoints for `pairwise(::Euclidean, ...)` * Base threshold on result instead of input arguments * Apply suggestions --- Project.toml | 2 +- src/lib/distances.jl | 35 ++++++++++++++--------------------- test/gradcheck.jl | 11 +++++++++++ 3 files changed, 26 insertions(+), 22 deletions(-) diff --git a/Project.toml b/Project.toml index 599325e12..7d277f688 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.48" +version = "0.6.49" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/distances.jl b/src/lib/distances.jl index ee39e9de6..1adf30d01 100644 --- a/src/lib/distances.jl +++ b/src/lib/distances.jl @@ -64,31 +64,24 @@ end end end -@adjoint function pairwise(::Euclidean, X::AbstractMatrix, Y::AbstractMatrix; dims=2) +_sqrt_if_positive(d, δ) = d > δ ? sqrt(d) : zero(d) +@adjoint function pairwise(dist::Euclidean, X::AbstractMatrix, Y::AbstractMatrix; dims=2) # Modify the forwards-pass slightly to ensure stability on the reverse. - function _pairwise_euclidean(X, Y) - δ = eps(promote_type(eltype(X), eltype(Y)))^2 - return sqrt.(max.(pairwise(SqEuclidean(), X, Y; dims=dims), δ)) - end - D, back = pullback(_pairwise_euclidean, X, Y) - - return D, function(Δ) - return (nothing, back(Δ)...) + function _pairwise_euclidean(sqdist::SqEuclidean, X, Y) + D2 = pairwise(sqdist, X, Y; dims=dims) + δ = eps(eltype(D2)) + return _sqrt_if_positive.(D2, δ) end + return pullback(_pairwise_euclidean, SqEuclidean(dist.thresh), X, Y) end -@adjoint function pairwise(::Euclidean, X::AbstractMatrix; dims=2) - - _conditional(d, δ) = d > δ ? sqrt(d) : zero(d) - - function _pairwise_euclidean(X) - δ = eps(eltype(X))^2 - D2 = pairwise(SqEuclidean(), X; dims=dims) - return _conditional.(D2, δ) +@adjoint function pairwise(dist::Euclidean, X::AbstractMatrix; dims=2) + # Modify the forwards-pass slightly to ensure stability on the reverse. + function _pairwise_euclidean(sqdist::SqEuclidean, X) + D2 = pairwise(sqdist, X; dims=dims) + δ = eps(eltype(D2)) + return _sqrt_if_positive.(D2, δ) end - D, back = pullback(_pairwise_euclidean, X) - - _pairwise_pullback(Δ) = (nothing, back(Δ)...) - return D, _pairwise_pullback + return pullback(_pairwise_euclidean, SqEuclidean(dist.thresh), X) end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 3330d5927..3cc10ce82 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1197,6 +1197,7 @@ end Δ = randn(P, P) X = repeat(randn(rng, D), 1, P) + # Single input matrix Δ_fd = FiniteDifferences.j′vp( FiniteDifferences.central_fdm(5, 1), X -> pairwise(metric, X; dims=2), Δ, X ) @@ -1204,6 +1205,16 @@ end # This is impressively inaccurate, but at least it doesn't produce a NaN. @test first(Δ_fd) ≈ first(pb(Δ)) atol=1e-3 rtol=1e-3 + + # Two input matrices + Y = copy(X) + Δ_fd = FiniteDifferences.j′vp( + FiniteDifferences.central_fdm(5, 1), X -> pairwise(metric, X, Y; dims=2), Δ, X + ) + _, pb = Zygote.pullback(X -> pairwise(metric, X, Y; dims=2), X) + + # This is impressively inaccurate, but at least it doesn't produce a NaN. + @test first(Δ_fd) ≈ first(pb(Δ)) atol=1e-3 rtol=1e-3 end end From a766a58894c54c33ac732c63eb846021955d3689 Mon Sep 17 00:00:00 2001 From: Saransh Date: Fri, 23 Sep 2022 17:59:42 +0530 Subject: [PATCH 365/490] Dynamic README logo (#1311) --- README.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 2bff370af..0fb5c43bd 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,6 @@

- + +

From 58e7cffb0f3f7e079a2b48d42e77221517683a95 Mon Sep 17 00:00:00 2001 From: Saransh Date: Wed, 28 Sep 2022 00:10:29 +0530 Subject: [PATCH 366/490] Fix and update `CompatHelper` CI (#1316) --- .github/workflows/CompatHelper.yml | 24 ++++++++++++++++++++++-- 1 file changed, 22 insertions(+), 2 deletions(-) diff --git a/.github/workflows/CompatHelper.yml b/.github/workflows/CompatHelper.yml index 1696bd76e..223ed3f05 100644 --- a/.github/workflows/CompatHelper.yml +++ b/.github/workflows/CompatHelper.yml @@ -3,16 +3,37 @@ on: schedule: - cron: 0 0 * * * workflow_dispatch: + +permissions: + contents: write + pull-requests: write + jobs: CompatHelper: runs-on: ubuntu-latest steps: + - name: Check if Julia is already available in the PATH + id: julia_in_path + run: which julia + continue-on-error: true + - name: Install Julia, but only if it is not already available in the PATH + uses: julia-actions/setup-julia@v1 + with: + version: '1' + arch: ${{ runner.arch }} + if: steps.julia_in_path.outcome != 'success' + - name: "Add the General registry via Git" + run: | + import Pkg + ENV["JULIA_PKG_SERVER"] = "" + Pkg.Registry.add("General") + shell: julia --color=yes {0} - name: "Install CompatHelper" run: | import Pkg name = "CompatHelper" uuid = "aa819f21-2bde-4658-8897-bab36330d9b7" - version = "2" + version = "3" Pkg.add(; name, uuid, version) shell: julia --color=yes {0} - name: "Run CompatHelper" @@ -24,4 +45,3 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }} # COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} - From 2edc19055d09a2f543ea49110f28dba98e4a83eb Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 Oct 2022 23:27:04 -0400 Subject: [PATCH 367/490] Add an implicit example for `withgradient` (#1322) * add implicit example for withgradient * also see also from implicit gradient --- src/compiler/interface.jl | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index ee2a69528..f350069f4 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -115,8 +115,17 @@ as a named tuple. julia> y, ∇ = withgradient(/, 1, 2) (val = 0.5, grad = (0.5, -0.25)) -julia> ∇ == gradient(/, 1, 2) +julia> ∇ == gradient(/, 1, 2) # explicit mode true + +julia> w = [3.0]; + +julia> res = withgradient(() -> sum(abs2, w), Params([w])) # implicit mode +(val = 9.0, grad = Grads(...)) + +julia> res.grad[w] +1-element Vector{Float64}: + 6.0 ``` """ function withgradient(f, args...) @@ -134,6 +143,8 @@ end Gradient with implicit parameters. Takes a zero-argument function, and returns a dictionary-like container, whose keys are arrays `x in ps`. +See also [`withgradient`](@ref) to keep the value `loss()`. + ```jldoctest; setup=:(using Zygote) julia> x = [1 2 3; 4 5 6]; y = [7, 8]; z = [1, 10, 100]; From 1f079016d2c59d57c6c92262bdc79067828a25db Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 16 Oct 2022 23:28:20 -0400 Subject: [PATCH 368/490] Expand limitations page of docs (#1298) * add known bugs to limitations * Accessors * Apply suggestions from code review Co-authored-by: Brian Chen Co-authored-by: Brian Chen --- docs/src/limitations.md | 60 ++++++++++++++++++++++++++++++++++++----- 1 file changed, 53 insertions(+), 7 deletions(-) diff --git a/docs/src/limitations.md b/docs/src/limitations.md index 5b15afac3..bb3b4614a 100644 --- a/docs/src/limitations.md +++ b/docs/src/limitations.md @@ -1,11 +1,14 @@ -# Limitations +# Design Limitations -Zygote aims to support differentiating any code you might write in Julia, but it still has a few limitations. Notably, you might encounter errors when trying to differentiate: -- array mutation -- `try`/`catch` statements -- "foreign call" expressions +Zygote aims to support differentiating any Julia code, but it still has a few limitations. +Notably, you might encounter errors when trying to differentiate: +- array mutation, +- `try`/`catch` statements, +- "foreign call" expressions. -In this section, we will introduce examples where each of these errors occurs as well as possible work-arounds. +This section gives examples where each of these errors occurs, as well as possible work-arounds. + +Below, it also describes some known bugs in expressions Zygote ought to be able to handle. ## Array mutation @@ -123,7 +126,7 @@ Stacktrace: ``` `jclock` will multiply the result of our C function by an argument. When we try to differentiate with respect to this argument, we get an `foreigncall` error. -## Solutions +# Solutions For all of the errors above, the suggested solutions are similar. You have the following possible work arounds available (in order of preference): 1. avoid the error-inducing operation (e.g. do not use mutating functions) @@ -148,3 +151,46 @@ julia> gradient(jclock, rand()) ``` Lastly, if the code causing problems can be fixed, but it is package code instead of your code, then you should open an issue. For functions built into Julia or its standard libraries, you can open an issue with Zygote.jl or ChainRules.jl. For functions in other packages, you can open an issue with the corresponding package issue tracker. + + +# Known Issues + +Zygote's issue tracker has the current list of open [bugs](https://github.com/FluxML/Zygote.jl/issues?q=is%3Aissue+is%3Aopen+label%3Abug). There are some general principles about things you may wish to avoid if you can: + +## `mutable struct`s + +Zygote has limited support for mutation, and in particular will allow you to change a field in some `mutable struct X; a; b; end` by setting `x.a = val`. + +However, this has [many limitations](https://github.com/FluxML/Zygote.jl/issues?q=is%3Aissue+is%3Aopen+mutable+struct) and should be avoided if possible. + +The simple solution is to use only immutable `struct`s. + +If you need to modify them, using something like `@set` from [Accessors.jl](https://github.com/JuliaObjects/Accessors.jl) should work well. This returns a new object, but does not have side-effects on other copies of it. + +## Re-using variable names + +It is common to accumulate values in a loop by re-binding the same variable name to a new value +many times, for example: +``` +function mysum(x::Real, n::Int) + tot = 0.0 + for i in 1:n + tot += x^n # binds symbol `tot` to new value + end + return tot +end +``` +However, sometimes such re-binding confuses Zygote, especially if the type of the value changes. Especially if the variable is "boxed", as will happen if you re-bind from within a closure (such as the function created by a `do` block). + +## Second derivatives + +In principle Zygote supports taking derivatives of derivatives. There are, however, a few problems: +* Quite a few of its rules are not written in a way that is itself differentiable. For instance they may work by making an array then writing into it, which is mutation of the sort forbidden above. +* The complexity of the code grows rapidly, as Zygote differentiates its own un-optimised output. +* Reverse mode over reverse mode is seldom the best algorithm. + +The issue tracker has a label for [second order](https://github.com/FluxML/Zygote.jl/issues?q=is%3Aissue+is%3Aopen+label%3A%22second+order%22), which will outline where the bodies are buried. + +Often using a different AD system over Zygote is a better solution. +This is what [`hessian`](@ref) does, using ForwardDiff over Zygote, but other combinations are possible. +(Note that rules defined here mean that Zygote over ForwardDiff is translated to ForwardDiff over ForwardDiff.) From 5c81bcf5d6d328120cb4d28363b3985aea52f5eb Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 3 Aug 2022 17:23:24 -0700 Subject: [PATCH 369/490] add precompilation via SnoopPrecompile --- Project.toml | 2 ++ src/Zygote.jl | 3 +++ 2 files changed, 5 insertions(+) diff --git a/Project.toml b/Project.toml index 7d277f688..0287a71a5 100644 --- a/Project.toml +++ b/Project.toml @@ -20,6 +20,7 @@ MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" +SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -41,6 +42,7 @@ MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" +SnoopPrecompile = "1" ZygoteRules = "0.2.1" julia = "1.6" diff --git a/src/Zygote.jl b/src/Zygote.jl index 8a51b14fd..fc9d13f8f 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -79,4 +79,7 @@ macro profile(ex) end end +using SnoopPrecompile +@precompile_all_calls precompile() + end # module From 81680a7b8da780aad2235e8389a8fbd4517bd8c2 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Fri, 25 Nov 2022 05:55:35 +0100 Subject: [PATCH 370/490] tests for dictionaries (#1330) * tests for issue 717 * test for issue 760 * fix tests * add .vscode to gitignore * cleanup --- .gitignore | 1 + test/features.jl | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+) diff --git a/.gitignore b/.gitignore index 915632fc5..2640c66fa 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ docs/build docs/Manifest.toml Manifest.toml dev/ +.vscode/ diff --git a/test/features.jl b/test/features.jl index ce0d1bcff..e4fe61140 100644 --- a/test/features.jl +++ b/test/features.jl @@ -817,3 +817,21 @@ end @test gradient(xs -> sum(map(x->x.im^2, xs)), [1+2im,3])[1] == [4im, 0] @test gradient(xs -> mapreduce(x->x.im^2, +, xs), [1+2im,3])[1] == [4im, 0] end + +@testset "Dict" begin + # issue #717 + @test gradient(x -> (() -> x[:y])(), Dict(:y => 0.4)) == (Dict(:y => 1.0),) + + ntd = (; data = Dict("x" => rand(2))) + @test gradient(x -> sum(x.data["x"]), ntd)[1] == (; data = Dict("x" => ones(2))) + + # issue #760 + function f760(x) + d = Dict() + for i in 1:4 + push!(d, i=>i^x) + end + sum(values(d)) + end + @test gradient(f760, 3)[1] ≈ 123.93054835019153 +end From 573c657daa8d2ac100bb887167147df7103b7222 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sun, 27 Nov 2022 18:26:27 +0100 Subject: [PATCH 371/490] fix collect and comprehension for NamedTuple and Dict (#1331) * fix collect for Dict and NamedTuple * add tests * support comprehension * test types * more tests * Update test/lib/array.jl Co-authored-by: Brian Chen * add test for tuple * cleanup * Update src/lib/array.jl Co-authored-by: Brian Chen Co-authored-by: Brian Chen --- src/lib/array.jl | 50 ++++++++++++++++++++++++++++++++++++++++------- test/lib/array.jl | 48 +++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 91 insertions(+), 7 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 420e3716f..4b8f90609 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -8,8 +8,26 @@ using Distributed: pmap, AbstractWorkerPool @adjoint copy(x::AbstractArray) = copy(x), ȳ -> (ȳ,) -@adjoint collect(x::Tuple) = collect(x), dy -> (Tuple(dy),) -@adjoint collect(x::AbstractArray) = collect(x), dy -> (dy,) +@adjoint function collect(x::Tuple) + collect_tuple_pullback(dy) = (Tuple(dy),) + collect(x), collect_tuple_pullback +end + +@adjoint function collect(x::NamedTuple{names}) where names + collect_namedtuple_pullback(dy) = (NamedTuple{names}(Tuple(dy)),) + collect(x), collect_namedtuple_pullback +end + +@adjoint function collect(x::AbstractArray) + collect_array_pullback(dy) = (dy,) + collect(x), collect_array_pullback +end + +@adjoint function collect(d::Dict) + _keys = collect(keys(d)) + collect_dict_pullback(Δ) = (reconstruct_if_dict(Δ, _keys),) + collect(d), collect_dict_pullback +end # Array Constructors @adjoint function (::Type{T})(x::Number, sz) where {T <: Fill} @@ -211,13 +229,31 @@ end end function _pullback(cx::AContext, ::typeof(collect), g::Base.Generator) - y, b = ∇map(cx, g.f, g.iter) - back(::Nothing) = nothing - function back(ȳ) - f̄, x̄ = b(ȳ) + giter, _keys = collect_if_dict(g.iter) # map is not defined for dictionaries + y, map_pullback = ∇map(cx, g.f, giter) + + collect_pullback(::Nothing) = nothing + + function collect_pullback(ȳ) + f̄, x̄ = map_pullback(ȳ) + x̄ = reconstruct_if_dict(x̄, _keys) # return a dictionary if needed (nothing, (f = f̄, iter = x̄),) end - y, back + y, collect_pullback +end + +collect_if_dict(x::Dict) = collect(x), collect(keys(x)) +collect_if_dict(x) = x, nothing + +reconstruct_if_dict(x̄, _keys::Nothing) = x̄ + +function reconstruct_if_dict(x̄, _keys) + # This reverses `collect_if_dict`, which returns `_keys::Nothing` if x is not a Dict + @assert x̄ isa AbstractVector{<:Union{Nothing, NamedTuple{(:first,:second)}}} + # we don't compute gradients with respect to keys + # @assert all(x -> x === nothing || x[1] == 0 || x[1] === nothing, x̄) + d̄ = Dict(k => isnothing(x) ? nothing : x[2] for (x, k) in zip(x̄, _keys)) + return d̄ end @adjoint iterate(r::UnitRange, i...) = iterate(r, i...), _ -> nothing diff --git a/test/lib/array.jl b/test/lib/array.jl index 1d044de99..a527f9bc6 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -18,3 +18,51 @@ test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ @test gradient(x -> sum([y[2] * y[3] for y in Iterators.product(x, x, x, x)]), [1,2,3,4])[1] ≈ [320, 320, 320, 320] @test gradient(x -> sum(y[2] * y[3] for y in Iterators.product(x, x, x, x)), [1,2,3,4])[1] ≈ [320, 320, 320, 320] end + +@testset "collect" begin + @testset "Dict" begin + d = Dict(1 => 5, 2 => 6) + k = 2 + i = findfirst(p -> p[1] == k, collect(d)) + g = gradient(d -> collect(d)[i][2], d)[1] + @test g isa Dict{Int64, <:Union{Nothing, Int64}} + @test g[k] == 1 + + g = gradient(d -> sum(v^2 for (_,v) in collect(d)), d)[1] + @test g isa Dict{Int,Int} + @test g == Dict(1 => 10, 2 => 12) + end + + @testset "NamedTuple" begin + t = (a=1, b=2) + g = gradient(d -> sum(x^2 for x in collect(d)), t)[1] + @test g === (a = 2.0, b = 4.0) + end + + @testset "Tuple" begin + t = (1, 2) + g = gradient(d -> sum(x^2 for x in collect(d)), t)[1] + @test g === (2.0, 4.0) + end +end + +@testset "dictionary comprehension" begin + d = Dict(1 => 5, 2 => 6) + g = gradient(d -> sum([v^2 for (_,v) in d]), d)[1] + @test g isa Dict{Int, Int} + @test g == Dict(1 => 10, 2 => 12) + + + w = randn(5) + function f_generator(w) + d = Dict{Int, Float64}(i => v for (i,v) in enumerate(w)) + sum(v for (_, v) in d) + end + @test gradient(f_generator, w)[1] == ones(5) + + function f_comprehension(w) + d = Dict{Int, Float64}(i => v for (i,v) in enumerate(w)) + sum(v for (_, v) in d) + end + @test gradient(f_comprehension, w)[1] == ones(5) +end From 353bf4a83e940cd1a30d18893dd264b23c18e668 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sun, 27 Nov 2022 19:20:57 +0100 Subject: [PATCH 372/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 0287a71a5..ba2d63562 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.49" +version = "0.6.50" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 93017bba11201c1c171a60f05f3aa108457db8eb Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Mon, 28 Nov 2022 16:02:22 +0100 Subject: [PATCH 373/490] cl/hotfix (#1332) --- Project.toml | 8 +++----- src/Zygote.jl | 5 +++-- 2 files changed, 6 insertions(+), 7 deletions(-) diff --git a/Project.toml b/Project.toml index ba2d63562..82d2f59f0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.50" +version = "0.6.51" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" @@ -10,7 +10,7 @@ DiffRules = "b552c78f-8df3-52c6-915a-8e097449b14b" Distributed = "8ba89e20-285c-5b6f-9357-94700520ee1b" FillArrays = "1a297f60-69ca-5386-bcde-b61e274b549b" ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" -GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" # not loaded, just a version bound +GPUArrays = "0c68f7d7-f131-5f86-a1c3-88cf8149b2d7" GPUArraysCore = "46192b85-c4d5-4398-a991-12ede77f4527" IRTools = "7869d1d1-7146-5819-86e3-90919afe41df" InteractiveUtils = "b77e0a4c-d291-57a0-90e8-8db25a27a240" @@ -20,7 +20,6 @@ MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" -SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -34,7 +33,7 @@ ChainRulesTestUtils = "1" DiffRules = "1.4" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" -GPUArrays = "8.4.2" # not loaded, just a version bound +GPUArrays = "8.4.2" GPUArraysCore = "0.1.1" IRTools = "0.4.4" LogExpFunctions = "0.3.1" @@ -42,7 +41,6 @@ MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" SpecialFunctions = "1.6, 2" -SnoopPrecompile = "1" ZygoteRules = "0.2.1" julia = "1.6" diff --git a/src/Zygote.jl b/src/Zygote.jl index fc9d13f8f..c651968ba 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -79,7 +79,8 @@ macro profile(ex) end end -using SnoopPrecompile -@precompile_all_calls precompile() +## reverted due to https://github.com/SciML/DiffEqFlux.jl/issues/783 +# using SnoopPrecompile +# @precompile_all_calls precompile() end # module From b1faa1a06873aa7664c114c8c30e0d11247acdff Mon Sep 17 00:00:00 2001 From: bertschi Date: Sat, 3 Dec 2022 17:56:47 +0100 Subject: [PATCH 374/490] test for issue 796 --- test/gradcheck.jl | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 3cc10ce82..47cd03cea 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -2045,3 +2045,23 @@ end h(x) = sum(abs2, g(x)) @test gradient(h, x)[1] isa typeof(x) end + +@testset "Zygote issue 796" begin + function foo(z::Float64) + x = 1.0 + y = 1.0 + z + j = 1 + while abs(x - y) > 1e-6 + tmp = (x + y) / 2 + x = y + y = tmp + j += 1 + end + return y + end + + @test gradcheck(foo, 0.0) + @test gradcheck(foo, 2.0) + @test gradcheck(foo, -2.0) + @test gradcheck(foo, 1024.0) +end From c328cd58414bc043627e08a8dbe26444f071ef58 Mon Sep 17 00:00:00 2001 From: bertschi Date: Sun, 4 Dec 2022 00:12:40 +0100 Subject: [PATCH 375/490] fixed test for issue 796 --- test/gradcheck.jl | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 47cd03cea..330348988 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -2050,18 +2050,14 @@ end function foo(z::Float64) x = 1.0 y = 1.0 + z - j = 1 while abs(x - y) > 1e-6 - tmp = (x + y) / 2 - x = y - y = tmp - j += 1 + y, x = (x + y) / 2, y end return y end - @test gradcheck(foo, 0.0) - @test gradcheck(foo, 2.0) - @test gradcheck(foo, -2.0) - @test gradcheck(foo, 1024.0) + @test gradcheck(foo ∘ first, [0.0]) + @test gradcheck(foo ∘ first, [2.0]) + @test gradcheck(foo ∘ first, [-1e-5]) + @test gradient(foo, 1024.0)[1] ≈ 2//3 end From 604d35f3855adf702a68b6119ef209d8acbc25b5 Mon Sep 17 00:00:00 2001 From: Nils Bertschinger Date: Tue, 6 Dec 2022 21:49:13 +0100 Subject: [PATCH 376/490] Changing test name as suggested Co-authored-by: Brian Chen --- test/gradcheck.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 330348988..540d85e92 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -2046,7 +2046,7 @@ end @test gradient(h, x)[1] isa typeof(x) end -@testset "Zygote issue 796" begin +@testset "Zygote #796" begin function foo(z::Float64) x = 1.0 y = 1.0 + z From c2fb256e0d4f96504a4cc4251e8348d2de4a5d1c Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Sun, 11 Dec 2022 19:24:34 +0100 Subject: [PATCH 377/490] Delete bors.toml (#1339) --- bors.toml | 5 ----- 1 file changed, 5 deletions(-) delete mode 100644 bors.toml diff --git a/bors.toml b/bors.toml deleted file mode 100644 index 6d529fdd1..000000000 --- a/bors.toml +++ /dev/null @@ -1,5 +0,0 @@ -status = [ - "buildkite/zygote-dot-jl" -] - -timeout-sec = 14400 From ae2fa0c5b86d7093c5afeb997a358e1737a27227 Mon Sep 17 00:00:00 2001 From: marius Date: Sat, 7 Jan 2023 00:52:43 -0800 Subject: [PATCH 378/490] best-effort widen Buffer on back pass if needed --- src/lib/buffer.jl | 12 +++++++----- src/lib/lib.jl | 9 +-------- test/gradcheck.jl | 12 ++++++++++++ 3 files changed, 20 insertions(+), 13 deletions(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index b3aef17f0..0c1c873e9 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,11 +1,13 @@ -grad_mut(b::Buffer) = fill!(similar(b.data, Any), nothing) -grad_mut(b::Buffer{T}) where T<:Number = fill!(similar(b.data, float(T)), 0) +grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T, S} = + get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) +grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S} = + get!(() -> fill!(similar(b.data, float(promote_type(T,S))), 0), cache(cx), b) @non_differentiable Buffer(::Any...) @adjoint function getindex(b::Buffer, i...) - b[i...], function (Δ) - grad = grad_mut(__context__, b) + b[i...], function (Δ::S) where {S} + grad = grad_mut(__context__, b, S) grad[i...] = accum(grad[i...], Δ) return end @@ -48,7 +50,7 @@ _pullback(cx::AContext, ::typeof(Broadcast.materialize!), b::Buffer, x::Abstract res = copy(b) function copy_sensitivity(b̄) - grad_mut(__context__, b)[:] .= vec(b̄) + grad_mut(__context__, b, eltype(b̄))[:] .= vec(b̄) return end diff --git a/src/lib/lib.jl b/src/lib/lib.jl index 52a734809..c08b96511 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -264,14 +264,7 @@ end grad_mut(x) = Ref{Any}(nt_nothing(x)) -function grad_mut(cx::Context, x) - ch = cache(cx) - if haskey(ch, x) - ch[x] - else - ch[x] = grad_mut(x) - end -end +grad_mut(cx::Context, x) = get!(() -> grad_mut(x), cache(cx), x) @adjoint! function setfield!(x, f, val) y = setfield!(x, f, val) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 540d85e92..71ac5109f 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1523,6 +1523,18 @@ using Zygote: Buffer prod(copy(b)) end == (3,) + # backwards pass Buffer widening (#1349) + @test Zygote.hessian(1.) do A + buf = Zygote.Buffer([0, 0]) + buf[:] = [1, 2] + sum(A^2 .* copy(buf)) + end == 6 + @test Zygote.hessian(1.) do A + buf = Zygote.Buffer([0, 0]) + buf[1] = 1 + A^2 * buf[1] + end == 2 + # Buffer storing arrays test W1 = ones(3, 3) W2 = ones(3, 3) From d20574fcd45f4aa958e87c552787f4d9575858b8 Mon Sep 17 00:00:00 2001 From: marius Date: Sat, 7 Jan 2023 14:08:58 -0800 Subject: [PATCH 379/490] fix Flux test --- src/lib/buffer.jl | 4 ++-- src/lib/lib.jl | 12 +++++++++++- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 0c1c873e9..1037ed0b2 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,7 +1,7 @@ grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T, S} = - get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) + _get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S} = - get!(() -> fill!(similar(b.data, float(promote_type(T,S))), 0), cache(cx), b) + _get!(() -> fill!(similar(b.data, float(promote_type(T,S))), 0), cache(cx), b) @non_differentiable Buffer(::Any...) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index c08b96511..eaa49ada2 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -264,7 +264,17 @@ end grad_mut(x) = Ref{Any}(nt_nothing(x)) -grad_mut(cx::Context, x) = get!(() -> grad_mut(x), cache(cx), x) +grad_mut(cx::Context, x) = _get!(() -> grad_mut(x), cache(cx), x) + +# needed for reverse-over-reverse pending rrule for Base.get! +function _get!(default::Base.Callable, ch, x) + if haskey(ch, x) + ch[x] + else + ch[x] = default() + end +end + @adjoint! function setfield!(x, f, val) y = setfield!(x, f, val) From 14b2a3f3de9145d0797f16168999d02a06e92537 Mon Sep 17 00:00:00 2001 From: Marius Millea Date: Sat, 7 Jan 2023 18:22:21 -0800 Subject: [PATCH 380/490] Update src/lib/buffer.jl Co-authored-by: Brian Chen --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 1037ed0b2..424c26a7e 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,7 +1,7 @@ grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T, S} = _get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S} = - _get!(() -> fill!(similar(b.data, float(promote_type(T,S))), 0), cache(cx), b) + _get!(() -> fill!(similar(b.data, float(promote_type(T, S))), 0), cache(cx), b) @non_differentiable Buffer(::Any...) From 7beb46d0756e28e2c702022ee5d1f0db3175af79 Mon Sep 17 00:00:00 2001 From: marius Date: Sat, 7 Jan 2023 18:30:10 -0800 Subject: [PATCH 381/490] code comment --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 424c26a7e..4faac2584 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,4 +1,4 @@ -grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T, S} = +grad_mut(cx::Context, b::Buffer, ::Type=Union{}) = _get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S} = _get!(() -> fill!(similar(b.data, float(promote_type(T, S))), 0), cache(cx), b) From f3857d18bba9676bef8614d0c2a61b07df3d4dcf Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 7 Jan 2023 19:29:57 -0800 Subject: [PATCH 382/490] 0.6.52 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 82d2f59f0..f94d259ca 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.51" +version = "0.6.52" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 616bf6cfb7d7cea2b755a31f36274e9fff18f678 Mon Sep 17 00:00:00 2001 From: Paul Tiede Date: Tue, 10 Jan 2023 11:02:30 -0500 Subject: [PATCH 383/490] Adding complex broadcasting for gradients on the GPU (#1324) * Added complex broadcasting support * Added tests and clean up the code * Fix up type instability * Add testing * Everything passes tests now * switch to more generic broadcast_forward * clean up submission * Remove various Val's * change to Complex{<:Dual} * add mixed complex and real to cuda testing * import not using * Add complex to _dual_safearg * Type stable on my computer * Fix Dual tagging * Add more tests * update tests * First attempt to fix real performance regression * Uncomment ldexp rules * cleanup broadcast and inline * update tests * specify more reasonable tolerance for float32 * revert testing bug * clean up the submission --- src/lib/broadcast.jl | 113 +++++++++++++++++++++++++++++++++++++------ test/complex.jl | 1 - test/cuda.jl | 98 +++++++++++++++++++++++++++++-------- 3 files changed, 174 insertions(+), 38 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 58f7ecf99..dc7b053c1 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -120,6 +120,9 @@ end @adjoint broadcasted(::typeof(imag), x::Numeric) = imag.(x), z̄ -> (nothing, im .* real.(z̄)) +@adjoint broadcasted(::typeof(abs2), x::Numeric) = + abs2.(x), z̄ -> (nothing, 2 .* real.(z̄) .* x) + @adjoint function broadcasted(::typeof(+), a::AbstractArray{<:Number}, b::Bool) y = b === false ? a : a .+ b y, Δ -> (nothing, Δ, nothing) @@ -181,7 +184,9 @@ _dual_purefun(::Type) = false _dual_purefun(::Type{typeof(^)}) = false # avoid DomainError from negative powers _dual_safearg(x::Numeric{<:Real}) = true +_dual_safearg(x::Numeric{<:Complex}) = true _dual_safearg(x::Ref{<:Numeric{<:Real}}) = true +_dual_safearg(x::Ref{<:Numeric{<:Complex}}) = true _dual_safearg(x::Union{Type,Val,Symbol}) = true # non-differentiable types _dual_safearg(x) = false @@ -190,7 +195,7 @@ _dual_safearg(x) = false # Avoid generic broadcasting in two easy cases: if T == Bool return (f.(args...), _ -> nothing) - elseif T <: Real && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) && !isderiving() + elseif T <: Union{Real, Complex} && isconcretetype(T) && _dual_purefun(F) && all(_dual_safearg, args) && !isderiving() return broadcast_forward(f, args...) end len = inclen(args) @@ -230,35 +235,112 @@ end # Forward Mode -- necessary for CUDA, also used as a fast path above import ForwardDiff -using ForwardDiff: Dual +using ForwardDiff: Dual, Partials, value, partials + + +# We do this because it ensures type stability so it compiles nicely on the gpu +# The val is needed for some type stability +@inline dual(x, i, ::Val{N}) where {N} = x +@inline dual(x::Bool, i, ::Val{N}) where {N} = x +@inline dual(x::Real, i, ::Val{N}) where {N} = Dual(x, ntuple(==(i), N)) +# For complex since ForwardDiff.jl doesn't play nicely with complex numbers we +# construct a Complex dual number and tag the real and imaginary parts separately +@inline function dual(x::Complex{T}, i, ::Val{N}) where {T,N} + re_dual = Dual(real(x), ntuple(==(i), 2N)) + im_dual = Dual(imag(x), ntuple(==(N+i), 2N)) + return Complex(re_dual, im_dual) +end -dual(x, p) = x -dual(x::Real, p) = Dual(x, p) -dual(x::Bool, p) = x +function dualize(args::Vararg{Any, N}) where {N} + ds = map(args, ntuple(identity,N)) do x, i + return dual(x, i, Val(N)) + end + return ds +end -function dual_function(f::F) where F - function (args::Vararg{Any,N}) where N - ds = map(args, ntuple(identity,Val(N))) do x, i - dual(x, ntuple(j -> i==j, Val(N))) +@inline function dual_function(f::F) where F + function (args::Vararg{Any,N}) where N + ds = dualize(args...) + return f(ds...) end - return f(ds...) end -end + @inline function broadcast_forward(f, args::Vararg{Any,N}) where N - valN = Val(N) out = dual_function(f).(args...) - eltype(out) <: Dual || return (out, _ -> nothing) - y = broadcast(x -> x.value, out) + T = eltype(out) + T <: Union{Dual, Complex{<:Dual}} || return (out, _ -> nothing) + if any(eltype(a) <: Complex for a in args) + _broadcast_forward_complex(T, out, args...) + else + _broadcast_forward(T, out, args...) + end +end + +# Real input and real output pullback +@inline function _broadcast_forward(::Type{<:Dual}, out, args::Vararg{Any, N}) where {N} + valN = Val(N) + y = broadcast(x -> value(x), out) function bc_fwd_back(ȳ) dargs = ntuple(valN) do i - unbroadcast(args[i], broadcast((y1, o1) -> y1 * o1.partials[i], ȳ, out)) + unbroadcast(args[i], broadcast((y1, o1) -> y1 * partials(o1,i), ȳ, out)) end (nothing, nothing, dargs...) # nothings for broadcasted & f end return y, bc_fwd_back end +# This handles the complex output and real input pullback +@inline function _broadcast_forward(::Type{<:Complex}, out, args::Vararg{Any, N}) where {N} + valN = Val(N) + y = broadcast(x -> Complex(value(real(x)), value(imag(x))), out) + function bc_fwd_back(ȳ) + dargs = ntuple(valN) do i + unbroadcast(args[i], broadcast((y1, o1) -> (real(y1)*partials(real(o1),i) + imag(y1)*partials(imag(o1), i)), ȳ, out)) + end + (nothing, nothing, dargs...) # nothings for broadcasted & f + end + return y, bc_fwd_back + end + +# This handles complex input and real output. We use the gradient definition from ChainRules here +# since it agrees with what Zygote did for real(x). +@inline function _broadcast_forward_complex(::Type{<:Dual}, out, args::Vararg{Any, N}) where {N} + valN = Val(N) + y = broadcast(x -> value(x), out) + function bc_fwd_back(ȳ) + dargs = ntuple(valN) do i + unbroadcast(args[i], broadcast((y1, o1) -> y1 * Complex(partials(o1, i), partials(o1, i+N)), ȳ, out)) + end + (nothing, nothing, dargs...) # nothings for broadcasted & f + end + return y, bc_fwd_back +end + +# # # This is for complex input and complex output +# If we assume that +# f(x + iy) = u(x,y) + iv(x,y) +# then we do the following for the adjoint +# Δu ∂u/∂x + Δv∂v/∂x + i(Δu∂u/∂y + Δv ∂v/∂y ) +# this follows https://juliadiff.org/ChainRulesCore.jl/stable/maths/complex.html +function _adjoint_complex(N, Δz, df, i) + Δu, Δv = reim(Δz) + du, dv = reim(df) + return Complex(Δu*partials(du, i) + Δv*partials(dv, i), Δu*partials(du, i+N) + Δv*partials(dv, i+N)) +end + +@inline function _broadcast_forward_complex(::Type{<:Complex}, out, args::Vararg{Any, N}) where {N} + valN = Val(N) + y = broadcast(x -> Complex(value(real(x)), value(imag(x))), out) + function bc_fwd_back(ȳ) + dargs = ntuple(valN) do i + unbroadcast(args[i], broadcast((y1, o1) -> _adjoint_complex(N, y1, o1, i), ȳ, out)) + end + (nothing, nothing, dargs...) # nothings for broadcasted & f + end + return y, bc_fwd_back +end + using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve git blame # Ordinary broadcasting calls broadcast_forward anyway when certain its' safe, @@ -287,4 +369,3 @@ using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve end pull_block_vert(sz, Δ::AbstractGPUArray, A::Number) = @allowscalar Δ[sz] - diff --git a/test/complex.jl b/test/complex.jl index efb1e06dd..e50c57486 100644 --- a/test/complex.jl +++ b/test/complex.jl @@ -120,4 +120,3 @@ end end @test Zygote.hessian(fun, collect(1:9)) ≈ [14 0 0 0 0 0 2 0 0; 0 16 0 0 0 0 0 4 0; 0 0 18 0 0 0 0 0 6; 0 0 0 14 0 0 8 0 0; 0 0 0 0 16 0 0 10 0; 0 0 0 0 0 18 0 0 12; 2 0 0 8 0 0 0 0 0; 0 4 0 0 10 0 0 0 0; 0 0 6 0 0 12 0 0 0] end - diff --git a/test/cuda.jl b/test/cuda.jl index 5cb1c8cdc..171fa45db 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -2,8 +2,17 @@ using CUDA using Zygote: Grads using LinearAlgebra using Random: randn! +import FiniteDifferences CUDA.allowscalar(false) +function gradcheck_gpu(f, xs...) + grad_zygote = gradient(f, xs...) + m = FiniteDifferences.central_fdm(5,1) + grad_finite_difference = FiniteDifferences.grad(m, f, collect.(xs)...) + return all(isapprox.(collect.(grad_zygote), grad_finite_difference)) +end + + # Test GPU movement inside the call to `gradient` @testset "GPU movement" begin r = rand(Float32, 3,3) @@ -26,7 +35,7 @@ end g_gpu = gradient(x -> v(x, 7), a_gpu)[1] @test g_gpu isa CuArray @test g_gpu |> collect ≈ g - + w(x) = sum(broadcast(log, x)) g = gradient(x -> w(x), a)[1] g_gpu = gradient(x -> w(x), a_gpu)[1] @@ -38,7 +47,7 @@ end @test gradient(x -> sum(x .> 3), a_gpu) == (nothing,) g3 = gradient(x -> sum(x .^ 3) / count(x .> 3), a)[1] # was Can't differentiate gc_preserve_end expression @test_skip cu(g3) ≈ gradient(x -> sum(x .^ 3) / sum(x .> 3), a_gpu)[1] # was KernelException -- not fixed by PR #1018 - @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / count(x .> 3), a_gpu)[1] + @test cu(g3) ≈ gradient(x -> sum(x .^ 3) / count(x .> 3), a_gpu)[1] # Projection: eltype preservation: @test gradient(x -> 2.3 * sum(x.^4), a_gpu)[1] isa CuArray{Float32} @@ -90,40 +99,40 @@ end @testset "gradient algebra" begin w, b = rand(2) |> cu, rand(2) |> cu x1, x2 = rand(2) |> cu, rand(2) |> cu - - gs1 = gradient(() -> sum(w .* x1), Params([w])) - gs2 = gradient(() -> sum(w .* x2), Params([w])) + + gs1 = gradient(() -> sum(w .* x1), Params([w])) + gs2 = gradient(() -> sum(w .* x2), Params([w])) @test .- gs1 isa Grads - @test gs1 .- gs2 isa Grads + @test gs1 .- gs2 isa Grads @test .+ gs1 isa Grads - @test gs1 .+ gs2 isa Grads - @test 2 .* gs1 isa Grads + @test gs1 .+ gs2 isa Grads + @test 2 .* gs1 isa Grads @test (2 .* gs1)[w] ≈ 2 * gs1[w] - @test gs1 .* 2 isa Grads - @test gs1 ./ 2 isa Grads - @test (gs1 .+ gs2)[w] ≈ gs1[w] .+ gs2[w] + @test gs1 .* 2 isa Grads + @test gs1 ./ 2 isa Grads + @test (gs1 .+ gs2)[w] ≈ gs1[w] .+ gs2[w] gs12 = gs1 .+ gs2 gs1 .+= gs2 - @test gs12[w] ≈ gs1[w] + @test gs12[w] ≈ gs1[w] gs3 = gradient(() -> sum(w .* x1), Params([w, b])) # grad nothing with respect to b - gs4 = gradient(() -> sum(w .* x2 .+ b), Params([w, b])) + gs4 = gradient(() -> sum(w .* x2 .+ b), Params([w, b])) @test .- gs3 isa Grads - @test gs3 .- gs4 isa Grads + @test gs3 .- gs4 isa Grads @test .+ gs3 isa Grads - @test gs3 .+ gs4 isa Grads - @test 2 .* gs3 isa Grads - @test gs3 .* 2 isa Grads - @test gs3 ./ 2 isa Grads + @test gs3 .+ gs4 isa Grads + @test 2 .* gs3 isa Grads + @test gs3 .* 2 isa Grads + @test gs3 ./ 2 isa Grads @test (gs3 .+ gs4)[w] ≈ gs3[w] .+ gs4[w] - @test (gs3 .+ gs4)[b] ≈ gs4[b] - + @test (gs3 .+ gs4)[b] ≈ gs4[b] + @test gs3 .+ IdDict(w => similar(w), b => similar(b)) isa Grads gs3 .+= IdDict(p => randn!(similar(p)) for p in keys(gs3)) - @test gs3 isa Grads + @test gs3 isa Grads @test_throws ArgumentError gs1 .+ gs4 end @@ -140,3 +149,50 @@ end @test_skip gradient((x,y) -> sum(vcat(x,y)), 1f0, r, 2f0, r)[2] isa CUDA.CuArray{Float32} end + +@testset "CUDA complex broadcasting" begin + # Issue 961 and 1121 and 1215 + x = 2*rand(Float32, 10) .- 1f0 + y = 2*rand(ComplexF32, 10) .- 1f0 + + xgpu =cu(x) + ygpu =cu(y) + + g1 = Zygote.gradient(x->sum(abs2, x), ygpu)[1] + g2 = Zygote.gradient(x->sum(abs2.(x)), ygpu)[1] + g3 = Zygote.gradient(x->sum(abs2, x), y)[1] + @test g1 isa CUDA.CuArray{ComplexF32} + @test g2 isa CUDA.CuArray{ComplexF32} + @test collect(g1) ≈ collect(g2) + @test collect(g1) ≈ g3 + + + r3 = cu(Float32.(inv.(2:4))) + c3 = cu(ComplexF32.(inv.(5:7) .+ im ./ (8:10))) + + + # These check _broadcast_forward(::Type{<:Dual}, ...) + @test gradcheck_gpu((x,y)->sum(abs2, x.^2 .+ y), xgpu, ygpu) + @test gradcheck_gpu((x,y)->sum(abs, exp.(x) .+ imag.(y)), xgpu, ygpu) + + # These check _broadcast_forward_complex(::Type{<:Complex}, ...) + @test gradcheck_gpu((x,y)->sum(abs2, cos.(x) .+ sin.(y)), xgpu, ygpu) + @test gradcheck_gpu((x,y)->sum(abs, cos.(x).*sin.(y)), xgpu, ygpu) + @test gradcheck_gpu((x,y)->sum(abs, cos.(x) .+ sin.(conj.(y))), xgpu, ygpu) + @test gradcheck_gpu((x,y)->sum(abs, cos.(x) .+ sin.(conj.(y))), xgpu, ygpu) + @test gradcheck_gpu((r,c) -> sum(abs2, sin.(conj.(c)./transpose(r) .- im) .- imag.(c .+ tanh.(r./c'))), r3, c3) + + # Painful test! + @test gradcheck_gpu(c -> sum(abs2, imag.(sqrt.(c .+ im))), c3) + @test gradcheck_gpu(r -> sum(abs2, log.(1 .+ im .* r)./2), r3) + + + # These check _broadcast_forward(::Type{<:Complex}, ...) + @test gradcheck_gpu(x->sum(real, cis.(x)), xgpu) + @test gradcheck_gpu(x->sum(real, cispi.(x)), xgpu) + + # These check _broadcast_forward_complex(::Type{<:Dual}, ...) + @test gradcheck_gpu(x->sum(imag, x.^2 .+ abs.(sinh.(conj.(x)))), ygpu) + + +end From 34ece5b8c6ee2f2269e2d384ebd813e1af0489c9 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Tue, 10 Jan 2023 18:58:40 +0100 Subject: [PATCH 384/490] Update Project.toml @JuliaRegistrator register --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f94d259ca..4173b1ffc 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.52" +version = "0.6.53" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 277bd29b7e83ac57b7b1ad9381d6b41e76407a8e Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 10 Jan 2023 21:13:58 +0100 Subject: [PATCH 385/490] Fix issue #1352 (Buffer regression) --- src/lib/buffer.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 4faac2584..385f5ecc4 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,6 +1,6 @@ grad_mut(cx::Context, b::Buffer, ::Type=Union{}) = _get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) -grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S} = +grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S<:Number} = _get!(() -> fill!(similar(b.data, float(promote_type(T, S))), 0), cache(cx), b) @non_differentiable Buffer(::Any...) From ce2983a91724093b3877e575d26752644b5bc8db Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 10 Jan 2023 21:38:00 +0100 Subject: [PATCH 386/490] Add test with MWE --- test/gradcheck.jl | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 71ac5109f..c517e859e 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1552,6 +1552,47 @@ using Zygote: Buffer @test ∇W1 == W1 @test ∇W2 == W2 @test ∇x == 6 .* x + + @testset "incorrect promotion (#1352)" begin + u = [0.75, 0.5] + p = [-1.5, 0.05, 0.2, 0.01] + + # in-place + function g1352!(du, u, p, t) + du[1, 1] = p[3] * u[1] + p[4] * u[2] + du[1, 2] = p[3] * u[1] + p[4] * u[2] + du[2, 1] = p[4] * u[1] + p[3] * u[2] + du[2, 2] = p[4] * u[1] + p[3] * u[2] + return nothing + end + du1_inplace, back_inplace = Zygote.pullback(u, p) do u, p + du = Zygote.Buffer(Matrix{Float64}(undef, 2, 2)) + g1352!(du, u, p, 1.0) + return copy(du[:, 1]) + end + + # out-of-place + function g1352(u, p, t) + du11 = p[3] * u[1] + p[4] * u[2] + du12 = p[3] * u[1] + p[4] * u[2] + du21 = p[4] * u[1] + p[3] * u[2] + du22 = p[4] * u[1] + p[3] * u[2] + return [du11 du12 + du21 du22] + end + du1, back = Zygote.pullback(u, p) do u, p + du = g1352(u, p, 1.0) + return du[:, 1] + end + + # comparison + @test du1_inplace ≈ du1 + v = randn(2) + ∇u_inplace, ∇p_inplace = back_inplace(v) + ∇u, ∇p = back(v) + @test ∇u_inplace ≈ ∇u + @test ∇p_inplace ≈ ∇p + end end @testset "AbstractArray Addition / Subtraction / Negation" begin From c2f1794ca9da3088a2f3bfb0144c8bfc4dd89d9a Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 10 Jan 2023 17:37:59 -0800 Subject: [PATCH 387/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4173b1ffc..5261e5114 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.53" +version = "0.6.54" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From d1da34a619eed5d2daf5ed662949758d334830d7 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Wed, 11 Jan 2023 11:09:32 +0100 Subject: [PATCH 388/490] Remove `show` for `Type{<:Pullback}` --- src/compiler/show.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/compiler/show.jl b/src/compiler/show.jl index a72ca7527..8e6797f15 100644 --- a/src/compiler/show.jl +++ b/src/compiler/show.jl @@ -10,4 +10,3 @@ end Base.show(io::IO, j::Pullback{S}) where S = print(io, "∂($(funcname(S.parameters[1])))") -Base.show(io::IO, P::Type{<:Pullback{S}}) where S<:Tuple = print(io, "typeof(∂($(funcname(@isdefined(S) ? S.parameters[1] : nothing))))") From f1eae033db661f0001366fb51e3f920423d1b55f Mon Sep 17 00:00:00 2001 From: Frames White Date: Fri, 13 Jan 2023 18:04:47 +0000 Subject: [PATCH 389/490] Give method error if configured rrule is ambiguous --- src/compiler/chainrules.jl | 15 ++++++++++----- test/chainrules.jl | 19 ++++++++++++++----- 2 files changed, 24 insertions(+), 10 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 7c7de8655..a19d7f230 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -18,7 +18,10 @@ such that if a suitable rule is defined later, the generated function will recom function has_chain_rrule(T) config_T, arg_Ts = Iterators.peel(T.parameters) configured_rrule_m = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) - if _is_rrule_redispatcher(configured_rrule_m.method) + is_ambig = configured_rrule_m === nothing # this means there was an ambiguity error, on configured_rrule + + + if !is_ambig && _is_rrule_redispatcher(configured_rrule_m.method) # The config is not being used: # it is being redispatched without config, so we need the method it redispatches to rrule_m = meta(Tuple{typeof(rrule), arg_Ts...}) @@ -33,6 +36,8 @@ function has_chain_rrule(T) no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}) end + is_ambig |= rrule_m === nothing # this means there was an ambiguity error on unconfigured rrule + # To understand why we only need to check if the sigs match between no_rrule_m and rrule_m # in order to decide if to use, one must consider the following facts: # - for every method in `no_rrule` there is a identical one in `rrule` that returns nothing @@ -51,8 +56,7 @@ function has_chain_rrule(T) # It can be seen that checking if it matches is the correct way to decide if we should use the rrule or not. - do_not_use_rrule = matching_cr_sig(no_rrule_m, rrule_m) - if do_not_use_rrule + if !is_ambig && matching_cr_sig(no_rrule_m, rrule_m) # Not ambigious, and opted-out. # Return instance for configured_rrule_m as that will be invalidated # directly if configured rule added, or indirectly if unconfigured rule added # Do not need an edge for `no_rrule` as no addition of methods to that can cause this @@ -60,7 +64,8 @@ function has_chain_rrule(T) # using the rrule, so not using more rules wouldn't change anything. return false, configured_rrule_m.instance else - # Otherwise found a rrule, no need to add any edges for `rrule`, as it will generate + # Either is ambigious, and we should try to use it, and then error + # or we are uses a rrule, no need to add any edges for `rrule`, as it will generate # code with natural edges if a new method is defined there. # We also do not need an edge to `no_rrule`, as any time a method is added to `no_rrule` # a corresponding method is added to `rrule` (to return `nothing`), thus we will already @@ -73,7 +78,7 @@ matching_cr_sig(t, s) = matching_cr_sig(t.method.sig, s.method.sig) matching_cr_sig(::DataType, ::UnionAll) = false matching_cr_sig(::UnionAll, ::DataType) = false matching_cr_sig(t::Type, s::Type) = type_tuple_tail(t) == type_tuple_tail(s) -matching_cr_sig(::Any, ::Nothing) = false # https://github.com/FluxML/Zygote.jl/issues/1234 +matching_cr_sig(::Any, ::Nothing) = false # ambigious https://github.com/FluxML/Zygote.jl/issues/1234 type_tuple_tail(d::DataType) = Tuple{d.parameters[2:end]...} function type_tuple_tail(d::UnionAll) diff --git a/test/chainrules.jl b/test/chainrules.jl index e9cb4afbc..51e0a80c6 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -278,11 +278,20 @@ using Zygote: ZygoteRuleConfig # https://github.com/FluxML/Zygote.jl/issues/1234 @testset "rrule lookup ambiguities" begin - f_ambig(x, y) = x + y - ChainRulesCore.rrule(::typeof(f_ambig), x::Int, y) = x + y, _ -> (0, 0) - ChainRulesCore.rrule(::typeof(f_ambig), x, y::Int) = x + y, _ -> (0, 0) - - @test_throws MethodError pullback(f_ambig, 1, 2) + @testset "unconfigured" begin + f_ambig(x, y) = x + y + ChainRulesCore.rrule(::typeof(f_ambig), x::Int, y) = x + y, _ -> (0, 0) + ChainRulesCore.rrule(::typeof(f_ambig), x, y::Int) = x + y, _ -> (0, 0) + + @test_throws MethodError pullback(f_ambig, 1, 2) + end + @testset "configured" begin + h_ambig(x, y) = x + y + ChainRulesCore.rrule(::ZygoteRuleConfig, ::typeof(h_ambig), x, y) = x + y, _ -> (0, 0) + ChainRulesCore.rrule(::RuleConfig, ::typeof(h_ambig), x::Int, y::Int) = x + y, _ -> (0, 0) + + @test_throws MethodError pullback(h_ambig, 1, 2) + end end end From 42b8bca89bc31694e9ccc7e341065df044569ff4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 13 Jan 2023 14:47:18 -0800 Subject: [PATCH 390/490] =?UTF-8?q?improve=20type=20stability=20of=20?= =?UTF-8?q?=E2=88=87broadcasted=20for=20custom=20Number=20types?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/lib/broadcast.jl | 5 ++--- test/features.jl | 17 ++++++++++++++++- 2 files changed, 18 insertions(+), 4 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index dc7b053c1..1cafa5d6d 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -203,9 +203,8 @@ _dual_safearg(x) = false y = broadcast(first, y∂b) function ∇broadcasted(ȳ) dxs_zip = map(((_, pb), ȳ₁) -> pb(ȳ₁), y∂b, ȳ) - dxs = ntuple(len) do i - collapse_nothings(map(StaticGetter{i}(), dxs_zip)) - end + getters = ntuple(i -> StaticGetter{i}(), len) + dxs = map(g -> collapse_nothings(map(g, dxs_zip)), getters) (nothing, accum_sum(dxs[1]), map(unbroadcast, args, Base.tail(dxs))...) end return y, ∇broadcasted diff --git a/test/features.jl b/test/features.jl index e4fe61140..e329942c5 100644 --- a/test/features.jl +++ b/test/features.jl @@ -745,7 +745,7 @@ end loss(x) = sum(abs2, net(x)) @test gradient(loss, ones(10,10))[1] == fill(131072, 10, 10) @test 150_000_000 > @allocated gradient(loss, ones(1000,1000)) - + # https://github.com/FluxML/Zygote.jl/issues/1233 function defensiveupdate(d, a) nd = deepcopy(d) @@ -818,6 +818,21 @@ end @test gradient(xs -> mapreduce(x->x.im^2, +, xs), [1+2im,3])[1] == [4im, 0] end +@testset "broadcast fallbacks" begin + # https://github.com/FluxML/Zygote.jl/issues/1359 + struct MyFloat64 <: Number + n::Float64 + end + + Base.exp(f::MyFloat64) = MyFloat64(exp(f.n)) + Base.conj(f::MyFloat64) = MyFloat64(conj(f.n)) + Base.:*(x::MyFloat64, y::MyFloat64) = MyFloat64(x.n * y.n) + + x = MyFloat64[1., 2., 3.] + result, pb = @inferred Zygote.pullback(Base.broadcasted, Base.Broadcast.DefaultArrayStyle{1}(), exp, x) + @inferred pb(MyFloat64[1., 1., 1.]) +end + @testset "Dict" begin # issue #717 @test gradient(x -> (() -> x[:y])(), Dict(:y => 0.4)) == (Dict(:y => 1.0),) From b3dea4947561ff57e9c97f4cceee6971a12caa85 Mon Sep 17 00:00:00 2001 From: Frames White Date: Tue, 17 Jan 2023 13:37:16 +0000 Subject: [PATCH 391/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5261e5114..8d0bf8ec4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.54" +version = "0.6.55" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 45bf883491d2b52580d716d577e2fa8577a07230 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 25 Jan 2023 00:56:33 +0100 Subject: [PATCH 392/490] test pycall adjoint (#1363) * test pycall adjoint * Update Project.toml * Update test/features.jl * Update Project.toml --- Project.toml | 3 ++- test/features.jl | 8 ++++++++ 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8d0bf8ec4..d64b909fb 100644 --- a/Project.toml +++ b/Project.toml @@ -50,7 +50,8 @@ ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" +PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" [targets] -test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "Test"] +test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] diff --git a/test/features.jl b/test/features.jl index e329942c5..77a9b7165 100644 --- a/test/features.jl +++ b/test/features.jl @@ -681,6 +681,14 @@ end end == ([8 112; 36 2004],) end +@testset "PyCall custom @adjoint" begin + import PyCall + math = PyCall.pyimport("math") + pysin(x) = math.sin(x) + Zygote.@adjoint pysin(x) = math.sin(x), (δ) -> (δ * math.cos(x), ) + @test Zygote.gradient(pysin, 1.5) == Zygote.gradient(sin, 1.5) +end + # https://github.com/JuliaDiff/ChainRules.jl/issues/257 @testset "Keyword Argument Passing" begin struct Type1{VJP} From 34495d3209f69793fc0ca3a111e91877a4d18435 Mon Sep 17 00:00:00 2001 From: skyleaworlder <870033938@qq.com> Date: Thu, 2 Feb 2023 16:23:23 +0000 Subject: [PATCH 393/490] update: actions node 12 => node 16 --- .github/workflows/Downstream.yml | 4 ++-- .github/workflows/ci.yml | 8 ++++---- .github/workflows/clean_preview.yml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/.github/workflows/Downstream.yml b/.github/workflows/Downstream.yml index 47f8032a5..9e8dcb0af 100644 --- a/.github/workflows/Downstream.yml +++ b/.github/workflows/Downstream.yml @@ -27,14 +27,14 @@ jobs: - {user: SciML, repo: NeuralPDE.jl, group: NNPDE} - {user: JuliaMolSim, repo: Molly.jl, group: Zygote} steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@v1 with: version: ${{ matrix.julia-version }} arch: x64 - uses: julia-actions/julia-buildpkg@latest - name: Clone Downstream - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: repository: ${{ matrix.package.user }}/${{ matrix.package.repo }} path: downstream diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 887c985c8..0c41fddaa 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,12 +32,12 @@ jobs: # version: '1' # arch: x64 steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@v1 with: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - - uses: actions/cache@v1 + - uses: actions/cache@v3 env: cache-name: cache-artifacts with: @@ -58,7 +58,7 @@ jobs: #continue-on-error: ${{ matrix.version == 'nightly' }} # comment out to report nightly failures - uses: julia-actions/julia-processcoverage@v1 if: matrix.version == '1' && matrix.os == 'ubuntu-latest' - - uses: codecov/codecov-action@v2 + - uses: codecov/codecov-action@v3 if: matrix.version == '1' && matrix.os == 'ubuntu-latest' with: file: lcov.info @@ -66,7 +66,7 @@ jobs: name: Documentation runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - uses: julia-actions/setup-julia@v1 with: version: '1' diff --git a/.github/workflows/clean_preview.yml b/.github/workflows/clean_preview.yml index 25946efc3..f356099ba 100644 --- a/.github/workflows/clean_preview.yml +++ b/.github/workflows/clean_preview.yml @@ -10,7 +10,7 @@ jobs: runs-on: ubuntu-latest steps: - name: Checkout gh-pages branch - uses: actions/checkout@v2 + uses: actions/checkout@v3 with: ref: gh-pages - name: Delete preview and history + push changes From 9b6fd6861985d6f96f21924cb4277b93798223e4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 7 Feb 2023 22:14:03 -0800 Subject: [PATCH 394/490] Don't differentiate getproperty on const module fields --- src/compiler/reverse.jl | 32 +++++++++++++++++++++++++++----- test/compiler.jl | 13 ++++++++++++- 2 files changed, 39 insertions(+), 6 deletions(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index 532644914..333323e83 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -37,11 +37,18 @@ is_getproperty(ex) = iscall(ex, Base, :getproperty) # argument is a literal or not. function instrument_getproperty!(ir, v, ex) if is_getproperty(ex) - if ex.args[3] isa Union{QuoteNode,Integer} - ir[v] = xcall(Zygote, :literal_getproperty, ex.args[2], Val(unwrapquote(ex.args[3]))) + obj, prop = ex.args[2], ex.args[3] + if obj isa Module && prop isa QuoteNode && isconst(obj, unwrapquote(prop)) + # Metaprogramming can generate getproperty(::Module, ...) calls. + # Like other types, these are type unstable without constprop. + # However, literal_getproperty's heuristic is also not general enough for modules. + # Thankfully, we can skip instrumenting these if they're const properties. + ex + elseif prop isa Union{QuoteNode,Integer} + ir[v] = xcall(Zygote, :literal_getproperty, obj, Val(unwrapquote(prop))) else - f = insert!(ir, v, :(Val($(ex.args[3])))) - ir[v] = xcall(Zygote, :literal_getproperty, ex.args[2], f) + f = insert!(ir, v, :(Val($(prop)))) + ir[v] = xcall(Zygote, :literal_getproperty, obj, f) end else ex @@ -169,7 +176,22 @@ ignored_f(f) = f in (GlobalRef(Base, :not_int), ignored_f(ir, f) = ignored_f(f) ignored_f(ir, f::Variable) = ignored_f(get(ir, f, nothing)) -ignored(ir, ex) = isexpr(ex, :call) && ignored_f(ir, ex.args[1]) +function ignored(ir, ex) + isexpr(ex, :call) || return false + f = ex.args[1] + ignored_f(ir, f) && return true + if f isa Variable && haskey(ir, f) + f = ir[f].expr + end + if f == GlobalRef(Base, :getproperty) && length(ex.args) >= 3 + obj, prop = ex.args[2], ex.args[3] + # Metaprogramming can generate getproperty(::Module, ...) calls. + # These are type unstable without constprop, which transforming to _pullback breaks. + # However, we can skip differentiating these if they're const properties. + obj isa Module && prop isa QuoteNode && isconst(obj, unwrapquote(prop)) && return true + end + return false +end ignored(ir, ex::Variable) = ignored(ir, ir[ex]) function primal(ir::IR) diff --git a/test/compiler.jl b/test/compiler.jl index c5ddf1f38..198ca2a29 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -128,7 +128,7 @@ end d_two = Zygote.pullback(two_svds, X)[2](Δoutput) d_one = Zygote.pullback(one_svd, X)[2](Δoutput) @test d_one == d_two -end +end # this test fails if adjoint for literal_getproperty is added # https://github.com/FluxML/Zygote.jl/issues/922#issuecomment-804128905 @@ -157,6 +157,13 @@ function _Gaussian(suffix::Symbol) end end +module MyMod + const C = 1 + func(a, b) = a * b +end + +@eval usesmod(x) = Base.getproperty($MyMod, :func)(x, Base.getproperty($MyMod, :C)) + @testset "inference for `getproperty`" begin Gaussian = _Gaussian(:getproperty) g = Gaussian(randn(3), randn(3, 3)) @@ -204,6 +211,10 @@ end y, back = @inferred pullback(x -> x.m, g) @test y == getfield(g, :m) @test @inferred(back(1.0)) == ((m = 1.0, P = nothing),) + + + # Const properties on modules should be lowered as-is (not differentiated) + @test @inferred gradient(usesmod, 1)[1] == 1.0 end # issue 897 From 75032df0ceadfe428aaf81fdf327fd39416e7438 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 10 Feb 2023 20:20:28 -0800 Subject: [PATCH 395/490] fixup flaky rule tolerance --- test/chainrules.jl | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/test/chainrules.jl b/test/chainrules.jl index 51e0a80c6..7bd66a4d2 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -366,8 +366,10 @@ end test_rrule(ZygoteRuleConfig(), sum, cbrt, randn(5); rrule_f=rrule_via_ad) # but x -> cbrt(x) has no rule, so will be done by Zygote - test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5)) - test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rrule_f=rrule_via_ad) + # increased tolerances because these are occasionally flaky at rtol=1e-9 + test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rtol=1e-8) + test_rrule(ZygoteRuleConfig(), sum, x -> cbrt(x), randn(5); rtol=1e-8, + rrule_f=rrule_via_ad) end # See https://github.com/FluxML/Zygote.jl/issues/1078 From 92cda7d0237582c2111206d009013e54e1c3600c Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 17 Feb 2023 17:48:00 -0500 Subject: [PATCH 396/490] Always call reverse in `map` (#1376) * fix 1374 * shorter comment --- src/lib/array.jl | 14 ++++++-------- test/gradcheck.jl | 16 ++++++++++++++++ 2 files changed, 22 insertions(+), 8 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 4b8f90609..c185048f5 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -161,16 +161,14 @@ function unzip(tuples) _unzip(tuples, Val(N)) end -# Reverse iteration order when ∇map is applied to vector, -# needed for stateful functions. -# See https://github.com/FluxML/Flux.jl/issues/1209 -# Should be generalized to abstract array, but reverse takes a dims keyword there +# Reverse iteration order in ∇map, for stateful functions. +# This is also used by comprehensions, which do guarantee iteration order. +# Not done for pmap, presumably because all is lost if you are relying on its order. _tryreverse(m, backs, Δ) = backs, Δ -function _tryreverse(m::typeof(map), backs, Δ::Union{AbstractVector, Tuple}) - return reverse(backs), reverse(Δ) -end +_tryreverse(m::typeof(map), backs, Δ) = reverse(backs), reverse(Δ) + _tryreverse(m, x) = x -_tryreverse(m::typeof(map), x::Union{AbstractVector, Tuple}) = reverse(x) +_tryreverse(m::typeof(map), x) = reverse(x) # With mismatched lengths, map stops early. With mismatched shapes, it makes a vector. # So we keep axes(x) to restore gradient dx to its full length & correct shape. diff --git a/test/gradcheck.jl b/test/gradcheck.jl index c517e859e..b170aa045 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -401,6 +401,22 @@ end @test gradient((x,y) -> sum(map(*,x,y)), (1,2,3,4,5), [1 2; 3 4]) == ((1,3,2,4,nothing), [1 3; 2 4]) end +@testset "map: issye 1374" begin + # The code to reverse iteration in map was very sloppy, could reverse fwd & not reverse, wtf. + # https://github.com/FluxML/Zygote.jl/issues/1374 + struct Affine1374 + W + b + end + (m::Affine1374)(x) = [sum(x.*r) for r in eachrow(m.W)] + m.b + m = Affine1374(zeros(3,3), zeros(3,1)) + x = [ 1.0, 2.0, 3.0] + y = [-1.0, -2.0, -3.0] + l1374(y,ŷ) = sum(abs2.(y - ŷ))/2 + grads = gradient(m -> l1374(y,m(x)), m) + @test grads[1].W ≈ [1 2 3; 2 4 6; 3 6 9] +end + @testset "sort" begin @test gradtest(sort, 5) correct = [ From 781630aa026f099d1df9046c7cdaf6c07005bba4 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 23 Feb 2023 15:26:36 -0800 Subject: [PATCH 397/490] Update to latest TagBot config (#1373) --- .github/workflows/TagBot.yml | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/.github/workflows/TagBot.yml b/.github/workflows/TagBot.yml index f49313b66..2bacdb87e 100644 --- a/.github/workflows/TagBot.yml +++ b/.github/workflows/TagBot.yml @@ -4,6 +4,22 @@ on: types: - created workflow_dispatch: + inputs: + lookback: + default: 3 +permissions: + actions: read + checks: read + contents: write + deployments: read + issues: read + discussions: read + packages: read + pages: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read jobs: TagBot: if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' From 0dd6a9d5ccda2f24b81db85d64b6d92e53ca735a Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 25 Feb 2023 16:34:03 -0800 Subject: [PATCH 398/490] Un-revert SnoopCompile changes (#1370) * Un-revert SnoopCompile changes Based on https://github.com/SciML/DiffEqFlux.jl/issues/783#issuecomment-1418280850, a newer Julia 1.8 patch version seems to have fixed things. --- Project.toml | 2 ++ src/Zygote.jl | 9 ++++++--- 2 files changed, 8 insertions(+), 3 deletions(-) diff --git a/Project.toml b/Project.toml index d64b909fb..93b548336 100644 --- a/Project.toml +++ b/Project.toml @@ -20,6 +20,7 @@ MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" +SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -40,6 +41,7 @@ LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" +SnoopPrecompile = "1.0.3" SpecialFunctions = "1.6, 2" ZygoteRules = "0.2.1" julia = "1.6" diff --git a/src/Zygote.jl b/src/Zygote.jl index c651968ba..3467877ad 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -79,8 +79,11 @@ macro profile(ex) end end -## reverted due to https://github.com/SciML/DiffEqFlux.jl/issues/783 -# using SnoopPrecompile -# @precompile_all_calls precompile() +using SnoopPrecompile +# This caused freezes on early 1.8 patch versions, +# see https://github.com/SciML/DiffEqFlux.jl/issues/783 +@static if VERSION < v"1.8" || VERSION >= v"1.8.5" + @precompile_all_calls precompile() +end end # module From 7869f7f2b6bcfd050a3f9fa4927c4c23a6b61eb0 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 25 Feb 2023 17:06:58 -0800 Subject: [PATCH 399/490] simple fix for kron(::AbstractVector) on nightly --- src/lib/array.jl | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index c185048f5..5086d9c14 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -9,12 +9,12 @@ using Distributed: pmap, AbstractWorkerPool @adjoint copy(x::AbstractArray) = copy(x), ȳ -> (ȳ,) @adjoint function collect(x::Tuple) - collect_tuple_pullback(dy) = (Tuple(dy),) + collect_tuple_pullback(dy) = (Tuple(dy),) collect(x), collect_tuple_pullback end @adjoint function collect(x::NamedTuple{names}) where names - collect_namedtuple_pullback(dy) = (NamedTuple{names}(Tuple(dy)),) + collect_namedtuple_pullback(dy) = (NamedTuple{names}(Tuple(dy)),) collect(x), collect_namedtuple_pullback end @@ -101,7 +101,7 @@ Possible fixes: _ -> _throw_mutation_error(copyto!, xs) for f in [push!, pop!, pushfirst!, popfirst!] - @eval @adjoint! $f(x::AbstractVector, ys...) = $f(x, ys...), + @eval @adjoint! $f(x::AbstractVector, ys...) = $f(x, ys...), _ -> _throw_mutation_error($f, x) end @@ -248,10 +248,10 @@ reconstruct_if_dict(x̄, _keys::Nothing) = x̄ function reconstruct_if_dict(x̄, _keys) # This reverses `collect_if_dict`, which returns `_keys::Nothing` if x is not a Dict @assert x̄ isa AbstractVector{<:Union{Nothing, NamedTuple{(:first,:second)}}} - # we don't compute gradients with respect to keys + # we don't compute gradients with respect to keys # @assert all(x -> x === nothing || x[1] == 0 || x[1] === nothing, x̄) d̄ = Dict(k => isnothing(x) ? nothing : x[2] for (x, k) in zip(x̄, _keys)) - return d̄ + return d̄ end @adjoint iterate(r::UnitRange, i...) = iterate(r, i...), _ -> nothing @@ -344,7 +344,7 @@ end # ============= @adjoint parent(x::LinearAlgebra.Adjoint) = parent(x), ȳ -> (LinearAlgebra.Adjoint(ȳ),) -@adjoint parent(x::LinearAlgebra.Transpose) = parent(x), ȳ -> (LinearAlgebra.Transpose(ȳ),) +@adjoint parent(x::LinearAlgebra.Transpose) = parent(x), ȳ -> (LinearAlgebra.Transpose(ȳ),) function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) m1, n1 = size(mat1) @@ -355,8 +355,10 @@ function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) return reshape(mat1_rsh.*mat2_rsh, (m1*m2,n1*n2)) end +_kron(a::AbstractVector, b::AbstractVector) = vec(_kron(reshape(a, :, 1), reshape(b, :, 1))) @adjoint kron(a::AbstractMatrix, b::AbstractMatrix) = pullback(_kron, a, b) +@adjoint kron(a::AbstractVector, b::AbstractVector) = pullback(_kron, a, b) @adjoint logabsdet(xs::AbstractMatrix) = logabsdet(xs), Δ -> (Δ[1] * inv(xs)',) From d4faaeb63b8381532dcff9cf11de3aa51b5fc9c8 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 25 Feb 2023 20:07:39 -0800 Subject: [PATCH 400/490] Don't use zerolike fallback for GlobalRefs This was causing UndefRefErrors on nightly. --- src/forward/lib.jl | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/forward/lib.jl b/src/forward/lib.jl index a5518fd5d..fecbd90e2 100644 --- a/src/forward/lib.jl +++ b/src/forward/lib.jl @@ -9,6 +9,9 @@ end # TODO figure out why this made a test fail zerolike(x::Union{Module,Type}) = nothing +# Required to not get an UndefRefError on 1.10 +zerolike(x::GlobalRef) = nothing + # TODO: `@non_differentiable` and `@linear` @tangent zerolike(x) = zerolike(x), _ -> zerolike(x) From eda7f3862ffd6d6c69c9ca935e82d0ada5009793 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 26 Feb 2023 09:10:16 -0800 Subject: [PATCH 401/490] fix now passing backtrace test on nightly --- test/compiler.jl | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/test/compiler.jl b/test/compiler.jl index 198ca2a29..a74da5433 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -17,8 +17,10 @@ trace_contains(st, func, file, line) = any(st) do fr end bad(x) = x +const bad_def_line = (@__LINE__) + 1 @adjoint bad(x) = x, Δ -> error("bad") +const bad_call_line = (@__LINE__) + 3 function badly(x) x = x + 1 x = bad(x) @@ -30,11 +32,11 @@ y, back = pullback(badly, 2) @test_throws Exception back(1) bt = try back(1) catch e stacktrace(catch_backtrace()) end -@test trace_contains(bt, nothing, "compiler.jl", 20) -if VERSION >= v"1.6-" - @test_broken trace_contains(bt, :badly, "compiler.jl", 24) +@test trace_contains(bt, nothing, "compiler.jl", bad_def_line) +if VERSION <= v"1.6-" || VERSION >= v"1.10-" + @test trace_contains(bt, :badly, "compiler.jl", bad_call_line) else - @test trace_contains(bt, :badly, "compiler.jl", 24) + @test_broken trace_contains(bt, :badly, "compiler.jl", bad_call_line) end # Type inference checks From 14120e94e443aeb222df28328a08b75225f48976 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 26 Feb 2023 14:30:41 -0800 Subject: [PATCH 402/490] more robust test for Pullback show --- test/compiler.jl | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/test/compiler.jl b/test/compiler.jl index a74da5433..c9b091f78 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -83,10 +83,14 @@ y, back = @test_inferred pullback(((a,b),) -> a, (5, 10)) # testcase for issue #808 # testing that methods(Base.show) does not throw. Having something more specific would be too fragile -buf = IOBuffer() -Base.show(buf, methods(Base.show)) -str_repr = String(take!(buf)) -@test !isempty(str_repr) +show_err = try + buf = IOBuffer() + Base.show(buf, methods(Base.show)) + nothing +catch ex + ex +end +@test show_err === nothing struct Funky x From a908a85dd03845b882d5a0f9b6188c8ff1ae2df7 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 27 Feb 2023 20:49:01 -0800 Subject: [PATCH 403/490] Add `hessian_reverse` to docs --- docs/src/utils.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/src/utils.md b/docs/src/utils.md index 25b5954e4..3adb1d4c1 100644 --- a/docs/src/utils.md +++ b/docs/src/utils.md @@ -6,6 +6,7 @@ or a Hessian (by taking a second derivative). ```@docs Zygote.jacobian Zygote.hessian +Zygote.hessian_reverse Zygote.diaghessian ``` From 49a11849157222e9ae5c995f5886a9e19b8505aa Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 4 Mar 2023 17:42:22 -0800 Subject: [PATCH 404/490] Use `_pullback` inside rules instead of `pullback` This will give us more flexibility to implement internal changes such as https://github.com/FluxML/Zygote.jl/issues/603 without changing the user-facing API. --- src/Zygote.jl | 2 +- src/lib/array.jl | 44 ++++++++++++++++++----------------------- src/lib/base.jl | 7 +++---- src/lib/broadcast.jl | 17 +++++++++++----- src/lib/distances.jl | 18 +++++++++++++---- src/lib/forward.jl | 20 +++++++++++-------- test/cuda.jl | 1 - test/forward/forward.jl | 2 -- test/lib/array.jl | 1 - test/lib/base.jl | 2 -- test/runtests.jl | 2 +- test/utils.jl | 1 - 12 files changed, 62 insertions(+), 55 deletions(-) diff --git a/src/Zygote.jl b/src/Zygote.jl index 3467877ad..05d0bd80e 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -4,7 +4,7 @@ using LinearAlgebra, Statistics using LinearAlgebra: copytri!, AbstractTriangular import ZygoteRules: @adjoint, @adjoint!, AContext, adjoint, _pullback, pullback, - literal_getproperty, literal_getfield + literal_getproperty, literal_getfield, unthunk_tangent using ChainRulesCore using ChainRules: ChainRules, rrule, unthunk, canonicalize diff --git a/src/lib/array.jl b/src/lib/array.jl index 5086d9c14..a855f8946 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -279,7 +279,10 @@ end enumerate(xs), back end -@adjoint Iterators.Filter(f, x) = pullback(filter, f, collect(x)) +function _pullback(cx::AContext, ::Type{<:Iterators.Filter}, f, x) + res, back = _pullback(cx, filter, f, collect(x)) + return res, back ∘ unthunk_tangent +end _ndims(::Base.HasShape{d}) where {d} = d _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x)) : 1 @@ -321,18 +324,12 @@ end end end -@adjoint function sum(f, xs::AbstractArray{<:AbstractArray}; kws...) - @assert !haskey(kws, :init) # TODO add init support (julia 1.6) - return pullback((f, xs) -> sum(f.(xs); kws...), __context__, f, xs) -end - @adjoint function sum(xs::AbstractArray{Bool}; dims = :) sum(xs, dims = dims), Δ -> (nothing,) end function _pullback(cx::AContext, ::typeof(prod), f, xs::AbstractArray) - y, back = pullback((f, xs) -> prod(f.(xs)), cx, f, xs) - y, ȳ -> (nothing, back(ȳ)...) + return _pullback(cx, (f, xs) -> prod(f.(xs)), f, xs) end @adjoint real(x::AbstractArray) = real(x), r̄ -> (real(r̄),) @@ -357,8 +354,14 @@ function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) end _kron(a::AbstractVector, b::AbstractVector) = vec(_kron(reshape(a, :, 1), reshape(b, :, 1))) -@adjoint kron(a::AbstractMatrix, b::AbstractMatrix) = pullback(_kron, a, b) -@adjoint kron(a::AbstractVector, b::AbstractVector) = pullback(_kron, a, b) +function _pullback(cx::AContext, ::typeof(kron), a::AbstractVector, b::AbstractVector) + res, back = _pullback(cx, _kron, a, b) + return res, back ∘ unthunk_tangent +end +function _pullback(cx::AContext, ::typeof(kron), a::AbstractMatrix, b::AbstractMatrix) + res, back = _pullback(cx, _kron, a, b) + return res, back ∘ unthunk_tangent +end @adjoint logabsdet(xs::AbstractMatrix) = logabsdet(xs), Δ -> (Δ[1] * inv(xs)',) @@ -432,15 +435,6 @@ end @adjoint LinearAlgebra.UnitLowerTriangular(A) = UnitLowerTriangular(A), Δ->(UnitLowerTriangular(Δ)-I,) @adjoint LinearAlgebra.UnitUpperTriangular(A) = UnitUpperTriangular(A), Δ->(UnitUpperTriangular(Δ)-I,) -# This is basically a hack while we don't have a working `ldiv!`. -@adjoint function \(A::Cholesky, B::AbstractVecOrMat) - Y, back = Zygote.pullback((U, B)->U \ (U' \ B), A.U, B) - return Y, function(Ȳ) - Ā_factors, B̄ = back(Ȳ) - return ((uplo=nothing, info=nothing, factors=Ā_factors), B̄) - end -end - function _symmetric_back(Δ, uplo) L, U, D = LowerTriangular(Δ), UpperTriangular(Δ), Diagonal(Δ) return uplo == 'U' ? U .+ transpose(L) - D : L .+ transpose(U) - D @@ -572,14 +566,14 @@ _hermsympow(A::Hermitian, p::Integer) = A^p @adjoint function _hermsympow(A::Hermitian, p::Integer) if p < 0 - B, back = Zygote.pullback(A->Base.power_by_squaring(inv(A), -p), A) + B, back = _pullback(__context__, A -> Base.power_by_squaring(inv(A), -p), A) else - B, back = Zygote.pullback(A->Base.power_by_squaring(A, p), A) + B, back = _pullback(__context__, A -> Base.power_by_squaring(A, p), A) end Ω = Hermitian(_realifydiag!(B)) return Ω, function (Ω̄) B̄ = _hermitian_back(Ω̄, 'U') - Ā = back(B̄)[1] + Ā = last(back(B̄)) return (Ā, nothing) end end @@ -812,8 +806,8 @@ end # ======================= @adjoint function broadcasted(op, r::AbstractFill{<:Real}) - y, _back = Zygote.pullback(op, getindex_value(r)) - back(Δ::AbstractFill) = (nothing, Fill(_back(getindex_value(Δ))[1], size(r))) - back(Δ::AbstractArray) = (nothing, getindex.(_back.(Δ), 1)) + y, _back = _pullback(__context__, op, getindex_value(r)) + back(Δ::AbstractFill) = (nothing, Fill(last(_back(getindex_value(Δ))), size(r))) + back(Δ::AbstractArray) = (nothing, last.(_back.(Δ))) return Fill(y, size(r)), back end diff --git a/src/lib/base.jl b/src/lib/base.jl index e259a999d..c0efa83cf 100644 --- a/src/lib/base.jl +++ b/src/lib/base.jl @@ -178,10 +178,9 @@ end # For merge between NamedTuple and Dict, we will just convert the Dict to a NamedTuple. # and then call `pullback`, which should overall be pretty efficient code generated, # and it avoids trying to AD the problematic generic `merge(::NamedTuple, ::iter)` method which uses `push!`. -if VERSION >= v"1.6" - @adjoint merge(nt::NamedTuple, dict::Dict) = pullback(merge, nt, NamedTuple(dict)) -else - @adjoint merge(nt::NamedTuple, dict::Dict) = pullback(merge, nt, (;dict...)) +function _pullback(cx::AContext, ::typeof(merge), a::NamedTuple, b::Dict{Symbol}) + res, back = _pullback(cx, merge, a, NamedTuple(b)) + return res, back ∘ unthunk_tangent end # Keyword arguments pretend to be a Dict, but are secretly wrapping a NamedTuple. diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 1cafa5d6d..d21d8e54c 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -30,9 +30,13 @@ using Base.Broadcast: Broadcasted, AbstractArrayStyle, broadcasted, materialize # Utilities # ========= -# ChainRules already marks this non-differentiable, -# But inference can still give up because of the Zygote -> CR wrapper layer -@nograd Broadcast.combine_styles +# ChainRules already marks this non-differentiable,# But inference can still give up because of the Zygote -> CR wrapper layer. +# This has been desugared from the (deprecated) @nograd macro. +@inline function Zygote._pullback(::AContext, ::typeof(Broadcast.combine_styles), args...) + dargs = ntuple(_ -> nothing, length(args) + 1) + combine_styles_pullback(_) = dargs + return Broadcast.combine_styles(args...), combine_styles_pullback +end accum_sum(xs; dims = :) = reduce(accum, xs, dims = dims) @@ -358,9 +362,12 @@ using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible - @adjoint function sum(f, xs::AbstractGPUArray; kws...) + function _pullback(cx::AContext, ::Core.kwftype(typeof(sum)), kws, ::typeof(sum), f, + xs::AbstractGPUArray) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) - return pullback((f, xs) -> sum(f.(xs); kws...), __context__, f, xs) + res, back = _pullback(cx, (f, xs) -> sum(f.(xs); kws...), f, xs) + sum_gpuarray_pullback(Δ) = last(back(unthunk_tangent(Δ))) + return res, sum_gpuarray_pullback end @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:AbstractGPUArray} diff --git a/src/lib/distances.jl b/src/lib/distances.jl index 1adf30d01..e4ecceeb6 100644 --- a/src/lib/distances.jl +++ b/src/lib/distances.jl @@ -66,22 +66,32 @@ end _sqrt_if_positive(d, δ) = d > δ ? sqrt(d) : zero(d) -@adjoint function pairwise(dist::Euclidean, X::AbstractMatrix, Y::AbstractMatrix; dims=2) +function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), + kws::@NamedTuple{dims::Int}, ::typeof(pairwise), dist::Euclidean, + X::AbstractMatrix, Y::AbstractMatrix) # Modify the forwards-pass slightly to ensure stability on the reverse. + dims = kws.dims function _pairwise_euclidean(sqdist::SqEuclidean, X, Y) D2 = pairwise(sqdist, X, Y; dims=dims) δ = eps(eltype(D2)) return _sqrt_if_positive.(D2, δ) end - return pullback(_pairwise_euclidean, SqEuclidean(dist.thresh), X, Y) + res, back = _pullback(cx, _pairwise_euclidean, SqEuclidean(dist.thresh), X, Y) + pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(unthunk_tangent(Δ))...) + return res, pairwise_Euclidean_pullback end -@adjoint function pairwise(dist::Euclidean, X::AbstractMatrix; dims=2) +function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), + kws::@NamedTuple{dims::Int}, ::typeof(pairwise), dist::Euclidean, + X::AbstractMatrix) # Modify the forwards-pass slightly to ensure stability on the reverse. + dims = kws.dims function _pairwise_euclidean(sqdist::SqEuclidean, X) D2 = pairwise(sqdist, X; dims=dims) δ = eps(eltype(D2)) return _sqrt_if_positive.(D2, δ) end - return pullback(_pairwise_euclidean, SqEuclidean(dist.thresh), X) + res, back = _pullback(cx, _pairwise_euclidean, SqEuclidean(dist.thresh), X) + pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(unthunk_tangent(Δ))...) + return res, pairwise_Euclidean_pullback end diff --git a/src/lib/forward.jl b/src/lib/forward.jl index 3ee3f7d1c..e33cbf499 100644 --- a/src/lib/forward.jl +++ b/src/lib/forward.jl @@ -137,32 +137,36 @@ end # Use this to allow second derivatives -- this is forward-over-forward, # see https://github.com/FluxML/Zygote.jl/issues/769 for a forward-over-reverse proposal -@adjoint function ForwardDiff.gradient(f, x) +function _pullback(cx::AContext, ::typeof(ForwardDiff.gradient), f, x) F = typeof(f) Base.issingletontype(F) || @warn """`ForwardDiff.gradient(f, x)` within Zygote cannot track gradients with respect to `f`, and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). typeof(f) = $F""" maxlog=1 _id=hash(F) - pullback(forwarddiff, x -> ForwardDiff.gradient(f, x), x) + res, back = _pullback(cx, forwarddiff, x -> ForwardDiff.gradient(f, x), x) + return res, back ∘ unthunk_tangent end -@adjoint function ForwardDiff.jacobian(f::F, x) where F +function _pullback(cx::AContext, ::typeof(ForwardDiff.jacobian), f::F, x) where F Base.issingletontype(F) || @warn """`ForwardDiff.jacobian(f, x)` within Zygote cannot track gradients with respect to `f`, and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). typeof(f) = $F""" maxlog=1 _id=hash(F) - pullback(forwarddiff, x -> ForwardDiff.jacobian(f, x), x) + res, back = _pullback(cx, forwarddiff, x -> ForwardDiff.jacobian(f, x), x) + return res, back ∘ unthunk_tangent end -@adjoint function ForwardDiff.derivative(f::F, x) where F +function _pullback(cx::AContext, ::typeof(ForwardDiff.derivative), f::F, x) where F Base.issingletontype(F) || @warn """`ForwardDiff.derivative(f, x)` within Zygote cannot track gradients with respect to `f`, and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). typeof(f) = $F""" maxlog=1 _id=hash(F) - pullback(forwarddiff, x -> ForwardDiff.derivative(f, x), x) + res, back = _pullback(cx, forwarddiff, x -> ForwardDiff.derivative(f, x), x) + return res, back ∘ unthunk_tangent end -@adjoint function ForwardDiff.hessian(f::F, x) where F +function _pullback(cx::AContext, ::typeof(ForwardDiff.hessian), f::F, x) where F Base.issingletontype(F) || @warn """`ForwardDiff.hessian(f, x)` within Zygote cannot track gradients with respect to `f`, and `f` appears to be a closure, or a struct with fields (according to `issingletontype(typeof(f))`). typeof(f) = $F""" maxlog=1 _id=hash(F) - pullback(forwarddiff, x -> ForwardDiff.hessian(f, x), x) + res, back = _pullback(cx, forwarddiff, x -> ForwardDiff.hessian(f, x), x) + return res, back ∘ unthunk_tangent end diff --git a/test/cuda.jl b/test/cuda.jl index 171fa45db..43d7b5bcd 100644 --- a/test/cuda.jl +++ b/test/cuda.jl @@ -1,6 +1,5 @@ using CUDA using Zygote: Grads -using LinearAlgebra using Random: randn! import FiniteDifferences CUDA.allowscalar(false) diff --git a/test/forward/forward.jl b/test/forward/forward.jl index 6aa9173ef..3b9e10187 100644 --- a/test/forward/forward.jl +++ b/test/forward/forward.jl @@ -39,8 +39,6 @@ end == 0 @test D(x -> abs(x+2im), 1) == gradient(x -> abs(x+2im), 1+0im)[1] @test real(D(x -> abs(x+2im), 1)) == gradient(x -> abs(x+2im), 1)[1] # ProjectTo means gradient here is real -using LinearAlgebra - @test D(3) do x A = zeros(5, 5) B = zeros(5, 5) diff --git a/test/lib/array.jl b/test/lib/array.jl index a527f9bc6..d02e9f9d3 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -1,5 +1,4 @@ using ChainRulesTestUtils -using LinearAlgebra using Zygote: ZygoteRuleConfig, _pullback # issue 897 diff --git a/test/lib/base.jl b/test/lib/base.jl index 4d4c54626..2a161a58a 100644 --- a/test/lib/base.jl +++ b/test/lib/base.jl @@ -1,5 +1,3 @@ -using LinearAlgebra; - @testset "base.jl" begin @testset "Dict getindex with implicit params" begin d = Dict{String, Vector{Float64}}("key"=>ones(4)) diff --git a/test/runtests.jl b/test/runtests.jl index 565ad182f..672960944 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,4 +1,4 @@ -using Zygote, Test +using Zygote, Test, LinearAlgebra using Zygote: gradient, ZygoteRuleConfig using CUDA using CUDA: has_cuda diff --git a/test/utils.jl b/test/utils.jl index 40b2e85b7..cb11437cf 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -1,4 +1,3 @@ -using LinearAlgebra using ForwardDiff using Zygote: hessian_dual, hessian_reverse From 683f5542d09e832d0a27733e76db57dc259692f9 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 4 Mar 2023 18:39:19 -0800 Subject: [PATCH 405/490] GPU rule fixes --- src/lib/broadcast.jl | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index d21d8e54c..4508c3ca2 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -362,12 +362,16 @@ using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible + function _pullback(cx::AContext, ::typeof(sum), f, xs::AbstractGPUArray) + res, back = _pullback(cx, (f, xs) -> sum(f.(xs)), f, xs) + return res, back ∘ unthunk_tangent + end function _pullback(cx::AContext, ::Core.kwftype(typeof(sum)), kws, ::typeof(sum), f, xs::AbstractGPUArray) @assert !haskey(kws, :init) # TODO add init support (julia 1.6) res, back = _pullback(cx, (f, xs) -> sum(f.(xs); kws...), f, xs) - sum_gpuarray_pullback(Δ) = last(back(unthunk_tangent(Δ))) - return res, sum_gpuarray_pullback + sum_gpuarray_kw_pullback(Δ) = (nothing, nothing, back(unthunk_tangent(Δ))...) + return res, sum_gpuarray_kw_pullback end @adjoint function Base.convert(::Type{T}, xs::Array) where {T<:AbstractGPUArray} From 02eaa679f16dab5902bb69a900b0a694bf5620a7 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 7 Mar 2023 19:45:49 -0800 Subject: [PATCH 406/490] Make sure conda env dir is set on Buildkite CI --- .buildkite/pipeline.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 6d3a048a2..8b7f20dac 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -4,6 +4,8 @@ steps: - JuliaCI/julia#v1: version: "1.6" - JuliaCI/julia-test#v1: ~ + command: + - mkdir -p "${JULIA_DEPOT_PATH}/conda/3/x86_64" agents: queue: "juliagpu" cuda: "*" @@ -14,6 +16,8 @@ steps: - JuliaCI/julia#v1: version: "1" - JuliaCI/julia-test#v1: ~ + command: + - mkdir -p "${JULIA_DEPOT_PATH}/conda/3/x86_64" agents: queue: "juliagpu" cuda: "*" From 942ca6dbbf3604d01df203c4029d3b0b24ca1875 Mon Sep 17 00:00:00 2001 From: trahflow Date: Wed, 8 Mar 2023 23:11:17 +0100 Subject: [PATCH 407/490] drop adjoints for [i,r,b]fft() (#1386) * drop adjoints for [i,r,b]fft() Partially addresses https://github.com/FluxML/Zygote.jl/issues/1377 ChainRules for these have been added in https://github.com/JuliaMath/AbstractFFTs.jl/pull/58 * add back gradient test for *fft without dims argument * increase compat constraint for AbstractFFTs to 1.3.1 * fix typo Co-authored-by: Brian Chen --------- Co-authored-by: Brian Chen --- Project.toml | 2 +- src/lib/array.jl | 123 ---------------------------------------------- test/gradcheck.jl | 102 ++++++++++++++++++-------------------- 3 files changed, 48 insertions(+), 179 deletions(-) diff --git a/Project.toml b/Project.toml index 93b548336..f78a49654 100644 --- a/Project.toml +++ b/Project.toml @@ -27,7 +27,7 @@ Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" [compat] -AbstractFFTs = "0.5, 1.0" +AbstractFFTs = "1.3.1" ChainRules = "1.44.1" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" diff --git a/src/lib/array.jl b/src/lib/array.jl index a855f8946..1c6e09916 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -665,12 +665,6 @@ AbstractFFTs.brfft(x::Fill, d, dims...) = AbstractFFTs.brfft(collect(x), d, dims # the adjoint jacobian of an FFT with respect to its input is the reverse FFT of the # gradient of its inputs, but with different normalization factor -@adjoint function fft(xs) - return AbstractFFTs.fft(xs), function(Δ) - return (AbstractFFTs.bfft(Δ),) - end -end - @adjoint function *(P::AbstractFFTs.Plan, xs) return P * xs, function(Δ) N = prod(size(xs)[[P.region...]]) @@ -685,123 +679,6 @@ end end end -# all of the plans normalize their inverse, while we need the unnormalized one. -@adjoint function ifft(xs) - return AbstractFFTs.ifft(xs), function(Δ) - N = length(xs) - return (AbstractFFTs.fft(Δ)/N,) - end -end - -@adjoint function bfft(xs) - return AbstractFFTs.bfft(xs), function(Δ) - return (AbstractFFTs.fft(Δ),) - end -end - -@adjoint function fftshift(x) - return fftshift(x), function(Δ) - return (ifftshift(Δ),) - end -end - -@adjoint function ifftshift(x) - return ifftshift(x), function(Δ) - return (fftshift(Δ),) - end -end - - -# to actually use rfft, one needs to insure that everything -# that happens in the Fourier domain could've been done in -# the space domain with real numbers. This means enforcing -# conjugate symmetry along all transformed dimensions besides -# the first. Otherwise this is going to result in *very* weird -# behavior. -@adjoint function rfft(xs::AbstractArray{<:Real}) - return AbstractFFTs.rfft(xs), function(Δ) - N = length(Δ) - originalSize = size(xs,1) - return (AbstractFFTs.brfft(Δ, originalSize),) - end -end - -@adjoint function irfft(xs, d) - return AbstractFFTs.irfft(xs, d), function(Δ) - total = length(Δ) - fullTransform = AbstractFFTs.rfft(real.(Δ))/total - return (fullTransform, nothing) - end -end - -@adjoint function brfft(xs, d) - return AbstractFFTs.brfft(xs, d), function(Δ) - fullTransform = AbstractFFTs.rfft(real.(Δ)) - return (fullTransform, nothing) - end -end - - -# if we're specifying the dimensions -@adjoint function fft(xs, dims) - return AbstractFFTs.fft(xs, dims), function(Δ) - # dims can be int, array or tuple, - # convert to collection for use as index - dims = collect(dims) - return (AbstractFFTs.bfft(Δ, dims), nothing) - end -end - -@adjoint function bfft(xs, dims) - return AbstractFFTs.ifft(xs, dims), function(Δ) - dims = collect(dims) - return (AbstractFFTs.fft(Δ, dims),nothing) - end -end - -@adjoint function ifft(xs, dims) - return AbstractFFTs.ifft(xs, dims), function(Δ) - dims = collect(dims) - N = prod(collect(size(xs))[dims]) - return (AbstractFFTs.fft(Δ, dims)/N,nothing) - end -end - -@adjoint function rfft(xs, dims) - return AbstractFFTs.rfft(xs, dims), function(Δ) - dims = collect(dims) - N = prod(collect(size(xs))[dims]) - return (N * AbstractFFTs.irfft(Δ, size(xs,dims[1]), dims), nothing) - end -end - -@adjoint function irfft(xs, d, dims) - return AbstractFFTs.irfft(xs, d, dims), function(Δ) - dims = collect(dims) - N = prod(collect(size(xs))[dims]) - return (AbstractFFTs.rfft(real.(Δ), dims)/N, nothing, nothing) - end -end -@adjoint function brfft(xs, d, dims) - return AbstractFFTs.brfft(xs, d, dims), function(Δ) - dims = collect(dims) - return (AbstractFFTs.rfft(real.(Δ), dims), nothing, nothing) - end -end - - -@adjoint function fftshift(x, dims) - return fftshift(x), function(Δ) - return (ifftshift(Δ, dims), nothing) - end -end - -@adjoint function ifftshift(x, dims) - return ifftshift(x), function(Δ) - return (fftshift(Δ, dims), nothing) - end -end - # FillArray functionality # ======================= diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b170aa045..6e9b954fc 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1621,16 +1621,15 @@ end @testset "AbstractFFTs" begin - # Many of these tests check a complex gradient to a function with real input. This is now - # clamped to real by ProjectTo, but to run the old tests, use here the old gradient function: - function oldgradient(f, args...) - y, back = Zygote.pullback(f, args...) - back(Zygote.sensitivity(y)) - end - # Eventually these rules and tests will be moved to ChainRules.jl, at which point the tests - # can be updated to use real / complex consistently. + # Eventually these rules and tests will be moved to AbstractFFTs.jl + # Rules for direct invocation of [i,r,b]fft have already been defined in # https://github.com/JuliaMath/AbstractFFTs.jl/pull/58 + # ChainRules involving AbstractFFTs.Plan are not yet part of AbstractFFTs, + # but there is a WIP PR: + # https://github.com/JuliaMath/AbstractFFTs.jl/pull/67 + # After the above is merged, this testset can probably be removed entirely. + findicateMat(i,j,n1,n2) = [(k==i) && (l==j) ? 1.0 : 0.0 for k=1:n1, l=1:n2] mirrorIndex(i,N) = i - 2*max(0,i - (N>>1+1)) @@ -1643,45 +1642,41 @@ end indicateMat = [(k==i) && (l==j) ? 1.0 : 0.0 for k=1:size(X, 1), l=1:size(X,2)] # gradient of ifft(fft) must be (approximately) 1 (for various cases) - @test oldgradient((X)->real.(ifft(fft(X))[i, j]), X)[1] ≈ indicateMat + @test gradient((X)->real.(ifft(fft(X))[i, j]), X)[1] ≈ indicateMat # same for the inverse - @test oldgradient((X̂)->real.(fft(ifft(X̂))[i, j]), X̂)[1] ≈ indicateMat + @test gradient((X̂)->real.(fft(ifft(X̂))[i, j]), X̂)[1] ≈ indicateMat # same for rfft(irfft) - @test oldgradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) - # rfft isn't actually surjective, so rffft(irfft) can't really be tested this way. + @test gradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) + # rfft isn't actually surjective, so rfft(irfft) can't really be tested this way. # the gradients are actually just evaluating the inverse transform on the # indicator matrix mirrorI = mirrorIndex(i,sizeX[1]) FreqIndMat = findicateMat(mirrorI, j, size(X̂r,1), sizeX[2]) - listOfSols = [(fft, bfft(indicateMat), bfft(indicateMat*im), - plan_fft(X), i, X), - (ifft, 1/N*fft(indicateMat), 1/N*fft(indicateMat*im), - plan_fft(X), i, X), - (bfft, fft(indicateMat), fft(indicateMat*im), nothing, i, - X), - (rfft, real.(brfft(FreqIndMat, sizeX[1])), - real.(brfft(FreqIndMat*im, sizeX[1])), plan_rfft(X), - mirrorI, X), - ((K)->(irfft(K,sizeX[1])), 1/N * rfft(indicateMat), - zeros(size(X̂r)), plan_rfft(X), i, X̂r)] - for (trans, solRe, solIm, P, mI, evalX) in listOfSols - @test oldgradient((X)->real.(trans(X))[mI, j], evalX)[1] ≈ + listOfSols = [(X -> fft(X, (1, 2)), real(bfft(indicateMat)), real(bfft(indicateMat*im)), + plan_fft(X), i, X, true), + (K -> ifft(K, (1, 2)), 1/N*real(fft(indicateMat)), 1/N*real(fft(indicateMat*im)), + plan_fft(X), i, X, false), + (X -> bfft(X, (1, 2)), real(fft(indicateMat)), real(fft(indicateMat*im)), nothing, i, + X, false), + ] + for (trans, solRe, solIm, P, mI, evalX, fft_or_rfft) in listOfSols + @test gradient((X)->real.(trans(X))[mI, j], evalX)[1] ≈ solRe - @test oldgradient((X)->imag.(trans(X))[mI, j], evalX)[1] ≈ + @test gradient((X)->imag.(trans(X))[mI, j], evalX)[1] ≈ solIm - if typeof(P) <:AbstractFFTs.Plan && maximum(trans .== [fft,rfft]) - @test oldgradient((X)->real.(P * X)[mI, j], evalX)[1] ≈ + if typeof(P) <:AbstractFFTs.Plan && fft_or_rfft + @test gradient((X)->real.(P * X)[mI, j], evalX)[1] ≈ solRe - @test oldgradient((X)->imag.(P * X)[mI, j], evalX)[1] ≈ + @test gradient((X)->imag.(P * X)[mI, j], evalX)[1] ≈ solIm elseif typeof(P) <: AbstractFFTs.Plan - @test oldgradient((X)->real.(P \ X)[mI, j], evalX)[1] ≈ + @test gradient((X)->real.(P \ X)[mI, j], evalX)[1] ≈ solRe # for whatever reason the rfft_plan doesn't handle this case well, # even though irfft does if eltype(evalX) <: Real - @test oldgradient((X)->imag.(P \ X)[mI, j], evalX)[1] ≈ + @test gradient((X)->imag.(P \ X)[mI, j], evalX)[1] ≈ solIm end end @@ -1692,47 +1687,44 @@ end x = [-0.353213 -0.789656 -0.270151; -0.95719 -1.27933 0.223982] # check ffts for individual dimensions for trans in (fft, ifft, bfft) - @test oldgradient((x)->sum(abs.(trans(x))), x)[1] ≈ - oldgradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] + @test gradient((x)->sum(abs.(trans(x, (1, 2)))), x)[1] ≈ + gradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] # switch sum abs order - @test oldgradient((x)->abs(sum((trans(x)))),x)[1] ≈ - oldgradient( (x) -> abs(sum(trans(trans(x,1),2))), x)[1] + @test gradient((x)->abs(sum((trans(x)))),x)[1] ≈ + gradient( (x) -> abs(sum(trans(trans(x,1),2))), x)[1] # dims parameter for the function - @test oldgradient((x, dims)->sum(abs.(trans(x,dims))), x, (1,2))[1] ≈ - oldgradient( (x) -> sum(abs.(trans(x))), x)[1] - # (1,2) should be the same as no index - @test oldgradient( (x) -> sum(abs.(trans(x,(1,2)))), x)[1] ≈ - oldgradient( (x) -> sum(abs.(trans(trans(x,1),2))), x)[1] - @test gradcheck(x->sum(abs.(trans(x))), x) + @test gradient((x, dims)->sum(abs.(trans(x,dims))), x, (1,2))[1] ≈ + gradient( (x) -> sum(abs.(trans(x, (1, 2)))), x)[1] + @test gradcheck(x->sum(abs.(trans(x, (1, 2)))), x) @test gradcheck(x->sum(abs.(trans(x, 2))), x) end - @test oldgradient((x)->sum(abs.(rfft(x))), x)[1] ≈ - oldgradient( (x) -> sum(abs.(fft(rfft(x,1),2))), x)[1] - @test oldgradient((x, dims)->sum(abs.(rfft(x,dims))), x, (1,2))[1] ≈ - oldgradient( (x) -> sum(abs.(rfft(x))), x)[1] + @test gradient((x)->sum(abs.(rfft(x, (1, 2)))), x)[1] ≈ + gradient( (x) -> sum(abs.(fft(rfft(x,1),2))), x)[1] + @test gradient((x, dims)->sum(abs.(rfft(x,dims))), x, (1,2))[1] ≈ + gradient( (x) -> sum(abs.(rfft(x, (1, 2)))), x)[1] # Test type stability of fft x = randn(Float64,16) P = plan_fft(x) - @test typeof(oldgradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float64},1} - @test typeof(oldgradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float64},1} - @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float64,1} + @test typeof(gradient(x->sum(abs2,ifft(fft(x, 1), 1)),x)[1]) == Array{Float64,1} + @test typeof(gradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Float64,1} + @test typeof(gradient(x->sum(abs2,irfft(rfft(x, 1),16, 1)),x)[1]) == Array{Float64,1} x = randn(Float64,16,16) - @test typeof(oldgradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float64},2} - @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float64,2} + @test typeof(gradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Float64,2} + @test typeof(gradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float64,2} x = randn(Float32,16) P = plan_fft(x) - @test typeof(oldgradient(x->sum(abs2,ifft(fft(x))),x)[1]) == Array{Complex{Float32},1} - @test typeof(oldgradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Complex{Float32},1} - @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x),16)),x)[1]) == Array{Float32,1} + @test typeof(gradient(x->sum(abs2,ifft(fft(x, 1), 1)),x)[1]) == Array{Float32,1} + @test typeof(gradient(x->sum(abs2,P\(P*x)),x)[1]) == Array{Float32,1} + @test typeof(gradient(x->sum(abs2,irfft(rfft(x, 1),16, 1)),x)[1]) == Array{Float32,1} x = randn(Float32,16,16) - @test typeof(oldgradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Complex{Float32},2} - @test typeof(oldgradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float32,2} + @test typeof(gradient(x->sum(abs2,ifft(fft(x,1),1)),x)[1]) == Array{Float32,2} + @test typeof(gradient(x->sum(abs2,irfft(rfft(x,1),16,1)),x)[1]) == Array{Float32,2} end @testset "FillArrays" begin From 108e5a19d8fa7187f6eaece7a142c48d71dfd0d2 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 8 Mar 2023 23:12:11 +0100 Subject: [PATCH 408/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f78a49654..f3811db1b 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.55" +version = "0.6.56" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 1aec78f6c7e3fbccda1d2fdd703b7853e6e03345 Mon Sep 17 00:00:00 2001 From: Kristoffer Carlsson Date: Tue, 14 Mar 2023 13:41:46 +0100 Subject: [PATCH 409/490] move require usage to extensions on 1.9+ (#1390) * move require usage to extensions on 1.9+ * remove extra loads in tracker extension * fix an unexprted function --- Project.toml | 15 ++++++++++++ ext/ZygoteColorsExt.jl | 13 ++++++++++ .../distances.jl => ext/ZygoteDistancesExt.jl | 24 +++++++++++++++---- ext/ZygoteTrackerExt.jl | 17 +++++++++++++ src/Zygote.jl | 10 ++++---- src/flux.jl | 7 ------ 6 files changed, 68 insertions(+), 18 deletions(-) create mode 100644 ext/ZygoteColorsExt.jl rename src/lib/distances.jl => ext/ZygoteDistancesExt.jl (83%) create mode 100644 ext/ZygoteTrackerExt.jl delete mode 100644 src/flux.jl diff --git a/Project.toml b/Project.toml index f3811db1b..64b79bca0 100644 --- a/Project.toml +++ b/Project.toml @@ -26,12 +26,24 @@ SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" +[weakdeps] +Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" +Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +Tracker= "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + +[extensions] +ZygoteColorsExt = "Colors" +ZygoteDistancesExt = "Distances" +ZygoteTrackerExt = "Tracker" + [compat] AbstractFFTs = "1.3.1" ChainRules = "1.44.1" ChainRulesCore = "1.9" ChainRulesTestUtils = "1" +Colors = "0.12" DiffRules = "1.4" +Distances = "0.10" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" ForwardDiff = "0.10" GPUArrays = "8.4.2" @@ -43,10 +55,12 @@ NaNMath = "0.3, 1" Requires = "1.1" SnoopPrecompile = "1.0.3" SpecialFunctions = "1.6, 2" +Tracker = "0.2" ZygoteRules = "0.2.1" julia = "1.6" [extras] +Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" @@ -54,6 +68,7 @@ FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" [targets] test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] diff --git a/ext/ZygoteColorsExt.jl b/ext/ZygoteColorsExt.jl new file mode 100644 index 000000000..83a743439 --- /dev/null +++ b/ext/ZygoteColorsExt.jl @@ -0,0 +1,13 @@ +module ZygoteColorsExt + +if isdefined(Base, :get_extension) + using Zygote + using Colors +else + using ..Zygote + using ..Colors +end + +Zygote.@non_differentiable Colors.ColorTypes._parameter_upper_bound(::Any...) + +end diff --git a/src/lib/distances.jl b/ext/ZygoteDistancesExt.jl similarity index 83% rename from src/lib/distances.jl rename to ext/ZygoteDistancesExt.jl index e4ecceeb6..d1342ac5f 100644 --- a/src/lib/distances.jl +++ b/ext/ZygoteDistancesExt.jl @@ -1,4 +1,16 @@ -using .Distances +module ZygoteDistancesExt + +if isdefined(Base, :get_extension) + using Zygote + using Distances + using LinearAlgebra +else + using ..Zygote + using ..Distances + using ..LinearAlgebra +end + +using Zygote: @adjoint, @adjoint, AContext, _pullback @adjoint function (::SqEuclidean)(x::AbstractVector, y::AbstractVector) δ = x .- y @@ -66,7 +78,7 @@ end _sqrt_if_positive(d, δ) = d > δ ? sqrt(d) : zero(d) -function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), +function Zygote._pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), kws::@NamedTuple{dims::Int}, ::typeof(pairwise), dist::Euclidean, X::AbstractMatrix, Y::AbstractMatrix) # Modify the forwards-pass slightly to ensure stability on the reverse. @@ -77,11 +89,11 @@ function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), return _sqrt_if_positive.(D2, δ) end res, back = _pullback(cx, _pairwise_euclidean, SqEuclidean(dist.thresh), X, Y) - pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(unthunk_tangent(Δ))...) + pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(Zygote.unthunk_tangent(Δ))...) return res, pairwise_Euclidean_pullback end -function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), +function Zygote._pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), kws::@NamedTuple{dims::Int}, ::typeof(pairwise), dist::Euclidean, X::AbstractMatrix) # Modify the forwards-pass slightly to ensure stability on the reverse. @@ -92,6 +104,8 @@ function _pullback(cx::AContext, ::Core.kwftype(typeof(pairwise)), return _sqrt_if_positive.(D2, δ) end res, back = _pullback(cx, _pairwise_euclidean, SqEuclidean(dist.thresh), X) - pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(unthunk_tangent(Δ))...) + pairwise_Euclidean_pullback(Δ) = (nothing, nothing, back(Zygote.unthunk_tangent(Δ))...) return res, pairwise_Euclidean_pullback end + +end diff --git a/ext/ZygoteTrackerExt.jl b/ext/ZygoteTrackerExt.jl new file mode 100644 index 000000000..7d1a12df6 --- /dev/null +++ b/ext/ZygoteTrackerExt.jl @@ -0,0 +1,17 @@ +module ZygoteTrackerExt + +if isdefined(Base, :get_extension) + using Zygote + using Tracker: Tracker, TrackedArray, TrackedReal +else + using ..Zygote + using ..Tracker: Tracker, TrackedArray, TrackedReal +end + +Zygote.unwrap(x::Union{TrackedArray,TrackedReal}) = Tracker.data(x) + +Zygote.pullback(f, ps::Tracker.Params) = pullback(f, ZygtParams(ps)) +Tracker.forward(f, ps::Params) = Tracker.forward(f, Tracker.Params(ps)) +Tracker.gradient_(f, ps::Params) = Tracker.gradient_(f, Tracker.Params(ps)) + +end diff --git a/src/Zygote.jl b/src/Zygote.jl index 05d0bd80e..fddb30e5a 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -43,7 +43,6 @@ include("lib/forward.jl") include("lib/utils.jl") include("lib/range.jl") include("lib/logexpfunctions.jl") -@init @require Distances="b4f34e82-e78d-54a5-968a-f98e89d6e8f7" include("lib/distances.jl") # we need to define this late, so that the genfuncs see lib.jl # Move using statements out of this file to help with sysimage building @@ -53,12 +52,11 @@ include("compiler/interface2.jl") include("profiler/Profile.jl") -@init @require Tracker="9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" begin - include("flux.jl") -end -@init @require Colors="5ae59095-9a9b-59fe-a467-6f913c188581" begin - @non_differentiable Colors.ColorTypes._parameter_upper_bound(::Any...) +if !isdefined(Base, :get_extension) + @init @require Distances="b4f34e82-e78d-54a5-968a-f98e89d6e8f7" include("../ext/ZygoteDistancesExt.jl") + @init @require Tracker="9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" include("../ext/ZygoteTrackerExt.jl") + @init @require Colors="5ae59095-9a9b-59fe-a467-6f913c188581" include("../ext/ZygoteColorsExt.jl") end using InteractiveUtils diff --git a/src/flux.jl b/src/flux.jl deleted file mode 100644 index 0fd53a1c5..000000000 --- a/src/flux.jl +++ /dev/null @@ -1,7 +0,0 @@ -using .Tracker: TrackedArray, TrackedReal - -unwrap(x::Union{TrackedArray,TrackedReal}) = Tracker.data(x) - -pullback(f, ps::Tracker.Params) = pullback(f, Params(ps)) -Tracker.forward(f, ps::Params) = Tracker.forward(f, Tracker.Params(ps)) -Tracker.gradient_(f, ps::Params) = Tracker.gradient_(f, Tracker.Params(ps)) From 20660ffe99cb531cb114c315652c24501bc7ca94 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Tue, 14 Mar 2023 15:07:27 +0100 Subject: [PATCH 410/490] Fix duplicate import (#1391) --- ext/ZygoteDistancesExt.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ext/ZygoteDistancesExt.jl b/ext/ZygoteDistancesExt.jl index d1342ac5f..2fe7c56ad 100644 --- a/ext/ZygoteDistancesExt.jl +++ b/ext/ZygoteDistancesExt.jl @@ -10,7 +10,7 @@ else using ..LinearAlgebra end -using Zygote: @adjoint, @adjoint, AContext, _pullback +using Zygote: @adjoint, AContext, _pullback @adjoint function (::SqEuclidean)(x::AbstractVector, y::AbstractVector) δ = x .- y From 756dd378b07539bab5a6bc45d183223467c5d3f7 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Tue, 14 Mar 2023 15:59:51 +0100 Subject: [PATCH 411/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 64b79bca0..c8598a3b0 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.56" +version = "0.6.57" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From bb560f223a2b12e36bc1c48914683424012d286d Mon Sep 17 00:00:00 2001 From: chengchingwen Date: Wed, 15 Mar 2023 00:20:10 +0800 Subject: [PATCH 412/490] generated z2d --- src/compiler/chainrules.jl | 35 +++++++++++++++++++++++++++-------- 1 file changed, 27 insertions(+), 8 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index a19d7f230..d2f46a1c3 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -313,15 +313,34 @@ end z2d(dx::NamedTuple, primal::AbstractDict) = dx function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct - fnames = fieldnames(T) - deltas = map(n -> get(delta, n, nothing), fnames) - primals = map(n -> getfield(primal, n), fnames) - inner = map(z2d, deltas, primals) # recurse into fields - if inner isa Tuple{Vararg{AbstractZero}} - return NoTangent() # collapse all-zero case + if @generated + fnames = fieldnames(T) + N = length(fnames) + deltas = [ :($(Symbol(:delta_, fname)) = get(delta, $(QuoteNode(fname)), nothing)) for fname in fnames ] + primals = [ :($(Symbol(:primal_, fname)) = getfield(primal, $(QuoteNode(fname)))) for fname in fnames ] + inner = Expr(:tuple, [ :(z2d($(Symbol(:delta_, fname)), $(Symbol(:primal_, fname)))) for fname in fnames ]...) + return quote + $(deltas...) + $(primals...) + inner = $inner + if inner isa Tuple{Vararg{AbstractZero}} + return NoTangent() # collapse all-zero case + else + backing = NamedTuple{$fnames}(inner) + return canonicalize(Tangent{T, typeof(backing)}(backing)) + end + end else - backing = NamedTuple{fnames}(inner) - return canonicalize(Tangent{T, typeof(backing)}(backing)) + fnames = fieldnames(T) + deltas = map(n -> get(delta, n, nothing), fnames) + primals = map(n -> getfield(primal, n), fnames) + inner = map(z2d, deltas, primals) # recurse into fields + if inner isa Tuple{Vararg{AbstractZero}} + return NoTangent() # collapse all-zero case + else + backing = NamedTuple{fnames}(inner) + return canonicalize(Tangent{T, typeof(backing)}(backing)) + end end end From d7cdea3cc3eff3fe03974766ce679537533b567c Mon Sep 17 00:00:00 2001 From: chengchingwen Date: Wed, 15 Mar 2023 00:50:26 +0800 Subject: [PATCH 413/490] remove canonicalize --- src/compiler/chainrules.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index d2f46a1c3..8a3f4c4fb 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -327,7 +327,7 @@ function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct return NoTangent() # collapse all-zero case else backing = NamedTuple{$fnames}(inner) - return canonicalize(Tangent{T, typeof(backing)}(backing)) + return Tangent{T, typeof(backing)}(backing) end end else @@ -339,7 +339,7 @@ function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct return NoTangent() # collapse all-zero case else backing = NamedTuple{fnames}(inner) - return canonicalize(Tangent{T, typeof(backing)}(backing)) + return Tangent{T, typeof(backing)}(backing) end end end From 2490c792b3cb62fadc5837d259be383bbc8f1302 Mon Sep 17 00:00:00 2001 From: Simone Carlo Surace <51025924+simsurace@users.noreply.github.com> Date: Wed, 15 Mar 2023 11:16:31 +0100 Subject: [PATCH 414/490] Fix `reverse` failure (#1396) * Add internal function `_reverse` and overloads * Add unit tests * Correct issue number * Label testset * Add missing wrappers * Avoid `collect` in `_reverse` for `Hermitian` and `Symmetric` Co-authored-by: David Widmann * Use `_reverse` instead of `reverse` Co-authored-by: David Widmann * Fix wrong names :) Co-authored-by: David Widmann * Add end user test case * Add `using Zygote: _reverse` --------- Co-authored-by: David Widmann --- src/lib/array.jl | 17 +++++++++++++++-- test/lib/array.jl | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 47 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 1c6e09916..b7c6d45ee 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -2,6 +2,8 @@ using Random, FillArrays, AbstractFFTs using FillArrays: AbstractFill, getindex_value using Base.Broadcast: broadcasted, broadcast_shape using Distributed: pmap, AbstractWorkerPool +using LinearAlgebra: Diagonal, Hermitian, LowerTriangular, UpperTriangular +using LinearAlgebra: UnitLowerTriangular, UnitUpperTriangular @adjoint Array(xs::AbstractArray) = Array(xs), ȳ -> (ȳ,) @adjoint Array(xs::Array) = Array(xs), ȳ -> (ȳ,) @@ -165,10 +167,21 @@ end # This is also used by comprehensions, which do guarantee iteration order. # Not done for pmap, presumably because all is lost if you are relying on its order. _tryreverse(m, backs, Δ) = backs, Δ -_tryreverse(m::typeof(map), backs, Δ) = reverse(backs), reverse(Δ) +_tryreverse(m::typeof(map), backs, Δ) = _reverse(backs), _reverse(Δ) _tryreverse(m, x) = x -_tryreverse(m::typeof(map), x) = reverse(x) +_tryreverse(m::typeof(map), x) = _reverse(x) + +# Fallback +_reverse(x) = reverse(x) + +# Known cases in the standard library on which `reverse` errors (issue #1393) +_reverse(x::LowerTriangular) = UpperTriangular(_reverse(parent(x))) +_reverse(x::UpperTriangular) = LowerTriangular(_reverse(parent(x))) +_reverse(x::UnitLowerTriangular) = UnitUpperTriangular(_reverse(parent(x))) +_reverse(x::UnitUpperTriangular) = UnitLowerTriangular(_reverse(parent(x))) +_reverse(x::Hermitian) = Hermitian(_reverse(x.data), x.uplo == 'U' ? :L : :U) +_reverse(x::Symmetric) = Symmetric(_reverse(x.data), x.uplo == 'U' ? :L : :U) # With mismatched lengths, map stops early. With mismatched shapes, it makes a vector. # So we keep axes(x) to restore gradient dx to its full length & correct shape. diff --git a/test/lib/array.jl b/test/lib/array.jl index d02e9f9d3..889301c1e 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -1,5 +1,7 @@ using ChainRulesTestUtils -using Zygote: ZygoteRuleConfig, _pullback +using LinearAlgebra: Diagonal, Hermitian, LowerTriangular, UpperTriangular +using LinearAlgebra: UnitLowerTriangular, UnitUpperTriangular +using Zygote: ZygoteRuleConfig, _pullback, _reverse # issue 897 @@ -65,3 +67,32 @@ end end @test gradient(f_comprehension, w)[1] == ones(5) end + +@testset "_reverse" begin + m = [1 2 3; 4 5 6; 7 8 9] + @testset "$wrapper" for wrapper in [ + Hermitian, Symmetric, LowerTriangular, UpperTriangular, + UnitLowerTriangular, UnitUpperTriangular, + ] + M = wrapper(m) + @test collect(_reverse(M)) == _reverse(collect(M)) + end +end + +@testset "rrule for `map`" begin + @testset "MWE from #1393" begin + # https://github.com/FluxML/Zygote.jl/issues/1393#issuecomment-1468496804 + struct Foo1393 x::Float64 end + (f::Foo1393)(x) = f.x * x + x = randn(5, 5) + out, pb = Zygote.pullback(x -> map(Foo1393(5.0), x), x) + @testset "$wrapper" for wrapper in [ + Hermitian, Symmetric, LowerTriangular, UpperTriangular, + UnitLowerTriangular, UnitUpperTriangular, + ] + m = wrapper(rand(5, 5)) + res = only(pb(m)) + @test res == 5m + end + end +end From 55fea5efe6a7e78c95d621cabe1393a7db8e57de Mon Sep 17 00:00:00 2001 From: David Widmann Date: Wed, 15 Mar 2023 11:37:31 +0100 Subject: [PATCH 415/490] Update Project.toml (#1397) --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index c8598a3b0..41c476eee 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.57" +version = "0.6.58" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From a9f56a72b620b59fa79dacac2d2abfb677a0da51 Mon Sep 17 00:00:00 2001 From: chengchingwen Date: Thu, 16 Mar 2023 00:05:01 +0800 Subject: [PATCH 416/490] split fallback as another function and test --- src/compiler/chainrules.jl | 24 ++++++++++++++---------- test/chainrules.jl | 7 +++++++ 2 files changed, 21 insertions(+), 10 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 8a3f4c4fb..9b8b60552 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -312,6 +312,19 @@ end # Right now it uses a NamedTuple but not for fields of the AbstractDict struct z2d(dx::NamedTuple, primal::AbstractDict) = dx +function _z2d_struct_fallback(delta::NamedTuple, primal::T) where T + fnames = fieldnames(T) + deltas = map(n -> get(delta, n, nothing), fnames) + primals = map(n -> getfield(primal, n), fnames) + inner = map(z2d, deltas, primals) # recurse into fields + if inner isa Tuple{Vararg{AbstractZero}} + return NoTangent() # collapse all-zero case + else + backing = NamedTuple{fnames}(inner) + return Tangent{T, typeof(backing)}(backing) + end +end + function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct if @generated fnames = fieldnames(T) @@ -331,16 +344,7 @@ function z2d(delta::NamedTuple, primal::T) where T # arbitrart struct end end else - fnames = fieldnames(T) - deltas = map(n -> get(delta, n, nothing), fnames) - primals = map(n -> getfield(primal, n), fnames) - inner = map(z2d, deltas, primals) # recurse into fields - if inner isa Tuple{Vararg{AbstractZero}} - return NoTangent() # collapse all-zero case - else - backing = NamedTuple{fnames}(inner) - return Tangent{T, typeof(backing)}(backing) - end + return _z2d_struct_fallback(delta, primal) end end diff --git a/test/chainrules.jl b/test/chainrules.jl index 7bd66a4d2..7e55720de 100644 --- a/test/chainrules.jl +++ b/test/chainrules.jl @@ -412,4 +412,11 @@ end @test Zygote.z2d((; x=(; re=1)), Ref(3.0+im)) == nested @test Zygote.z2d((; x=(; re=nothing)), Ref(3.0+im)) === NoTangent() end + + x = (c = (a = randn(3,3), b = rand(3)), d = randn(5)) + z2d_compiled = Zygote.z2d(x, x) + z2d_fallback = Zygote._z2d_struct_fallback(x, x) + @test z2d_compiled.d === z2d_fallback.d + @test z2d_compiled.c.a === z2d_fallback.c.a + @test z2d_compiled.c.b === z2d_fallback.c.b end From d2b3b02a882f3418b2bb6dcc4cc338c7b927729c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 15 Mar 2023 19:20:50 -0700 Subject: [PATCH 417/490] Actually make sure conda env dir is set on Buildkite CI (#1392) Actually make sure conda env dir is set on Buildkite CI The original fix was incorrect due to https://buildkite.com/docs/pipelines/environment-variables#runtime-variable-interpolation, but happened to pass by coincidence. --- .buildkite/pipeline.yml | 4 ++-- Project.toml | 3 ++- test/features.jl | 4 ++++ 3 files changed, 8 insertions(+), 3 deletions(-) diff --git a/.buildkite/pipeline.yml b/.buildkite/pipeline.yml index 8b7f20dac..5e2ae545d 100644 --- a/.buildkite/pipeline.yml +++ b/.buildkite/pipeline.yml @@ -5,7 +5,7 @@ steps: version: "1.6" - JuliaCI/julia-test#v1: ~ command: - - mkdir -p "${JULIA_DEPOT_PATH}/conda/3/x86_64" + - mkdir -p "$${JULIA_DEPOT_PATH}/conda/3/x86_64" agents: queue: "juliagpu" cuda: "*" @@ -17,7 +17,7 @@ steps: version: "1" - JuliaCI/julia-test#v1: ~ command: - - mkdir -p "${JULIA_DEPOT_PATH}/conda/3/x86_64" + - mkdir -p "$${JULIA_DEPOT_PATH}/conda/3/x86_64" agents: queue: "juliagpu" cuda: "*" diff --git a/Project.toml b/Project.toml index 41c476eee..040360094 100644 --- a/Project.toml +++ b/Project.toml @@ -61,6 +61,7 @@ julia = "1.6" [extras] Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" +Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" @@ -71,4 +72,4 @@ Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" [targets] -test = ["ChainRulesTestUtils", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] +test = ["ChainRulesTestUtils", "Conda", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] diff --git a/test/features.jl b/test/features.jl index 77a9b7165..78f496721 100644 --- a/test/features.jl +++ b/test/features.jl @@ -682,6 +682,10 @@ end end @testset "PyCall custom @adjoint" begin + # Trigger Python install if required. Required for Buildkite CI! + import Conda + Conda.list() + import PyCall math = PyCall.pyimport("math") pysin(x) = math.sin(x) From 0c150fff80f14844ef1a680fc4e18b3171307dc9 Mon Sep 17 00:00:00 2001 From: chengchingwen Date: Fri, 17 Mar 2023 06:46:48 +0800 Subject: [PATCH 418/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 040360094..346c63e9f 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.58" +version = "0.6.59" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 5d3150b9d45abaa7f5143989c532d26d097fd22e Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 19 Mar 2023 21:46:19 -0400 Subject: [PATCH 419/490] fix broadcasted 3-arg * --- src/lib/broadcast.jl | 11 ++++++++++- test/gradcheck.jl | 30 ++++++++++++++++++++++++++++++ 2 files changed, 40 insertions(+), 1 deletion(-) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 4508c3ca2..60c905cd8 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -162,6 +162,14 @@ end @adjoint broadcasted(::Type{T}, x::Numeric) where {T<:Number} = T.(x), ȳ -> (nothing, _project(x, ȳ),) + +# Fix https://github.com/FluxML/Zygote.jl/issues/1399 by ensuring we avoid a lazier CR rule +# https://github.com/JuliaDiff/ChainRules.jl/blob/5855c10bdbe691fc07822752f5b5865b9cea44d3/src/rulesets/Base/broadcast.jl#L199 +@adjoint function broadcasted(::typeof(*), x::Numeric, y::Numeric, zs::Numeric...) + y, back = _broadcast_generic(__context__, *, x, y, zs...) + return y, Base.tail∘back +end + # General Fallback # ================ @@ -194,7 +202,8 @@ _dual_safearg(x::Ref{<:Numeric{<:Complex}}) = true _dual_safearg(x::Union{Type,Val,Symbol}) = true # non-differentiable types _dual_safearg(x) = false -@adjoint function broadcasted(::AbstractArrayStyle, f::F, args...) where {F} +@adjoint broadcasted(::AbstractArrayStyle, f::F, args...) where {F} = _broadcast_generic(__context__, f, args...) +@inline function _broadcast_generic(__context__, f::F, args...) where {F} T = Broadcast.combine_eltypes(f, args) # Avoid generic broadcasting in two easy cases: if T == Bool diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 6e9b954fc..5037d4c88 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -2122,3 +2122,33 @@ end @test gradcheck(foo ∘ first, [-1e-5]) @test gradient(foo, 1024.0)[1] ≈ 2//3 end + +@testset "Zygote #1399" begin + function f1(t) # this works + r = 5.0 # (changed to make answers the same) + sum(@. exp(-t*r)) + end + @test gradient(f1, [1.0, 0.2])[1] ≈ [-0.03368973499542734, -1.8393972058572117] + + function f2(t) # this works, too + sum(@. exp(-t*5)) + end + @test gradient(f2, [1.0, 0.2])[1] ≈ [-0.03368973499542734, -1.8393972058572117] + + function f3(t) # but this didn't work + r = 1.0 + sum(@. exp(-t*r*5)) + end + @test gradient(f3, [1.0, 0.2])[1] ≈ [-0.03368973499542734, -1.8393972058572117] + + # Also test 4-arg case + function f4(t) + r = -0.5 + sum(@. exp(t*r*5*2)) + end + @test gradient(f4, [1.0, 0.2])[1] ≈ [-0.03368973499542734, -1.8393972058572117] + + # Check that trivial scalar broadcast hasn't gone weird: + @test gradient(x -> @.(x * x * x), 2.0) == gradient(x -> x * (x * x), 2.0) + @test gradient(x -> @.(3.0*x*2.0*x), 2.0) == gradient(x -> 6(x^2), 2.0) +end From 413728b65ff0d9a891afe2670ffac2d3a7ccf742 Mon Sep 17 00:00:00 2001 From: Alexander Seiler Date: Thu, 30 Mar 2023 04:49:09 +0200 Subject: [PATCH 420/490] Fix some typos (#1407) * Fix some typos Signed-off-by: Alexander Seiler * Update test/features.jl Co-authored-by: Brian Chen --------- Signed-off-by: Alexander Seiler Co-authored-by: Brian Chen --- docs/src/adjoints.md | 2 +- docs/src/index.md | 2 +- src/compiler/chainrules.jl | 6 +++--- src/compiler/interface.jl | 2 +- src/lib/array.jl | 2 +- src/lib/broadcast.jl | 2 +- src/lib/grad.jl | 2 +- test/features.jl | 4 ++-- 8 files changed, 11 insertions(+), 11 deletions(-) diff --git a/docs/src/adjoints.md b/docs/src/adjoints.md index 45f0662b3..fab185d20 100644 --- a/docs/src/adjoints.md +++ b/docs/src/adjoints.md @@ -7,7 +7,7 @@ To define custom sensitivities using ChainRulesCore, define a `ChainRulesCore.rrule(f, args...; kwargs...)`. Head to [ChainRules project's documentation](https://www.juliadiff.org/ChainRulesCore.jl/stable/) for more information. **If you are defining your custom adjoints using ChainRulesCore then you do not need to read this page**, and can consider it as documenting a legacy feature. - This page exists to descibe how Zygote works, and how adjoints can be directly defined for Zygote. + This page exists to describe how Zygote works, and how adjoints can be directly defined for Zygote. Defining adjoints this way does not make them accessible to other AD systems, but does let you do things that directly depend on how Zygote works. It allows for specific definitions of adjoints that are only defined for Zygote (which might work differently to more generic definitions defined for all AD). diff --git a/docs/src/index.md b/docs/src/index.md index ca45d5da8..b3ea8d776 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -131,7 +131,7 @@ julia> gradient(colordiff, RGB(1, 0, 0), RGB(0, 1, 0)) ## Explicit and Implicit Parameters -It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. Depending on how you write your model, there are multiple ways to *explicity* take gradients with respect to parameters. For example, the function `linear` accepts the parameters as an argument to the model. So, we directly pass in the parameters, `θ`, as an argument to the function being differentiated. +It's easy to work with even very large and complex models, and there are few ways to do this. Autograd-style models pass around a collection of weights. Depending on how you write your model, there are multiple ways to *explicitly* take gradients with respect to parameters. For example, the function `linear` accepts the parameters as an argument to the model. So, we directly pass in the parameters, `θ`, as an argument to the function being differentiated. ```@docs gradient(f, args...) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 9b8b60552..223d3867c 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -56,7 +56,7 @@ function has_chain_rrule(T) # It can be seen that checking if it matches is the correct way to decide if we should use the rrule or not. - if !is_ambig && matching_cr_sig(no_rrule_m, rrule_m) # Not ambigious, and opted-out. + if !is_ambig && matching_cr_sig(no_rrule_m, rrule_m) # Not ambiguous, and opted-out. # Return instance for configured_rrule_m as that will be invalidated # directly if configured rule added, or indirectly if unconfigured rule added # Do not need an edge for `no_rrule` as no addition of methods to that can cause this @@ -64,7 +64,7 @@ function has_chain_rrule(T) # using the rrule, so not using more rules wouldn't change anything. return false, configured_rrule_m.instance else - # Either is ambigious, and we should try to use it, and then error + # Either is ambiguous, and we should try to use it, and then error # or we are uses a rrule, no need to add any edges for `rrule`, as it will generate # code with natural edges if a new method is defined there. # We also do not need an edge to `no_rrule`, as any time a method is added to `no_rrule` @@ -78,7 +78,7 @@ matching_cr_sig(t, s) = matching_cr_sig(t.method.sig, s.method.sig) matching_cr_sig(::DataType, ::UnionAll) = false matching_cr_sig(::UnionAll, ::DataType) = false matching_cr_sig(t::Type, s::Type) = type_tuple_tail(t) == type_tuple_tail(s) -matching_cr_sig(::Any, ::Nothing) = false # ambigious https://github.com/FluxML/Zygote.jl/issues/1234 +matching_cr_sig(::Any, ::Nothing) = false # ambiguous https://github.com/FluxML/Zygote.jl/issues/1234 type_tuple_tail(d::DataType) = Tuple{d.parameters[2:end]...} function type_tuple_tail(d::UnionAll) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index f350069f4..cce7c4d6d 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -168,7 +168,7 @@ gradient Params([A, B]) Container for implicit parameters, used when differentiating -a zero-argument funtion `() -> loss(A, B)` with respect to `A, B`. +a zero-argument function `() -> loss(A, B)` with respect to `A, B`. """ struct Params{B <: Buffer} order::B diff --git a/src/lib/array.jl b/src/lib/array.jl index b7c6d45ee..d65fea2c1 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -608,7 +608,7 @@ end # ChainRules has this also but does not use FillArrays, so we have our own definition # for improved performance. See https://github.com/JuliaDiff/ChainRules.jl/issues/46 Zygote.@adjoint function LinearAlgebra.tr(x::AbstractMatrix) - # x is a squre matrix checked by tr, + # x is a square matrix checked by tr, # so we could just use Eye(size(x, 1)) # to create a Diagonal tr(x), function (Δ::Number) diff --git a/src/lib/broadcast.jl b/src/lib/broadcast.jl index 60c905cd8..504ef614d 100644 --- a/src/lib/broadcast.jl +++ b/src/lib/broadcast.jl @@ -369,7 +369,7 @@ using GPUArraysCore # replaces @require CUDA block, weird indenting to preserve sum(xs, dims = dims), Δ -> (placeholder .= Δ,) end - # Make sure sum(f, ::CuArray) uses broadcase through forward-mode defined above + # Make sure sum(f, ::CuArray) uses broadcast through forward-mode defined above # Not the ChainRules.rrule which will use the Zygote.Context and thus not be GPU compatible function _pullback(cx::AContext, ::typeof(sum), f, xs::AbstractGPUArray) res, back = _pullback(cx, (f, xs) -> sum(f.(xs)), f, xs) diff --git a/src/lib/grad.jl b/src/lib/grad.jl index 38347b312..6b9002f73 100644 --- a/src/lib/grad.jl +++ b/src/lib/grad.jl @@ -11,7 +11,7 @@ Use gradient checkpointing on the call `f(xs...)`. This means that `checkpointed(f, xs...) === f(xs...)`, but when computing the derivative intermediate results from the forward pass of `f` will not be stored. Instead the forward pass will be repeated, when computing the derivative. -This saves memory at the cost of increasing exectution time. +This saves memory at the cost of increasing execution time. !!! warning If `f` is not a pure function, `checkpointed` will likely give wrong results. diff --git a/test/features.jl b/test/features.jl index 78f496721..112c5b937 100644 --- a/test/features.jl +++ b/test/features.jl @@ -322,7 +322,7 @@ end[1] == 5 @test gradient(x -> one(eltype(x)), rand(10))[1] === nothing -# Thre-way control flow merge +# Three-way control flow merge @test gradient(1) do x if x > 0 x *= 2 @@ -486,7 +486,7 @@ end @test gradient(x -> (getindex.(x).^2)[1], Ref.(1:3))[1][1] == (x=2.0,) # rest are (x = 0.0,), but nothing would be OK too @test gradient(x -> (prod.(getindex.(x)))[1], Ref.(eachcol([1 2; 3 4])))[1][1] == (x = [3.0, 1.0],) - # Broadcasting over Ref is handled specially. Tested elsehwere too. + # Broadcasting over Ref is handled specially. Tested elsewhere too. @test gradient(x -> sum(sum, x .* [1,2,3]), Ref([4,5])) == ((x = [6.0, 6.0],),) @test gradient(x -> sum(sum, Ref(x) .* [1,2,3]), [4,5]) == ([6.0, 6.0],) end From dc16b2e35cdfa9dda62f636e95271490548cc574 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Fri, 31 Mar 2023 07:49:40 +0200 Subject: [PATCH 421/490] CompatHelper: bump compat for FillArrays to 1, (keep existing compat) (#1409) Co-authored-by: CompatHelper Julia --- Project.toml | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Project.toml b/Project.toml index 346c63e9f..003b1fc29 100644 --- a/Project.toml +++ b/Project.toml @@ -26,16 +26,6 @@ SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" -[weakdeps] -Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" -Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" -Tracker= "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" - -[extensions] -ZygoteColorsExt = "Colors" -ZygoteDistancesExt = "Distances" -ZygoteTrackerExt = "Tracker" - [compat] AbstractFFTs = "1.3.1" ChainRules = "1.44.1" @@ -44,7 +34,7 @@ ChainRulesTestUtils = "1" Colors = "0.12" DiffRules = "1.4" Distances = "0.10" -FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13" +FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13, 1" ForwardDiff = "0.10" GPUArrays = "8.4.2" GPUArraysCore = "0.1.1" @@ -59,11 +49,16 @@ Tracker = "0.2" ZygoteRules = "0.2.1" julia = "1.6" +[extensions] +ZygoteColorsExt = "Colors" +ZygoteDistancesExt = "Distances" +ZygoteTrackerExt = "Tracker" + [extras] -Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" -Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" +Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" +Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" @@ -73,3 +68,8 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" [targets] test = ["ChainRulesTestUtils", "Conda", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] + +[weakdeps] +Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" +Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" From 31811c3f909c82e20d0b4b0d39411750eec9d6ba Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 7 Apr 2023 08:47:02 -0700 Subject: [PATCH 422/490] v0.6.60 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 003b1fc29..aec621906 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.59" +version = "0.6.60" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From e562b5fa79fc0c634fffedfa4cb4332019e7e71f Mon Sep 17 00:00:00 2001 From: Jeffrey Sun Date: Sun, 9 Apr 2023 18:32:56 -0400 Subject: [PATCH 423/490] Replace "may" with "might" to avoid ambiguity As written, "Non-mutating functions may also use mutation under the hood" might imply that non-mutating functions are _allowed to_ use mutation under the hood --- docs/src/limitations.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/limitations.md b/docs/src/limitations.md index bb3b4614a..f27f74305 100644 --- a/docs/src/limitations.md +++ b/docs/src/limitations.md @@ -48,7 +48,7 @@ We got an error message and a long stacktrace. The error informs us that our cod !!! warning - Non-mutating functions may also use mutation under the hood. This can be done for performance reasons or code re-use. + Non-mutating functions might also use mutation under the hood. This can be done for performance reasons or code re-use. ```julia function g!(x, y) From a0f2327eb4337f13de1355ee69d432259e52661d Mon Sep 17 00:00:00 2001 From: Tim Holy Date: Mon, 24 Apr 2023 16:31:46 -0500 Subject: [PATCH 424/490] Migrate from SnoopPrecompile to PrecompileTools --- Project.toml | 4 ++-- src/Zygote.jl | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Project.toml b/Project.toml index aec621906..0853dcd98 100644 --- a/Project.toml +++ b/Project.toml @@ -20,7 +20,7 @@ MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" -SnoopPrecompile = "66db9d55-30c0-4569-8b51-7e840670fc0c" +PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" @@ -43,7 +43,7 @@ LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" -SnoopPrecompile = "1.0.3" +PrecompileTools = "1.0.3" SpecialFunctions = "1.6, 2" Tracker = "0.2" ZygoteRules = "0.2.1" diff --git a/src/Zygote.jl b/src/Zygote.jl index fddb30e5a..64564ad7f 100644 --- a/src/Zygote.jl +++ b/src/Zygote.jl @@ -77,11 +77,11 @@ macro profile(ex) end end -using SnoopPrecompile +using PrecompileTools # This caused freezes on early 1.8 patch versions, # see https://github.com/SciML/DiffEqFlux.jl/issues/783 @static if VERSION < v"1.8" || VERSION >= v"1.8.5" - @precompile_all_calls precompile() + @compile_workload precompile() end end # module From 869f8b4065371fe06057b8d2f9b9dea14dbc798b Mon Sep 17 00:00:00 2001 From: Tim Holy Date: Tue, 25 Apr 2023 02:28:50 -0500 Subject: [PATCH 425/490] Fix version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 0853dcd98..0e79be05f 100644 --- a/Project.toml +++ b/Project.toml @@ -43,7 +43,7 @@ LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" Requires = "1.1" -PrecompileTools = "1.0.3" +PrecompileTools = "1" SpecialFunctions = "1.6, 2" Tracker = "0.2" ZygoteRules = "0.2.1" From 6ed242e63bddd58a566784102388167867cb7919 Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Wed, 3 May 2023 12:44:20 +0200 Subject: [PATCH 426/490] Fixes for Julia master. --- src/compiler/chainrules.jl | 10 ++++---- src/compiler/emit.jl | 4 +-- src/compiler/interface2.jl | 52 ++++++++++++++++++++++++++++++++++---- 3 files changed, 54 insertions(+), 12 deletions(-) diff --git a/src/compiler/chainrules.jl b/src/compiler/chainrules.jl index 223d3867c..10e7d8abb 100644 --- a/src/compiler/chainrules.jl +++ b/src/compiler/chainrules.jl @@ -15,25 +15,25 @@ The first return value is `true` if the `rrule` exists, `false` otherwise. If it does not, then the second argument is a list of edges to attach to the CodeInfo for a generated function, such that if a suitable rule is defined later, the generated function will recompile. """ -function has_chain_rrule(T) +function has_chain_rrule(T, world) config_T, arg_Ts = Iterators.peel(T.parameters) - configured_rrule_m = meta(Tuple{typeof(rrule), config_T, arg_Ts...}) + configured_rrule_m = meta(Tuple{typeof(rrule), config_T, arg_Ts...}; world) is_ambig = configured_rrule_m === nothing # this means there was an ambiguity error, on configured_rrule if !is_ambig && _is_rrule_redispatcher(configured_rrule_m.method) # The config is not being used: # it is being redispatched without config, so we need the method it redispatches to - rrule_m = meta(Tuple{typeof(rrule), arg_Ts...}) + rrule_m = meta(Tuple{typeof(rrule), arg_Ts...}; world) # Thus any no_rrule that might apply must also not have a config because if there was a # no_rrule with a config that applied then there would also be a rrule with config that applied - no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), arg_Ts...}) + no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), arg_Ts...}; world) else # Not being redispatched: it does have a config rrule_m = configured_rrule_m # Thus any no_rrule that might apply must also have a config because if it applied # it will be identical, and if it doesn't we don't care what it is. - no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}) + no_rrule_m = meta(Tuple{typeof(ChainRulesCore.no_rrule), config_T, arg_Ts...}; world) end is_ambig |= rrule_m === nothing # this means there was an ambiguity error on unconfigured rrule diff --git a/src/compiler/emit.jl b/src/compiler/emit.jl index 1c82a44f1..ca79f11ce 100644 --- a/src/compiler/emit.jl +++ b/src/compiler/emit.jl @@ -95,8 +95,8 @@ end varargs(m::Method, n) = m.isva ? n - m.nargs + 1 : nothing -function _generate_pullback_via_decomposition(T) - (m = meta(T)) === nothing && return +function _generate_pullback_via_decomposition(T, world) + (m = meta(T; world)) === nothing && return va = varargs(m.method, length(T.parameters)) forw, back = stacks!(Adjoint(IR(m), varargs = va, normalise = false), T) m, forw, back diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index bf3692a30..732b572d0 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -6,7 +6,7 @@ function edge!(m::IRTools.Meta, edge::Core.MethodInstance) return end -@generated function _pullback(ctx::AContext, f, args...) +function _generate_pullback(ctx, world, f, args...) # Try using ChainRulesCore if is_kwfunc(f, args...) # if it is_kw then `args[1]` are the keyword args, `args[2]` is actual function @@ -17,7 +17,7 @@ end chain_rrule_f = :chain_rrule end - hascr, cr_edge = has_chain_rrule(cr_T) + hascr, cr_edge = has_chain_rrule(cr_T, world) hascr && return :($chain_rrule_f(ZygoteRuleConfig(ctx), f, args...)) # No ChainRule, going to have to work it out. @@ -25,7 +25,7 @@ end ignore_sig(T) && return :(f(args...), Pullback{$T}(())) g = try - _generate_pullback_via_decomposition(T) + _generate_pullback_via_decomposition(T, world) catch e rethrow(CompileError(T,e)) end @@ -40,10 +40,10 @@ end return update!(meta.code, forw) end -@generated function (j::Pullback{T})(Δ) where T +function _generate_callable_pullback(j::Type{<:Pullback{T}}, world, Δ) where T ignore_sig(T) && return :nothing g = try - _generate_pullback_via_decomposition(T) + _generate_pullback_via_decomposition(T, world) catch e rethrow(CompileError(T,e)) end @@ -57,3 +57,45 @@ end back = slots!(inlineable!(back)) return update!(meta.code, back) end + +if VERSION >= v"1.10.0-DEV.873" + +# on Julia 1.10, generated functions need to keep track of the world age + +function _pullback_generator(world::UInt, source, self, ctx, f, args) + ret = _generate_pullback(ctx, world, f, args...) + ret isa Core.CodeInfo && return ret + + stub = Core.GeneratedFunctionStub(identity, Core.svec(:methodinstance, :ctx, :f, :args), Core.svec()) + stub(world, source, ret) +end + +@eval function _pullback(ctx::AContext, f, args...) + $(Expr(:meta, :generated, _pullback_generator)) + $(Expr(:meta, :generated_only)) +end + +function _callable_pullback_generator(world::UInt, source, self, Δ) + ret = _generate_callable_pullback(self, world, Δ) + ret isa Core.CodeInfo && return ret + + stub = Core.GeneratedFunctionStub(identity, Core.svec(:methodinstance, :Δ), Core.svec()) + stub(world, source, ret) +end + +@eval function (j::Pullback)(Δ) + $(Expr(:meta, :generated, _callable_pullback_generator)) + $(Expr(:meta, :generated_only)) +end + +else + +@generated function _pullback(ctx::AContext, f, args...) + _generate_pullback(ctx, nothing, f, args...) +end + +@generated function (j::Pullback)(Δ) + _generate_callable_pullback(j, nothing, Δ) +end + +end From e70f5ed478702cb44597ef94c156c944b6af2aca Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Wed, 3 May 2023 12:53:18 +0200 Subject: [PATCH 427/490] Disable broken optimization. --- src/lib/literal_getproperty.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/lib/literal_getproperty.jl b/src/lib/literal_getproperty.jl index c13f7a89b..3ed4c541e 100644 --- a/src/lib/literal_getproperty.jl +++ b/src/lib/literal_getproperty.jl @@ -41,6 +41,8 @@ end # ugly hack to make differentiating `getproperty` infer a lot better +if VERSION < v"1.10.0-DEV.873" +# XXX: still needed on 1.10? if so, make this generator propagate world ages @generated function _pullback(cx::AContext, ::typeof(literal_getproperty), x, ::Val{f}) where f sig(x) = Tuple{x, typeof(f)} rrule_sig(x) = Tuple{typeof(getproperty), x, typeof(f)} @@ -84,3 +86,5 @@ end end end end + +end From 06867361dd7e3d7d9848ca05c5d5d3a588bc84d5 Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Wed, 10 May 2023 11:33:19 +0200 Subject: [PATCH 428/490] Don't throw during generator expansion, but return an error instead. --- src/compiler/interface2.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 732b572d0..0d46061d2 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -27,7 +27,7 @@ function _generate_pullback(ctx, world, f, args...) g = try _generate_pullback_via_decomposition(T, world) catch e - rethrow(CompileError(T,e)) + return :(throw($(CompileError(T,e)))) end g === nothing && return :(f(args...), Pullback{$T}((f,))) meta, forw, _ = g @@ -45,7 +45,7 @@ function _generate_callable_pullback(j::Type{<:Pullback{T}}, world, Δ) where T g = try _generate_pullback_via_decomposition(T, world) catch e - rethrow(CompileError(T,e)) + return :(throw($(CompileError(T,e)))) end if g === nothing Δ == Nothing && return :nothing From aa6a0eb1a221266d7ef90d08b9bcf94c8965d961 Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Wed, 10 May 2023 13:34:32 +0200 Subject: [PATCH 429/490] Restore literal_getproperty optimization. --- src/lib/literal_getproperty.jl | 61 +++++++++++++++++++++++++--------- 1 file changed, 46 insertions(+), 15 deletions(-) diff --git a/src/lib/literal_getproperty.jl b/src/lib/literal_getproperty.jl index 3ed4c541e..c50cab171 100644 --- a/src/lib/literal_getproperty.jl +++ b/src/lib/literal_getproperty.jl @@ -1,6 +1,6 @@ # Mostly copied over from Cassette in `src/overdub.jl` # Return `Reflection` for signature `sigtypes` and `world`, if possible. Otherwise, return `nothing`. -function reflect(@nospecialize(sigtypes::Tuple), world::UInt = typemax(UInt)) +function reflect(@nospecialize(sigtypes::Tuple), world::UInt) if length(sigtypes) > 2 && sigtypes[1] === typeof(invoke) @assert sigtypes[3] <: Type{<:Tuple} sigtypes = (sigtypes[2], sigtypes[3].parameters[1].parameters...) @@ -41,25 +41,32 @@ end # ugly hack to make differentiating `getproperty` infer a lot better -if VERSION < v"1.10.0-DEV.873" -# XXX: still needed on 1.10? if so, make this generator propagate world ages -@generated function _pullback(cx::AContext, ::typeof(literal_getproperty), x, ::Val{f}) where f +function _generate_literal_getproperty(ctx, world, x, ::Type{Val{f}}) where f + world = something(world, typemax(UInt)) + sig(x) = Tuple{x, typeof(f)} rrule_sig(x) = Tuple{typeof(getproperty), x, typeof(f)} - pb_sig(x) = Tuple{cx, typeof(getproperty), x, typeof(f)} + pb_sig(x) = Tuple{ctx, typeof(getproperty), x, typeof(f)} + @static if VERSION >= v"1.10.0-DEV.65" + which(f, t) = Base._which(Base.signature_type(f, t); world).method + else + which(f, t) = Base.which(f, t) + end - # either `getproperty` has a custom implementation or `_pullback(cx, getproperty, x, f)` + # either `getproperty` has a custom implementation or `_pullback(ctx, getproperty, x, f)` # / `rrule(getproperty, x, f) is overloaded directly is_getfield_fallback = which(getproperty, sig(x)) == which(getproperty, sig(Any)) && which(_pullback, pb_sig(x)) == which(_pullback, pb_sig(Any)) && which(rrule, rrule_sig(x)) == which(rrule, rrule_sig(Any)) - #ccall(:jl_safe_printf, Cvoid, (Cstring,), "$is_getfield_fallback: $x\n") - if is_getfield_fallback # just copy pullback of `literal_getfield` - mi, _sig, sparams = reflect((typeof(_pullback), cx, typeof(literal_getfield), x, Val{f})) - ci = copy(Core.Compiler.retrieve_code_info(mi)) + mi, _sig, sparams = reflect((typeof(_pullback), ctx, typeof(literal_getfield), x, Val{f}), world) + ci = if VERSION >= v"1.10.0-DEV.873" + copy(Core.Compiler.retrieve_code_info(mi, world)) + else + copy(Core.Compiler.retrieve_code_info(mi)) + end # we need to change the second arg to `_pullback` from `literal_getproperty` to # `literal_getfield` @@ -71,20 +78,44 @@ if VERSION < v"1.10.0-DEV.873" # backedge for `_pullback`, see https://docs.julialang.org/en/v1/devdocs/ast/#MethodInstance # this will cause a backedge to this particular MethodInstance to be attached to - # `_pullback(cx, getproperty, x, f)` - mi_pb_getproperty, _, _ = reflect((typeof(_pullback), pb_sig(x).parameters...)) - mi_getproperty, _, _ = reflect((typeof(getproperty), sig(x).parameters...)) - mi_rrule, _, _ = reflect((typeof(rrule), rrule_sig(x).parameters...)) + # `_pullback(ctx, getproperty, x, f)` + mi_pb_getproperty, _, _ = reflect((typeof(_pullback), pb_sig(x).parameters...), world) + mi_getproperty, _, _ = reflect((typeof(getproperty), sig(x).parameters...), world) + mi_rrule, _, _ = reflect((typeof(rrule), rrule_sig(x).parameters...), world) ci.edges = Core.MethodInstance[mi, mi_pb_getproperty, mi_getproperty, mi_rrule] + # XXX: on 1.10, we should also set metadata like min-world and max-world return ci else # nothing to optimize here, need to recurse into `getproperty` return quote Base.@_inline_meta - _pullback(cx, getproperty, x, $(QuoteNode(f))) + _pullback(ctx, getproperty, x, $(QuoteNode(f))) end end end +if VERSION >= v"1.10.0-DEV.873" + +# on Julia 1.10, generated functions need to keep track of the world age + +function _literal_getproperty_pullback_generator(world::UInt, source, self, ctx, literal_getproperty, x, f) + ret = _generate_literal_getproperty(ctx, world, x, f) + ret isa Core.CodeInfo && return ret + + stub = Core.GeneratedFunctionStub(identity, Core.svec(:methodinstance, :ctx, :literal_getproperty, :x, :f), Core.svec()) + stub(world, source, ret) +end + +@eval function _pullback(ctx::AContext, ::typeof(literal_getproperty), x, f) + $(Expr(:meta, :generated, _literal_getproperty_pullback_generator)) + $(Expr(:meta, :generated_only)) +end + +else + +@generated function _pullback(ctx::AContext, ::typeof(literal_getproperty), x, f) + _generate_literal_getproperty(ctx, nothing, x, f) +end + end From 3101698b61043345a77b44829bd36ac912862f7f Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Wed, 10 May 2023 17:05:13 +0200 Subject: [PATCH 430/490] Work around Julia bug. --- src/compiler/interface2.jl | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 0d46061d2..31ae7eaf4 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -27,6 +27,10 @@ function _generate_pullback(ctx, world, f, args...) g = try _generate_pullback_via_decomposition(T, world) catch e + if VERSION < v"1.8" + # work around Julia bug + rethrow(CompileError(T,e)) + end return :(throw($(CompileError(T,e)))) end g === nothing && return :(f(args...), Pullback{$T}((f,))) @@ -45,6 +49,10 @@ function _generate_callable_pullback(j::Type{<:Pullback{T}}, world, Δ) where T g = try _generate_pullback_via_decomposition(T, world) catch e + if VERSION < v"1.8" + # work around Julia bug + rethrow(CompileError(T,e)) + end return :(throw($(CompileError(T,e)))) end if g === nothing From d1bce984f5fe4867bce4be46b1dd84ff6f1fc38a Mon Sep 17 00:00:00 2001 From: Tim Besard Date: Thu, 11 May 2023 09:20:44 +0200 Subject: [PATCH 431/490] Fix test for 1.9. --- test/features.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/features.jl b/test/features.jl index 112c5b937..0499987d8 100644 --- a/test/features.jl +++ b/test/features.jl @@ -401,7 +401,11 @@ global_param = 3 y, back = Zygote._pullback(cx, x -> x*global_param, 2) @test y == 6 @test back(1) == (nothing, 3) - Zygote.cache(cx)[GlobalRef(Main, :global_param)] == 2 + ref = first(keys(Zygote.cache(cx))) + @test ref isa GlobalRef + @test ref.mod == Main + @test ref.name == :global_param + @test Zygote.cache(cx)[ref] == 2 end function pow_try(x) From e2d7839a476fc1cda407500d9a2ca125bd6e2aa5 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Fri, 12 May 2023 18:27:35 -0400 Subject: [PATCH 432/490] v0.6.61 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 0e79be05f..f1a730e18 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.60" +version = "0.6.61" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From b03590b0b59c660050592eaab649c2498fee3f51 Mon Sep 17 00:00:00 2001 From: Sam Date: Wed, 31 May 2023 17:19:42 -0700 Subject: [PATCH 433/490] Remove @adjoint for logabsdet Duplicated by rrule in ChainRules, should be unnecessary --- src/lib/array.jl | 2 -- 1 file changed, 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d65fea2c1..d4b81e3e1 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -376,8 +376,6 @@ function _pullback(cx::AContext, ::typeof(kron), a::AbstractMatrix, b::AbstractM return res, back ∘ unthunk_tangent end -@adjoint logabsdet(xs::AbstractMatrix) = logabsdet(xs), Δ -> (Δ[1] * inv(xs)',) - @adjoint function inv(A::Union{Number, AbstractMatrix}) Ainv = inv(A) return Ainv, function (Δ) From 785574c700d7e3cfcf6a292dc03c713c804a0e39 Mon Sep 17 00:00:00 2001 From: Frames White Date: Thu, 1 Jun 2023 16:36:48 +0800 Subject: [PATCH 434/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f1a730e18..4016c7a48 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.61" +version = "0.6.62" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From ffd2db8d253cb72eb9f4c4ac05f492f0493c878a Mon Sep 17 00:00:00 2001 From: Lilith Orion Hafner Date: Fri, 2 Jun 2023 14:30:15 -0500 Subject: [PATCH 435/490] Fix broken example in documentation --- docs/src/index.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/src/index.md b/docs/src/index.md index b3ea8d776..40e4b0a0b 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -172,6 +172,8 @@ julia> (l::Linear)(x) = l.W * x .+ l.b julia> model = Linear(rand(2, 5), rand(2)) Linear([0.267663 … 0.334385], [0.0386873, 0.0203294]) +julia> x = rand(5); + julia> dmodel = gradient(model -> sum(model(x)), model)[1] (W = [0.652543 … 0.683588], b = [1.0, 1.0]) ``` From 113c0976808d1e790a881f3572f123dd2724ec54 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 30 Apr 2023 11:00:15 -0400 Subject: [PATCH 436/490] allow multiple returns in withgradient --- src/compiler/interface.jl | 30 +++++++++++++++++++++++++++++- test/features.jl | 18 ++++++++++++++++++ 2 files changed, 47 insertions(+), 1 deletion(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index cce7c4d6d..7e72ce4b5 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -119,7 +119,27 @@ julia> ∇ == gradient(/, 1, 2) # explicit mode true julia> w = [3.0]; +``` + +If `f` returns a Tuple or NamedTuple, then it calculates +`gradient(first∘f, args...)` but returns the whole `f(args...)`: + +```jldoctest; setup=:(using Zygote) +julia> withgradient([1,2,4]) do x + z = 1 ./ x + sum(z), z + end +(val = (1.75, [1.0, 0.5, 0.25]), grad = ([-1.0, -0.25, -0.0625],)) + +julia> withgradient(3.0, 4.0) do x, y + (div = x/y, mul = x*y) + end +(val = (div = 0.75, mul = 12.0), grad = (0.25, -0.1875)) +``` + +Also supports implicit mode: +```jldoctest; setup=:(using Zygote) julia> res = withgradient(() -> sum(abs2, w), Params([w])) # implicit mode (val = 9.0, grad = Grads(...)) @@ -130,7 +150,15 @@ julia> res.grad[w] """ function withgradient(f, args...) y, back = pullback(f, args...) - grad = back(sensitivity(y)) + grad = if y isa Tuple + dy = (sensitivity(first(y)), map(_ -> nothing, Base.tail(y))...) + back(dy) + elseif y isa NamedTuple + dy = (sensitivity(first(y)), map(_ -> nothing, Base.tail(y))...) + back(NamedTuple{propertynames(y), typeof(dy)}(dy)) + else + back(sensitivity(y)) + end results = isnothing(grad) ? map(_ -> nothing, args) : map(_project, args, grad) (val=y, grad=results) end diff --git a/test/features.jl b/test/features.jl index 0499987d8..908ae5815 100644 --- a/test/features.jl +++ b/test/features.jl @@ -866,3 +866,21 @@ end end @test gradient(f760, 3)[1] ≈ 123.93054835019153 end + +@testset "withgradient" begin + @test withgradient([1,2,4]) do x + z = 1 ./ x + sum(z), z + end == (val = (1.75, [1.0, 0.5, 0.25]), grad = ([-1.0, -0.25, -0.0625],)) + + @test withgradient(3.0, 4.0) do x, y + (div = x/y, mul = x*y) + end == (val = (div = 0.75, mul = 12.0), grad = (0.25, -0.1875)) + + f3(x) = sum(sin, x), sum(cos, x), sum(tan, x) + g1 = gradient(first∘f3, [1,2,3.0]) + y2, g2 = withgradient(first∘f3, [1,2,3.0]) + y3, g3 = withgradient(f3, [1,2,3.0]) + @test g1[1] ≈ g2[1] ≈ g3[1] +end + From c7ed3fdbb4b870cd622dcfd2fe75c34208ff22b3 Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Sun, 30 Apr 2023 12:13:50 -0400 Subject: [PATCH 437/490] fix doctest --- src/compiler/interface.jl | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 7e72ce4b5..43e47c6c3 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -115,10 +115,8 @@ as a named tuple. julia> y, ∇ = withgradient(/, 1, 2) (val = 0.5, grad = (0.5, -0.25)) -julia> ∇ == gradient(/, 1, 2) # explicit mode +julia> ∇ == gradient(/, 1, 2) true - -julia> w = [3.0]; ``` If `f` returns a Tuple or NamedTuple, then it calculates @@ -140,7 +138,9 @@ julia> withgradient(3.0, 4.0) do x, y Also supports implicit mode: ```jldoctest; setup=:(using Zygote) -julia> res = withgradient(() -> sum(abs2, w), Params([w])) # implicit mode +julia> w = [3.0]; + +julia> res = withgradient(() -> sum(abs2, w), Params([w])) (val = 9.0, grad = Grads(...)) julia> res.grad[w] From e0d3d8b1a785ec291f0a41da3f12cad51d80eb6b Mon Sep 17 00:00:00 2001 From: Michael Abbott <32575566+mcabbott@users.noreply.github.com> Date: Mon, 19 Jun 2023 18:31:14 -0400 Subject: [PATCH 438/490] better words --- src/compiler/interface.jl | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 43e47c6c3..c09d6db31 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -119,13 +119,15 @@ julia> ∇ == gradient(/, 1, 2) true ``` -If `f` returns a Tuple or NamedTuple, then it calculates -`gradient(first∘f, args...)` but returns the whole `f(args...)`: +Allows you to capture auxillary outputs, in addition to the scalar +used by `gradient`. To do this, `f` must return a Tuple or NamedTuple. +Then it calculates `grad = gradient(first∘f, args...) +but returns the whole `val = f(args...)`: ```jldoctest; setup=:(using Zygote) julia> withgradient([1,2,4]) do x z = 1 ./ x - sum(z), z + sum(z), z # here z is an auxillary output end (val = (1.75, [1.0, 0.5, 0.25]), grad = ([-1.0, -0.25, -0.0625],)) From 612961353e4a81f9861fbca9db714e86f30ad0a3 Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Mon, 7 Aug 2023 16:30:20 +0100 Subject: [PATCH 439/490] added parent adjoint for LowerTriangular and UpperTriangular --- src/lib/array.jl | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/lib/array.jl b/src/lib/array.jl index d4b81e3e1..37884cded 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -355,6 +355,8 @@ end @adjoint parent(x::LinearAlgebra.Adjoint) = parent(x), ȳ -> (LinearAlgebra.Adjoint(ȳ),) @adjoint parent(x::LinearAlgebra.Transpose) = parent(x), ȳ -> (LinearAlgebra.Transpose(ȳ),) +@adjoint parent(x::LinearAlgebra.UpperTriangular) = parent(x), ȳ -> (LinearAlgebra.UpperTriangular(ȳ),) +@adjoint parent(x::LinearAlgebra.LowerTriangular) = parent(x), ȳ -> (LinearAlgebra.LowerTriangular(ȳ),) function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) m1, n1 = size(mat1) From f0e0cafca94f3a7b76783ab2f9c0dfbfd27290da Mon Sep 17 00:00:00 2001 From: Tor Erlend Fjelde Date: Mon, 7 Aug 2023 17:27:22 +0100 Subject: [PATCH 440/490] added test for parent --- test/lib/array.jl | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/test/lib/array.jl b/test/lib/array.jl index 889301c1e..a3b73aff9 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -96,3 +96,13 @@ end end end end + +@testset "parent" begin + @testset "$constructor" for constructor in [LowerTriangular, UpperTriangular] + x = randn(2, 2) + y, pb = Zygote.pullback(x) do x + sum(parent(constructor(2 .* x))) + end + @test first(pb(one(y))) ≈ constructor(2 * ones(2, 2)) + end +end From 547be707a59e079ff1cc8f079bd8fc9298b93fa3 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 10 Aug 2023 15:27:22 -0700 Subject: [PATCH 441/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 4016c7a48..15f342bf4 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.62" +version = "0.6.63" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 8418647cff7ffd92b7e9d87981c3b9cd145723f7 Mon Sep 17 00:00:00 2001 From: marius Date: Tue, 10 Jan 2023 23:56:09 -0800 Subject: [PATCH 442/490] full fix to #1352 --- src/lib/buffer.jl | 5 +++-- test/gradcheck.jl | 45 ++++++--------------------------------------- 2 files changed, 9 insertions(+), 41 deletions(-) diff --git a/src/lib/buffer.jl b/src/lib/buffer.jl index 385f5ecc4..83fceb713 100644 --- a/src/lib/buffer.jl +++ b/src/lib/buffer.jl @@ -1,13 +1,14 @@ grad_mut(cx::Context, b::Buffer, ::Type=Union{}) = _get!(() -> fill!(similar(b.data, Any), nothing), cache(cx), b) +# S is the eltype we are about to set into the buffer accumulator, so allocte wide enough grad_mut(cx::Context, b::Buffer{T}, ::Type{S}=Union{}) where {T<:Number, S<:Number} = _get!(() -> fill!(similar(b.data, float(promote_type(T, S))), 0), cache(cx), b) @non_differentiable Buffer(::Any...) @adjoint function getindex(b::Buffer, i...) - b[i...], function (Δ::S) where {S} - grad = grad_mut(__context__, b, S) + b[i...], function (Δ) + grad = grad_mut(__context__, b, eltype(Δ)) grad[i...] = accum(grad[i...], Δ) return end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 5037d4c88..f44cd1cca 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1569,46 +1569,13 @@ using Zygote: Buffer @test ∇W2 == W2 @test ∇x == 6 .* x - @testset "incorrect promotion (#1352)" begin - u = [0.75, 0.5] - p = [-1.5, 0.05, 0.2, 0.01] - - # in-place - function g1352!(du, u, p, t) - du[1, 1] = p[3] * u[1] + p[4] * u[2] - du[1, 2] = p[3] * u[1] + p[4] * u[2] - du[2, 1] = p[4] * u[1] + p[3] * u[2] - du[2, 2] = p[4] * u[1] + p[3] * u[2] - return nothing - end - du1_inplace, back_inplace = Zygote.pullback(u, p) do u, p - du = Zygote.Buffer(Matrix{Float64}(undef, 2, 2)) - g1352!(du, u, p, 1.0) - return copy(du[:, 1]) - end - - # out-of-place - function g1352(u, p, t) - du11 = p[3] * u[1] + p[4] * u[2] - du12 = p[3] * u[1] + p[4] * u[2] - du21 = p[4] * u[1] + p[3] * u[2] - du22 = p[4] * u[1] + p[3] * u[2] - return [du11 du12 - du21 du22] - end - du1, back = Zygote.pullback(u, p) do u, p - du = g1352(u, p, 1.0) - return du[:, 1] - end + # reduced mwe of #1352 + @test Zygote.gradient([0,0]) do x + buf = Zygote.Buffer(similar(x)) + buf[:] = x + sum(copy(buf[1:2])) + end == ([1,1],) - # comparison - @test du1_inplace ≈ du1 - v = randn(2) - ∇u_inplace, ∇p_inplace = back_inplace(v) - ∇u, ∇p = back(v) - @test ∇u_inplace ≈ ∇u - @test ∇p_inplace ≈ ∇p - end end @testset "AbstractArray Addition / Subtraction / Negation" begin From 12156369e984aa52211557cefbee2a81a9dea635 Mon Sep 17 00:00:00 2001 From: Anton Smirnov Date: Fri, 1 Sep 2023 20:50:54 +0300 Subject: [PATCH 443/490] Allow GPUArrays 9 --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 15f342bf4..b784d0cc7 100644 --- a/Project.toml +++ b/Project.toml @@ -36,7 +36,7 @@ DiffRules = "1.4" Distances = "0.10" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13, 1" ForwardDiff = "0.10" -GPUArrays = "8.4.2" +GPUArrays = "8.4.2, 9" GPUArraysCore = "0.1.1" IRTools = "0.4.4" LogExpFunctions = "0.3.1" From 5c3dfc7084b18b4fd5b14426f9c423ab4c5980b8 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 1 Sep 2023 11:44:29 -0700 Subject: [PATCH 444/490] Mark AbstractFFT test broken --- test/gradcheck.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index f44cd1cca..b7fd5391f 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -1613,7 +1613,7 @@ end # same for the inverse @test gradient((X̂)->real.(fft(ifft(X̂))[i, j]), X̂)[1] ≈ indicateMat # same for rfft(irfft) - @test gradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) + @test_broken gradient((X)->real.(irfft(rfft(X), size(X,1)))[i, j], X)[1] ≈ real.(indicateMat) # rfft isn't actually surjective, so rfft(irfft) can't really be tested this way. # the gradients are actually just evaluating the inverse transform on the From ad930c3c8764e4cef72bbec4c1d97ae48795cfbb Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 4 Sep 2023 10:13:41 -0700 Subject: [PATCH 445/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index b784d0cc7..dd390100c 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.63" +version = "0.6.64" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 6edfce9d1e8322a53a5f42e4bf527aa8a42307f6 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sun, 14 May 2023 17:37:57 -0700 Subject: [PATCH 446/490] Excise getindex adjoint We have a better rule in Chainrules now --- src/lib/array.jl | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 37884cded..a182c037d 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -41,24 +41,6 @@ end @adjoint (::Type{T})(sz) where {T<:Zeros} = T(sz), Δ->(nothing,) @adjoint (::Type{T})(sz) where {T<:Ones} = T(sz), Δ->(nothing,) -@adjoint getindex(x::AbstractArray, inds...) = x[inds...], ∇getindex(x, inds) - -@adjoint view(x::AbstractArray, inds...) = view(x, inds...), ∇getindex(x, inds) - -∇getindex(x::AbstractArray{T,N}, inds) where {T,N} = dy -> begin - if inds isa NTuple{N,Int} && T <: Number - dx = OneElement(dy, inds, axes(x)) - elseif inds isa NTuple{<:Any, Integer} - dx = _zero(x, typeof(dy)) - dx[inds...] = dy - else - dx = _zero(x, eltype(dy)) - dxv = view(dx, inds...) - dxv .= accum.(dxv, _droplike(dy, dxv)) - end - return (_project(x, dx), map(_->nothing, inds)...) -end - """ OneElement(val, ind, axes) <: AbstractArray From cf7f7d08705d2787fa31bcf45bcca5447fd9a9a7 Mon Sep 17 00:00:00 2001 From: Will Tebbutt Date: Wed, 27 Sep 2023 08:14:17 +0100 Subject: [PATCH 447/490] Extend kron support (#1458) * Bump patch * Generalise kron implementation --- Project.toml | 2 +- src/lib/array.jl | 8 +++----- test/gradcheck.jl | 2 ++ 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Project.toml b/Project.toml index dd390100c..5105e6a06 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.64" +version = "0.6.65" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" diff --git a/src/lib/array.jl b/src/lib/array.jl index 37884cded..7740a80d4 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -368,12 +368,10 @@ function _kron(mat1::AbstractMatrix,mat2::AbstractMatrix) return reshape(mat1_rsh.*mat2_rsh, (m1*m2,n1*n2)) end _kron(a::AbstractVector, b::AbstractVector) = vec(_kron(reshape(a, :, 1), reshape(b, :, 1))) +_kron(a::AbstractVector, b::AbstractMatrix) = _kron(reshape(a, :, 1), b) +_kron(a::AbstractMatrix, b::AbstractVector) = _kron(a, reshape(b, :, 1)) -function _pullback(cx::AContext, ::typeof(kron), a::AbstractVector, b::AbstractVector) - res, back = _pullback(cx, _kron, a, b) - return res, back ∘ unthunk_tangent -end -function _pullback(cx::AContext, ::typeof(kron), a::AbstractMatrix, b::AbstractMatrix) +function _pullback(cx::AContext, ::typeof(kron), a::AbstractVecOrMat, b::AbstractVecOrMat) res, back = _pullback(cx, _kron, a, b) return res, back ∘ unthunk_tangent end diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b7fd5391f..bc22a2016 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -275,6 +275,8 @@ end @test gradtest(kron, rand(5,1), rand(3,1)) @test gradtest(kron, rand(5,1), rand(3,1), rand(8,1)) @test gradtest(kron, rand(5,2), rand(3,2), rand(8,2)) +@test gradtest(kron, rand(5), rand(3, 2)) +@test gradtest(kron, rand(3, 2), rand(5)) for mapfunc in [map,pmap] @testset "$mapfunc" begin From 4cf706d5dd368ba9a149e5668e8ed50ea6966faa Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 5 Oct 2023 15:55:00 -0700 Subject: [PATCH 448/490] Allow collapsed zeros in getindex tests --- test/gradcheck.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index b7fd5391f..e519b8cc0 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -174,11 +174,11 @@ end # Ensure that nothings work with numeric types. _, back = Zygote.pullback(getindex, randn(4), [1]) - @test back([nothing]) == (zeros(4), nothing) + @test back([nothing]) == nothing # Ensure that nothings work with non-numeric types. _, back = Zygote.pullback(getindex, [randn(2) for _ in 1:3], [1]) - @test back([nothing]) == (nothing, nothing) + @test back([nothing]) == nothing end @testset "view" begin From e1b5e0ce101f2c444a66ac83de2a2c32e1b4aa5a Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 5 Oct 2023 17:03:18 -0700 Subject: [PATCH 449/490] Mark `hessian_reverse` tests unbroken --- test/utils.jl | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/test/utils.jl b/test/utils.jl index cb11437cf..4e7f4929b 100644 --- a/test/utils.jl +++ b/test/utils.jl @@ -2,14 +2,8 @@ using ForwardDiff using Zygote: hessian_dual, hessian_reverse @testset "hessian: $hess" for hess in [hessian_dual, hessian_reverse] - - if hess == hessian_dual - @test hess(x -> x[1]*x[2], randn(2)) ≈ [0 1; 1 0] - @test hess(((x,y),) -> x*y, randn(2)) ≈ [0 1; 1 0] # original docstring version - else - @test_broken hess(x -> x[1]*x[2], randn(2)) ≈ [0 1; 1 0] # can't differentiate ∇getindex - @test_broken hess(((x,y),) -> x*y, randn(2)) ≈ [0 1; 1 0] - end + @test hess(x -> x[1]*x[2], randn(2)) ≈ [0 1; 1 0] + @test hess(((x,y),) -> x*y, randn(2)) ≈ [0 1; 1 0] # original docstring version @test hess(x -> sum(x.^3), [1 2; 3 4]) ≈ Diagonal([6, 18, 12, 24]) @test hess(sin, pi/2) ≈ -1 From 12bdca99b6370d0af7233f6aec08ac3f31eaab0f Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 5 Oct 2023 20:52:07 -0700 Subject: [PATCH 450/490] =?UTF-8?q?Keep=20`=E2=88=87getindex`=20function?= =?UTF-8?q?=20stub=20to=20avoid=20downstream=20breakage?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/deprecated.jl | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/src/deprecated.jl b/src/deprecated.jl index 6fe88b5b1..c57fa0d7e 100644 --- a/src/deprecated.jl +++ b/src/deprecated.jl @@ -65,3 +65,9 @@ macro nograd(ex) end return blk end + +# Internal function used by some downstream packages. +# Removing this completely would require some tricky registry changes, +# but leaving it as a vestigial function is much easier. +# See https://github.com/FluxML/Zygote.jl/pull/1328 for more context. +function ∇getindex end From efcc64bbbe7534f71998d2371c4a89b0e95bbcdb Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Tue, 10 Oct 2023 19:39:23 -0700 Subject: [PATCH 451/490] Handle `Expr(:boundscheck)` --- src/compiler/reverse.jl | 4 ++++ test/compiler.jl | 9 +++++++++ 2 files changed, 13 insertions(+) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index 333323e83..c72e1a5a6 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -132,6 +132,10 @@ function instrument(ir::IR) elseif isexpr(ex, :(=)) @assert ex.args[1] isa GlobalRef pr[v] = xcall(Zygote, :global_set, QuoteNode(ex.args[1]), ex.args[2]) + elseif isexpr(ex, :boundscheck) + # Expr(:boundscheck) now appears in common Julia code paths, so we need to handle it. + # For correctness sake, fix to true like https://github.com/dfdx/Umlaut.jl/issues/34. + pr[v] = true else ex = instrument_new!(pr, v, ex) ex = instrument_literals!(pr, v, ex) diff --git a/test/compiler.jl b/test/compiler.jl index c9b091f78..381c4bc14 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -225,3 +225,12 @@ end # issue 897 @test gradient(x -> sum(norm, collect(eachcol(x))), ones(3, 400))[1] ≈ fill(0.5773502691896258, 3, 400) + +# Tests adapted from https://github.com/dfdx/Umlaut.jl/pull/35 +@eval _boundscheck_foo(x) = ifelse($(Expr(:boundscheck)), 2x, x) + +@testset "Meta Expr handling" begin + y, (dx,) = withgradient(_boundscheck_foo, 1) + @test y == 2 + @test dx == 2 +end From b029bcbc8f6450f51851bb1d7af7fa819a2767c2 Mon Sep 17 00:00:00 2001 From: Paul Date: Sun, 15 Oct 2023 15:18:11 +0200 Subject: [PATCH 452/490] Handle unreachable blocks in the adjoint CFG --- src/compiler/reverse.jl | 4 ++-- test/compiler.jl | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index 333323e83..9303b8e3a 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -244,7 +244,6 @@ Variable(a::Alpha) = Variable(a.id) sig(b::IRTools.Block) = unique([arg for br in branches(b) for arg in br.args if arg isa Variable]) sig(pr::Primal) = Dict(b.id => sig(b) for b in blocks(pr.ir)) -# TODO unreachables? function adjointcfg(pr::Primal) ir = empty(pr.ir) return!(ir, nothing) @@ -257,7 +256,8 @@ function adjointcfg(pr::Primal) push!(rb, xcall(Base, :(!==), alpha(pr.branches[b.id]), BranchNumber(i))) branch!(rb, preds[i].id, unless = cond) end - if !isempty(branches(b)) && branches(b)[end] == IRTools.unreachable + if isempty(preds) || (!isempty(branches(b)) && branches(b)[end] == IRTools.unreachable) + # An unreachable block in the primal will also be unreachable in the adjoint branch!(rb, 0) end end diff --git a/test/compiler.jl b/test/compiler.jl index c9b091f78..4f8776c90 100644 --- a/test/compiler.jl +++ b/test/compiler.jl @@ -225,3 +225,23 @@ end # issue 897 @test gradient(x -> sum(norm, collect(eachcol(x))), ones(3, 400))[1] ≈ fill(0.5773502691896258, 3, 400) + +# issue 1118 & 1380 +function f_1380(x) + if rand(Bool) + return x + else + return 2x + end + + # unreachable + return nothing +end + +@testset "unreachable block" begin + y, back = Zygote.pullback(f_1380, 1.) + # There should not be a compiler error + local g + @test_nowarn g = back(1.) + @test only(g) ∈ (1., 2.) +end From e09f40706a292eba96581c078187b7a2e1229d46 Mon Sep 17 00:00:00 2001 From: Paul Berg Date: Tue, 17 Oct 2023 13:15:11 +0200 Subject: [PATCH 453/490] Bump compat to `IRTools@0.4.11` --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 5105e6a06..e4426b59d 100644 --- a/Project.toml +++ b/Project.toml @@ -38,7 +38,7 @@ FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13, 1" ForwardDiff = "0.10" GPUArrays = "8.4.2, 9" GPUArraysCore = "0.1.1" -IRTools = "0.4.4" +IRTools = "0.4.11" LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" From bcf996ad45d1faf95e0bcb8433b2c9c0e09683a0 Mon Sep 17 00:00:00 2001 From: Paul Berg Date: Tue, 17 Oct 2023 14:30:29 +0200 Subject: [PATCH 454/490] Clarify comment and add Core.throw anyway --- src/compiler/reverse.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/reverse.jl b/src/compiler/reverse.jl index 9303b8e3a..0583b3da6 100644 --- a/src/compiler/reverse.jl +++ b/src/compiler/reverse.jl @@ -257,7 +257,8 @@ function adjointcfg(pr::Primal) branch!(rb, preds[i].id, unless = cond) end if isempty(preds) || (!isempty(branches(b)) && branches(b)[end] == IRTools.unreachable) - # An unreachable block in the primal will also be unreachable in the adjoint + # If `b` is unreachable, then no context produced by the primal should end up branching to `rb` + push!(rb, xcall(Core, :throw, "unreachable")) # `throw` is necessary for inference not to hit the `unreachable` branch!(rb, 0) end end From f7551273080569316f5127cc6a537b62ef50f592 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Wed, 18 Oct 2023 20:04:30 +0200 Subject: [PATCH 455/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index e4426b59d..eea6cdb90 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.65" +version = "0.6.66" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 08e0cd8bb21d58c509c43f32fa00335dec495b5c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 25 Oct 2023 12:06:50 -0700 Subject: [PATCH 456/490] Use === nothing in tests --- test/gradcheck.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index e519b8cc0..06533ba78 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -174,11 +174,11 @@ end # Ensure that nothings work with numeric types. _, back = Zygote.pullback(getindex, randn(4), [1]) - @test back([nothing]) == nothing + @test back([nothing]) === nothing # Ensure that nothings work with non-numeric types. _, back = Zygote.pullback(getindex, [randn(2) for _ in 1:3], [1]) - @test back([nothing]) == nothing + @test back([nothing]) === nothing end @testset "view" begin From 340d3bf34429f90460031cd2d20fc2477f27b986 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 25 Oct 2023 13:23:05 -0700 Subject: [PATCH 457/490] bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index eea6cdb90..dcc25d2aa 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.66" +version = "0.6.67" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 0ac130f47c3f38b04f6f5d57a1a01fa6ad59cef3 Mon Sep 17 00:00:00 2001 From: David Widmann Date: Thu, 26 Oct 2023 10:57:49 +0200 Subject: [PATCH 458/490] Fix tests for ZygoteRule change (#1467) * Fix tests for ZygoteRule change * Apply suggestions from code review --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index dcc25d2aa..77bc44905 100644 --- a/Project.toml +++ b/Project.toml @@ -46,7 +46,7 @@ Requires = "1.1" PrecompileTools = "1" SpecialFunctions = "1.6, 2" Tracker = "0.2" -ZygoteRules = "0.2.1" +ZygoteRules = "0.2.4" julia = "1.6" [extensions] From 7590bf7f251c5847f835c4884d7fe7f7751a037d Mon Sep 17 00:00:00 2001 From: CompatHelper Julia Date: Sun, 29 Oct 2023 00:14:08 +0000 Subject: [PATCH 459/490] CompatHelper: add new compat entry for Statistics at version 1, (keep existing compat) --- Project.toml | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/Project.toml b/Project.toml index 77bc44905..9101c516c 100644 --- a/Project.toml +++ b/Project.toml @@ -18,14 +18,24 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" LogExpFunctions = "2ab3a3ac-af41-5b50-aa03-7779005ae688" MacroTools = "1914dd2f-81c6-5fcd-8719-6d5c9610ff09" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" +PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a" Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c" Requires = "ae029012-a4dd-5104-9daa-d747884805df" -PrecompileTools = "aea7be01-6a6a-4083-8856-8a6e6704d82a" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" SpecialFunctions = "276daf66-3868-5448-9aa4-cd146d93841b" Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2" ZygoteRules = "700de1a5-db45-46bc-99cf-38207098b444" +[weakdeps] +Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" +Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" +Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" + +[extensions] +ZygoteColorsExt = "Colors" +ZygoteDistancesExt = "Distances" +ZygoteTrackerExt = "Tracker" + [compat] AbstractFFTs = "1.3.1" ChainRules = "1.44.1" @@ -42,18 +52,14 @@ IRTools = "0.4.11" LogExpFunctions = "0.3.1" MacroTools = "0.5" NaNMath = "0.3, 1" -Requires = "1.1" PrecompileTools = "1" +Requires = "1.1" SpecialFunctions = "1.6, 2" +Statistics = "1" Tracker = "0.2" ZygoteRules = "0.2.4" julia = "1.6" -[extensions] -ZygoteColorsExt = "Colors" -ZygoteDistancesExt = "Distances" -ZygoteTrackerExt = "Tracker" - [extras] CUDA = "052768ef-5323-5732-b1bb-66c8b64840ba" ChainRulesTestUtils = "cdddcdb0-9152-4a09-a978-84456f9df70a" @@ -68,8 +74,3 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" [targets] test = ["ChainRulesTestUtils", "Conda", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] - -[weakdeps] -Colors = "5ae59095-9a9b-59fe-a467-6f913c188581" -Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" -Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" From f68d40d41d985d89eea63db3d0bd2162ac86c2de Mon Sep 17 00:00:00 2001 From: CompatHelper Julia Date: Wed, 20 Dec 2023 00:10:39 +0000 Subject: [PATCH 460/490] CompatHelper: bump compat for GPUArrays to 10, (keep existing compat) --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 9101c516c..f42d61c26 100644 --- a/Project.toml +++ b/Project.toml @@ -46,7 +46,7 @@ DiffRules = "1.4" Distances = "0.10" FillArrays = "0.8, 0.9, 0.10, 0.11, 0.12, 0.13, 1" ForwardDiff = "0.10" -GPUArrays = "8.4.2, 9" +GPUArrays = "8.4.2, 9, 10" GPUArraysCore = "0.1.1" IRTools = "0.4.11" LogExpFunctions = "0.3.1" From 223f85620749bb96e68e57b13f1161034c5b528c Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Wed, 20 Dec 2023 16:59:55 -0800 Subject: [PATCH 461/490] bump version mostly for compat changes --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index f42d61c26..b9a7f703b 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.67" +version = "0.6.68" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 0bd7f55f2c3a2d757b10f63551cde4d738e74b8f Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 30 Dec 2023 23:20:39 -0500 Subject: [PATCH 462/490] Swap PyCall to PythonCall The former was giving us setup-related headaches on CI, and we can use any Python FFI for the one test which needs it. --- Project.toml | 4 ++-- test/features.jl | 10 +++------- 2 files changed, 5 insertions(+), 9 deletions(-) diff --git a/Project.toml b/Project.toml index b9a7f703b..d2357cb26 100644 --- a/Project.toml +++ b/Project.toml @@ -68,9 +68,9 @@ Conda = "8f4d0f93-b110-5947-807f-2305c1781a2d" Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7" FFTW = "7a1cc6ca-52ef-59f5-83cd-3a7055c09341" FiniteDifferences = "26cc04aa-876d-5657-8c51-4c34ba976000" -PyCall = "438e738f-606a-5dbb-bf0a-cddfbfd45ab0" +PythonCall = "6099a3de-0909-46bc-b1f4-468b9a2dfc0d" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" [targets] -test = ["ChainRulesTestUtils", "Conda", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PyCall", "Test"] +test = ["ChainRulesTestUtils", "Conda", "CUDA", "Distances", "FFTW", "FiniteDifferences", "PythonCall", "Test"] diff --git a/test/features.jl b/test/features.jl index 908ae5815..84f875c8f 100644 --- a/test/features.jl +++ b/test/features.jl @@ -685,13 +685,9 @@ end end == ([8 112; 36 2004],) end -@testset "PyCall custom @adjoint" begin - # Trigger Python install if required. Required for Buildkite CI! - import Conda - Conda.list() - - import PyCall - math = PyCall.pyimport("math") +@testset "PythonCall custom @adjoint" begin + using PythonCall: pyimport + math = pyimport("math") pysin(x) = math.sin(x) Zygote.@adjoint pysin(x) = math.sin(x), (δ) -> (δ * math.cos(x), ) @test Zygote.gradient(pysin, 1.5) == Zygote.gradient(sin, 1.5) From cbcb7a9f8b4cf7c5a9b388da92847edeb03e741e Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 30 Dec 2023 23:20:59 -0500 Subject: [PATCH 463/490] Don't load CUDA.jl on GHA --- test/runtests.jl | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/runtests.jl b/test/runtests.jl index 672960944..9fea7b2d8 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,17 +1,17 @@ using Zygote, Test, LinearAlgebra using Zygote: gradient, ZygoteRuleConfig -using CUDA -using CUDA: has_cuda @testset "all" begin # Overall testset ensures it keeps running after failure - - if has_cuda() - @testset "CUDA tests" begin - include("cuda.jl") + if !haskey(ENV, "GITHUB_ACTION") + using CUDA + if CUDA.has_cuda() + @testset "CUDA tests" begin + include("cuda.jl") + end + @info "CUDA tests have run" + else + @warn "CUDA not found - Skipping CUDA Tests" end - @info "CUDA tests have run" - else - @warn "CUDA not found - Skipping CUDA Tests" end @testset "deprecated.jl" begin From 7252a93de3aa1b88777996fcb3ada5f540b3641e Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 30 Dec 2023 21:24:22 -0800 Subject: [PATCH 464/490] add needed type conversion This was implicit for PyCall but needs to be explicit for PythonCall. --- test/features.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/features.jl b/test/features.jl index 84f875c8f..3c6766e44 100644 --- a/test/features.jl +++ b/test/features.jl @@ -686,10 +686,10 @@ end end @testset "PythonCall custom @adjoint" begin - using PythonCall: pyimport + using PythonCall: pyimport, pyconvert math = pyimport("math") pysin(x) = math.sin(x) - Zygote.@adjoint pysin(x) = math.sin(x), (δ) -> (δ * math.cos(x), ) + Zygote.@adjoint pysin(x) = pyconvert(Float64, math.sin(x)), δ -> (δ * math.cos(x),) @test Zygote.gradient(pysin, 1.5) == Zygote.gradient(sin, 1.5) end From 0f5d958149b94daa687ac59c86b0091d29024144 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Sat, 30 Dec 2023 22:05:11 -0800 Subject: [PATCH 465/490] One more `pyconvert` --- test/features.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/features.jl b/test/features.jl index 3c6766e44..c8640b098 100644 --- a/test/features.jl +++ b/test/features.jl @@ -689,7 +689,7 @@ end using PythonCall: pyimport, pyconvert math = pyimport("math") pysin(x) = math.sin(x) - Zygote.@adjoint pysin(x) = pyconvert(Float64, math.sin(x)), δ -> (δ * math.cos(x),) + Zygote.@adjoint pysin(x) = pyconvert(Float64, math.sin(x)), δ -> (pyconvert(Float64, δ * math.cos(x)),) @test Zygote.gradient(pysin, 1.5) == Zygote.gradient(sin, 1.5) end From 4de0cf20a1e88f77fc6f516b57aa282509cffb85 Mon Sep 17 00:00:00 2001 From: lkdvos Date: Wed, 3 Jan 2024 18:06:02 +0100 Subject: [PATCH 466/490] Remove `@adjoint` rule for `sort` and `filter` --- src/lib/array.jl | 14 -------------- 1 file changed, 14 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 2d9479e62..06ce860d2 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -251,20 +251,6 @@ end @adjoint iterate(r::UnitRange, i...) = iterate(r, i...), _ -> nothing -@adjoint function sort(x::AbstractArray; by=identity) - p = sortperm(x, by=by) - return x[p], x̄ -> (x̄[invperm(p)],) -end - -@adjoint function filter(f, x::AbstractVector) - t = map(f, x) - x[t], Δ -> begin - dx = _zero(x, eltype(Δ)) - dx[t] .= Δ - (nothing, dx) - end -end - # Iterators @adjoint function enumerate(xs) From 03a8ef7cf8d5c35ea13056ccb73d1a049d33d9ba Mon Sep 17 00:00:00 2001 From: lkdvos Date: Wed, 3 Jan 2024 22:48:49 +0100 Subject: [PATCH 467/490] Add `sort(x; rev=true)` tests --- test/gradcheck.jl | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/test/gradcheck.jl b/test/gradcheck.jl index 6cc814481..8cb7e6e1a 100644 --- a/test/gradcheck.jl +++ b/test/gradcheck.jl @@ -425,13 +425,17 @@ end [2,3,1], [1, 2, 3], [1,2,3], - [2,1,3] + [2,1,3], + [1,3,2], + [3,2,1] ] for i = 1:3 @test gradient(v->sort(v)[i], [3.,1,2])[1][correct[1][i]] == 1 @test gradient(v->sort(v)[i], [1.,2,3])[1][correct[2][i]] == 1 @test gradient(v->sort(v,by=x->x%10)[i], [11,2,99])[1][correct[3][i]] == 1 @test gradient(v->sort(v,by=x->x%10)[i], [2,11,99])[1][correct[4][i]] == 1 + @test gradient(v->sort(v,rev=true)[i], [3.,1,2])[1][correct[5][i]] == 1 + @test gradient(v->sort(v,rev=true)[i], [1.,2,3])[1][correct[6][i]] == 1 end end From 4229760fc913624ff291b3187107b2d4f5c3d9bc Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 2 Jan 2024 16:34:36 -0800 Subject: [PATCH 468/490] initial commit --- src/lib/lib.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/lib.jl b/src/lib/lib.jl index eaa49ada2..4559237e7 100644 --- a/src/lib/lib.jl +++ b/src/lib/lib.jl @@ -22,7 +22,7 @@ accum(x, y) = accum(x, y, zs...) = accum(accum(x, y), zs...) accum(x::Tuple, ys::Tuple...) = map(accum, x, ys...) -accum(x::AbstractArray, ys::AbstractArray...) = accum.(x, ys...) +accum(x::AbstractArray, ys::AbstractArray...) = Base.broadcast_preserving_zero_d(accum, x, ys...) @generated function accum(x::NamedTuple, y::NamedTuple) # assumes that y has no keys apart from those also in x From 65a32f516e21fa5dadf054a1ff5445e9cd436669 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 2 Jan 2024 16:40:11 -0800 Subject: [PATCH 469/490] add test --- test/lib/lib.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/test/lib/lib.jl b/test/lib/lib.jl index 0886b9969..11e64cba9 100644 --- a/test/lib/lib.jl +++ b/test/lib/lib.jl @@ -4,5 +4,6 @@ t2 = (a=1, b=2) @test Zygote.accum(t1, t2) == (a = 2, b = 4, c = 3) @test_throws ArgumentError Zygote.accum(t2, t1) + @test Zygote.accum(fill(0.0), fill(0.0)) == fill(0.0) end end From 38ebc73d4f9b25ca871b780da9b01ef8e3c2eead Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Thu, 11 Jan 2024 19:04:02 -0800 Subject: [PATCH 470/490] un-collapse nothings in `gradient` --- Project.toml | 2 +- src/compiler/interface.jl | 11 +++++++++-- test/lib/number.jl | 4 ++-- test/structures.jl | 2 +- 4 files changed, 13 insertions(+), 6 deletions(-) diff --git a/Project.toml b/Project.toml index d2357cb26..20895dd9d 100644 --- a/Project.toml +++ b/Project.toml @@ -57,7 +57,7 @@ Requires = "1.1" SpecialFunctions = "1.6, 2" Statistics = "1" Tracker = "0.2" -ZygoteRules = "0.2.4" +ZygoteRules = "0.2.5" julia = "1.6" [extras] diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index c09d6db31..2daf5e4cd 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -67,6 +67,10 @@ sensitivity(y::Complex) = error("Output is complex, so the gradient is not defin sensitivity(y::AbstractArray) = error("Output is an array, so the gradient is not defined. Perhaps you wanted jacobian.") sensitivity(y) = error("Output should be scalar; gradients are not defined for output $(repr(y))") +# Preserves output as tuple when gradients are collapsed +_project_all(::NTuple{N}, ::Nothing) where {N} = ntuple(_ -> nothing, N) +_project_all(x::Tuple, dx::Tuple) = map(_project, x, dx) + """ gradient(f, args...) @@ -95,7 +99,7 @@ julia> gradient([7, 11], 0, 1) do x, y, d function gradient(f, args...) y, back = pullback(f, args...) grad = back(sensitivity(y)) - isnothing(grad) ? nothing : map(_project, args, grad) + return _project_all(args, grad) end # Base.adjoint(f::Function) = x -> gradient(f, x)[1] # piracy! @@ -161,7 +165,7 @@ function withgradient(f, args...) else back(sensitivity(y)) end - results = isnothing(grad) ? map(_ -> nothing, args) : map(_project, args, grad) + results = _project_all(args, grad) (val=y, grad=results) end @@ -421,6 +425,9 @@ function pullback(f, ps::Params) end end +# No conversion required here +_project_all(_, dx::Grads) = dx + # Code Reflection function code_ir(f, T) diff --git a/test/lib/number.jl b/test/lib/number.jl index ce0a64bef..77756387d 100644 --- a/test/lib/number.jl +++ b/test/lib/number.jl @@ -3,8 +3,8 @@ @test gradient(floor, 1) === (0.0,) @test gradient(ceil, 1) === (0.0,) @test gradient(round, 1) === (0.0,) - @test gradient(hash, 1) === nothing - @test gradient(div, 1, 2) === nothing + @test gradient(hash, 1) === (nothing,) + @test gradient(div, 1, 2) === (nothing, nothing) end @testset "basics" begin diff --git a/test/structures.jl b/test/structures.jl index 5a951a621..cdba138c4 100644 --- a/test/structures.jl +++ b/test/structures.jl @@ -64,5 +64,5 @@ end end m, b = Zygote._pullback(Zygote.Context(), nameof, M) - @test b(m) == (nothing, nothing) + @test b(m) === nothing end From 070466d95dfaa2dc8649d127e045960524519088 Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Mon, 15 Jan 2024 21:34:10 -0800 Subject: [PATCH 471/490] Add docs Also adds a docstring for `pullback`, which we've been missing for some time. --- src/compiler/interface.jl | 55 ++++++++++++++++++++++++++++++++++++--- 1 file changed, 51 insertions(+), 4 deletions(-) diff --git a/src/compiler/interface.jl b/src/compiler/interface.jl index 2daf5e4cd..80fd9b477 100644 --- a/src/compiler/interface.jl +++ b/src/compiler/interface.jl @@ -39,6 +39,52 @@ _pullback(f, args...) = _pullback(Context(), f, args...) tailmemaybe(::Nothing) = nothing tailmemaybe(x::Tuple) = Base.tail(x) +""" + pullback(f, args...) + pullback(f, ::Params) + +Returns the value of the function `f` and a back-propagator function, +which can be called to obtain a tuple containing `∂f/∂x` for each argument `x`, +the derivative (for scalar `x`) or gradient. + +```julia +y, back = pullback(f, args...) +∇ = back(seed) +``` + +`back` must be called with a start value `seed` matching the output of `f(args...)`. +If `f(args...)` returns a number, `seed` should be a number. +If `f(args...)` returns an array, `seed` should be an equally-sized array. + +See also [`withgradient`](@ref) to obtain the value and gradients in one call, +and [`gradient`](@ref) for obtaining just the gradients. + +```jldoctest; setup=:(using Zygote) +julia> y, back = pullback(*, 2.0, 3.0, 5.0); + +julia> y +30.0 + +julia> back(1.0) +(15.0, 10.0, 6.0) + +julia> back(2.0) +(30.0, 20.0, 12.0) + +julia> y, back = pullback(x -> [x, x], 1.0); + +julia> y +2-element Vector{Float64}: + 1.0 + 1.0 + +julia> back([1.0, 1.0]) +(2.0,) + +julia> back([2.0, nothing]) +(2.0,) +``` +""" @inline pullback(f, args...) = pullback(f, Context(), args...) function pullback(f, cx::AContext, args...) y, back = _pullback(cx, f, args...) @@ -76,6 +122,7 @@ _project_all(x::Tuple, dx::Tuple) = map(_project, x, dx) Returns a tuple containing `∂f/∂x` for each argument `x`, the derivative (for scalar `x`) or the gradient. +If no gradient is defined, `∂f/∂x` will be `nothing`. `f(args...)` must be a real number, see [`jacobian`](@ref) for array output. @@ -113,7 +160,7 @@ end withgradient(f, ::Params) Returns both the value of the function and the [`gradient`](@ref), -as a named tuple. +as a named tuple. ```jldoctest; setup=:(using Zygote) julia> y, ∇ = withgradient(/, 1, 2) @@ -308,7 +355,7 @@ end Grads(...) Dictionary-like container returned when taking gradients with -respect to implicit parameters. For an array `W`, appearing +respect to implicit parameters. For an array `W`, appearing within `Params([W, A, B...])`, the gradient is `g[W]`. """ struct Grads @@ -325,7 +372,7 @@ const ADictOrGrads = Union{AbstractDict, Grads} # Dictionary interface. # Don't use the IdDict directly since it may contain some spurious pairs. -Base.haskey(gs::Grads, x) = x ∈ gs.params +Base.haskey(gs::Grads, x) = x ∈ gs.params Base.keys(gs::Grads) = gs.params Base.values(gs::Grads) = (gs.grads[p] for p in gs.params) @@ -385,7 +432,7 @@ broadcasted(f, a::Numeric, gs::Grads) = map(x -> f(a, x), gs) broadcasted(f, gs::Grads, a::Numeric) = map(x -> f(x, a), gs) function materialize!(gs1::Grads, gs2::Grads) - issetequal(gs1.params, gs2.params) || + issetequal(gs1.params, gs2.params) || throw(ArgumentError("Expected Grads objects with the same Params.")) for p in gs1.params gs1[p] = gs2[p] From 9914d59c72028fb12c5e17617aeb6b6660bd4817 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 2 Jan 2024 15:05:38 -0800 Subject: [PATCH 472/490] initial commit --- src/lib/array.jl | 20 +++++++++++--------- test/lib/array.jl | 5 +++++ 2 files changed, 16 insertions(+), 9 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 06ce860d2..f733a57cc 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -272,15 +272,17 @@ _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x) back(::AbstractArray{Nothing}) = nothing back(dy::NamedTuple{(:iterators,)}) = dy.iterators function back(dy::AbstractArray) - d = 1 - ntuple(length(xs)) do n - nd = _ndims(xs[n]) - dims = ntuple(i -> i StaticGetter{n}(), Val(length(xs))) + dims = Vector{Int}(undef, length(xs)) + map(first(dy), xs, cdim, ndim, getters) do dyn, x, cd, nd, getter + dyn === nothing && return nothing + append!(empty!(dims), 1:cd, cd+nd+1:ndims(dy)) + init = map(zero, dyn) # allows for tuples, which accum can add: + red = mapreduce(getter, accum, dy; dims=_ndims(x) == 0 ? (:) : dims, init=init) + return _project(x, _ndims(x) == 0 ? red : reshape(red, axes(x))) end end Iterators.product(xs...), back diff --git a/test/lib/array.jl b/test/lib/array.jl index a3b73aff9..2adf484e6 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -18,6 +18,11 @@ test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ # This was wrong before https://github.com/FluxML/Zygote.jl/pull/1170 @test gradient(x -> sum([y[2] * y[3] for y in Iterators.product(x, x, x, x)]), [1,2,3,4])[1] ≈ [320, 320, 320, 320] @test gradient(x -> sum(y[2] * y[3] for y in Iterators.product(x, x, x, x)), [1,2,3,4])[1] ≈ [320, 320, 320, 320] + + for p in (1.0, fill(1.0), [1.0]) + @test gradient(p -> sum([x*q for q in p, x in 1:3]), p) == (6p,) + # @test gradient(p -> sum(x*q for (q, p) in Iterators.product(p, 1:3)), p) == (6.0,) + end end @testset "collect" begin From 4616eaf2cefac5398e5c02958259c322a59b6e0b Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 2 Jan 2024 21:07:55 -0800 Subject: [PATCH 473/490] add adjoint for collect ProductIterator --- src/lib/array.jl | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/lib/array.jl b/src/lib/array.jl index f733a57cc..14a59d426 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -288,6 +288,11 @@ _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x) Iterators.product(xs...), back end +function _pullback(cx::AContext, ::typeof(collect), p_::Base.Iterators.ProductIterator) + p, back = _pullback(cx, Iterators.product, p_.iterators...) + collect(p), y -> (nothing, (iterators=back(y),)) +end + @adjoint function Iterators.Zip(xs) axs = map(_tryaxes, xs) # same function used for map back(dy::NamedTuple{(:is,)}) = tuple(dy.is) From a88bb3fad843b79c0bb4d112de1b960f0de64496 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 2 Jan 2024 21:58:20 -0800 Subject: [PATCH 474/490] tests and fixes for collect adjoint --- src/lib/array.jl | 35 ++++++++++++++++++----------------- test/lib/array.jl | 5 +++++ 2 files changed, 23 insertions(+), 17 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 14a59d426..745acae47 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -268,29 +268,30 @@ end _ndims(::Base.HasShape{d}) where {d} = d _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x)) : 1 +function prodfunc(xs, dy) + @assert length(first(dy)) == length(xs) + ndim = map(Zygote._ndims, xs) + cdim = cumsum((0, ndim[begin:end-1]...)) + getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) + dims = Vector{Int}(undef, length(xs)) + map(first(dy), xs, cdim, ndim, getters) do dyn, x, cd, nd, getter + dyn === nothing && return nothing + append!(empty!(dims), 1:cd, cd+nd+1:ndims(dy)) + init = map(zero, dyn) # allows for tuples, which accum can add: + red = mapreduce(getter, accum, dy; dims=_ndims(x) == 0 ? (:) : dims, init=init) + return _project(x, _ndims(x) == 0 ? red : reshape(red, axes(x))) + end +end + @adjoint function Iterators.product(xs...) back(::AbstractArray{Nothing}) = nothing back(dy::NamedTuple{(:iterators,)}) = dy.iterators - function back(dy::AbstractArray) - @assert length(first(dy)) == length(xs) - ndim = map(Zygote._ndims, xs) - cdim = cumsum((0, ndim[begin:end-1]...)) - getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) - dims = Vector{Int}(undef, length(xs)) - map(first(dy), xs, cdim, ndim, getters) do dyn, x, cd, nd, getter - dyn === nothing && return nothing - append!(empty!(dims), 1:cd, cd+nd+1:ndims(dy)) - init = map(zero, dyn) # allows for tuples, which accum can add: - red = mapreduce(getter, accum, dy; dims=_ndims(x) == 0 ? (:) : dims, init=init) - return _project(x, _ndims(x) == 0 ? red : reshape(red, axes(x))) - end - end + back(dy::AbstractArray) = prodfunc(xs, dy) Iterators.product(xs...), back end -function _pullback(cx::AContext, ::typeof(collect), p_::Base.Iterators.ProductIterator) - p, back = _pullback(cx, Iterators.product, p_.iterators...) - collect(p), y -> (nothing, (iterators=back(y),)) +@adjoint function Base.collect(p::Base.Iterators.ProductIterator) + collect(p), dy -> ((iterators=prodfunc(p.iterators, dy),),) end @adjoint function Iterators.Zip(xs) diff --git a/test/lib/array.jl b/test/lib/array.jl index 2adf484e6..b05b99dee 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -23,6 +23,11 @@ test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ @test gradient(p -> sum([x*q for q in p, x in 1:3]), p) == (6p,) # @test gradient(p -> sum(x*q for (q, p) in Iterators.product(p, 1:3)), p) == (6.0,) end + + @test gradient(x -> sum(broadcast(prod, Iterators.product(x,x))), ones(4)) == (2*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x))), ones(4)) == (3*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x, x .^ 2))), ones(4)) == (3*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x .^ 2))), ones(4)) == (4*4ones(4),) end @testset "collect" begin From 3ac1fb802ff4f82066394820ea37d8bb302e47c4 Mon Sep 17 00:00:00 2001 From: lxvm Date: Mon, 15 Jan 2024 23:49:09 -0500 Subject: [PATCH 475/490] rebase and make dims a tuple again --- src/lib/array.jl | 12 ++++++------ test/lib/array.jl | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 745acae47..3f9d4a25b 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -271,15 +271,15 @@ _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x) function prodfunc(xs, dy) @assert length(first(dy)) == length(xs) ndim = map(Zygote._ndims, xs) - cdim = cumsum((0, ndim[begin:end-1]...)) + cdim = cumsum((1, ndim[begin:end-1]...)) getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) - dims = Vector{Int}(undef, length(xs)) - map(first(dy), xs, cdim, ndim, getters) do dyn, x, cd, nd, getter + map(first(dy), xs, cdim, getters) do dyn, x, cd, getter dyn === nothing && return nothing - append!(empty!(dims), 1:cd, cd+nd+1:ndims(dy)) + nd = _ndims(x) + dims = nd == 0 ? (:) : ntuple(i -> isum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ for p in (1.0, fill(1.0), [1.0]) @test gradient(p -> sum([x*q for q in p, x in 1:3]), p) == (6p,) - # @test gradient(p -> sum(x*q for (q, p) in Iterators.product(p, 1:3)), p) == (6.0,) + @test gradient(p -> sum(x*q for (q, x) in Iterators.product(p, 1:3)), p) == (6p,) end @test gradient(x -> sum(broadcast(prod, Iterators.product(x,x))), ones(4)) == (2*4ones(4),) From 8903ed41892f8040e8082bc436b3d385edafc41a Mon Sep 17 00:00:00 2001 From: Lorenzo Van Munoz <66997677+lxvm@users.noreply.github.com> Date: Tue, 16 Jan 2024 01:05:11 -0500 Subject: [PATCH 476/490] Update src/lib/array.jl Co-authored-by: Brian Chen --- src/lib/array.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 3f9d4a25b..34e4c481f 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -291,7 +291,8 @@ end end @adjoint function Base.collect(p::Base.Iterators.ProductIterator) - collect(p), dy -> ((iterators=prodfunc(p.iterators, dy),),) + collect_product_pullback(dy) = ((iterators=prodfunc(p.iterators, dy),),) + return collect(p), collect_product_pullback end @adjoint function Iterators.Zip(xs) From ab95858f9ebcdf96008c0a2f943df25841e160e7 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 01:09:15 -0500 Subject: [PATCH 477/490] rename productfunc --- src/lib/array.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 34e4c481f..3363298e3 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -268,7 +268,7 @@ end _ndims(::Base.HasShape{d}) where {d} = d _ndims(x) = Base.IteratorSize(x) isa Base.HasShape ? _ndims(Base.IteratorSize(x)) : 1 -function prodfunc(xs, dy) +function productfunc(xs, dy) @assert length(first(dy)) == length(xs) ndim = map(Zygote._ndims, xs) cdim = cumsum((1, ndim[begin:end-1]...)) @@ -286,7 +286,7 @@ end @adjoint function Iterators.product(xs...) back(::AbstractArray{Nothing}) = nothing back(dy::NamedTuple{(:iterators,)}) = dy.iterators - back(dy::AbstractArray) = prodfunc(xs, dy) + back(dy::AbstractArray) = productfunc(xs, dy) Iterators.product(xs...), back end From 1a4b275c4458d508b9a5bc08eaca7c01a93b8662 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 01:18:43 -0500 Subject: [PATCH 478/490] update adjoints for zip --- src/lib/array.jl | 26 ++++++++++++++++++-------- 1 file changed, 18 insertions(+), 8 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 3363298e3..d4e373560 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -295,14 +295,24 @@ end return collect(p), collect_product_pullback end -@adjoint function Iterators.Zip(xs) - axs = map(_tryaxes, xs) # same function used for map - back(dy::NamedTuple{(:is,)}) = tuple(dy.is) - back(dy::AbstractArray) = ntuple(length(xs)) do d - dx = map(StaticGetter{d}(), dy) - _project(xs[d], _restore(dx, axs[d])) - end |> tuple - Iterators.Zip(xs), back +function zipfunc(xs, dy) + getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) + map(xs, getters) do x, getter + dx = map(getter, dy) + _project(x, _restore(dx, _tryaxes(x))) + end +end + +@adjoint function Iterators.zip(xs...) + back(::AbstractArray{Nothing}) = nothing + back(dy::NamedTuple{(:is,)}) = dy.is + back(dy::AbstractArray) = zipfunc(xs, dy) + Iterators.zip(xs...), back +end + +@adjoint function Base.collect(z::Base.Iterators.Zip) + collect_zip_pullback(dy) = ((is=zipfunc(z.is, dy),),) + collect(z), collect_zip_pullback end # Reductions From c9f222c357cf61d62f84b16b185b277a7612120b Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 01:19:05 -0500 Subject: [PATCH 479/490] typo --- src/lib/array.jl | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index d4e373560..6f4f8e965 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -291,7 +291,7 @@ end end @adjoint function Base.collect(p::Base.Iterators.ProductIterator) - collect_product_pullback(dy) = ((iterators=prodfunc(p.iterators, dy),),) + collect_product_pullback(dy) = ((iterators=productfunc(p.iterators, dy),),) return collect(p), collect_product_pullback end From 2efd29d4bf639d7091f1be7644a28ffaba0dda05 Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 14:27:52 -0500 Subject: [PATCH 480/490] inference tests for product --- test/lib/array.jl | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/test/lib/array.jl b/test/lib/array.jl index a9f0a10c6..a8e020d80 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -19,15 +19,15 @@ test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ @test gradient(x -> sum([y[2] * y[3] for y in Iterators.product(x, x, x, x)]), [1,2,3,4])[1] ≈ [320, 320, 320, 320] @test gradient(x -> sum(y[2] * y[3] for y in Iterators.product(x, x, x, x)), [1,2,3,4])[1] ≈ [320, 320, 320, 320] + # Numbers failed before https://github.com/FluxML/Zygote.jl/pull/1489 for p in (1.0, fill(1.0), [1.0]) @test gradient(p -> sum([x*q for q in p, x in 1:3]), p) == (6p,) @test gradient(p -> sum(x*q for (q, x) in Iterators.product(p, 1:3)), p) == (6p,) end - @test gradient(x -> sum(broadcast(prod, Iterators.product(x,x))), ones(4)) == (2*4ones(4),) - @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x))), ones(4)) == (3*4ones(4),) - @test gradient(x -> sum(broadcast(prod, Iterators.product(x, x .^ 2))), ones(4)) == (3*4ones(4),) - @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x .^ 2))), ones(4)) == (4*4ones(4),) + # inference would also fail before #1489 + y, back = _pullback(Iterators.product, 1:5, fill(1)) + @test @inferred back(collect(y)) == (nothing, [1.0, 2.0, 3.0, 4.0, 5.0], fill(5.0)) end @testset "collect" begin @@ -55,6 +55,17 @@ end g = gradient(d -> sum(x^2 for x in collect(d)), t)[1] @test g === (2.0, 4.0) end + + @testset "Iterators.Product" begin + p = Iterators.product(1:3, 1:2) + g = gradient(p -> sum(prod, collect(p)), p)[1] + @test g == (iterators=(3ones(3), 6ones(2)),) + + @test gradient(x -> sum(broadcast(prod, Iterators.product(x,x))), ones(4)) == (2*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x))), ones(4)) == (3*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x, x .^ 2))), ones(4)) == (3*4ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x .^ 2))), ones(4)) == (4*4ones(4),) + end end @testset "dictionary comprehension" begin From 4e36d7be634fc60b6b1c46a808b8b5c81f31842a Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 15:30:02 -0500 Subject: [PATCH 481/490] add tests for zip --- src/lib/array.jl | 4 +++- test/lib/array.jl | 33 ++++++++++++++++++++++++++++++++- 2 files changed, 35 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 6f4f8e965..7e3604861 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -169,8 +169,10 @@ _reverse(x::Symmetric) = Symmetric(_reverse(x.data), x.uplo == 'U' ? :L : :U) # So we keep axes(x) to restore gradient dx to its full length & correct shape. _tryaxes(x) = axes(x) _tryaxes(x::Tuple) = Val(length(x)) -_restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(length, ax) - length(dx))), ax) +_tryaxes(::Number) = Val(-1) +_restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(map(length, ax)) - length(dx))), ax) _restore(dx, ::Val{N}) where {N} = ntuple(i -> get(dx,i,nothing), N) +_restore(dx, ::Val{-1}) = only(dx) # Sometimes a pullback doesn't return a Tuple, but rather returns only a # single nothing to say "all arguments have zero cotangent". This function is needed to diff --git a/test/lib/array.jl b/test/lib/array.jl index a8e020d80..7b11e635a 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -30,6 +30,26 @@ test_rrule(ZygoteRuleConfig(), x->sum(sin, Diagonal(x)), rand(3); rrule_f=rrule_ @test @inferred back(collect(y)) == (nothing, [1.0, 2.0, 3.0, 4.0, 5.0], fill(5.0)) end +@testset "adjoints of Iterators.zip" begin + y, back = _pullback(Iterators.zip, 1:5, 1:3, 1:2) + @test back(collect(y)) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], [1.0, 2.0]) + @test back([(nothing, j, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, Union{Nothing, Float64}[nothing, nothing, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], [1.0, 2.0]) + @test back([(i, nothing, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], Union{Nothing, Float64}[nothing, nothing, 0.0], [1.0, 2.0]) + @test back([(i, j, nothing) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], nothing) + + + @test gradient(x -> sum([y[2] * y[3] for y in Iterators.zip(x, x, x, x)]), [1,2,3,4])[1] ≈ [2, 4, 6, 8] + @test gradient(x -> sum(y[2] * y[3] for y in Iterators.zip(x, x, x, x)), [1,2,3,4])[1] ≈ [2, 4, 6, 8] + + for p in (1.0, fill(1.0), [1.0]) + @test gradient(p_ -> sum(map(prod, Iterators.zip(p_, p))), p) == (p,) + @test gradient(p_ -> sum(x*q for (q, x) in Iterators.zip(p_, p)), p) == (p,) + end + + y, back = _pullback(Iterators.zip, 1:5, fill(1)) + @test @inferred back(collect(y)) == (nothing, [1.0, 0.0, 0.0, 0.0, 0.0], fill(1.0)) +end + @testset "collect" begin @testset "Dict" begin d = Dict(1 => 5, 2 => 6) @@ -56,7 +76,7 @@ end @test g === (2.0, 4.0) end - @testset "Iterators.Product" begin + @testset "Iterators.ProductIterator" begin p = Iterators.product(1:3, 1:2) g = gradient(p -> sum(prod, collect(p)), p)[1] @test g == (iterators=(3ones(3), 6ones(2)),) @@ -66,6 +86,17 @@ end @test gradient(x -> sum(broadcast(prod, Iterators.product(x, x .^ 2))), ones(4)) == (3*4ones(4),) @test gradient(x -> sum(broadcast(prod, Iterators.product(x .^ 2, x .^ 2))), ones(4)) == (4*4ones(4),) end + + @testset "Iterators.Zip" begin + z = Iterators.zip(1:3, 1:2) + g = gradient(z -> sum(prod, collect(z)), z)[1] + @test g == (is=([1.0, 2.0, 0.0], [1.0, 2.0]),) + + @test gradient(x -> sum(broadcast(prod, Iterators.zip(x,x))), ones(4)) == (2ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.zip(x.^2,x))), ones(4)) == (3ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.zip(x,x.^2))), ones(4)) == (3ones(4),) + @test gradient(x -> sum(broadcast(prod, Iterators.zip(x.^2,x.^2))), ones(4)) == (4ones(4),) + end end @testset "dictionary comprehension" begin From 8adfd297955500b4b3cfcd05d533e58540948bac Mon Sep 17 00:00:00 2001 From: lxvm Date: Tue, 16 Jan 2024 15:54:48 -0500 Subject: [PATCH 482/490] better pullback names --- src/lib/array.jl | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 7e3604861..88a576bc2 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -286,10 +286,10 @@ function productfunc(xs, dy) end @adjoint function Iterators.product(xs...) - back(::AbstractArray{Nothing}) = nothing - back(dy::NamedTuple{(:iterators,)}) = dy.iterators - back(dy::AbstractArray) = productfunc(xs, dy) - Iterators.product(xs...), back + product_pullback(::AbstractArray{Nothing}) = nothing + product_pullback(dy::NamedTuple{(:iterators,)}) = dy.iterators + product_pullback(dy::AbstractArray) = productfunc(xs, dy) + Iterators.product(xs...), product_pullback end @adjoint function Base.collect(p::Base.Iterators.ProductIterator) @@ -306,10 +306,10 @@ function zipfunc(xs, dy) end @adjoint function Iterators.zip(xs...) - back(::AbstractArray{Nothing}) = nothing - back(dy::NamedTuple{(:is,)}) = dy.is - back(dy::AbstractArray) = zipfunc(xs, dy) - Iterators.zip(xs...), back + zip_pullback(::AbstractArray{Nothing}) = nothing + zip_pullback(dy::NamedTuple{(:is,)}) = dy.is + zip_pullback(dy::AbstractArray) = zipfunc(xs, dy) + Iterators.zip(xs...), zip_pullback end @adjoint function Base.collect(z::Base.Iterators.Zip) From 3087003910967ebdfdc3496990af799f89ff0c5a Mon Sep 17 00:00:00 2001 From: lxvm Date: Wed, 17 Jan 2024 01:46:03 -0500 Subject: [PATCH 483/490] rewrite unnecesarily generated function --- src/lib/array.jl | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index 88a576bc2..695039f3e 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -137,8 +137,9 @@ end struct StaticGetter{i} end (::StaticGetter{i})(v) where {i} = v[i] (::StaticGetter{i})(::Nothing) where {i} = nothing -@generated function _unzip(tuples, ::Val{N}) where {N} - Expr(:tuple, (:(map($(StaticGetter{i}()), tuples)) for i ∈ 1:N)...) +function _unzip(tuples, ::Val{N}) where {N} + getters = ntuple(n -> StaticGetter{n}(), Val(N)) + map(g -> map(g, tuples), getters) end function unzip(tuples) N = length(first(tuples)) @@ -313,7 +314,7 @@ end end @adjoint function Base.collect(z::Base.Iterators.Zip) - collect_zip_pullback(dy) = ((is=zipfunc(z.is, dy),),) + collect_zip_pullback(dy::AbstractArray) = ((is=zipfunc(z.is, dy),),) collect(z), collect_zip_pullback end From 2bf76338dfff5e8c6e09fbe68182ef961c814819 Mon Sep 17 00:00:00 2001 From: lxvm Date: Wed, 17 Jan 2024 14:04:57 -0500 Subject: [PATCH 484/490] restore arrays with nothing elements --- src/lib/array.jl | 1 + 1 file changed, 1 insertion(+) diff --git a/src/lib/array.jl b/src/lib/array.jl index 695039f3e..b152d06d6 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -171,6 +171,7 @@ _reverse(x::Symmetric) = Symmetric(_reverse(x.data), x.uplo == 'U' ? :L : :U) _tryaxes(x) = axes(x) _tryaxes(x::Tuple) = Val(length(x)) _tryaxes(::Number) = Val(-1) +_restore(dx::AbstractArray{Nothing}, ax::Tuple) = similar(dx, ax) _restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(map(length, ax)) - length(dx))), ax) _restore(dx, ::Val{N}) where {N} = ntuple(i -> get(dx,i,nothing), N) _restore(dx, ::Val{-1}) = only(dx) From 734a6c46c9d8522bd238f40bf8e9cc442bc6973e Mon Sep 17 00:00:00 2001 From: lxvm Date: Wed, 17 Jan 2024 14:07:13 -0500 Subject: [PATCH 485/490] update zip test for nothings --- test/lib/array.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/test/lib/array.jl b/test/lib/array.jl index 7b11e635a..8016c9541 100644 --- a/test/lib/array.jl +++ b/test/lib/array.jl @@ -33,8 +33,8 @@ end @testset "adjoints of Iterators.zip" begin y, back = _pullback(Iterators.zip, 1:5, 1:3, 1:2) @test back(collect(y)) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], [1.0, 2.0]) - @test back([(nothing, j, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, Union{Nothing, Float64}[nothing, nothing, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], [1.0, 2.0]) - @test back([(i, nothing, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], Union{Nothing, Float64}[nothing, nothing, 0.0], [1.0, 2.0]) + @test back([(nothing, j, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, nothing, [1.0, 2.0, 0.0], [1.0, 2.0]) + @test back([(i, nothing, k) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], nothing, [1.0, 2.0]) @test back([(i, j, nothing) for (i,j,k) in zip(1:5, 1:3, 1:2)]) == (nothing, [1.0, 2.0, 0.0, 0.0, 0.0], [1.0, 2.0, 0.0], nothing) From 8c61928e8d004fa352cda035cb87e48bdd7fe42c Mon Sep 17 00:00:00 2001 From: Lorenzo Van Munoz <66997677+lxvm@users.noreply.github.com> Date: Fri, 19 Jan 2024 13:20:11 -0500 Subject: [PATCH 486/490] Apply suggestions from code review Remove `Val` from `ntuple`s where constant propagation occurs Co-authored-by: Brian Chen --- src/lib/array.jl | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index b152d06d6..fe0438b5a 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -138,7 +138,7 @@ struct StaticGetter{i} end (::StaticGetter{i})(v) where {i} = v[i] (::StaticGetter{i})(::Nothing) where {i} = nothing function _unzip(tuples, ::Val{N}) where {N} - getters = ntuple(n -> StaticGetter{n}(), Val(N)) + getters = ntuple(n -> StaticGetter{n}(), N) map(g -> map(g, tuples), getters) end function unzip(tuples) @@ -276,7 +276,7 @@ function productfunc(xs, dy) @assert length(first(dy)) == length(xs) ndim = map(Zygote._ndims, xs) cdim = cumsum((1, ndim[begin:end-1]...)) - getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) + getters = ntuple(n -> StaticGetter{n}(), length(xs)) map(first(dy), xs, cdim, getters) do dyn, x, cd, getter dyn === nothing && return nothing nd = _ndims(x) @@ -300,7 +300,7 @@ end end function zipfunc(xs, dy) - getters = ntuple(n -> StaticGetter{n}(), Val(length(xs))) + getters = ntuple(n -> StaticGetter{n}(), length(xs)) map(xs, getters) do x, getter dx = map(getter, dy) _project(x, _restore(dx, _tryaxes(x))) From 18e48dba2c43ede5fb7a0511318c40a638049a8a Mon Sep 17 00:00:00 2001 From: lxvm Date: Fri, 19 Jan 2024 13:23:22 -0500 Subject: [PATCH 487/490] use number as sentinel --- src/lib/array.jl | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/lib/array.jl b/src/lib/array.jl index fe0438b5a..6d914d272 100644 --- a/src/lib/array.jl +++ b/src/lib/array.jl @@ -170,11 +170,11 @@ _reverse(x::Symmetric) = Symmetric(_reverse(x.data), x.uplo == 'U' ? :L : :U) # So we keep axes(x) to restore gradient dx to its full length & correct shape. _tryaxes(x) = axes(x) _tryaxes(x::Tuple) = Val(length(x)) -_tryaxes(::Number) = Val(-1) +_tryaxes(x::Number) = x _restore(dx::AbstractArray{Nothing}, ax::Tuple) = similar(dx, ax) _restore(dx, ax::Tuple) = axes(dx) == ax ? dx : reshape(vcat(dx, falses(prod(map(length, ax)) - length(dx))), ax) _restore(dx, ::Val{N}) where {N} = ntuple(i -> get(dx,i,nothing), N) -_restore(dx, ::Val{-1}) = only(dx) +_restore(dx, ::Number) = only(dx) # Sometimes a pullback doesn't return a Tuple, but rather returns only a # single nothing to say "all arguments have zero cotangent". This function is needed to From c0daccded5b9f91d31ceb889e4a97e74dd722a4e Mon Sep 17 00:00:00 2001 From: Brian Chen Date: Fri, 19 Jan 2024 13:59:46 -0800 Subject: [PATCH 488/490] Bump version --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 20895dd9d..8ca9b5c1d 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.68" +version = "0.6.69" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c" From 2beaa07fed406f30b00e4cc5f523b8c08331dc45 Mon Sep 17 00:00:00 2001 From: Keno Fischer Date: Wed, 8 May 2024 01:03:35 -0400 Subject: [PATCH 489/490] Adjust to upcoming julia#54341 (#1511) Corresponding IRTools changes are in https://github.com/FluxML/IRTools.jl/pull/125. --- src/compiler/interface2.jl | 16 ++++++++++++++-- src/lib/literal_getproperty.jl | 5 +++++ 2 files changed, 19 insertions(+), 2 deletions(-) diff --git a/src/compiler/interface2.jl b/src/compiler/interface2.jl index 31ae7eaf4..571227d84 100644 --- a/src/compiler/interface2.jl +++ b/src/compiler/interface2.jl @@ -72,7 +72,13 @@ if VERSION >= v"1.10.0-DEV.873" function _pullback_generator(world::UInt, source, self, ctx, f, args) ret = _generate_pullback(ctx, world, f, args...) - ret isa Core.CodeInfo && return ret + if ret isa Core.CodeInfo + if isdefined(Base, :__has_internal_change) && Base.__has_internal_change(v"1.12-alpha", :codeinfonargs) + ret.nargs = 4 + ret.isva = true + end + return ret + end stub = Core.GeneratedFunctionStub(identity, Core.svec(:methodinstance, :ctx, :f, :args), Core.svec()) stub(world, source, ret) @@ -85,7 +91,13 @@ end function _callable_pullback_generator(world::UInt, source, self, Δ) ret = _generate_callable_pullback(self, world, Δ) - ret isa Core.CodeInfo && return ret + if ret isa Core.CodeInfo + if isdefined(Base, :__has_internal_change) && Base.__has_internal_change(v"1.12-alpha", :codeinfonargs) + ret.nargs = 2 + ret.isva = false + end + return ret + end stub = Core.GeneratedFunctionStub(identity, Core.svec(:methodinstance, :Δ), Core.svec()) stub(world, source, ret) diff --git a/src/lib/literal_getproperty.jl b/src/lib/literal_getproperty.jl index c50cab171..cf7d08068 100644 --- a/src/lib/literal_getproperty.jl +++ b/src/lib/literal_getproperty.jl @@ -85,6 +85,11 @@ function _generate_literal_getproperty(ctx, world, x, ::Type{Val{f}}) where f ci.edges = Core.MethodInstance[mi, mi_pb_getproperty, mi_getproperty, mi_rrule] # XXX: on 1.10, we should also set metadata like min-world and max-world + if isdefined(Base, :__has_internal_change) && Base.__has_internal_change(v"1.12-alpha", :codeinfonargs) + ci.nargs = 5 + ci.isva = false + end + return ci else # nothing to optimize here, need to recurse into `getproperty` From 3c3325d9987931f15bd478c932332be19c316de4 Mon Sep 17 00:00:00 2001 From: Carlo Lucibello Date: Thu, 9 May 2024 17:10:08 +0200 Subject: [PATCH 490/490] Update Project.toml --- Project.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Project.toml b/Project.toml index 8ca9b5c1d..c927c2de9 100644 --- a/Project.toml +++ b/Project.toml @@ -1,6 +1,6 @@ name = "Zygote" uuid = "e88e6eb3-aa80-5325-afca-941959d7151f" -version = "0.6.69" +version = "0.6.70" [deps] AbstractFFTs = "621f4979-c628-5d54-868e-fcf4e3e8185c"