Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Auto upgrade to autodiff_deferred in nested AD #1839

Merged
merged 8 commits into from
Sep 16, 2024
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions examples/autodiff.jl
Original file line number Diff line number Diff line change
Expand Up @@ -98,7 +98,7 @@ dby = [0.0]

Enzyme.autodiff(
Forward,
(x,y) -> Enzyme.autodiff_deferred(Reverse, f, x, y),
(x,y) -> Enzyme.autodiff(Reverse, f, x, y),
Duplicated(Duplicated(x, bx), Duplicated(dx, dbx)),
Duplicated(Duplicated(y, by), Duplicated(dy, dby)),
)
Expand All @@ -121,7 +121,7 @@ dbx[2] == 1.0
# \end{aligned}
# ```
function grad(x, dx, y, dy)
Enzyme.autodiff_deferred(Reverse, f, Duplicated(x, dx), DuplicatedNoNeed(y, dy))
Enzyme.autodiff(Reverse, f, Duplicated(x, dx), DuplicatedNoNeed(y, dy))
nothing
end

Expand Down
45 changes: 2 additions & 43 deletions src/Enzyme.jl
Original file line number Diff line number Diff line change
Expand Up @@ -1052,31 +1052,6 @@ grad = gradient(Reverse, only ∘ f, (a = 2.0, b = [3.0], c = "str"))
end
end

"""
gradient_deferred(::ReverseMode, f, x)

Like [`gradient`](@ref), except it using deferred mode.
"""
@inline function gradient_deferred(rm::ReverseMode{ReturnPrimal,RuntimeActivity,ABI,Holomorphic,ErrIfFuncWritten}, f::F, x::X) where {F, X, ReturnPrimal, RuntimeActivity, ABI, Holomorphic, ErrIfFuncWritten}
if Compiler.active_reg_inner(X, #=seen=#(), #=world=#nothing, #=justActive=#Val(true)) == Compiler.ActiveState
dx = Ref(make_zero(x))
autodiff_deferred(rm, f, Active, MixedDuplicated(x, dx))
if ReturnPrimal
return (res[2], only(dx))
else
return only(dx)
end
else
dx = make_zero(x)
autodiff_deferred(rm, f, Active, Duplicated(x, dx))
if ReturnPrimal
(res[2], dx)
else
dx
end
end
end

"""
gradient!(::ReverseMode, dx, f, x)

Expand Down Expand Up @@ -1109,22 +1084,6 @@ gradient!(Reverse, dx, f, [2.0, 3.0])
end
end


"""
gradient_deferred!(::ReverseMode, f, x)

Like [`gradient!`](@ref), except it using deferred mode.
"""
@inline function gradient_deferred!(rm::ReverseMode{ReturnPrimal,RuntimeActivity,ABI,Holomorphic,ErrIfFuncWritten}, dx::X, f::F, x::X) where {X<:Array, F, ReturnPrimal, RuntimeActivity, ABI, Holomorphic, ErrIfFuncWritten}
make_zero!(dx)
autodiff_deferred(rm, f, Active, Duplicated(x, dx))
return if ReturnPrimal
(res[2], dx)
else
dx
end
end

"""
gradient(::ForwardMode, f, x; shadow=onehot(x))

Expand Down Expand Up @@ -1565,7 +1524,7 @@ res
"""
@inline function hvp!(res::X, f::F, x::X, v::X) where {F, X}
grad = make_zero(x)
Enzyme.autodiff(Forward, gradient_deferred!, Const(Reverse), DuplicatedNoNeed(grad, res), Const(f), Duplicated(x, v))
Enzyme.autodiff(Forward, gradient!, Const(Reverse), DuplicatedNoNeed(grad, res), Const(f), Duplicated(x, v))
return nothing
end

Expand Down Expand Up @@ -1600,7 +1559,7 @@ grad
```
"""
@inline function hvp_and_gradient!(res::X, grad::X, f::F, x::X, v::X) where {F, X}
Enzyme.autodiff(Forward, gradient_deferred!, Const(Reverse), Duplicated(grad, res), Const(f), Duplicated(x, v))
Enzyme.autodiff(Forward, gradient!, Const(Reverse), Duplicated(grad, res), Const(f), Duplicated(x, v))
return nothing
end

Expand Down
32 changes: 31 additions & 1 deletion src/compiler/interpreter.jl
Original file line number Diff line number Diff line change
Expand Up @@ -212,4 +212,34 @@ let # overload `inlining_policy`
end
end

end # module Interpreter
import Core.Compiler: abstract_call, abstract_call_known, ArgInfo, StmtInfo, AbsIntState, get_max_methods,
CallMeta, Effects, NoCallInfo, widenconst, mapany

struct AutodiffCallInfo <: CallInfo
# ...
info::CallInfo
end

function abstract_call_known(interp::EnzymeInterpreter, @nospecialize(f),
arginfo::ArgInfo, si::StmtInfo, sv::AbsIntState,
max_methods::Int = get_max_methods(interp, f, sv))

(; fargs, argtypes) = arginfo

if f === Enzyme.autodiff && length(argtypes) >= 4
if widenconst(argtypes[2]) <: Enzyme.Mode && widenconst(argtypes[3]) <: Enzyme.Annotation && widenconst(argtypes[4]) <: Type{<:Enzyme.Annotation}
arginfo2 = ArgInfo(
fargs isa Nothing ? nothing : [:(Enzyme.autodiff_deferred), fargs[2:end]...],
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

You must actually change the IR, I believe.
A simple version would be to just add to the Overlay Table a "autodiff" -> "autodiff_defereed"

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah, I agree.

Base.Experimental.@MethodTable ENZYME_TABLE
Base.Experimental.@overlay ENZYME_TABLE function autodiff(...)
    [... the implementation for the deferred autodiff ...]
end
Core.Compiler.method_table(interp::EnzymeInterpreter) = 
    Core.Compiler.OverlayMethodTable(get_inference_world(interp), ENZYME_TABLE)

This allows EnzymeInterpreter to use the defferred autodiff implementation instead of the usual autodiff implementation automatically. I think that would work, since when an user calls autodiff(...) it dispatches to the usual autodiff implementation, which then kicks off the entire Enzyme compilation.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so one issue here is that we already receive a method table from GPUCompiler [e.g. for CUDA/etc]

Also I agree methodtable is cleaner, but the current code here does work [as also confirmed by CI]

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I am still surprised that this works... You are presenting conflicting information to the abstract interpreter. the CallInfo says one thing, and the IR says another. This may currently "work", but you might also encounter a situation where the code uses the IR as the source of truth.

That's why in my various attempts at something similar, during inlining I replace the IR with something else.

[Core.Const(Enzyme.autodiff_deferred), argtypes[2:end]...]
)
return abstract_call_known(
interp, Enzyme.autodiff_deferred, arginfo2,
si, sv, max_methods)
end
end
return Base.@invoke abstract_call_known(
interp::AbstractInterpreter, f, arginfo::ArgInfo,
si::StmtInfo, sv::AbsIntState, max_methods::Int)
end

end
8 changes: 8 additions & 0 deletions test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -486,6 +486,14 @@ end

end

@testset "Deferred upgrade" begin
function gradsin(x)
return gradient(Reverse, sin, x)
end
res = Enzyme.gradient(Reverse, gradsin, 3.1)
@test res ≈ -sin(3.1)
end

@testset "Simple Complex tests" begin
mul2(z) = 2 * z
square(z) = z * z
Expand Down
Loading