Skip to content

Commit

Permalink
Merge branch 'main' into test_for_Be and fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
CheukHinHoJerry committed Nov 26, 2023
2 parents f551bf0 + 11661dc commit bab1b27
Show file tree
Hide file tree
Showing 4 changed files with 22 additions and 20 deletions.
6 changes: 4 additions & 2 deletions src/backflowpooling.jl
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
using ACEpsi.AtomicOrbitals: AtomicOrbitalsBasisLayer
using LuxCore: AbstractExplicitLayer
using Random: AbstractRNG
using ChainRulesCore
using ChainRulesCore: NoTangent

using Polynomials4ML: _make_reqfields, @reqfields, POOL, TMP, META
using ObjectPools: acquire!

import ChainRulesCore: rrule

mutable struct BackflowPooling
basis::AtomicOrbitalsBasisLayer
@reqfields
Expand Down Expand Up @@ -86,7 +88,7 @@ function evaluate(pooling::BackflowPooling, ϕnlm::AbstractArray, Σ::AbstractVe
end

# --------------------- connect with ChainRule
function ChainRulesCore.rrule(::typeof(evaluate), pooling::BackflowPooling, ϕnlm, Σ::AbstractVector)
function rrule(::typeof(evaluate), pooling::BackflowPooling, ϕnlm, Σ::AbstractVector)
A = pooling(ϕnlm, Σ)
function pb(∂A)
return NoTangent(), NoTangent(), _pullback_evaluate(∂A, pooling, ϕnlm, Σ), NoTangent()
Expand Down
24 changes: 13 additions & 11 deletions src/bflow3d.jl
Original file line number Diff line number Diff line change
@@ -1,25 +1,24 @@
using Polynomials4ML, Random
using Polynomials4ML, Random, ACEpsi
using Polynomials4ML: OrthPolyBasis1D3T, LinearLayer, PooledSparseProduct, SparseSymmProdDAG, SparseSymmProd, release!
using Polynomials4ML.Utils: gensparse
using LinearAlgebra: qr, I, logabsdet, pinv, mul!, dot , tr, det
import ForwardDiff
using ACEpsi.AtomicOrbitals: make_nlms_spec
using ACEpsi.TD: No_Decomposition, Tucker
using ACEpsi: , , ∅, spins, extspins, Spin, spin2idx, idx2spin
using ACEpsi
using LuxCore: AbstractExplicitLayer
using LuxCore
using Lux
using Lux: Chain, WrappedFunction, BranchLayer
using ChainRulesCore
using ChainRulesCore: NoTangent
# ----------------------------------------
# some quick hacks that we should take care in P4ML later with careful thoughts
using ObjectPools: acquire!
using StrideArrays
using ObjectPools: unwrap
using Lux

using ACEpsi.AtomicOrbitals: make_nlms_spec
using ACEpsi.TD: No_Decomposition, Tucker
using ACEpsi: , , ∅, spins, extspins, Spin, spin2idx, idx2spin
# ----------------- custom layers ------------------
import ChainRulesCore: rrule
import ForwardDiff

struct MaskLayer <: AbstractExplicitLayer
nX::Int64
end
Expand All @@ -28,16 +27,18 @@ end
T = eltype(Φ)
A::Matrix{Bool} = [st.Σ[i] == st.Σ[j] for j = 1:l.nX, i = 1:l.nX]
val::Matrix{T} = Φ .* A
release!(Φ)
return val, st
end

function ChainRulesCore.rrule(::typeof(LuxCore.apply), l::MaskLayer, Φ, ps, st)
function rrule(::typeof(Lux.apply), l::MaskLayer, Φ, ps, st)
T = eltype(Φ)
A::Matrix{Bool} = [st.Σ[i] == st.Σ[j] for j = 1:l.nX, i = 1:l.nX]
val::Matrix{T} = Φ .* A
function pb(dΦ)
return NoTangent(), NoTangent(), dΦ[1] .* A, NoTangent(), NoTangent()
end
release!(Φ)
return (val, st), pb
end

Expand All @@ -51,7 +52,7 @@ end
return reshape(unwrap(x), r.dims), st
end

function ChainRulesCore.rrule(::typeof(LuxCore.apply), l::myReshapeLayer{N}, X, ps, st) where {N}
function rrule(::typeof(LuxCore.apply), l::myReshapeLayer{N}, X, ps, st) where {N}
val = l(X, ps, st)
function pb(dϕnlm) # dA is of a tuple (dAmat, st), dAmat is of size (Nnuc, Nel, Nnlm)
A = reshape(unwrap(dϕnlm[1]), size(X))
Expand Down Expand Up @@ -80,6 +81,7 @@ function get_spec(nuclei, spec1p)
return spec[:]
end


function displayspec(spec, spec1p)
nicespec = []
for k = 1:length(spec)
Expand Down
5 changes: 3 additions & 2 deletions test/test_atorbbasis.jl
Original file line number Diff line number Diff line change
Expand Up @@ -60,16 +60,17 @@ spec = ACEpsi.AtomicOrbitals.get_spec(aobasis_layer, spec1p)
@info("Test evaluation by manual construction")
using LinearAlgebra: norm
bYlm_ = RYlmBasis(totdegree)
Nnlm = length(aobasis_layer.prodbasis.layers.ϕnlms.basis.spec)
Nnlm = length(aobasis_layer.prodbasis.sparsebasis)
Nnuc = length(aobasis_layer.nuclei)

for I = 1:Nnuc
local Rnl
XI = X .- Ref(aobasis_layer.nuclei[I].rr)
xI = norm.(XI)
Rnl = evaluate(bRnl, xI)
Ylm = evaluate(bYlm_, XI)
for k = 1:Nnlm
nlm = aobasis_layer.prodbasis.layers.ϕnlms.basis.spec[k]
nlm = aobasis_layer.prodbasis.sparsebasis.spec[k]
iR = nlm[1]
iY = nlm[2]

Expand Down
7 changes: 2 additions & 5 deletions test/test_bflow.jl
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,8 @@
using Polynomials4ML, ACEbase, Printf, ACEpsi
using ACEpsi: BFwf1, gradient, laplacian
using LinearAlgebra
using Test
using Printf


function lap_test(f, Δf, X)
Expand Down Expand Up @@ -95,7 +97,6 @@ function _fdtest(F, Σ, dF, x::AbstractVector; h0 = 1.0, verbose=true)
end
##

const , , ∅ = '','',''
Nel = 5
polys = legendre_basis(8)
wf = BFwf1(Nel, polys; ν = 3)
Expand All @@ -106,10 +107,6 @@ wf(X, Σ)
g = gradient(wf, X, Σ)

##

using LinearAlgebra
using Printf

@info("Fd test of gradient w.r.t. X")
@test _fdtest(wf, Σ, g, X)

Expand Down

0 comments on commit bab1b27

Please sign in to comment.