Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…rks.jl into nn-lux
  • Loading branch information
rbSparky committed Aug 4, 2024
2 parents 32ee61d + fb0bb1d commit 2585cb6
Showing 1 changed file with 2 additions and 3 deletions.
5 changes: 2 additions & 3 deletions GNNLux/src/layers/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -656,7 +656,7 @@ function NNConv(ch::Pair{Int, Int}, nn, σ = identity;
return NNConv(nn, aggr, in_dims, out_dims, use_bias, add_self_loops, use_edge_weight, init_weight, init_bias, σ)
end

function (l::GCNConv)(g, x, edge_weight, ps, st)
function (l::NNConv)(g, x, edge_weight, ps, st)
nn = StatefulLuxLayer{true}(l.nn, ps, st)

# what would be the order of args here?
Expand All @@ -680,8 +680,7 @@ end
LuxCore.parameterlength(l::NNConv) = l.use_bias ? l.in_dims * l.out_dims + l.out_dims : l.in_dims * l.out_dims # nn wont affect this right?
LuxCore.outputsize(d::NNConv) = (d.out_dims,)


function Base.show(io::IO, l::NNConv)
function Base.show(io::IO, l::GINConv)
print(io, "NNConv($(l.nn)")
print(io, ", $(l.ϵ)")
l.σ == identity || print(io, ", ", l.σ)
Expand Down

0 comments on commit 2585cb6

Please sign in to comment.