Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add SuiteSparseGraphBLAS backend #222

Draft
wants to merge 2 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions Project.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ Reexport = "189a3867-3050-52da-a836-e630ba90ab69"
SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf"
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
StatsBase = "2913bbd2-ae8a-5f71-8c99-4fb6c76f3a91"
SuiteSparseGraphBLAS = "c2e53296-7b14-11e9-1210-bddfa8111e1d"

[compat]
Adapt = "3"
Expand Down
Binary file added docs/.DS_Store
Binary file not shown.
1 change: 1 addition & 0 deletions src/GNNGraphs/GNNGraphs.jl
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ using ChainRulesCore
using LinearAlgebra, Random, Statistics
import MLUtils
using MLUtils: getobs, numobs
using SuiteSparseGraphBLAS

include("gnngraph.jl")
export GNNGraph,
Expand Down
56 changes: 50 additions & 6 deletions src/GNNGraphs/convert.jl
Original file line number Diff line number Diff line change
Expand Up @@ -89,12 +89,12 @@ function to_dense(A::ADJMAT_T, T=nothing; dir=:out, num_nodes=nothing, weighted=
if dir == :in
A = A'
end
if !weighted
A = binarize(A, T)
end
if T != eltype(A)
A = T.(A)
end
if !weighted
A = map(x -> ifelse(x > 0, T(1), T(0)), A)
end
return A, num_nodes, num_edges
end

Expand Down Expand Up @@ -154,15 +154,15 @@ function to_sparse(A::ADJMAT_T, T=nothing; dir=:out, num_nodes=nothing, weighted
if dir == :in
A = A'
end
if !weighted
A = binarize(A, T)
end
if T != eltype(A)
A = T.(A)
end
if !(A isa AbstractSparseMatrix)
A = sparse(A)
end
if !weighted
A = map(x -> ifelse(x > 0, T(1), T(0)), A)
end
return A, num_nodes, num_edges
end

Expand All @@ -187,3 +187,47 @@ function to_sparse(coo::COO_T, T=nothing; dir=:out, num_nodes=nothing, weighted=
end
return A, num_nodes, num_edges
end

# GBMatrix

function to_graphblas(A::ADJMAT_T, T=nothing; dir=:out, num_nodes=nothing, weighted=true)
@assert dir ∈ [:out, :in]
num_nodes = size(A, 1)
@assert num_nodes == size(A, 2)
T = T === nothing ? eltype(A) : T
num_edges = A isa AbstractSparseMatrix ? nnz(A) : count(!=(0), A)
if dir == :in
A = A'
end
A = GBMatrix(A, fill=T(0))
if !weighted
A = binarize(A, T)
end
if T != eltype(A)
A = T.(A)
end

return A, num_nodes, num_edges
end

function to_graphblas(adj_list::ADJLIST_T, T=nothing; dir=:out, num_nodes=nothing, weighted=true)
coo, num_nodes, num_edges = to_coo(adj_list; dir, num_nodes)
return to_graphblas(coo; num_nodes)
end

function to_graphblas(coo::COO_T, T=nothing; dir=:out, num_nodes=nothing, weighted=true)
s, t, eweight = coo
T = T === nothing ? (eweight === nothing ? eltype(s) : eltype(eweight)) : T

if eweight === nothing || !weighted
eweight = fill!(similar(s, T), 1)
end

num_nodes::Int = isnothing(num_nodes) ? max(maximum(s), maximum(t)) : num_nodes
A = GBMatrix(s, t, eweight, num_nodes, num_nodes, fill=T(0))
num_edges::Int = nnz(A)
if eltype(A) != T
A = T.(A)
end
return A, num_nodes, num_edges
end
4 changes: 3 additions & 1 deletion src/GNNGraphs/gnngraph.jl
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ function GNNGraph(data::D;
gdata = (;),
) where D <: Union{COO_T, ADJMAT_T, ADJLIST_T}

@assert graph_type ∈ [:coo, :dense, :sparse] "Invalid graph_type $graph_type requested"
@assert graph_type ∈ [:coo, :dense, :sparse, :graphblas] "Invalid graph_type $graph_type requested"
@assert dir ∈ [:in, :out]

if graph_type == :coo
Expand All @@ -136,6 +136,8 @@ function GNNGraph(data::D;
graph, num_nodes, num_edges = to_dense(data; num_nodes, dir)
elseif graph_type == :sparse
graph, num_nodes, num_edges = to_sparse(data; num_nodes, dir)
elseif graph_type == :graphblas
graph, num_nodes, num_edges = to_graphblas(data; num_nodes, dir)
end

num_graphs = !isnothing(graph_indicator) ? maximum(graph_indicator) : 1
Expand Down
4 changes: 2 additions & 2 deletions src/GNNGraphs/query.jl
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,7 @@ function Graphs.adjacency_matrix(g::GNNGraph{<:ADJMAT_T}, T::DataType=eltype(g);
@assert dir ∈ [:in, :out]
A = g.graph
if !weighted
A = binarize(A)
A = binarize(A, T)
end
A = T != eltype(A) ? T.(A) : A
return dir == :out ? A : A'
Expand Down Expand Up @@ -232,7 +232,7 @@ function Graphs.degree(g::GNNGraph{<:ADJMAT_T}, T::TT=nothing; dir=:out, edge_we
end
A = adjacency_matrix(g)
if edge_weight === false
A = binarize(A)
A = binarize(A, T)
end
A = eltype(A) != T ? T.(A) : A
return dir == :out ? vec(sum(A, dims=2)) :
Expand Down
9 changes: 8 additions & 1 deletion src/GNNGraphs/utils.jl
Original file line number Diff line number Diff line change
Expand Up @@ -172,7 +172,14 @@ function edge_decoding(idx, n; directed=true)
return s, t
end

binarize(x) = map(>(0), x)
# binarize(x) = map(>(0), x)
binarize(x) = binarize(x, Bool)
binarize(x, T::Type{Bool}) = x .> 0
binarize(x, T) = T.(x .> 0)

binarize(x::GBMatrix, T::Type{Bool}) = x .> 0
binarize(x::GBMatrix, T) = T.(binarize(x, Bool))


@non_differentiable binarize(x...)
@non_differentiable edge_encoding(x...)
Expand Down
1 change: 1 addition & 0 deletions src/layers/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ function (l::GCNConv)(g::GNNGraph, x::AbstractMatrix{T}, edge_weight::EW=nothing
else
x = propagate(copy_xj, g, +, xj=x)
end
@show x c
x = x .* c'
if Dout >= Din
x = l.weight * x
Expand Down
2 changes: 1 addition & 1 deletion test/runtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ tests = [

!CUDA.functional() && @warn("CUDA unavailable, not testing GPU support")

@testset "GraphNeuralNetworks: graph format $graph_type" for graph_type in (:coo, :dense, :sparse)
@testset "GraphNeuralNetworks: graph format $graph_type" for graph_type in (:graphblas, :coo, :dense, :sparse)
global GRAPH_T = graph_type
global TEST_GPU = CUDA.functional() && (GRAPH_T != :sparse)

Expand Down
13 changes: 13 additions & 0 deletions test_graphblas.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
using GraphNeuralNetworks
using SuiteSparseGraphBLAS
using LinearAlgebra, SparseArrays

g = rand_graph(10, 20, graph_type=:graphblas)
x = rand(2, 10)
m = GCNConv(2 => 3)
A = adjacency_matrix(g)
@assert A isa GBMatrix
@assert A + I isa GBMatrix
@assert Float32.(A) isa GBMatrix

m(g, x)