Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/multidocs.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ jobs:
- uses: actions/checkout@v6
- uses: julia-actions/setup-julia@v2
with:
version: '1'
version: '1.12.2' # set to '1' after 1.12.4 release, see https://github.com/JuliaLang/Pkg.jl/pull/4568
- uses: julia-actions/cache@v2
# Build individual docs
- run: julia --project=GNNGraphs/docs/ -e 'using Pkg; Pkg.instantiate()'
Expand Down
17 changes: 4 additions & 13 deletions GraphNeuralNetworks/docs/src/dev.md
Original file line number Diff line number Diff line change
Expand Up @@ -74,22 +74,13 @@ Each PR should update the version number in the Porject.toml file of each involv
the compat bounds, e.g. GraphNeuralNetworks might require a newer version of GNNGraphs.

## Generate Documentation Locally
For generating the documentation locally
Each package has its own documentation folder, e.g. `GNNGraphs/docs`. To generate the docs locally, run the following command from the root of the repository:

```
cd docs
julia
# example for GNNGraphs
julia --project=GNNGraphs/docs GNNGraphs/docs/make.jl
```
```julia
(@v1.10) pkg> activate .
Activating project at `~/.julia/dev/GraphNeuralNetworks/docs`

(docs) pkg> dev ../ ../GNNGraphs/
Resolving package versions...
No Changes to `~/.julia/dev/GraphNeuralNetworks/docs/Project.toml`
No Changes to `~/.julia/dev/GraphNeuralNetworks/docs/Manifest.toml`

julia> include("make.jl")
```
## Benchmarking

You can benchmark the effect on performance of your commits using the script `perf/perf.jl`.
Expand Down
108 changes: 55 additions & 53 deletions GraphNeuralNetworks/test/layers/conv.jl
Original file line number Diff line number Diff line change
Expand Up @@ -365,35 +365,36 @@ end
end
end

@testitem "CGConv" setup=[TolSnippet, TestModule] begin
using .TestModule

edim = 10
l = CGConv((D_IN, edim) => D_OUT, tanh, residual = false, bias = true)
for g in TEST_GRAPHS
g = GNNGraph(g, edata = rand(Float32, edim, g.num_edges))
@test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes)
test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH)
end

# no edge features
l1 = CGConv(D_IN => D_OUT, tanh, residual = false, bias = true)
g1 = TEST_GRAPHS[1]
@test l1(g1, g1.ndata.x) == l1(g1).ndata.x
@test l1(g1, g1.ndata.x, nothing) == l1(g1).ndata.x
end

@testitem "CGConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin
using .TestModule
edim = 10
l = CGConv((D_IN, edim) => D_OUT, tanh, residual = false, bias = true)
for g in TEST_GRAPHS
g.graph isa AbstractSparseMatrix && continue
g = GNNGraph(g, edata = rand(Float32, edim, g.num_edges))
@test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes)
test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false)
end
end
## TODO segfault on julia v1.12
# @testitem "CGConv" setup=[TolSnippet, TestModule] begin
# using .TestModule

# edim = 10
# l = CGConv((D_IN, edim) => D_OUT, tanh, residual = false, bias = true)
# for g in TEST_GRAPHS
# g = GNNGraph(g, edata = rand(Float32, edim, g.num_edges))
# @test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes)
# test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH)
# end

# # no edge features
# l1 = CGConv(D_IN => D_OUT, tanh, residual = false, bias = true)
# g1 = TEST_GRAPHS[1]
# @test l1(g1, g1.ndata.x) == l1(g1).ndata.x
# @test l1(g1, g1.ndata.x, nothing) == l1(g1).ndata.x
# end

# @testitem "CGConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin
# using .TestModule
# edim = 10
# l = CGConv((D_IN, edim) => D_OUT, tanh, residual = false, bias = true)
# for g in TEST_GRAPHS
# g.graph isa AbstractSparseMatrix && continue
# g = GNNGraph(g, edata = rand(Float32, edim, g.num_edges))
# @test size(l(g, g.x, g.e)) == (D_OUT, g.num_nodes)
# test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false)
# end
# end

@testitem "AGNNConv" setup=[TolSnippet, TestModule] begin
using .TestModule
Expand Down Expand Up @@ -457,30 +458,31 @@ end
end
end

@testitem "GMMConv" setup=[TolSnippet, TestModule] begin
using .TestModule
ein_channel = 10
K = 5
l = GMMConv((D_IN, ein_channel) => D_OUT, K = K)
for g in TEST_GRAPHS
g = GNNGraph(g, edata = rand(Float32, ein_channel, g.num_edges))
y = l(g, g.x, g.e)
test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH)
end
end

@testitem "GMMConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin
using .TestModule
ein_channel = 10
K = 5
l = GMMConv((D_IN, ein_channel) => D_OUT, K = K)
for g in TEST_GRAPHS
g.graph isa AbstractSparseMatrix && continue
g = GNNGraph(g, edata = rand(Float32, ein_channel, g.num_edges))
y = l(g, g.x, g.e)
test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false)
end
end
## TODO segfault on julia v1.12
# @testitem "GMMConv" setup=[TolSnippet, TestModule] begin
# using .TestModule
# ein_channel = 10
# K = 5
# l = GMMConv((D_IN, ein_channel) => D_OUT, K = K)
# for g in TEST_GRAPHS
# g = GNNGraph(g, edata = rand(Float32, ein_channel, g.num_edges))
# y = l(g, g.x, g.e)
# test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH)
# end
# end

# @testitem "GMMConv GPU" setup=[TolSnippet, TestModule] tags=[:gpu] begin
# using .TestModule
# ein_channel = 10
# K = 5
# l = GMMConv((D_IN, ein_channel) => D_OUT, K = K)
# for g in TEST_GRAPHS
# g.graph isa AbstractSparseMatrix && continue
# g = GNNGraph(g, edata = rand(Float32, ein_channel, g.num_edges))
# y = l(g, g.x, g.e)
# test_gradients(l, g, g.x, g.e, rtol = RTOL_HIGH, test_gpu = true, compare_finite_diff = false)
# end
# end

@testitem "SGConv" setup=[TolSnippet, TestModule] begin
using .TestModule
Expand Down
3 changes: 2 additions & 1 deletion Project.toml
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
[workspace]
projects = ["GNNGraphs", "GNNlib", "GraphNeuralNetworks", "GNNLux"]
projects = ["docs", "GNNGraphs", "GNNlib", "GraphNeuralNetworks", "GNNLux"]

Loading