Skip to content

Commit

Permalink
Remove old example for now
Browse files Browse the repository at this point in the history
  • Loading branch information
avik-pal committed Jun 14, 2024
1 parent 08b555a commit 2b40022
Show file tree
Hide file tree
Showing 7 changed files with 20 additions and 196 deletions.
2 changes: 2 additions & 0 deletions LocalPreferences.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[LuxTestUtils]
target_modules = ["LuxNeuralOperators", "Lux", "LuxLib"]
51 changes: 0 additions & 51 deletions examples/Burgers/main.jl

This file was deleted.

14 changes: 0 additions & 14 deletions examples/Project.toml

This file was deleted.

117 changes: 0 additions & 117 deletions examples/common.jl

This file was deleted.

4 changes: 2 additions & 2 deletions src/transform.jl
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ end

@inline truncate_modes(ft::FourierTransform, x_fft::AbstractArray) = low_pass(ft, x_fft)

function inverse(ft::FourierTransform, x_fft::AbstractArray{T, N},
M::NTuple{N, Int64}) where {T, N}
function inverse(
ft::FourierTransform, x_fft::AbstractArray{T, N}, M::NTuple{N, Int64}) where {T, N}
return real(irfft(x_fft, first(M), 1:ndims(ft)))
end
19 changes: 11 additions & 8 deletions test/fno_tests.jl
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
@testitem "Fourier Neural Operator" setup=[SharedTestSetup] begin
@testset "BACKEND: $(mode)" for (mode, aType, dev, ongpu) in MODES
rng = get_default_rng(mode)
rng = get_stable_rng()

setups = [
(modes=(16,), chs=(2, 64, 64, 64, 64, 64, 128, 1), x_size=(2, 1024, 5),
y_size=(1, 1024, 5), permuted=Val(false)),
(modes=(16,), chs=(2, 64, 64, 64, 64, 64, 128, 1), x_size=(1024, 2, 5),
y_size=(1024, 1, 5), permuted=Val(true))]
(modes=(16,), chs=(2, 64, 64, 64, 64, 64, 128, 1),
x_size=(2, 1024, 5), y_size=(1, 1024, 5), permuted=Val(false)),
(modes=(16,), chs=(2, 64, 64, 64, 64, 64, 128, 1),
x_size=(1024, 2, 5), y_size=(1024, 1, 5), permuted=Val(true))]

@testset "$(length(setup.modes))D: permuted = $(setup.permuted)" for setup in setups
fno = FourierNeuralOperator(; setup.chs, setup.modes, setup.permuted)

x = rand(rng, Float32, setup.x_size...)
y = rand(rng, Float32, setup.y_size...)
x = rand(rng, Float32, setup.x_size...) |> aType
y = rand(rng, Float32, setup.y_size...) |> aType

ps, st = Lux.setup(rng, fno)
ps, st = Lux.setup(rng, fno) |> dev

@inferred fno(x, ps, st)
@jet fno(x, ps, st)

@test size(first(fno(x, ps, st))) == setup.y_size

Expand Down
9 changes: 5 additions & 4 deletions test/layers_tests.jl
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
@testitem "SpectralConv & SpectralKernel" setup=[SharedTestSetup] begin
@testset "BACKEND: $(mode)" for (mode, aType, dev, ongpu) in MODES
rng = get_default_rng(mode)
rng = get_stable_rng()

opconv = [SpectralConv, SpectralKernel]
setups = [
Expand All @@ -11,7 +11,7 @@
(; m=(10, 10), permuted=Val(true),
x_size=(22, 22, 1, 5), y_size=(22, 22, 64, 5))]

@testset "$(op) $(length(setup.modes))D: permuted = $(setup.permuted)" for setup in setups,
@testset "$(op) $(length(setup.m))D: permuted = $(setup.permuted)" for setup in setups,
op in opconv

p = Lux.__unwrap_val(setup.permuted)
Expand All @@ -22,11 +22,12 @@
l1 = p ? Conv(ntuple(_ -> 1, length(setup.m)), in_chs => first(ch)) :
Dense(in_chs => first(ch))
m = Chain(l1, op(ch, setup.m; setup.permuted))
ps, st = Lux.setup(rng, m)
ps, st = Lux.setup(rng, m) |> dev

x = rand(rng, Float32, setup.x_size...)
x = rand(rng, Float32, setup.x_size...) |> aType
@test size(first(m(x, ps, st))) == setup.y_size
@inferred m(x, ps, st)
@jet m(x, ps, st)

data = [(x, rand(rng, Float32, setup.y_size...))]
l2, l1 = train!(m, ps, st, data; epochs=10)
Expand Down

0 comments on commit 2b40022

Please sign in to comment.