ERROR: LoadError: MethodError: no method matching realfloat(::Array{ComplexF32, 4})
The function `realfloat` exists, but no method is defined for this combination of argument types.
Closest candidates are:
realfloat(::StridedArray{<:Union{Float32, Float64}})
@ AbstractFFTs ~/.julia/packages/AbstractFFTs/4iQz5/src/definitions.jl:42
realfloat(::AbstractArray{T}) where T<:Real
@ AbstractFFTs ~/.julia/packages/AbstractFFTs/4iQz5/src/definitions.jl:49
Stacktrace:
[1] plan_rfft(x::Array{ComplexF32, 4}, region::UnitRange{Int64}; kws::@Kwargs{})
@ AbstractFFTs ~/.julia/packages/AbstractFFTs/4iQz5/src/definitions.jl:221
[2] rfft(x::Array{ComplexF32, 4}, region::UnitRange{Int64})
@ AbstractFFTs ~/.julia/packages/AbstractFFTs/4iQz5/src/definitions.jl:67
[3] transform(ft::FourierTransform{ComplexF32, Tuple{Int64, Int64}}, x::Array{ComplexF32, 4})
@ NeuralOperators ~/.julia/packages/NeuralOperators/WUDL3/src/transform.jl:25
[4] operator_conv(x::Array{ComplexF32, 4}, tform::FourierTransform{ComplexF32, Tuple{Int64, Int64}}, weights::Base.ReshapedArray{ComplexF32, 3, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{}})
@ NeuralOperators ~/.julia/packages/NeuralOperators/WUDL3/src/layers.jl:74
[5] (::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)})(x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)}}}, st::@NamedTuple{})
@ NeuralOperators ~/.julia/packages/NeuralOperators/WUDL3/src/layers.jl:62
[6] apply
@ ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155 [inlined]
[7] macro expansion
@ ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:0 [inlined]
[8] applyparallel(layers::@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, connection::NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:180
[9] (::Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing})(x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:178
[10] apply(model::Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}, x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}})
@ LuxCore ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155
[11] (::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}})(x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}})
@ LuxCore ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:269
[12] apply
@ ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155 [inlined]
[13] macro expansion
@ ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:0 [inlined]
[14] applychain(layers::@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)}}}, st::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:511
[15] (::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing})(x::Array{ComplexF32, 4}, ps::ComponentVector{ComplexF32, SubArray{ComplexF32, 1, Vector{ComplexF32}, Tuple{UnitRange{Int64}}, true}, Tuple{Axis{(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)}}}, st::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:509
[16] apply
@ ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155 [inlined]
[17] macro expansion
@ ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:0 [inlined]
[18] applychain(layers::@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing}, layer_3::Chain{@NamedTuple{layer_1::Conv{typeof(NNlib.gelu_tanh), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}}, Nothing}}, x::Array{Float32, 4}, ps::ComponentVector{ComplexF32, Vector{ComplexF32}, Tuple{Axis{(layer_1 = ViewAxis(1:192, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 2, 64))), bias = ViewAxis(129:192, Shaped1DAxis((64,))))), layer_2 = ViewAxis(193:69888, Axis(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)), layer_3 = ViewAxis(69889:74178, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:4290, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 64, 2))), bias = ViewAxis(129:130, Shaped1DAxis((2,))))))))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}}, layer_3::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:511
[19] (::Chain{@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing}, layer_3::Chain{@NamedTuple{layer_1::Conv{typeof(NNlib.gelu_tanh), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}}, Nothing}}, Nothing})(x::Array{Float32, 4}, ps::ComponentVector{ComplexF32, Vector{ComplexF32}, Tuple{Axis{(layer_1 = ViewAxis(1:192, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 2, 64))), bias = ViewAxis(129:192, Shaped1DAxis((64,))))), layer_2 = ViewAxis(193:69888, Axis(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)), layer_3 = ViewAxis(69889:74178, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:4290, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 64, 2))), bias = ViewAxis(129:130, Shaped1DAxis((2,))))))))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}}, layer_3::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ Lux ~/.julia/packages/Lux/lRugP/src/layers/containers.jl:509
[20] apply(model::Chain{@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing}, layer_3::Chain{@NamedTuple{layer_1::Conv{typeof(NNlib.gelu_tanh), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}}, Nothing}}, Nothing}, x::Array{Float32, 4}, ps::ComponentVector{ComplexF32, Vector{ComplexF32}, Tuple{Axis{(layer_1 = ViewAxis(1:192, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 2, 64))), bias = ViewAxis(129:192, Shaped1DAxis((64,))))), layer_2 = ViewAxis(193:69888, Axis(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)), layer_3 = ViewAxis(69889:74178, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:4290, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 64, 2))), bias = ViewAxis(129:130, Shaped1DAxis((2,))))))))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}}, layer_3::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ LuxCore ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155
[21] (::FourierNeuralOperator{Chain{@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing}, layer_3::Chain{@NamedTuple{layer_1::Conv{typeof(NNlib.gelu_tanh), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}}, Nothing}}, Nothing}})(x::Array{Float32, 4}, ps::ComponentVector{ComplexF32, Vector{ComplexF32}, Tuple{Axis{(layer_1 = ViewAxis(1:192, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 2, 64))), bias = ViewAxis(129:192, Shaped1DAxis((64,))))), layer_2 = ViewAxis(193:69888, Axis(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)), layer_3 = ViewAxis(69889:74178, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:4290, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 64, 2))), bias = ViewAxis(129:130, Shaped1DAxis((2,))))))))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}}, layer_3::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ LuxCore ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:269
[22] apply(model::FourierNeuralOperator{Chain{@NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Chain{@NamedTuple{layer_1::OperatorKernel{Parallel{NeuralOperators.Fix1{typeof(NeuralOperators.add_act), typeof(NNlib.gelu_tanh)}, @NamedTuple{layer_1::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::OperatorConv{Static.True, FourierTransform{ComplexF32, Tuple{Int64, Int64}}, typeof(glorot_uniform)}}, Nothing}}}, Nothing}, layer_3::Chain{@NamedTuple{layer_1::Conv{typeof(NNlib.gelu_tanh), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}, layer_2::Conv{typeof(identity), Int64, Int64, Tuple{Int64, Int64}, Tuple{Int64, Int64}, NTuple{4, Int64}, Tuple{Int64, Int64}, Int64, Nothing, Nothing, Static.True, Static.False}}, Nothing}}, Nothing}}, x::Array{Float32, 4}, ps::ComponentVector{ComplexF32, Vector{ComplexF32}, Tuple{Axis{(layer_1 = ViewAxis(1:192, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 2, 64))), bias = ViewAxis(129:192, Shaped1DAxis((64,))))), layer_2 = ViewAxis(193:69888, Axis(layer_1 = ViewAxis(1:69696, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:69696, Axis(weight = ViewAxis(1:65536, ShapedAxis((64, 64, 16))),)))),)), layer_3 = ViewAxis(69889:74178, Axis(layer_1 = ViewAxis(1:4160, Axis(weight = ViewAxis(1:4096, ShapedAxis((1, 1, 64, 64))), bias = ViewAxis(4097:4160, Shaped1DAxis((64,))))), layer_2 = ViewAxis(4161:4290, Axis(weight = ViewAxis(1:128, ShapedAxis((1, 1, 64, 2))), bias = ViewAxis(129:130, Shaped1DAxis((2,))))))))}}}, st::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{layer_1::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}}, layer_3::@NamedTuple{layer_1::@NamedTuple{}, layer_2::@NamedTuple{}}})
@ LuxCore ~/.julia/packages/LuxCore/q0Mrq/src/LuxCore.jl:155
[23] top-level scope
@ ~/Dropbox/eScience_projects/DEEPDIEP-repos/test/bug.jl:26
in expression starting at /home/simone/Dropbox/eScience_projects/DEEPDIEP-repos/test/bug.jl:26
If the parameters are not converted, I am not able to use the fno for sensitivity analysis via SciMLSensitivity.jl and I get the following error
Describe the bug 🐞
When the parameters of a
fnoare converted to a SciML structure usingComponentArray(), type promotion breaks the modelExpected behavior
I was expecting to be able to use
ComponentArray()to use the fno's parameters inSciMLSensitivityMinimal Reproducible Example 👇
Error & Stacktrace⚠️
Environment (please complete the following information):
using Pkg; Pkg.status()using Pkg; Pkg.status(; mode = PKGMODE_MANIFEST)versioninfo()Additional context
If the parameters are not converted, I am not able to use the fno for sensitivity analysis via
SciMLSensitivity.jland I get the following errorIs there maybe another way to get a SciMLStructure?