NeuralPDE.jl icon indicating copy to clipboard operation
NeuralPDE.jl copied to clipboard

How to run bcs=[], without initial boundary value condition?

Open Runfa-Zhang opened this issue 1 year ago • 4 comments

using example, without initial boundary value.

just modify "bcs=[ ]"

res = Optimization.solve(prob,Adam(0.01);callback = callback,maxiters=2500)

then it will show

MethodError: no method matching zero(::Type{Any})
Closest candidates are:
  zero(::Type{Union{Missing, T}}) where T at missing.jl:105
  zero(::Union{Type{P}, P}) where P<:Dates.Period at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.6/Dates/src/periods.jl:53
  zero(::Union{AbstractAlgebra.Generic.LaurentSeriesFieldElem{T}, AbstractAlgebra.Generic.LaurentSeriesRingElem{T}} where T<:AbstractAlgebra.RingElement) at /home/inspur/.julia/packages/AbstractAlgebra/GyvKf/src/generic/LaurentSeries.jl:466
  ...

Stacktrace:
  [1] zero(#unused#::Type{Any})
    @ Base ./missing.jl:106
  [2] reduce_empty(#unused#::typeof(+), #unused#::Type{Any})
    @ Base ./reduce.jl:311
  [3] reduce_empty(#unused#::typeof(Base.add_sum), #unused#::Type{Any})
    @ Base ./reduce.jl:320
  [4] mapreduce_empty(#unused#::typeof(identity), op::Function, T::Type)
    @ Base ./reduce.jl:343
  [5] reduce_empty(op::Base.MappingRF{typeof(identity), typeof(Base.add_sum)}, #unused#::Type{Any})
    @ Base ./reduce.jl:329
  [6] reduce_empty_iter
    @ ./reduce.jl:355 [inlined]
  [7] mapreduce_empty_iter(f::Function, op::Function, itr::Vector{Any}, ItrEltype::Base.HasEltype)
    @ Base ./reduce.jl:351
  [8] _mapreduce(f::typeof(identity), op::typeof(Base.add_sum), #unused#::IndexLinear, A::Vector{Any})
    @ Base ./reduce.jl:400
  [9] _mapreduce_dim
    @ ./reducedim.jl:318 [inlined]
 [10] #mapreduce#672
    @ ./reducedim.jl:310 [inlined]
 [11] mapreduce
    @ ./reducedim.jl:310 [inlined]
 [12] #_sum#682
    @ ./reducedim.jl:878 [inlined]
 [13] _sum
    @ ./reducedim.jl:878 [inlined]
 [14] #_sum#681
    @ ./reducedim.jl:877 [inlined]
 [15] _sum
    @ ./reducedim.jl:877 [inlined]
 [16] #sum#679
    @ ./reducedim.jl:873 [inlined]
 [17] sum
    @ ./reducedim.jl:873 [inlined]
 [18] #adjoint#541
    @ ~/.julia/packages/Zygote/xGkZ5/src/lib/array.jl:305 [inlined]
 [19] adjoint
    @ ./none:0 [inlined]
 [20] _pullback(__context__::Zygote.Context{false}, 537::typeof(sum), xs::Vector{Any})
    @ Zygote ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65
 [21] _pullback
    @ ~/.julia/packages/NeuralPDE/Qjcw1/src/discretize.jl:607 [inlined]
 [22] _pullback(::Zygote.Context{false}, ::NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, ::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}}, ::SciMLBase.NullParameters)
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface2.jl:0
 [23] _apply(::Function, ::Vararg{Any, N} where N)
    @ Core ./boot.jl:804
 [24] adjoint
    @ ~/.julia/packages/Zygote/xGkZ5/src/lib/lib.jl:203 [inlined]
 [25] _pullback
    @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [26] _pullback
    @ ~/.julia/packages/SciMLBase/3fOCs/src/scimlfunctions.jl:2920 [inlined]
 [27] _pullback(::Zygote.Context{false}, ::OptimizationFunction{true, Optimization.AutoZygote, NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, ::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}}, ::SciMLBase.NullParameters)
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface2.jl:0
 [28] _apply(::Function, ::Vararg{Any, N} where N)
    @ Core ./boot.jl:804
 [29] adjoint
    @ ~/.julia/packages/Zygote/xGkZ5/src/lib/lib.jl:203 [inlined]
 [30] _pullback
    @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [31] _pullback
    @ ~/.julia/packages/Optimization/6nIwk/src/function/zygote.jl:30 [inlined]
 [32] _pullback(ctx::Zygote.Context{false}, f::Optimization.var"#124#134"{OptimizationFunction{true, Optimization.AutoZygote, NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}, args::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}})
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface2.jl:0
 [33] _apply(::Function, ::Vararg{Any, N} where N)
    @ Core ./boot.jl:804
 [34] adjoint
    @ ~/.julia/packages/Zygote/xGkZ5/src/lib/lib.jl:203 [inlined]
 [35] _pullback
    @ ~/.julia/packages/ZygoteRules/AIbCs/src/adjoint.jl:65 [inlined]
 [36] _pullback
    @ ~/.julia/packages/Optimization/6nIwk/src/function/zygote.jl:32 [inlined]
 [37] _pullback(ctx::Zygote.Context{false}, f::Optimization.var"#127#137"{Tuple{}, Optimization.var"#124#134"{OptimizationFunction{true, Optimization.AutoZygote, NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}}, args::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}})
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface2.jl:0
 [38] pullback(f::Function, cx::Zygote.Context{false}, args::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}})
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface.jl:44
 [39] pullback
    @ ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface.jl:42 [inlined]
 [40] gradient(f::Function, args::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}})
    @ Zygote ~/.julia/packages/Zygote/xGkZ5/src/compiler/interface.jl:96
 [41] (::Optimization.var"#125#135"{Optimization.var"#124#134"{OptimizationFunction{true, Optimization.AutoZygote, NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, SciMLBase.NullParameters}})(::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}}, ::ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}})
    @ Optimization ~/.julia/packages/Optimization/6nIwk/src/function/zygote.jl:32
 [42] macro expansion
    @ ~/.julia/packages/OptimizationOptimisers/XLPqT/src/OptimizationOptimisers.jl:35 [inlined]
 [43] macro expansion
    @ ~/.julia/packages/Optimization/6nIwk/src/utils.jl:35 [inlined]
 [44] __solve(prob::OptimizationProblem{true, OptimizationFunction{true, Optimization.AutoZygote, NeuralPDE.var"#full_loss_function#326"{NeuralPDE.var"#null_nonadaptive_loss#127", Vector{NeuralPDE.var"#74#75"}, Vector{NeuralPDE.var"#74#75"{NeuralPDE.var"#221#222"{RuntimeGeneratedFunctions.RuntimeGeneratedFunction{(:cord, Symbol("##θ#257"), :phi, :derivative, :integral, :u, :p), NeuralPDE.var"#_RGF_ModTag", NeuralPDE.var"#_RGF_ModTag", (0xfa607619, 0x86dea960, 0xd449cb44, 0xf406ef4d, 0x6958b285)}, NeuralPDE.var"#12#13", NeuralPDE.var"#287#294"{NeuralPDE.var"#287#288#295"{typeof(NeuralPDE.numeric_derivative)}, Dict{Symbol, Int64}, Dict{Symbol, Int64}, GridTraining{Float64}}, typeof(NeuralPDE.numeric_derivative), NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing}, CuArray{Float64, 2, CUDA.Mem.DeviceBuffer}}}, NeuralPDE.PINNRepresentation, Bool, Vector{Int64}, Int64, NeuralPDE.Phi{Chain{NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), Tuple{Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(NNlib.sigmoid_fast), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}, Dense{true, typeof(identity), typeof(Lux.glorot_uniform), typeof(Lux.zeros32)}}}}, NamedTuple{(:layer_1, :layer_2, :layer_3, :layer_4, :layer_5), NTuple{5, NamedTuple{(), Tuple{}}}}}, Nothing, Bool, Nothing}, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing, Nothing}, ComponentArrays.ComponentVector{Float64, CuArray{Float64, 1, CUDA.Mem.DeviceBuffer}, Tuple{ComponentArrays.Axis{(layer_1 = ViewAxis(1:100, Axis(weight = ViewAxis(1:75, ShapedAxis((25, 3), NamedTuple())), bias = ViewAxis(76:100, ShapedAxis((25, 1), NamedTuple())))), layer_2 = ViewAxis(101:750, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_3 = ViewAxis(751:1400, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_4 = ViewAxis(1401:2050, Axis(weight = ViewAxis(1:625, ShapedAxis((25, 25), NamedTuple())), bias = ViewAxis(626:650, ShapedAxis((25, 1), NamedTuple())))), layer_5 = ViewAxis(2051:2076, Axis(weight = ViewAxis(1:25, ShapedAxis((1, 25), NamedTuple())), bias = ViewAxis(26:26, ShapedAxis((1, 1), NamedTuple())))))}}}, SciMLBase.NullParameters, Nothing, Nothing, Nothing, Nothing, Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}}}, opt::Adam{Float64}, data::Base.Iterators.Cycle{Tuple{Optimization.NullData}}; maxiters::Int64, callback::Function, progress::Bool, save_best::Bool, kwargs::Base.Iterators.Pairs{Union{}, Union{}, Tuple{}, NamedTuple{(), Tuple{}}})
    @ OptimizationOptimisers ~/.julia/packages/OptimizationOptimisers/XLPqT/src/OptimizationOptimisers.jl:33
 [45] #solve#506
    @ ~/.julia/packages/SciMLBase/3fOCs/src/solve.jl:71 [inlined]
 [46] top-level scope
    @ In[5]:1
 [47] eval
    @ ./boot.jl:360 [inlined]
 [48] include_string(mapexpr::typeof(REPL.softscope), mod::Module, code::String, filename::String)
    @ Base ./loading.jl:1116

Runfa-Zhang avatar Aug 23 '22 03:08 Runfa-Zhang

What did you run?

ChrisRackauckas avatar Aug 31 '22 17:08 ChrisRackauckas

What did you run?

addition loss

Runfa-Zhang avatar Aug 31 '22 21:08 Runfa-Zhang

use something like "bcs=[ u(x) ~ u(x)]". but probably it needs to add the ability to use empty bcs = []

KirillZubov avatar Sep 01 '22 09:09 KirillZubov

use something like "bcs=[ u(x) ~ u(x)]". but probably it needs to add the ability to use empty bcs = []

Thanks so much!

Runfa-Zhang avatar Sep 07 '22 06:09 Runfa-Zhang