NeuralPDE.jl icon indicating copy to clipboard operation
NeuralPDE.jl copied to clipboard

Single output with parameter estimation

Open YichengDWu opened this issue 1 year ago • 1 comments

using NeuralPDE, Lux, Optimization, OptimizationOptimJL
import ModelingToolkit: Interval

@parameters t, x, C
@variables u(..)
Dxx = Differential(x)^2
Dtt = Differential(t)^2
Dt = Differential(t)
eq  = Dtt(u(t,x)) ~ C^2*Dxx(u(t,x))

bcs = [u(t,0) ~ 0.,# for all t > 0
       u(t,1) ~ 0.,# for all t > 0
       u(0,x) ~ x*(1. - x), #for all 0 < x < 1
       Dt(u(0,x)) ~ 0. ] #for all  0 < x < 1]

# Space and time domains
domains = [t ∈ Interval(0.0,1.0),
           x ∈ Interval(0.0,1.0)]
@named pde_system = PDESystem(eq,bcs,domains,[t,x],[u(t,x)], [C], defaults=Dict(C => 1.0))

chain = Lux.Chain(Dense(2,16,Lux.σ),Dense(16,16,Lux.σ),Dense(16,1))
discretization = NeuralPDE.PhysicsInformedNN(chain,GridTraining(0.1), param_estim=true)
sym_prob = symbolic_discretize(pde_system, discretization)

This gives

    begin
        (C,) = ((var"##θ#313").p[1:1],)
        let (t, x) = (cord[[1], :], cord[[2], :])
            begin
                cord1 = vcat(t, x)
            end
            derivative(phi, u, cord1, [[6.0554544523933395e-6, 0.0]], 1, var"##θ#313") .- 0.0
        end
    end

which is not correct. It should be something like

    begin
        (C, var"##θ#555") = ((var"##θ#313").p[1:1], var"##θ#313".depvar)
        let (t, x) = (cord[[1], :], cord[[2], :])
            begin
                cord1 = vcat(t, x)
            end
            derivative(phi, u, cord1, [[6.0554544523933395e-6, 0.0]], 1, var"##θ#555") .- 0.0
        end
    end

YichengDWu avatar Aug 21 '22 23:08 YichengDWu

#573 is probably related

YichengDWu avatar Aug 22 '22 00:08 YichengDWu