moose icon indicating copy to clipboard operation
moose copied to clipboard

Multiapp Restoring crashes when number of dofs changes during the timestep

Open GiudGiud opened this issue 3 years ago • 0 comments

Steps to reproduce

See discussions post needs parallel + subdomain modification + block restricted aux

Bug description

Simulation hangs Very likely the changed number of dofs is the problem

Impact

Decreased user experience

Discussed in https://github.com/idaholab/moose/discussions/22330

Originally posted by maxnezdyur October 7, 2022 I have two input files below that are a minimal working example of an error I am running into. The error occurs with multiapp restoring. I am a child multiapp that changes the subdomain depending on the current location of the parent multiapp. I have a block restricted auxvariable and auxkernel that changes based on a CoupledVarThresholdElementSubdomainModifier. This is causing the restoring problem. The restore will hang on me for hours even with small meshes until I stop it, so I can't get a backtrace. Using a debugger, I found that within Multiapp.C line 627 the restore is called and it seems to get hung up at SystemBase.C at line 94 within that for loop. All that occurs in the terminal is "Restoring Application............................................................................" with the dots ever increasing. Let me know if there are any questions I can answer.

fluid.i

[Mesh]
  [gen]
    type = GeneratedMeshGenerator
    dim = 2
    xmin = -1.0
    xmax = 3.0
    ymin = 0.0
    ymax = 1.0
    nx = 100
    ny = 25
    elem_type = QUAD4
  []
  [create_subdomain_1]
    type = SubdomainBoundingBoxGenerator
    block_id = 1
    input = gen
    bottom_left = ' 0 0 0'
    top_right = '1 1 1 '
  []

[]
[Problem]
  verbose_multiapps = true
[]

[AuxVariables]
  [indicator]
    order = FIRST
    family = LAGRANGE
    initial_condition = 0.0
  []
  [this_is_the_cause]
    block = 0
    [AuxKernel]
      type = ConstantAux
      variable = this_is_the_cause
      block = 0
    []
  []

[]

[Variables]
  [velocity]
    family = LAGRANGE_VEC
  []
  [p]
  []
[]
[Kernels]
  [mass]
    type = INSADMass
    variable = p
  []
  [mass_pspg]
    type = INSADMassPSPG
    variable = p
  []
  [momentum_convection]
    type = INSADMomentumAdvection
    variable = velocity
    block = 0
  []
  [momentum_time]
    type = INSADMomentumTimeDerivative
    variable = velocity
    block = 0
  []
  [momentum_viscous]
    type = INSADMomentumViscous
    variable = velocity
    viscous_form = traction
    block = 0
  []
  [momentum_pressure]
    type = INSADMomentumPressure
    variable = velocity
    pressure = p
    integrate_p_by_parts = true
    block = 0
  []
  [momentum_supg]
    type = INSADMomentumSUPG
    variable = velocity
    velocity = velocity
    block = 0
  []
[]

[BCs]

  [noslip]
    type = ADVectorFunctionDirichletBC
    variable = velocity
    boundary = 'top bottom'
    function_x = 0
    function_y = 0
  []
  [bc]
    type = ADVectorFunctionDirichletBC
    variable = velocity
    boundary = 'left'
    function_x = 10
    function_y = 0
  []
[]
[Materials]
  [mu]
    type = ADGenericConstantMaterial
    prop_names = 'mu rho'
    prop_values = '2.0e-4 0.001225'
  []
  [ins_mat]
    type = INSADTauMaterial
    velocity = velocity
    pressure = p
  []
[]

[Executioner]
  type = Transient
  solve_type = 'NEWTON'
  # Run for 100+ timesteps to reach stey state.

  dt = 1e-2
  end_time = 1
  dtmin = 1.0e-7
  # Note: -snes_ksp_ew seems to le to more nonlinear iterations, which isn't ideal
  # when compute_jacobian() is so expensive for this problem.
  petsc_options = '-snes_converged_reason -ksp_converged_reason'


  petsc_options_iname = '-pc_type -pc_hypre_type'
  petsc_options_value = 'hypre boomeramg'
  # residual_and_jacobian_together = true
  line_search = 'bt'
  nl_rel_tol = 1e-6
  nl_abs_tol = 2e-7
  nl_max_its = 1
  l_max_its = 1

[]

[Outputs]
  [exo]
    type = Exodus
    file_base = test_results/parent
  []
  print_linear_residuals = false
[]

[UserObjects]
  [indicator]
    type = CoupledVarThresholdElementSubdomainModifier
    coupled_var = indicator
    criterion_type = ABOVE
    threshold = 0.95
    subdomain_id = 1
    complement_subdomain_id = 0
    execute_on = "INITIAL TIMESTEP_BEGIN"
    apply_initial_conditions = false
  []
[]

parent.i

beta = 0.25
gamma = 0.5

[GlobalParams]
  displacements = 'disp_x disp_y'
[]

[Mesh]
  [gen]
    type = GeneratedMeshGenerator
    dim = 2
    xmin = 0
    ymin = 0
    xmax = 0.2
    ymax = 0.5
    nx = 50
    ny = 150
    elem_type = QUAD4
  []
[]
[Variables]
  [disp_x]
    order = FIRST
  []
  [disp_y]
    order = FIRST
  []
[]

[AuxVariables] # variables that are calculated for output

  [solid_indicator]
    order = FIRST
    family = LAGRANGE
    [AuxKernel]
      type = ConstantAux
      variable = solid_indicator
      value = 0.0
      boundary = 'left right top'
      execute_on = 'INITIAL TIMESTEP_END'
    []
    initial_condition = 1.0
  []
[]

[Modules/TensorMechanics/DynamicMaster]
  [all]
    # displacements = 'disp_x disp_y'
    add_variables = true
    # new_system = true
    incremental = true
    strain = FINITE
    decomposition_method = EigenSolution
    # hht_alpha = 0.25
  []
[]

[Materials]
  [elastic_tensor]
    type = ComputeIsotropicElasticityTensor
    youngs_modulus = 10000.0
    poissons_ratio = 0.3
    use_displaced_mesh = true
  []
  [stress]
    type = ComputeFiniteStrainElasticStress
  []
  [density]
    type = GenericConstantMaterial
    prop_names = 'density'
    prop_values = '1'
    use_displaced_mesh = true
  []
  [constant_stress]
    type = GenericConstantRankTwoTensor
    tensor_values = '100'
    tensor_name = test_tensor
  []
[]

[BCs]
  [hold_x]
    type = DirichletBC
    boundary = bottom
    variable = disp_x
    value = 0
    use_displaced_mesh = true
  []
  [hold_y]
    type = DirichletBC
    boundary = bottom
    variable = disp_y
    value = 0
    use_displaced_mesh = true
  []
  [Pressure]
    [push_left]
      boundary = left
      factor = 100
    []
  []
[]

[Preconditioning]
  [SMP]
    type = SMP
    full = true
  []
[]
[Executioner]
  type = Transient
  end_time = 10
  dt = 1e-2
  solve_type = 'NEWTON'
  petsc_options = '-snes_converged_reason -ksp_converged_reason -snes_ksp_ew'
  petsc_options_iname = '-pc_type -pc_factor_mat_solver_type -pc_factor_shift_type -pc_factor_shift_amount'
  petsc_options_value = 'lu       superlu_dist                  NONZERO               1e-15'
  nl_max_its = 2
  l_max_its = 15
  line_search = 'none'
  nl_abs_tol = 1e-5
  nl_rel_tol = 1e-4
  automatic_scaling = true
  fixed_point_max_its = 10
  [TimeIntegrator]
    type = NewmarkBeta
    beta = ${beta}
    gamma = ${gamma}
  []
[]

[Outputs]
  # exodus = true
  [exo]
  type = Exodus
  file_base = test_results/child
  []
  print_linear_residuals = false
  print_linear_converged_reason = false
[]

[MultiApps]
  [fluid_domain]
    type = TransientMultiApp
    execute_on = "INITIAL TIMESTEP_END"
    positions = '0 0 0'
    input_files = fluid.i
    use_displaced_mesh = true
  []
[]

[Transfers]
  [push_indicator]
    type = MultiAppGeometricInterpolationTransfer
    # Transfer from the sub-app from this app
    to_multi_app = fluid_domain
    # The name of the variable in this app
    source_variable = solid_indicator
    # The name of the auxiliary variable in the sub-app
    variable = indicator
    displaced_source_mesh = true
    use_displaced_mesh = true
    num_points = 1
  []
[]

```</div>

GiudGiud avatar Oct 10 '22 15:10 GiudGiud