Turing.jl icon indicating copy to clipboard operation
Turing.jl copied to clipboard

Chain construction errors when no variables are recorded

Open phipsgabler opened this issue 5 years ago • 3 comments

The example does not really make sense, but it shows that flatten_namedtuple is missing an init parameter in mapreduce:

julia> @model function test(x)
           # no prior!
           x ~ Bernoulli(0.5)
       end
ModelGen{var"###generator#898",(:x,),(),Tuple{}}(##generator#898, NamedTuple())

julia> c1 = sample(test(false), Prior(), 100) # give me the prior!

Sampling 100%|███████████████████████████████████████████████████████████████████████████████████████████████████████████████████████| Time: 0:00:00
ERROR: ArgumentError: reducing over an empty collection is not allowed
Stacktrace:
 [1] _empty_reduce_error() at ./reduce.jl:212
 [2] mapreduce_empty(::Function, ::Function, ::Type) at ./reduce.jl:246
 [3] mapreduce_empty_iter(::Function, ::Function, ::Tuple{}, ::Base.HasEltype) at ./reduce.jl:254
 [4] mapfoldl_impl at ./tuple.jl:200 [inlined]
 [5] #mapfoldl#186 at ./reduce.jl:72 [inlined]
 [6] mapfoldl at ./reduce.jl:72 [inlined]
 [7] #mapreduce#194 at ./reduce.jl:200 [inlined]
 [8] mapreduce(::Function, ::Function, ::Tuple{}) at ./reduce.jl:200
 [9] flatten_namedtuple(::NamedTuple{(),Tuple{}}) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:332
 [10] (::Turing.Inference.var"#15#18"{Array{String,1}})(::VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64}) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:315
 [11] iterate at ./generator.jl:47 [inlined]
 [12] _collect(::Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1}, ::Base.Generator{Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1},Turing.Inference.var"#15#18"{Array{String,1}}}, ::Base.EltypeUnknown, ::Base.HasShape{1}) at ./array.jl:635
 [13] collect_similar(::Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1}, ::Base.Generator{Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1},Turing.Inference.var"#15#18"{Array{String,1}}}) at ./array.jl:564
 [14] map at ./abstractarray.jl:2073 [inlined]
 [15] _params_to_array(::Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1}) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:314
 [16] #bundle_samples#31(::Bool, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::typeof(AbstractMCMC.bundle_samples), ::Random._GLOBAL_RNG, ::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::SampleFromPrior, ::Int64, ::Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1}, ::Type{Chains}) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:415
 [17] bundle_samples(::Random._GLOBAL_RNG, ::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::SampleFromPrior, ::Int64, ::Array{VarInfo{DynamicPPL.Metadata{Dict{VarName,Int64},Array{Distribution,1},Array{VarName,1},Array{Real,1},Array{Set{DynamicPPL.Selector},1}},Float64},1}, ::Type{Chains}) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:415
 [18] #mcmcsample#17(::Bool, ::String, ::AbstractMCMC.var"#20#23", ::Type, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::typeof(AbstractMCMC.mcmcsample), ::Random._GLOBAL_RNG, ::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::SampleFromPrior, ::Int64) at /home/philipp/.julia/packages/AbstractMCMC/iOkTf/src/sample.jl:109
 [19] (::AbstractMCMC.var"#kw##mcmcsample")(::NamedTuple{(:chain_type, :progress),Tuple{UnionAll,Bool}}, ::typeof(AbstractMCMC.mcmcsample), ::Random._GLOBAL_RNG, ::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::SampleFromPrior, ::Int64) at ./none:0
 [20] #sample#4(::Type, ::Nothing, ::Bool, ::Base.Iterators.Pairs{Union{},Union{},Tuple{},NamedTuple{(),Tuple{}}}, ::typeof(sample), ::Random._GLOBAL_RNG, ::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::Prior, ::Int64) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:196
 [21] sample at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:195 [inlined]
 [22] #sample#1 at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:154 [inlined]
 [23] sample(::Model{var"###evaluator#897",(:x,),Tuple{Bool},(),ModelGen{var"###generator#898",(:x,),(),Tuple{}}}, ::Prior, ::Int64) at /home/philipp/.julia/packages/Turing/WGENS/src/inference/Inference.jl:154
 [24] top-level scope at REPL[51]:1
 [25] eval(::Module, ::Any) at ./boot.jl:330
 [26] eval_user_input(::Any, ::REPL.REPLBackend) at /buildworker/worker/package_linux64/build/usr/share/julia/stdlib/v1.3/REPL/src/REPL.jl:86
 [27] run_backend(::REPL.REPLBackend) at /home/philipp/.julia/packages/Revise/BqeJF/src/Revise.jl:1184
 [28] top-level scope at REPL[2]:0

phipsgabler avatar Oct 10 '20 10:10 phipsgabler

I'll take this one.

cpfiffer avatar Oct 10 '20 23:10 cpfiffer

After looking at this, it's not clear to me what the proper init is here. I would prefer to just catch this ahead of time and just return early if there are no keys in the NamedTuple, but that causes other problems in MCMCChains because there are no symbols in the parameter field.

Is this actually a bug or an edge case we don't need to care about?

cpfiffer avatar Oct 11 '20 00:10 cpfiffer

This seems like an edge case to me. I guess we can actually spit out a warning in DynamicPPL for those cases.

trappmartin avatar Oct 14 '20 09:10 trappmartin