MXFusion
MXFusion copied to clipboard
Kernels not copied when model cloned
Describe the bug When a model is cloned, any kernel attached to the graph is not copied. This means that cloned graphs with GP modules don't work.
To Reproduce This is a unit test I added to gpregression_test.py file, it currently fails:
def test_module_clone(self):
D, X, Y, noise_var, lengthscale, variance = self.gen_data()
dtype = 'float64'
# Predict from original model
m = self.gen_mxfusion_model(dtype, D, noise_var, lengthscale, variance)
observed = [m.X, m.Y]
infr = Inference(MAP(model=m, observed=observed), dtype=dtype)
loss, _ = infr.run(X=mx.nd.array(X, dtype=dtype), Y=mx.nd.array(Y, dtype=dtype), max_iter=1)
infr2 = TransferInference(ModulePredictionAlgorithm(m, observed=[m.X], target_variables=[m.Y]),
infr_params=infr.params, dtype=np.float64)
infr2.inference_algorithm.model.Y.factor.gp_predict.diagonal_variance = False
infr2.inference_algorithm.model.Y.factor.gp_predict.noise_free = False
res = infr2.run(X=mx.nd.array(X, dtype=dtype))[0]
mu_mf, var_mf = res[0].asnumpy()[0], res[1].asnumpy()[0]
# Clone model
cloned_model = m.clone()
# Predict from cloned model
observed = [cloned_model.X, cloned_model.Y]
infr = Inference(MAP(model=cloned_model, observed=observed), dtype=dtype)
loss, _ = infr.run(X=mx.nd.array(X, dtype=dtype), Y=mx.nd.array(Y, dtype=dtype), max_iter=1)
infr2_clone = TransferInference(ModulePredictionAlgorithm(cloned_model, observed=[cloned_model.X],
target_variables=[cloned_model.Y]),
infr_params=infr.params, dtype=np.float64)
infr2_clone.inference_algorithm.model.Y.factor.gp_predict.diagonal_variance = False
infr2_clone.inference_algorithm.model.Y.factor.gp_predict.noise_free = False
res = infr2_clone.run(X=mx.nd.array(X, dtype=dtype))[0]
mu_mf_clone, var_mf_clone = res[0].asnumpy()[0], res[1].asnumpy()[0]
assert np.allclose(mu_mf, mu_mf_clone)
assert np.allclose(var_mf, var_mf_clone)
Error message:
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
../../mxfusion/inference/inference.py:171: in run
return executor(mx.nd.zeros(1, ctx=self.mxnet_context), *data)
../../../../miniconda3/lib/python3.6/site-packages/mxnet/gluon/block.py:540: in __call__
out = self.forward(*args)
../../../../miniconda3/lib/python3.6/site-packages/mxnet/gluon/block.py:917: in forward
return self.hybrid_forward(ndarray, x, *args, **params)
../../mxfusion/inference/inference_alg.py:83: in hybrid_forward
obj = self._infr_method.compute(F=F, variables=variables)
../../mxfusion/inference/map.py:83: in compute
logL = self.model.log_pdf(F=F, variables=variables)
../../mxfusion/models/factor_graph.py:234: in log_pdf
F=F, variables=variables, targets=module_targets)))
../../mxfusion/modules/module.py:321: in log_pdf
result = alg.compute(F, variables)
_ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
self = <mxfusion.modules.gp_modules.gp_regression.GPRegressionLogPdf object at 0x122e384e0>
F = <module 'mxnet.ndarray' from '/Users/marpulli/miniconda3/lib/python3.6/site-packages/mxnet/ndarray/__init__.py'>
variables = {'271f2355_72ae_48a2_b46a_ff58815c1f07':
[[[0.26455561 0.77423369]
[0.45615033 0.56843395]
[0.0187898 0.6176355 ....06185717 -0.01682754
0.03680881 0.0380233 -0.02780953 0.03818348 -0.04858983]]]
<NDArray 1x10x10 @cpu(0)>, ...}
def compute(self, F, variables):
has_mean = self.model.F.factor.has_mean
X = variables[self.model.X]
Y = variables[self.model.Y]
noise_var = variables[self.model.noise_var]
D = Y.shape[-1]
N = X.shape[-2]
> kern = self.model.kernel
E AttributeError: 'Model' object has no attribute 'kernel'
../../mxfusion/modules/gp_modules/gp_regression.py:49: AttributeError
Expected behavior A clear and concise description of what you expected to happen. A
Desktop (please complete the following information):
- OS: iOS
- Python version3.6
- MXNet version 1.3
- MXFusion version master
- MXNet contextcpu
- MXNet dtype float64
Additional context Add any other context about the problem here.