devito
devito copied to clipboard
mpi not supported for class of sparse functions
MFE
import devito
shape = (12,13,14)
nfreq = 5
nrec = 6
space_order = 8
grid = devito.Grid(shape=shape)
freq_dim = devito.DefaultDimension(name="freq_dim", default_value=nfreq)
p0re = devito.Function(name="p0re", grid=grid, dimensions=(*grid.dimensions, freq_dim), shape=(*shape, nfreq), space_order=space_order)
class CoordSlowSparseFunction(devito.SparseFunction):
_sparse_position = 0
recdim = devito.Dimension(name="recdim")
p0recre = CoordSlowSparseFunction(name="p0recre", grid=grid, dimensions=(recdim, freq_dim), shape=(nrec, nfreq), npoint=nrec)
interp_eq_re = p0recre.interpolate(expr=p0re)
op = devito.Operator([interp_eq_re], name="interpolator")
op.apply()
Results in:
cvx@cbox-lukedecker-test:~/.julia/dev/JetPackDevitoPSD/test$ DEVITO
_MPI=1 mpirun -n 2 python mfe.py
Traceback (most recent call last):
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 189, in preprocess
assert not queue
AssertionError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/cvx/.julia/dev/JetPackDevitoPSD/test/mfe.py", line 21, in <module>
op = devito.Operator([interp_eq_re], name="interpolator")
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 158, in __new__
op = cls._build(expressions, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 180, in _build
irs, byproduct = cls._lower(expressions, profiler=profiler, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 258, in _lower
stree = cls._lower_stree(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/tools/timing.py", line 76, in __call__
retval = self.func(*args, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 405, in _lower_stree
stree = stree_build(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 21, in stree_build
clusters = preprocess(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 192, in preprocess
raise RuntimeError("Unsupported MPI for the given equations")
RuntimeError: Unsupported MPI for the given equations
Traceback (most recent call last):
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 189, in preprocess
assert not queue
AssertionError
During handling of the above exception, another exception occurred:
Traceback (most recent call last):
File "/home/cvx/.julia/dev/JetPackDevitoPSD/test/mfe.py", line 21, in <module>
op = devito.Operator([interp_eq_re], name="interpolator")
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 158, in __new__
op = cls._build(expressions, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 180, in _build
irs, byproduct = cls._lower(expressions, profiler=profiler, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 258, in _lower
stree = cls._lower_stree(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/tools/timing.py", line 76, in __call__
retval = self.func(*args, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/operator/operator.py", line 405, in _lower_stree
stree = stree_build(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 21, in stree_build
clusters = preprocess(clusters, **kwargs)
File "/home/cvx/.conda/envs/conda_jl/lib/python3.10/site-packages/devito/ir/stree/algorithms.py", line 192, in preprocess
raise RuntimeError("Unsupported MPI for the given equations")
RuntimeError: Unsupported MPI for the given equations
--------------------------------------------------------------------------
Primary job terminated normally, but 1 process returned
a non-zero exit code. Per user-direction, the job has been aborted.
--------------------------------------------------------------------------
--------------------------------------------------------------------------
mpirun detected that one or more processes exited with non-zero status, thus causing
the job to be terminated. The first process to do so was:
Process name: [[59720,1],1]
Exit code: 1
--------------------------------------------------------------------------