THArrays.jl icon indicating copy to clipboard operation
THArrays.jl copied to clipboard

indexing error for CUDA Tensor

Open x66ccff opened this issue 1 year ago • 4 comments

(base) kent@kent-Super-Server:~/_Project/PTSjl/PTS.jl$ julia --project=.
               _
   _       _ _(_)_     |  Documentation: https://docs.julialang.org
  (_)     | (_) (_)    |
   _ _   _| |_  __ _   |  Type "?" for help, "]?" for Pkg help.
  | | | | | | |/ _` |  |
  | | |_| | | | (_| |  |  Version 1.11.1 (2024-10-16)
 _/ |\__'_|_|_|\__'_|  |  Official https://julialang.org/ release
|__/                   |

julia> using THArrays

julia> A = [1 2 3
       4 5 6
       7 8 9]
3×3 Matrix{Int64}:
 1  2  3
 4  5  6
 7  8  9

julia> t = Tensor(A)
PyTorch.Tensor{Int64, 2}:
 1  2  3
 4  5  6
 7  8  9
[ CPULongType{3,3} ]

julia> t = to(t, CUDA(0))
PyTorch.Tensor{Int64, 2}:
 1  2  3
 4  5  6
 7  8  9
[ CUDALongType{3,3} ]

julia> t[1,:]
ERROR: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu! (when checking argument for argument index in method wrapper_CUDA__index_select)
Exception raised from common_device_check_failure at ../aten/src/ATen/core/adaption.cpp:10 (most recent call first):
frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >) + 0x6b (0x797ecd46e38b in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #1: c10::detail::torchCheckFail(char const*, char const*, unsigned int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) + 0xbf (0x797ecd468f3f in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #2: c10::impl::common_device_check_failure(c10::optional<c10::Device>&, at::Tensor const&, char const*, char const*) + 0x422 (0x797eb7607cb2 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #3: <unknown function> + 0x2eca7eb (0x797e676ca7eb in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #4: <unknown function> + 0x2eca8a6 (0x797e676ca8a6 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #5: at::_ops::index_select::redispatch(c10::DispatchKeySet, at::Tensor const&, long, at::Tensor const&) + 0x98 (0x797eb7fe0758 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #6: <unknown function> + 0x3a729ef (0x797eb9c729ef in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #7: <unknown function> + 0x3a72ef9 (0x797eb9c72ef9 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #8: at::_ops::index_select::call(at::Tensor const&, long, at::Tensor const&) + 0x172 (0x797eb8077a72 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #9: at::Tensor::index_select(long, at::Tensor const&) const + 0x43 (0x797ecdb946c5 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #10: tensor_method_index_select_int64 + 0xbb (0x797ecdb8fbac in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #11: [0x79811c70a453]
frame #12: [0x79811c70a926]
frame #13: [0x79811c70abe1]
frame #14: <unknown function> + 0x70365 (0x798123470365 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #15: <unknown function> + 0x6fe2d (0x79812346fe2d in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #16: <unknown function> + 0x70f98 (0x798123470f98 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #17: <unknown function> + 0x71b3e (0x798123471b3e in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #18: <unknown function> + 0x8dfde (0x79812348dfde in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #19: <unknown function> + 0x8e92a (0x79812348e92a in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #20: ijl_toplevel_eval_in + 0xa6 (0x79812348f996 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #21: <unknown function> + 0x2536d8 (0x797ecf6536d8 in /usr/local/julia-1.11.1/share/julia/compiled/v1.11/REPL/u0gqU_GYsA8.so)

Stacktrace:
 [1] opt_index_select(self::Tensor{Int64, 2}, dim::Int64, index::Int64)
   @ THArrays.THC ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/thc/thc-opt.jl:10
 [2] getindex(::Tensor{Int64, 2}, ::Int64, ::Function)
   @ THArrays ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/tensor.jl:171
 [3] top-level scope
   @ REPL[5]:1

julia> 

x66ccff avatar Dec 14 '24 06:12 x66ccff

However, i can run all the example codes successfully

julia> t = Tensor( -rand(3, 3) )
PyTorch.Tensor{Float64, 2}:
-0.4873 -0.9448 -0.3749
-0.0476 -0.7437 -0.5665
-0.4632 -0.6791 -0.4475
[ CPUDoubleType{3,3} ]

julia> abs(t)
PyTorch.Tensor{Float64, 2}:
 0.4873  0.9448  0.3749
 0.0476  0.7437  0.5665
 0.4632  0.6791  0.4475
[ CPUDoubleType{3,3} ]

julia> sin(t)^2 + cos(t)^2
PyTorch.Tensor{Float64, 2}:
 1.0000  1.0000  1.0000
 1.0000  1.0000  1.0000
 1.0000  1.0000  1.0000
[ CPUDoubleType{3,3} ]

julia> t
PyTorch.Tensor{Float64, 2}:
-0.4873 -0.9448 -0.3749
-0.0476 -0.7437 -0.5665
-0.4632 -0.6791 -0.4475
[ CPUDoubleType{3,3} ]

julia> THC.abs!(t)
PyTorch.Tensor{Float64, 2}:
 0.4873  0.9448  0.3749
 0.0476  0.7437  0.5665
 0.4632  0.6791  0.4475
[ CPUDoubleType{3,3} ]

julia> t
PyTorch.Tensor{Float64, 2}:
 0.4873  0.9448  0.3749
 0.0476  0.7437  0.5665
 0.4632  0.6791  0.4475
[ CPUDoubleType{3,3} ]

julia> THAD.gradient(x->sum(sin(x)+x^2), rand(3,3))
(PyTorch.Tensor{Float64, 2}:
 1.1412  1.7706  2.0321
 2.1909  1.0919  2.3021
 1.2763  1.9108  2.2036
[ CPUDoubleType{3,3} ]
,)

julia> 

x66ccff avatar Dec 14 '24 06:12 x66ccff

This fails too.

julia> t
PyTorch.Tensor{Float64, 2}:
 0.4873  0.9448  0.3749
 0.0476  0.7437  0.5665
 0.4632  0.6791  0.4475
[ CUDADoubleType{3,3} ]

julia> index
PyTorch.Tensor{Int64, 0}:
1
[ CUDALongType{} ]

julia> t[index]
ERROR: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu! (when checking argument for argument index in method wrapper_CUDA__index_select)
Exception raised from common_device_check_failure at ../aten/src/ATen/core/adaption.cpp:10 (most recent call first):
frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >) + 0x6b (0x797ecd46e38b in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #1: c10::detail::torchCheckFail(char const*, char const*, unsigned int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) + 0xbf (0x797ecd468f3f in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #2: c10::impl::common_device_check_failure(c10::optional<c10::Device>&, at::Tensor const&, char const*, char const*) + 0x422 (0x797eb7607cb2 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #3: <unknown function> + 0x2eca7eb (0x797e676ca7eb in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #4: <unknown function> + 0x2eca8a6 (0x797e676ca8a6 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #5: at::_ops::index_select::redispatch(c10::DispatchKeySet, at::Tensor const&, long, at::Tensor const&) + 0x98 (0x797eb7fe0758 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #6: <unknown function> + 0x3a729ef (0x797eb9c729ef in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #7: <unknown function> + 0x3a72ef9 (0x797eb9c72ef9 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #8: at::_ops::index_select::call(at::Tensor const&, long, at::Tensor const&) + 0x172 (0x797eb8077a72 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #9: at::Tensor::index_select(long, at::Tensor const&) const + 0x43 (0x797ecdb946c5 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #10: tensor_method_index_select_int64 + 0xbb (0x797ecdb8fbac in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #11: [0x79811c732c13]
frame #12: [0x79811c739e18]
frame #13: [0x79811c73a0c7]
frame #14: <unknown function> + 0x70365 (0x798123470365 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #15: <unknown function> + 0x6fe2d (0x79812346fe2d in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #16: <unknown function> + 0x70f98 (0x798123470f98 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #17: <unknown function> + 0x71b3e (0x798123471b3e in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #18: <unknown function> + 0x8dfde (0x79812348dfde in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #19: <unknown function> + 0x8e92a (0x79812348e92a in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #20: ijl_toplevel_eval_in + 0xa6 (0x79812348f996 in /usr/local/julia-1.11.1/bin/../lib/julia/libjulia-internal.so.1.11)
frame #21: <unknown function> + 0x2536d8 (0x797ecf6536d8 in /usr/local/julia-1.11.1/share/julia/compiled/v1.11/REPL/u0gqU_GYsA8.so)

Stacktrace:
 [1] opt_index_select(self::Tensor{Float64, 2}, dim::Int64, index::Int64)
   @ THArrays.THC ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/thc/thc-opt.jl:10
 [2] getindex(t::Tensor{Float64, 2}, I::Tensor{Int64, 0})
   @ THArrays ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/tensor.jl:171
 [3] top-level scope
   @ REPL[34]:1

julia> 

x66ccff avatar Dec 14 '24 06:12 x66ccff

here is my libtorch version : libtorch-cxx11-abi-shared-with-deps-2.1.0+cu121.zip

wget https://download.pytorch.org/libtorch/cu121/libtorch-cxx11-abi-shared-with-deps-2.1.0%2Bcu121.zip

Driver Version: 560.35.03 CUDA Version: 12.6

nvcc:

Cuda compilation tools, release 12.0, V12.0.140 Build cuda_12.0.r12.0/compiler.32267302_0

x66ccff avatar Dec 14 '24 06:12 x66ccff

And, it is not able to concat two CUDA tensor (even put them in a Vector) , however pytorch can do this.

julia> A
PyTorch.Tensor{Float64, 2}:
 0.9373  0.6318
 0.4080  0.5238
 0.8977  0.2095
[ CUDADoubleType{3,2} ]

julia> [A,A]
2-element Vector{Tensor{Float64, 2}}:
 Error showing value of type Vector{Tensor{Float64, 2}}:
ERROR: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu! (when checking argument for argument index in method wrapper_CUDA__index_select)
Exception raised from common_device_check_failure at ../aten/src/ATen/core/adaption.cpp:10 (most recent call first):
frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >) + 0x6b (0x7c520ce6e38b in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #1: c10::detail::torchCheckFail(char const*, char const*, unsigned int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) + 0xbf (0x7c520ce68f3f in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #2: c10::impl::common_device_check_failure(c10::optional<c10::Device>&, at::Tensor const&, char const*, char const*) + 0x422 (0x7c51f7007cb2 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #3: <unknown function> + 0x2eca7eb (0x7c51a70ca7eb in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #4: <unknown function> + 0x2eca8a6 (0x7c51a70ca8a6 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #5: at::_ops::index_select::redispatch(c10::DispatchKeySet, at::Tensor const&, long, at::Tensor const&) + 0x98 (0x7c51f79e0758 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #6: <unknown function> + 0x3a729ef (0x7c51f96729ef in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #7: <unknown function> + 0x3a72ef9 (0x7c51f9672ef9 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #8: at::_ops::index_select::call(at::Tensor const&, long, at::Tensor const&) + 0x172 (0x7c51f7a77a72 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #9: at::Tensor::index_select(long, at::Tensor const&) const + 0x43 (0x7c520d5946c5 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #10: tensor_method_index_select_int64 + 0xbb (0x7c520d58fbac in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #11: [0x7c545b748a73]
frame #12: [0x7c545b7490a7]
frame #13: [0x7c545b74947b]
frame #14: [0x7c545b749514]
frame #15: <unknown function> + 0x3de7d (0x7c5210a3de7d in /usr/local/julia-1.11.1/share/julia/compiled/v1.11/StyledStrings/UcVoM_GYsA8.so)

Stacktrace:
  [1] opt_index_select(self::Tensor{Float64, 2}, dim::Int64, index::Int64)
    @ THArrays.THC ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/thc/thc-opt.jl:10
  [2] getindex(::Tensor{Float64, 2}, ::Int64, ::Int64)
    @ THArrays ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/tensor.jl:171
  [3] isassigned(::Tensor{Float64, 2}, ::Int64, ::Int64)
    @ Base ./multidimensional.jl:1612
  [4] alignment(io::IOContext{IOBuffer}, X::AbstractVecOrMat, rows::Vector{Int64}, cols::Vector{Int64}, cols_if_complete::Int64, cols_otherwise::Int64, sep::Int64, ncols::Int64)
    @ Base ./arrayshow.jl:68
  [5] _print_matrix(io::IOContext{…}, X::AbstractVecOrMat, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64, rowsA::UnitRange{…}, colsA::UnitRange{…})
    @ Base ./arrayshow.jl:207
  [6] print_matrix(io::IOContext{IOBuffer}, X::Tensor{Float64, 2}, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64)
    @ Base ./arrayshow.jl:171
  [7] print_matrix
    @ ./arrayshow.jl:171 [inlined]
  [8] print_array
    @ ./arrayshow.jl:358 [inlined]
  [9] show(io::IOContext{IOBuffer}, ::MIME{Symbol("text/plain")}, X::Tensor{Float64, 2})
    @ Base ./arrayshow.jl:399
 [10] show(io::IOContext{IOBuffer}, m::String, x::Tensor{Float64, 2})
    @ Base.Multimedia ./multimedia.jl:123
 [11] sprint(::Function, ::String, ::Vararg{Any}; context::IOContext{Base.TTY}, sizehint::Int64)
    @ Base ./strings/io.jl:112
 [12] sprint
    @ ./strings/io.jl:107 [inlined]
 [13] print_matrix_row(io::IOContext{Base.TTY}, X::AbstractVecOrMat, A::Vector{Tuple{Int64, Int64}}, i::Int64, cols::Vector{Int64}, sep::String, idxlast::Int64)
    @ Base ./arrayshow.jl:108
 [14] _print_matrix(io::IOContext{…}, X::AbstractVecOrMat, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64, rowsA::UnitRange{…}, colsA::UnitRange{…})
    @ Base ./arrayshow.jl:213
 [15] print_matrix(io::IOContext{Base.TTY}, X::Vector{Tensor{Float64, 2}}, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64)
    @ Base ./arrayshow.jl:171
 [16] print_matrix
    @ ./arrayshow.jl:171 [inlined]
 [17] print_array
    @ ./arrayshow.jl:358 [inlined]
 [18] show(io::IOContext{Base.TTY}, ::MIME{Symbol("text/plain")}, X::Vector{Tensor{Float64, 2}})
    @ Base ./arrayshow.jl:399
 [19] (::REPL.var"#68#69"{REPL.REPLDisplay{REPL.LineEditREPL}, MIME{Symbol("text/plain")}, Base.RefValue{Any}})(io::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:367
 [20] with_repl_linfo(f::Any, repl::REPL.LineEditREPL)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:661
 [21] display(d::REPL.REPLDisplay, mime::MIME{Symbol("text/plain")}, x::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:353
 [22] display
    @ /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:372 [inlined]
 [23] display(x::Any)
    @ Base.Multimedia ./multimedia.jl:340
 [24] #invokelatest#2
    @ ./essentials.jl:1055 [inlined]
 [25] invokelatest
    @ ./essentials.jl:1052 [inlined]
 [26] print_response(errio::IO, response::Any, show_value::Bool, have_color::Bool, specialdisplay::Union{Nothing, AbstractDisplay})
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:409
 [27] (::REPL.var"#70#71"{REPL.LineEditREPL, Pair{Any, Bool}, Bool, Bool})(io::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:378
 [28] with_repl_linfo(f::Any, repl::REPL.LineEditREPL)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:661
 [29] print_response(repl::REPL.AbstractREPL, response::Any, show_value::Bool, have_color::Bool)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:376
 [30] (::REPL.var"#do_respond#96"{Bool, Bool, REPL.var"#112#130"{…}, REPL.LineEditREPL, REPL.LineEdit.Prompt})(s::REPL.LineEdit.MIState, buf::Any, ok::Bool)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:1003
 [31] #invokelatest#2
    @ ./essentials.jl:1055 [inlined]
 [32] invokelatest
    @ ./essentials.jl:1052 [inlined]
 [33] run_interface(terminal::REPL.Terminals.TextTerminal, m::REPL.LineEdit.ModalInterface, s::REPL.LineEdit.MIState)
    @ REPL.LineEdit /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/LineEdit.jl:2755
 [34] run_frontend(repl::REPL.LineEditREPL, backend::REPL.REPLBackendRef)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:1471
 [35] (::REPL.var"#75#81"{REPL.LineEditREPL, REPL.REPLBackendRef})()
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:480
Some type information was truncated. Use `show(err)` to see complete types.

julia> 

cat

julia> begin lll = THC.cat([A,A],dims=1); end; u=1
1

julia> lll
2-element Vector{Tensor{Float64, 2}}:
 Error showing value of type Vector{Tensor{Float64, 2}}:
ERROR: Expected all tensors to be on the same device, but found at least two devices, cuda:0 and cpu! (when checking argument for argument index in method wrapper_CUDA__index_select)
Exception raised from common_device_check_failure at ../aten/src/ATen/core/adaption.cpp:10 (most recent call first):
frame #0: c10::Error::Error(c10::SourceLocation, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> >) + 0x6b (0x7c520ce6e38b in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #1: c10::detail::torchCheckFail(char const*, char const*, unsigned int, std::__cxx11::basic_string<char, std::char_traits<char>, std::allocator<char> > const&) + 0xbf (0x7c520ce68f3f in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libc10.so)
frame #2: c10::impl::common_device_check_failure(c10::optional<c10::Device>&, at::Tensor const&, char const*, char const*) + 0x422 (0x7c51f7007cb2 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #3: <unknown function> + 0x2eca7eb (0x7c51a70ca7eb in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #4: <unknown function> + 0x2eca8a6 (0x7c51a70ca8a6 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cuda.so)
frame #5: at::_ops::index_select::redispatch(c10::DispatchKeySet, at::Tensor const&, long, at::Tensor const&) + 0x98 (0x7c51f79e0758 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #6: <unknown function> + 0x3a729ef (0x7c51f96729ef in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #7: <unknown function> + 0x3a72ef9 (0x7c51f9672ef9 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #8: at::_ops::index_select::call(at::Tensor const&, long, at::Tensor const&) + 0x172 (0x7c51f7a77a72 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/csrc/libtorch/lib/libtorch_cpu.so)
frame #9: at::Tensor::index_select(long, at::Tensor const&) const + 0x43 (0x7c520d5946c5 in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #10: tensor_method_index_select_int64 + 0xbb (0x7c520d58fbac in /home/kent/_Project/PTSjl/PTS.jl/THArrays.jl/deps/lib/libtorch_capi.so)
frame #11: [0x7c545b748a73]
frame #12: [0x7c545b7490a7]
frame #13: [0x7c545b74947b]
frame #14: [0x7c545b749514]
frame #15: <unknown function> + 0x3de7d (0x7c5210a3de7d in /usr/local/julia-1.11.1/share/julia/compiled/v1.11/StyledStrings/UcVoM_GYsA8.so)

Stacktrace:
  [1] opt_index_select(self::Tensor{Float64, 2}, dim::Int64, index::Int64)
    @ THArrays.THC ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/thc/thc-opt.jl:10
  [2] getindex(::Tensor{Float64, 2}, ::Int64, ::Int64)
    @ THArrays ~/_Project/PTSjl/PTS.jl/THArrays.jl/src/tensor.jl:171
  [3] isassigned(::Tensor{Float64, 2}, ::Int64, ::Int64)
    @ Base ./multidimensional.jl:1612
  [4] alignment(io::IOContext{IOBuffer}, X::AbstractVecOrMat, rows::Vector{Int64}, cols::Vector{Int64}, cols_if_complete::Int64, cols_otherwise::Int64, sep::Int64, ncols::Int64)
    @ Base ./arrayshow.jl:68
  [5] _print_matrix(io::IOContext{…}, X::AbstractVecOrMat, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64, rowsA::UnitRange{…}, colsA::UnitRange{…})
    @ Base ./arrayshow.jl:207
  [6] print_matrix(io::IOContext{IOBuffer}, X::Tensor{Float64, 2}, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64)
    @ Base ./arrayshow.jl:171
  [7] print_matrix
    @ ./arrayshow.jl:171 [inlined]
  [8] print_array
    @ ./arrayshow.jl:358 [inlined]
  [9] show(io::IOContext{IOBuffer}, ::MIME{Symbol("text/plain")}, X::Tensor{Float64, 2})
    @ Base ./arrayshow.jl:399
 [10] show(io::IOContext{IOBuffer}, m::String, x::Tensor{Float64, 2})
    @ Base.Multimedia ./multimedia.jl:123
 [11] sprint(::Function, ::String, ::Vararg{Any}; context::IOContext{Base.TTY}, sizehint::Int64)
    @ Base ./strings/io.jl:112
 [12] sprint
    @ ./strings/io.jl:107 [inlined]
 [13] print_matrix_row(io::IOContext{Base.TTY}, X::AbstractVecOrMat, A::Vector{Tuple{Int64, Int64}}, i::Int64, cols::Vector{Int64}, sep::String, idxlast::Int64)
    @ Base ./arrayshow.jl:108
 [14] _print_matrix(io::IOContext{…}, X::AbstractVecOrMat, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64, rowsA::UnitRange{…}, colsA::UnitRange{…})
    @ Base ./arrayshow.jl:213
 [15] print_matrix(io::IOContext{Base.TTY}, X::Vector{Tensor{Float64, 2}}, pre::String, sep::String, post::String, hdots::String, vdots::String, ddots::String, hmod::Int64, vmod::Int64)
    @ Base ./arrayshow.jl:171
 [16] print_matrix
    @ ./arrayshow.jl:171 [inlined]
 [17] print_array
    @ ./arrayshow.jl:358 [inlined]
 [18] show(io::IOContext{Base.TTY}, ::MIME{Symbol("text/plain")}, X::Vector{Tensor{Float64, 2}})
    @ Base ./arrayshow.jl:399
 [19] (::REPL.var"#68#69"{REPL.REPLDisplay{REPL.LineEditREPL}, MIME{Symbol("text/plain")}, Base.RefValue{Any}})(io::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:367
 [20] with_repl_linfo(f::Any, repl::REPL.LineEditREPL)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:661
 [21] display(d::REPL.REPLDisplay, mime::MIME{Symbol("text/plain")}, x::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:353
 [22] display
    @ /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:372 [inlined]
 [23] display(x::Any)
    @ Base.Multimedia ./multimedia.jl:340
 [24] #invokelatest#2
    @ ./essentials.jl:1055 [inlined]
 [25] invokelatest
    @ ./essentials.jl:1052 [inlined]
 [26] print_response(errio::IO, response::Any, show_value::Bool, have_color::Bool, specialdisplay::Union{Nothing, AbstractDisplay})
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:409
 [27] (::REPL.var"#70#71"{REPL.LineEditREPL, Pair{Any, Bool}, Bool, Bool})(io::Any)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:378
 [28] with_repl_linfo(f::Any, repl::REPL.LineEditREPL)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:661
 [29] print_response(repl::REPL.AbstractREPL, response::Any, show_value::Bool, have_color::Bool)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:376
 [30] (::REPL.var"#do_respond#96"{Bool, Bool, REPL.var"#112#130"{…}, REPL.LineEditREPL, REPL.LineEdit.Prompt})(s::REPL.LineEdit.MIState, buf::Any, ok::Bool)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:1003
 [31] #invokelatest#2
    @ ./essentials.jl:1055 [inlined]
 [32] invokelatest
    @ ./essentials.jl:1052 [inlined]
 [33] run_interface(terminal::REPL.Terminals.TextTerminal, m::REPL.LineEdit.ModalInterface, s::REPL.LineEdit.MIState)
    @ REPL.LineEdit /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/LineEdit.jl:2755
 [34] run_frontend(repl::REPL.LineEditREPL, backend::REPL.REPLBackendRef)
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:1471
 [35] (::REPL.var"#75#81"{REPL.LineEditREPL, REPL.REPLBackendRef})()
    @ REPL /usr/local/julia-1.11.1/share/julia/stdlib/v1.11/REPL/src/REPL.jl:480
Some type information was truncated. Use `show(err)` to see complete types.

julia> 

x66ccff avatar Dec 14 '24 08:12 x66ccff