Traceback (most recent call last):
File "E:\PyCharmProject\OOTDiffusion\run\run_ootd.py", line 10, in
from preprocess.humanparsing.aigc_run_parsing import Parsing
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\aigc_run_parsing.py", line 7, in
from parsing_api import load_atr_model, load_lip_model, inference
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\parsing_api.py", line 11, in
import networks
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\networks_init_.py", line 2, in
from networks.AugmentCE2P import resnet101
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\networks\AugmentCE2P.py", line 22, in
from modules import InPlaceABNSync
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\modules_init_.py", line 1, in
from .bn import ABN, InPlaceABN, InPlaceABNSync
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\modules\bn.py", line 10, in
from .functions import *
File "E:\PyCharmProject\OOTDiffusion\preprocess\humanparsing\modules\functions.py", line 11, in
_backend = load(name="inplace_abn",
File "D:\soft\Miniconda\envs\ootd\lib\site-packages\torch\utils\cpp_extension.py", line 1284, in load
return _jit_compile(
File "D:\soft\Miniconda\envs\ootd\lib\site-packages\torch\utils\cpp_extension.py", line 1509, in _jit_compile
_write_ninja_file_and_build_library(
File "D:\soft\Miniconda\envs\ootd\lib\site-packages\torch\utils\cpp_extension.py", line 1593, in _write_ninja_file_and_build_library
verify_ninja_availability()
File "D:\soft\Miniconda\envs\ootd\lib\site-packages\torch\utils\cpp_extension.py", line 1649, in verify_ninja_availability
raise RuntimeError("Ninja is required to load C++ extensions")
RuntimeError: Ninja is required to load C++ extensions
pip install Ninja
Traceback (most recent call last):
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/utils/cpp_extension.py", line 1893, in _run_ninja_build
subprocess.run(
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/subprocess.py", line 524, in run
raise CalledProcessError(retcode, process.args,
subprocess.CalledProcessError: Command '['ninja', '-v']' returned non-zero exit status 1.
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/Data2/wyw/OOTDiffusion/run/graido_ootd.py", line 20, in
from preprocess.humanparsing.aigc_run_parsing import Parsing
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/aigc_run_parsing.py", line 7, in
from parsing_api import load_atr_model, load_lip_model, inference
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/parsing_api.py", line 11, in
import networks
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/networks/init.py", line 2, in
from networks.AugmentCE2P import resnet101
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/networks/AugmentCE2P.py", line 22, in
from modules import InPlaceABNSync
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/modules/init.py", line 1, in
from .bn import ABN, InPlaceABN, InPlaceABNSync
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/modules/bn.py", line 10, in
from .functions import *
File "/Data2/wyw/OOTDiffusion/preprocess/humanparsing/modules/functions.py", line 11, in
_backend = load(name="inplace_abn",
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/utils/cpp_extension.py", line 1284, in load
return _jit_compile(
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/utils/cpp_extension.py", line 1509, in jit_compile
write_ninja_file_and_build_library(
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/utils/cpp_extension.py", line 1624, in write_ninja_file_and_build_library
run_ninja_build(
File "/home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/utils/cpp_extension.py", line 1909, in run_ninja_build
raise RuntimeError(message) from e
RuntimeError: Error building extension 'inplace_abn': [1/3] /usr/bin/nvcc -DTORCH_EXTENSION_NAME=inplace_abn -DTORCH_API_INCLUDE_EXTENSION_H -DPYBIND11_COMPILER_TYPE="gcc" -DPYBIND11_STDLIB="libstdcpp" -DPYBIND11_BUILD_ABI="cxxabi1011" -isystem /home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/include -isystem /home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/include/torch/csrc/api/include -isystem /home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/include/TH -isystem /home/wyw/anaconda3/envs/ootd/lib/python3.10/site-packages/torch/include/THC -isystem /home/wyw/anaconda3/envs/ootd/include/python3.10 -D_GLIBCXX_USE_CXX11_ABI=0 -D__CUDA_NO_HALF_OPERATORS -D__CUDA_NO_HALF_CONVERSIONS -D__CUDA_NO_BFLOAT16_CONVERSIONS -D__CUDA_NO_HALF2_OPERATORS --expt-relaxed-constexpr -gencode=arch=compute_86,code=compute_86 -gencode=arch=compute_86,code=sm_86 --compiler-options '-fPIC' --expt-extended-lambda -std=c++17 -c /Data2/wyw/OOTDiffusion/preprocess/humanparsing/modules/src/inplace_abn_cuda.cu -o inplace_abn_cuda.cuda.o
FAILED: inplace_abn_cuda.cuda.o
It seems like you’re having trouble building a PyTorch extension. The error message indicates that the ninja build system failed to compile the inplace_abn extension.
Here are a few things you could try:
Check your CUDA installation: The error message suggests that the nvcc compiler failed to compile the CUDA code. Make sure that CUDA is installed correctly and that nvcc is in your PATH.
Check the PyTorch version: Make sure that your PyTorch version is compatible with the code you’re trying to run. You might need to downgrade or upgrade PyTorch.
Check the GCC version: PyTorch extensions are often sensitive to the version of GCC you’re using. Try using GCC version 7 or 8.
Check the code compatibility with your CUDA version: The code seems to be compiled for compute_86 and sm_86, which corresponds to CUDA architecture of NVIDIA Ampere GPUs (like RTX 30 series). If you’re running on an older GPU, you might need to change these flags to match your GPU.
Remember to clean the build cache (usually located in ./build directory) before trying to build again. If none of these suggestions help, I recommend reaching out to the maintainers of the code or opening an issue in the relevant GitHub repository. Good luck!
We support onnx model for human parsing now, please refer to our latest model files and environment requirements.
Mar 08
'24 06:03
T-Gu