mmrazor
mmrazor copied to clipboard
Error ModuleNotFoundError: No module named 'mmcv._ext'
Checklist
- I have searched related issues but cannot get the expected help. 👍
- I have read related documents and don't know what to do. 👍
Describe the question you meet
I am trying to run mmrazor ==1.0.0 with mmcv==2.0.0 for mgd knowledge distillation with mmdet for RetinaNet. I am getting this error when running train.py with mgd configuration:
ext = importlib.import_module('mmcv.' + name) File "/usr/lib64/python3.7/importlib/init.py", line 127, in import_module return _bootstrap._gcd_import(name[level:], package, level) ModuleNotFoundError: No module named 'mmcv._ext'
Post related information
-
The output of
pip list | grep "mmcv\|mmrazor\|^torch"
mmcv 2.0.0 mmdet 3.1.0 /mnt/efs/sashabehrouzi/mmrazor/mmdetection mmrazor 1.0.0 /mnt/efs/sashabehrouzi/mmrazor torch 1.10.2+cu113 torchvision 0.11.3+cu113 -
Your config file if you modified it or created a new one.
base = ['mmdet::/home/rrschch//mmdetection/configs/mgd/uc1_retinanet_r50_2x.py']
teacher_ckpt = '/home/rrschch/mmdetection/work_dirs/retinanet_x101/best_bbox_mAP_epoch_29.pth' # noqa: E501
student = base.model student.neck.init_cfg = dict( type='Pretrained', prefix='neck.', checkpoint=teacher_ckpt) student.bbox_head.init_cfg = dict( type='Pretrained', prefix='bbox_head.', checkpoint=teacher_ckpt) launcher = 'pytorch' env_cfg = dict(dist_cfg=dict(backend='nccl')) model = dict( scope='mmrazor', delete=True, type='FpnTeacherDistill', architecture=student, teacher=dict( cfg_path='mmdet::/home/rrschch/mmrazor/mmdetection/configs/mgd/uc1_retinanet_x101_64x4d.py', pretrained=False), teacher_ckpt=teacher_ckpt, distiller=dict( type='ConfigurableDistiller', student_recorders=dict( fpn0=dict(type='ModuleOutputs', source='neck.fpn_convs.0.conv'), fpn1=dict(type='ModuleOutputs', source='neck.fpn_convs.1.conv'), fpn2=dict(type='ModuleOutputs', source='neck.fpn_convs.2.conv'), fpn3=dict(type='ModuleOutputs', source='neck.fpn_convs.3.conv'), fpn4=dict(type='ModuleOutputs', source='neck.fpn_convs.4.conv')), teacher_recorders=dict( fpn0=dict(type='ModuleOutputs', source='neck.fpn_convs.0.conv'), fpn1=dict(type='ModuleOutputs', source='neck.fpn_convs.1.conv'), fpn2=dict(type='ModuleOutputs', source='neck.fpn_convs.2.conv'), fpn3=dict(type='ModuleOutputs', source='neck.fpn_convs.3.conv'), fpn4=dict(type='ModuleOutputs', source='neck.fpn_convs.4.conv')), connectors=dict( s_fpn0_connector=dict( type='MGDConnector', student_channels=256, teacher_channels=256, lambda_mgd=0.65), s_fpn1_connector=dict( type='MGDConnector', student_channels=256, teacher_channels=256, lambda_mgd=0.65), s_fpn2_connector=dict( type='MGDConnector', student_channels=256, teacher_channels=256, lambda_mgd=0.65), s_fpn3_connector=dict( type='MGDConnector', student_channels=256, teacher_channels=256, lambda_mgd=0.65), s_fpn4_connector=dict( type='MGDConnector', student_channels=256, teacher_channels=256, lambda_mgd=0.65)), distill_losses=dict( loss_mgd_fpn0=dict(type='MGDLoss', alpha_mgd=0.00002), loss_mgd_fpn1=dict(type='MGDLoss', alpha_mgd=0.00002), loss_mgd_fpn2=dict(type='MGDLoss', alpha_mgd=0.00002), loss_mgd_fpn3=dict(type='MGDLoss', alpha_mgd=0.00002), loss_mgd_fpn4=dict(type='MGDLoss', alpha_mgd=0.00002)), loss_forward_mappings=dict( loss_mgd_fpn0=dict( preds_S=dict( from_student=True, recorder='fpn0', connector='s_fpn0_connector'), preds_T=dict(from_student=False, recorder='fpn0')), loss_mgd_fpn1=dict( preds_S=dict( from_student=True, recorder='fpn1', connector='s_fpn1_connector'), preds_T=dict(from_student=False, recorder='fpn1')), loss_mgd_fpn2=dict( preds_S=dict( from_student=True, recorder='fpn2', connector='s_fpn2_connector'), preds_T=dict(from_student=False, recorder='fpn2')), loss_mgd_fpn3=dict( preds_S=dict( from_student=True, recorder='fpn3', connector='s_fpn3_connector'), preds_T=dict(from_student=False, recorder='fpn3')), loss_mgd_fpn4=dict( preds_S=dict( from_student=True, recorder='fpn4', connector='s_fpn4_connector'), preds_T=dict(from_student=False, recorder='fpn4')))))
find_unused_parameters = True
val_cfg = dict(delete=True, type='mmrazor.SingleTeacherDistillValLoop')
optimizer_config = dict( delete=True, grad_clip=dict(max_norm=35, norm_type=2))
param_scheduler = [ dict( type='LinearLR', start_factor=0.001, by_epoch=False, begin=0, end=500), dict( type='MultiStepLR', begin=0, end=24, by_epoch=True, milestones=[16, 22], gamma=0.1) ]
optim_wrapper = dict( optimizer=dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001))
3. Your train log file if you meet the problem during training.
My training does not start
4. Other code you modified in the `mmrazor` folder.
No other code is modified