BCNet icon indicating copy to clipboard operation
BCNet copied to clipboard

train issues

Open C-Ll-l opened this issue 2 years ago • 2 comments

WARNING [10/15 22:01:14 fvcore.common.checkpoint]: The checkpoint state_dict contains keys that are not used by the model: fc1000.{bias, weight} stem.conv1.bias [10/15 22:01:14 d2.engine.train_loop]: Starting training from iteration 0 ERROR [10/15 22:01:15 d2.engine.train_loop]: Exception during training: Traceback (most recent call last): File "/home/chuan/code/detectron2/detectron2/engine/train_loop.py", line 149, in train self.run_step() File "/home/chuan/code/detectron2/detectron2/engine/defaults.py", line 494, in run_step self._trainer.run_step() File "/home/chuan/code/detectron2/detectron2/engine/train_loop.py", line 273, in run_step loss_dict = self.model(data) File "/home/chuan/anaconda3/envs/bcnet2/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl return forward_call(*input, **kwargs) File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 172, in forward return self._forward_train( File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 199, in _forward_train loss_mask, loss_mask_bo, loss_boundary, loss_boundary_bo = self._forward_mask(features_list, proposals) File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 251, in _forward_mask return mask_rcnn_loss(mask_logits, boundary, proposals, bo_masks, bo_bound) File "/home/chuan/code/detectron2/detectron2/modeling/roi_heads/mask_head.py", line 77, in mask_rcnn_loss gt_bo_masks_per_image = instances_per_image.gt_bo_masks.crop_and_resize( File "/home/chuan/code/detectron2/detectron2/structures/instances.py", line 65, in getattr raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) AttributeError: Cannot find field 'gt_bo_masks' in the given Instances! [10/15 22:01:15 d2.engine.hooks]: Total training time: 0:00:00 (0:00:00 on hooks) [10/15 22:01:15 d2.utils.events]: iter: 0 lr: N/A max_mem: 962M /home/chuan/anaconda3/envs/bcnet2/lib/python3.8/site-packages/torch/functional.py:445: UserWarning: torch.meshgrid: in an upcoming release, it will be required to pass the indexing argument. (Triggered internally at ../aten/src/ATen/native/TensorShape.cpp:2157.) return _VF.meshgrid(tensors, **kwargs) # type: ignore[attr-defined] Traceback (most recent call last): File "tools/train_net.py", line 156, in launch( File "/home/chuan/code/detectron2/detectron2/engine/launch.py", line 82, in launch main_func(*args) File "tools/train_net.py", line 150, in main return trainer.train() File "/home/chuan/code/detectron2/detectron2/engine/defaults.py", line 484, in train super().train(self.start_iter, self.max_iter) File "/home/chuan/code/detectron2/detectron2/engine/train_loop.py", line 149, in train self.run_step() File "/home/chuan/code/detectron2/detectron2/engine/defaults.py", line 494, in run_step self._trainer.run_step() File "/home/chuan/code/detectron2/detectron2/engine/train_loop.py", line 273, in run_step loss_dict = self.model(data) File "/home/chuan/anaconda3/envs/bcnet2/lib/python3.8/site-packages/torch/nn/modules/module.py", line 1102, in _call_impl return forward_call(*input, **kwargs) File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 172, in forward return self._forward_train( File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 199, in _forward_train loss_mask, loss_mask_bo, loss_boundary, loss_boundary_bo = self._forward_mask(features_list, proposals) File "/home/chuan/code/detectron2/detectron2/modeling/meta_arch/fcos.py", line 251, in _forward_mask return mask_rcnn_loss(mask_logits, boundary, proposals, bo_masks, bo_bound) File "/home/chuan/code/detectron2/detectron2/modeling/roi_heads/mask_head.py", line 77, in mask_rcnn_loss gt_bo_masks_per_image = instances_per_image.gt_bo_masks.crop_and_resize( File "/home/chuan/code/detectron2/detectron2/structures/instances.py", line 65, in getattr raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) AttributeError: Cannot find field 'gt_bo_masks' in the given Instances!

I have used process.sh generates coco.json Some formats are as follows: {"id": 2364, "image_id": 990, "bbox": [21.0, 21.0, 282.0, 256.0], "area": 72192.0, "iscrowd": 0, "category_id": 1, "segmentation": [[257.0, 276.5, 190.0, 270.5, 115.0, 272.5, 106.0, 268.5, 96.5, 258.0, 80.5, 219.0, 43.5, 177.0, 31.5, 159.0, 20.5, 124.0, 21.5, 97.0, 71.0, 38.5, 101.0, 39.5, 134.0, 25.5, 155.0, 20.5, 175.0, 20.5, 186.0, 27.5, 197.0, 42.5, 215.0, 30.5, 223.0, 29.5, 238.0, 34.5, 253.0, 28.5, 262.0, 32.5, 286.5, 55.0, 299.5, 85.0, 298.5, 122.0, 302.5, 145.0, 291.5, 171.0, 294.5, 189.0, 284.5, 206.0, 284.5, 227.0, 288.5, 237.0, 284.5, 259.0, 270.0, 271.5, 257.0, 276.5]], "bg_object_segmentation": [[302.0, 271.0, 297.0, 271.0, 292.0, 268.0, 287.0, 264.0, 282.0, 263.0, 277.0, 264.0, 272.0, 264.0, 267.0, 263.0, 262.0, 260.0, 257.0, 258.0, 252.0, 256.0, 247.0, 256.0, 242.0, 256.0, 237.0, 256.0, 232.0, 255.0, 229.0, 250.0, 226.0, 245.0, 226.0, 240.0, 226.0, 235.0, 225.0, 230.0, 224.0, 225.0, 222.0, 220.0, 220.0, 215.0, 218.0, 210.0, 214.0, 205.0, 210.0, 200.0, 206.0, 195.0, 202.0, 190.0, 198.0, 185.0, 200.0, 180.0, 203.0, 175.0, 207.0, 170.0, 212.0, 166.0, 217.0, 162.0, 222.0, 158.0, 227.0, 154.0, 232.0, 151.0, 237.0, 148.0, 242.0, 145.0, 247.0, 142.0, 251.0, 138.0, 254.0, 133.0, 258.0, 128.0, 261.0, 123.0, 264.0, 118.0, 269.0, 114.0, 274.0, 110.0, 279.0, 107.0, 284.0, 103.0, 288.0, 98.0, 292.0, 93.0, 296.0, 88.0, 301.0, 83.0, 303.0, 84.0, 303.0, 89.0, 303.0, 94.0, 303.0, 99.0, 303.0, 104.0, 303.0, 109.0, 303.0, 114.0, 303.0, 119.0, 303.0, 124.0, 303.0, 129.0, 303.0, 134.0, 303.0, 139.0, 303.0, 144.0, 303.0, 149.0, 303.0, 154.0, 303.0, 159.0, 303.0, 164.0, 303.0, 169.0, 303.0, 174.0, 303.0, 179.0, 303.0, 184.0, 303.0, 189.0, 303.0, 194.0, 303.0, 199.0, 303.0, 204.0, 303.0, 209.0, 303.0, 214.0, 303.0, 219.0, 303.0, 224.0, 303.0, 229.0, 303.0, 234.0, 303.0, 239.0, 303.0, 244.0, 303.0, 249.0, 303.0, 254.0, 303.0, 259.0, 303.0, 264.0, 303.0, 269.0]]}, The required fields have been included. How to solve this problem? 3Q

C-Ll-l avatar Oct 15 '22 14:10 C-Ll-l

Have you addressed this issue? I met the same problem...

li-pengcheng avatar Apr 18 '23 14:04 li-pengcheng

This error occurs because detectron2 does not read the 'bg_object_segmentation' fields in the JSON file generated by process.sh when reading the data, we can modify some codes in detectron2/data/detection_utils.py In annotations_to_instances function: `
if len(annos) and "segmentation" in annos[0]: segms = [obj["segmentation"] for obj in annos] if len(annos) and "bg_object_segmentation" in annos[0]: bo_segms = [obj["bg_object_segmentation"] for obj in annos]

    if mask_format == "polygon":
        try:
            masks = PolygonMasks(segms)
            bo_masks = PolygonMasks(bo_segms)
        except ValueError as e:
            raise ValueError(
                "Failed to use mask_format=='polygon' from the given annotations!"
            ) from e
    else:
        assert mask_format == "bitmask", mask_format
        masks = []
        for segm in segms:
            if isinstance(segm, list):
                # polygon
                masks.append(polygons_to_bitmask(segm, *image_size))
            elif isinstance(segm, dict):
                # COCO RLE
                masks.append(mask_util.decode(segm))
            elif isinstance(segm, np.ndarray):
                assert segm.ndim == 2, "Expect segmentation of 2 dimensions, got {}.".format(
                    segm.ndim
                )
                # mask array
                masks.append(segm)
            else:
                raise ValueError(
                    "Cannot convert segmentation of type '{}' to BitMasks!"
                    "Supported types are: polygons as list[list[float] or ndarray],"
                    " COCO-style RLE as a dict, or a binary segmentation mask "
                    " in a 2D numpy array of shape HxW.".format(type(segm))
                )
        # torch.from_numpy does not support array with negative stride.
        masks = BitMasks(
            torch.stack([torch.from_numpy(np.ascontiguousarray(x)) for x in masks])
        )
    target.gt_masks = masks
    target.gt_bo_masks = bo_masks`

li-pengcheng avatar Apr 28 '23 03:04 li-pengcheng