BertGCN icon indicating copy to clipboard operation
BertGCN copied to clipboard

TypeError: GraphConv.forward() got an unexpected keyword argument 'edge_weights'

Open NitishOritro opened this issue 1 year ago • 3 comments

ERROR:ignite.engine.engine.Engine:Current run is terminating due to exception: GraphConv.forward() got an unexpected keyword argument 'edge_weights' ERROR:ignite.engine.engine.Engine:Engine run is terminating due to exception: GraphConv.forward() got an unexpected keyword argument 'edge_weights'

TypeError Traceback (most recent call last) in <cell line: 2>() 1 #bertgcn ----> 2 trainer.run(idx_loader, max_epochs=nb_epochs)

13 frames /usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in run(self, data, max_epochs, epoch_length, seed) 890 891 if self.interrupt_resume_enabled: --> 892 return self._internal_run() 893 else: 894 return self._internal_run_legacy()

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run(self) 933 self._internal_run_generator = self._internal_run_as_gen() 934 try: --> 935 return next(self._internal_run_generator) 936 except StopIteration as out: 937 self._internal_run_generator = None

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run_as_gen(self) 991 self._dataloader_iter = None 992 self.logger.error(f"Engine run is terminating due to exception: {e}") --> 993 self._handle_exception(e) 994 995 self._dataloader_iter = None

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _handle_exception(self, e) 636 self._fire_event(Events.EXCEPTION_RAISED, e) 637 else: --> 638 raise e 639 640 @property

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run_as_gen(self) 957 self._setup_engine() 958 --> 959 epoch_time_taken += yield from self._run_once_on_dataset_as_gen() 960 961 # time is available for handlers but must be updated after fire

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _run_once_on_dataset_as_gen(self) 1085 except Exception as e: 1086 self.logger.error(f"Current run is terminating due to exception: {e}") -> 1087 self._handle_exception(e) 1088 1089 return time.time() - start_time

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _handle_exception(self, e) 636 self._fire_event(Events.EXCEPTION_RAISED, e) 637 else: --> 638 raise e 639 640 @property

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _run_once_on_dataset_as_gen(self) 1066 yield from self._maybe_terminate_or_interrupt() 1067 -> 1068 self.state.output = self._process_function(self, self.state.batch) 1069 self._fire_event(Events.ITERATION_COMPLETED) 1070 yield from self._maybe_terminate_or_interrupt()

in train_step(engine, batch) 8 optimizer.zero_grad() 9 train_mask = g.ndata['train'][idx].type(th.BoolTensor) ---> 10 y_pred = model(g, idx)[train_mask] 11 y_true = g.ndata['label_train'][idx][train_mask] 12 loss = F.nll_loss(y_pred, y_true)

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs) 1499 or _global_backward_pre_hooks or _global_backward_hooks 1500 or _global_forward_hooks or _global_forward_pre_hooks): -> 1501 return forward_call(*args, **kwargs) 1502 # Do not call functions when jit is used 1503 full_backward_hooks, non_full_backward_hooks = [], []

in forward(self, g, idx) 138 cls_logit = self.classifier(cls_feats) 139 cls_pred = th.nn.Softmax(dim=1)(cls_logit) --> 140 gcn_logit = self.gcn(g.ndata['cls_feats'], g, g.edata['edge_weight'])[idx] 141 gcn_pred = th.nn.Softmax(dim=1)(gcn_logit) 142 pred = (gcn_pred+1e-10) * self.m + cls_pred * (1 - self.m)

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs) 1499 or _global_backward_pre_hooks or _global_backward_hooks 1500 or _global_forward_hooks or _global_forward_pre_hooks): -> 1501 return forward_call(*args, **kwargs) 1502 # Do not call functions when jit is used 1503 full_backward_hooks, non_full_backward_hooks = [], []

in forward(self, features, g, edge_weight) 107 if i != 0: 108 h = self.dropout(h) --> 109 h = layer(g, h, edge_weights=edge_weight) 110 return h 111

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs) 1499 or _global_backward_pre_hooks or _global_backward_hooks 1500 or _global_forward_hooks or _global_forward_pre_hooks): -> 1501 return forward_call(*args, **kwargs) 1502 # Do not call functions when jit is used 1503 full_backward_hooks, non_full_backward_hooks = [], []

TypeError: GraphConv.forward() got an unexpected keyword argument 'edge_weights'

NitishOritro avatar May 01 '23 14:05 NitishOritro

try just edge_weight without s in GraphConvEdgeWeight class forward method argument

padshahrohan avatar May 05 '23 04:05 padshahrohan

ERROR:ignite.engine.engine.Engine:Current run is terminating due to exception: 'NoneType' object is not subscriptable ERROR:ignite.engine.engine.Engine:Engine run is terminating due to exception: 'NoneType' object is not subscriptable

TypeError Traceback (most recent call last) in <cell line: 2>() 1 #bertgcn ----> 2 trainer.run(idx_loader, max_epochs=nb_epochs)

10 frames /usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in run(self, data, max_epochs, epoch_length, seed) 890 891 if self.interrupt_resume_enabled: --> 892 return self._internal_run() 893 else: 894 return self._internal_run_legacy()

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run(self) 933 self._internal_run_generator = self._internal_run_as_gen() 934 try: --> 935 return next(self._internal_run_generator) 936 except StopIteration as out: 937 self._internal_run_generator = None

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run_as_gen(self) 991 self._dataloader_iter = None 992 self.logger.error(f"Engine run is terminating due to exception: {e}") --> 993 self._handle_exception(e) 994 995 self._dataloader_iter = None

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _handle_exception(self, e) 636 self._fire_event(Events.EXCEPTION_RAISED, e) 637 else: --> 638 raise e 639 640 @property

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _internal_run_as_gen(self) 957 self._setup_engine() 958 --> 959 epoch_time_taken += yield from self._run_once_on_dataset_as_gen() 960 961 # time is available for handlers but must be updated after fire

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _run_once_on_dataset_as_gen(self) 1085 except Exception as e: 1086 self.logger.error(f"Current run is terminating due to exception: {e}") -> 1087 self._handle_exception(e) 1088 1089 return time.time() - start_time

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _handle_exception(self, e) 636 self._fire_event(Events.EXCEPTION_RAISED, e) 637 else: --> 638 raise e 639 640 @property

/usr/local/lib/python3.10/dist-packages/ignite/engine/engine.py in _run_once_on_dataset_as_gen(self) 1066 yield from self._maybe_terminate_or_interrupt() 1067 -> 1068 self.state.output = self._process_function(self, self.state.batch) 1069 self._fire_event(Events.ITERATION_COMPLETED) 1070 yield from self._maybe_terminate_or_interrupt()

in train_step(engine, batch) 102 optimizer.zero_grad() 103 train_mask = g.ndata['train'][idx].type(th.BoolTensor) --> 104 y_pred = model(g, idx)[train_mask] 105 y_true = g.ndata['label_train'][idx][train_mask] 106 loss = F.nll_loss(y_pred, y_true)

/usr/local/lib/python3.10/dist-packages/torch/nn/modules/module.py in _call_impl(self, *args, **kwargs) 1499 or _global_backward_pre_hooks or _global_backward_hooks 1500 or _global_forward_hooks or _global_forward_pre_hooks): -> 1501 return forward_call(*args, **kwargs) 1502 # Do not call functions when jit is used 1503 full_backward_hooks, non_full_backward_hooks = [], []

in forward(self, g, idx) 140 cls_logit = self.classifier(cls_feats) 141 cls_pred = th.nn.Softmax(dim=1)(cls_logit) --> 142 gcn_logit = self.gcn(g.ndata['cls_feats'], g, g.edata['edge_weight'])[idx] 143 gcn_pred = th.nn.Softmax(dim=1)(gcn_logit) 144 pred = (gcn_pred+1e-10) * self.m + cls_pred * (1 - self.m)

TypeError: 'NoneType' object is not subscriptable

NitishOritro avatar May 15 '23 14:05 NitishOritro

#type parameter as "edge_weight=None" [Solved]

import torch import torch.nn as nn from dgl.nn import GraphConv

class GraphConvEdgeWeight(GraphConv):

def forward(self, graph, feat,  weight=None, edge_weight=None):
    with graph.local_scope():
        if not self._allow_zero_in_degree:
            if (graph.in_degrees() == 0).any():
                raise DGLError('There are 0-in-degree nodes in the graph, '
                               'output for those nodes will be invalid. '
                               'This is harmful for some applications, '
                               'causing silent performance regression. '
                               'Adding self-loop on the input graph by '
                               'calling `g = dgl.add_self_loop(g)` will resolve '
                               'the issue. Setting ``allow_zero_in_degree`` '
                               'to be `True` when constructing this module will '
                               'suppress the check and let the code run.')

        # (BarclayII) For RGCN on heterogeneous graphs we need to support GCN on bipartite.
        feat_src, feat_dst = expand_as_pair(feat, graph)
        if self._norm == 'both':
            degs = graph.out_degrees().float().clamp(min=1)
            norm = th.pow(degs, -0.5)
            shp = norm.shape + (1,) * (feat_src.dim() - 1)
            norm = th.reshape(norm, shp)
            feat_src = feat_src * norm

        if weight is not None:
            if self.weight is not None:
                raise DGLError('External weight is provided while at the same time the'
                               ' module has defined its own weight parameter. Please'
                               ' create the module with flag weight=False.')
        else:
            weight = self.weight

        if self._in_feats > self._out_feats:
            # mult W first to reduce the feature size for aggregation.
            if weight is not None:
                feat_src = th.matmul(feat_src, weight)
            graph.srcdata['h'] = feat_src
            if edge_weight is None:
                graph.update_all(fn.copy_src(src='h', out='m'),
                                 fn.sum(msg='m', out='h'))
            else:
                graph.edata['a'] = edge_weight
                graph.update_all(fn.u_mul_e('h', 'a', 'm'),
                                 fn.sum(msg='m', out='h'))
            rst = graph.dstdata['h']
        else:
            # aggregate first then mult W
            graph.srcdata['h'] = feat_src
            if edge_weight is None:
                graph.update_all(fn.copy_src(src='h', out='m'),
                                 fn.sum(msg='m', out='h'))
            else:
                graph.edata['a'] = edge_weight
                graph.update_all(fn.u_mul_e('h', 'a', 'm'),
                                 fn.sum(msg='m', out='h'))
            rst = graph.dstdata['h']
            if weight is not None:
                rst = th.matmul(rst, weight)

        if self._norm != 'none':
            degs = graph.in_degrees().float().clamp(min=1)
            if self._norm == 'both':
                norm = th.pow(degs, -0.5)
            else:
                norm = 1.0 / degs
            shp = norm.shape + (1,) * (feat_dst.dim() - 1)
            norm = th.reshape(norm, shp)
            rst = rst * norm

        if self.bias is not None:
            rst = rst + self.bias

        if self._activation is not None:
            rst = self._activation(rst)

        return rst

NitishOritro avatar May 21 '23 15:05 NitishOritro