AutoAWQ icon indicating copy to clipboard operation
AutoAWQ copied to clipboard

cogvlm2 issue

Open BruceJust opened this issue 8 months ago • 0 comments

code

class Catcher(nn.Module): def init(self, module): super().init() self.module = module

        def forward(self, *args, **kwargs):
            # assume first input to forward is hidden states
            if len(args) > 0:
                hidden_states = args[0]
                del args
            else:
                first_key = list(kwargs.keys())[0]
                hidden_states = kwargs.pop(first_key)
            print("hidden_states: ", hidden_states.size())
            inps.append(hidden_states)
            layer_kwargs.update(kwargs)
            raise ValueError  # early exit to break later inference

    # patch layer 0 to catch input and kwargs
    modules[0] = Catcher(modules[0])
    print("1111")
    try:
        self.model(samples["input_ids"].to(next(self.model.parameters()).device),  samples["images"].to(next(self.model.parameters()).device), samples["token_type_ids"].to(next(self.model.parameters()).device))
        print("2222")
    except ValueError:  # work with early exit
        pass
    modules[0] = modules[0].module  # restore

    # Update the layer kwargs with `prepare_inputs_for_generation` method
    # that takes care of everything to avoid unexpected errors.
    print("samples: ", len(samples))
    layer_kwargs = self.model.prepare_inputs_for_generation(**samples, **layer_kwargs)
    # Pop the input_ids as they are not needed at all.
    layer_kwargs.pop("input_ids")

    del samples
    inps = inps[0]

issue

i am rewrite the code to support cogvlm2, i got empty inps when runing the code init_quant, print("hidden_states: ", hidden_states.size()) didn't run do you know what maybe the reason? thanks

BruceJust avatar May 31 '24 09:05 BruceJust