OpenDelta icon indicating copy to clipboard operation
OpenDelta copied to clipboard

compatibility with pytorch

Open CaffreyR opened this issue 1 year ago • 0 comments

Hi. here is another problem. See, I use opendelta and pytorch lightning to fine tune my model using lora. But when I tried to load, it seems wrong since it seems there is state keys missing here. Apparently, it seems not save the LORA weight. @ShengdingHu


def opendelta_modify_with_lora(transformer, config):
    # pass
    LoraModel(backbone_model=transformer, modified_modules=['[r](\d).SelfAttention.[q,v,o,k]'])
    LoraModel(backbone_model=transformer, modified_modules=['[r](\d).EncDecAttention.[q,v,o,k]'])
    delta_model = LoraModel(backbone_model=transformer, modified_modules=['[r](\d).DenseReluDense.w[o,i]'])

    delta_model.freeze_module(exclude=["layer_norm", "lora_A", "lora_B"])
    # delta_model.log(delta_ratio=True, trainable_ratio=True, visualization=True)
    # Visualization(transformer).structure_graph();
    return transformer

class EncoderDecoder(LightningModule):
    """
    Encoder Decoder
    """

    def __init__(self, config, tokenizer, transformer, dataset_reader):
        """
        :param config
        """
        super().__init__()
        self.config = config
        self.tokenizer = tokenizer
        self.model = transformer
        self.dataset_reader = dataset_reader

        self.use_deepspeed = self.config.compute_strategy.startswith("deepspeed")
        self.use_ddp = self.config.compute_strategy.startswith("ddp")
        self.load_model()

        self._last_global_step_saved = -1

        if self.config.fishmask_mode is not None:
            fishmask_plugin_on_init(self)

model= EncoderDecoder.load_from_checkpoints("my file path")

image

CaffreyR avatar Jul 21 '22 12:07 CaffreyR