Update modeling_moss_vl.py
Browse files- modeling_moss_vl.py +5 -1
modeling_moss_vl.py
CHANGED
|
@@ -2094,7 +2094,11 @@ class MossVLModel(MossVLPreTrainedModel):
|
|
| 2094 |
"""
|
| 2095 |
)
|
| 2096 |
class MossVLForConditionalGeneration(MossVLPreTrainedModel, GenerationMixin):
|
| 2097 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2098 |
config: MossVLConfig
|
| 2099 |
_checkpoint_conversion_mapping = {}
|
| 2100 |
accepts_loss_kwargs = False
|
|
|
|
| 2094 |
"""
|
| 2095 |
)
|
| 2096 |
class MossVLForConditionalGeneration(MossVLPreTrainedModel, GenerationMixin):
|
| 2097 |
+
# transformers 5.x expects a dict[target, source]; MossVL does not tie
|
| 2098 |
+
# lm_head to the embeddings (config.tie_word_embeddings is False), so the
|
| 2099 |
+
# mapping is empty. The legacy list format ["lm_head.weight"] breaks
|
| 2100 |
+
# save_pretrained in transformers>=5.
|
| 2101 |
+
_tied_weights_keys: dict[str, str] = {}
|
| 2102 |
config: MossVLConfig
|
| 2103 |
_checkpoint_conversion_mapping = {}
|
| 2104 |
accepts_loss_kwargs = False
|