"git@developer.sourcefind.cn:OpenDAS/pytorch-encoding.git" did not exist on "c2cb2aab69d5d276fbcb847fb8277c1a52947661"
Unverified Commit d8310a8f authored by Sayak Paul's avatar Sayak Paul Committed by GitHub
Browse files

[lora] factor out the overlaps in `save_lora_weights()`. (#12027)

* factor out the overlaps in save_lora_weights().

* remove comment.

* remove comment.

* up

* fix-copies
parent 78031c29
...@@ -1064,6 +1064,41 @@ class LoraBaseMixin: ...@@ -1064,6 +1064,41 @@ class LoraBaseMixin:
save_function(state_dict, save_path) save_function(state_dict, save_path)
logger.info(f"Model weights saved in {save_path}") logger.info(f"Model weights saved in {save_path}")
@classmethod
def _save_lora_weights(
cls,
save_directory: Union[str, os.PathLike],
lora_layers: Dict[str, Dict[str, Union[torch.nn.Module, torch.Tensor]]],
lora_metadata: Dict[str, Optional[dict]],
is_main_process: bool = True,
weight_name: str = None,
save_function: Callable = None,
safe_serialization: bool = True,
):
"""
Helper method to pack and save LoRA weights and metadata. This method centralizes the saving logic for all
pipeline types.
"""
state_dict = {}
final_lora_adapter_metadata = {}
for prefix, layers in lora_layers.items():
state_dict.update(cls.pack_weights(layers, prefix))
for prefix, metadata in lora_metadata.items():
if metadata:
final_lora_adapter_metadata.update(_pack_dict_with_prefix(metadata, prefix))
cls.write_lora_layers(
state_dict=state_dict,
save_directory=save_directory,
is_main_process=is_main_process,
weight_name=weight_name,
save_function=save_function,
safe_serialization=safe_serialization,
lora_adapter_metadata=final_lora_adapter_metadata if final_lora_adapter_metadata else None,
)
@classmethod @classmethod
def _optionally_disable_offloading(cls, _pipeline): def _optionally_disable_offloading(cls, _pipeline):
return _func_optionally_disable_offloading(_pipeline=_pipeline) return _func_optionally_disable_offloading(_pipeline=_pipeline)
This diff is collapsed.
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment