Unverified Commit 5783286d authored by Jongho Choi's avatar Jongho Choi Committed by GitHub
Browse files

[peft] simple update when unscale (#9689)



Update peft_utils.py
Co-authored-by: default avatarSayak Paul <spsayakpaul@gmail.com>
parent ee4ab238
......@@ -134,14 +134,14 @@ def unscale_lora_layers(model, weight: Optional[float] = None):
"""
from peft.tuners.tuners_utils import BaseTunerLayer
if weight == 1.0:
if weight is None or weight == 1.0:
return
for module in model.modules():
if isinstance(module, BaseTunerLayer):
if weight is not None and weight != 0:
if weight != 0:
module.unscale_layer(weight)
elif weight is not None and weight == 0:
else:
for adapter_name in module.active_adapters:
# if weight == 0 unscale should re-set the scale to the original value.
module.set_scale(adapter_name, 1.0)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment