Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
renzhc
diffusers_dcu
Commits
5ef74fd5
Unverified
Commit
5ef74fd5
authored
Jul 02, 2025
by
Luo Yihang
Committed by
GitHub
Jul 01, 2025
Browse files
fix norm not training in train_control_lora_flux.py (#11832)
parent
64a92103
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
5 additions
and
5 deletions
+5
-5
examples/flux-control/train_control_lora_flux.py
examples/flux-control/train_control_lora_flux.py
+5
-5
No files found.
examples/flux-control/train_control_lora_flux.py
View file @
5ef74fd5
...
...
@@ -837,11 +837,6 @@ def main(args):
assert
torch
.
all
(
flux_transformer
.
x_embedder
.
weight
[:,
initial_input_channels
:].
data
==
0
)
flux_transformer
.
register_to_config
(
in_channels
=
initial_input_channels
*
2
,
out_channels
=
initial_input_channels
)
if
args
.
train_norm_layers
:
for
name
,
param
in
flux_transformer
.
named_parameters
():
if
any
(
k
in
name
for
k
in
NORM_LAYER_PREFIXES
):
param
.
requires_grad
=
True
if
args
.
lora_layers
is
not
None
:
if
args
.
lora_layers
!=
"all-linear"
:
target_modules
=
[
layer
.
strip
()
for
layer
in
args
.
lora_layers
.
split
(
","
)]
...
...
@@ -879,6 +874,11 @@ def main(args):
)
flux_transformer
.
add_adapter
(
transformer_lora_config
)
if
args
.
train_norm_layers
:
for
name
,
param
in
flux_transformer
.
named_parameters
():
if
any
(
k
in
name
for
k
in
NORM_LAYER_PREFIXES
):
param
.
requires_grad
=
True
def
unwrap_model
(
model
):
model
=
accelerator
.
unwrap_model
(
model
)
model
=
model
.
_orig_mod
if
is_compiled_module
(
model
)
else
model
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment