Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
text-generation-inference
Commits
133015f4
"src/git@developer.sourcefind.cn:renzhc/diffusers_dcu.git" did not exist on "01c056f09441a8670d0a88f24e2d4fb4a2956ae8"
Unverified
Commit
133015f4
authored
Aug 06, 2024
by
drbh
Committed by
GitHub
Aug 06, 2024
Browse files
fix: prefer original layernorm names for 180B (#2365)
parent
a64d407d
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
10 additions
and
1 deletion
+10
-1
server/text_generation_server/models/custom_modeling/flash_rw_modeling.py
...ration_server/models/custom_modeling/flash_rw_modeling.py
+10
-1
No files found.
server/text_generation_server/models/custom_modeling/flash_rw_modeling.py
View file @
133015f4
...
@@ -382,8 +382,13 @@ class FlashRWLayer(nn.Module):
...
@@ -382,8 +382,13 @@ class FlashRWLayer(nn.Module):
prefix
=
f
"
{
prefix
}
.h.
{
layer_id
}
"
prefix
=
f
"
{
prefix
}
.h.
{
layer_id
}
"
# NOTE: Falcon 180B uses the ln_attn prefix
ln_prefix
=
"input_layernorm"
if
config
.
num_hidden_layers
==
80
:
ln_prefix
=
"ln_attn"
self
.
input_layernorm
=
FastLayerNorm
.
load
(
self
.
input_layernorm
=
FastLayerNorm
.
load
(
prefix
=
f
"
{
prefix
}
.
input_layernorm
"
,
prefix
=
f
"
{
prefix
}
.
{
ln_prefix
}
"
,
weights
=
weights
,
weights
=
weights
,
eps
=
config
.
layer_norm_epsilon
,
eps
=
config
.
layer_norm_epsilon
,
)
)
...
@@ -477,6 +482,10 @@ class FlashRWLayerNorm(nn.Module):
...
@@ -477,6 +482,10 @@ class FlashRWLayerNorm(nn.Module):
# in the case no number of layer norms is provided, we default to 1
# in the case no number of layer norms is provided, we default to 1
self
.
num_ln
=
getattr
(
config
,
"num_ln_in_parallel_attn"
,
1
)
self
.
num_ln
=
getattr
(
config
,
"num_ln_in_parallel_attn"
,
1
)
# Falcon 180B uses the ln_attn prefix and has 2 layer norms
if
config
.
num_hidden_layers
==
80
:
self
.
num_ln
=
2
if
self
.
num_ln
==
1
:
if
self
.
num_ln
==
1
:
self
.
input_ln
=
FastLayerNorm
.
load
(
self
.
input_ln
=
FastLayerNorm
.
load
(
prefix
=
f
"
{
prefix
}
.input_layernorm"
,
prefix
=
f
"
{
prefix
}
.input_layernorm"
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment