Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
ComfyUI
Commits
ff63893d
Commit
ff63893d
authored
Jul 06, 2024
by
comfyanonymous
Browse files
Support other types of T5 models.
parent
40404911
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
3 additions
and
3 deletions
+3
-3
comfy/t5.py
comfy/t5.py
+3
-3
No files found.
comfy/t5.py
View file @
ff63893d
...
@@ -190,11 +190,11 @@ class T5Block(torch.nn.Module):
...
@@ -190,11 +190,11 @@ class T5Block(torch.nn.Module):
return
x
,
past_bias
return
x
,
past_bias
class
T5Stack
(
torch
.
nn
.
Module
):
class
T5Stack
(
torch
.
nn
.
Module
):
def
__init__
(
self
,
num_layers
,
model_dim
,
inner_dim
,
ff_dim
,
ff_activation
,
gated_act
,
num_heads
,
dtype
,
device
,
operations
):
def
__init__
(
self
,
num_layers
,
model_dim
,
inner_dim
,
ff_dim
,
ff_activation
,
gated_act
,
num_heads
,
relative_attention
,
dtype
,
device
,
operations
):
super
().
__init__
()
super
().
__init__
()
self
.
block
=
torch
.
nn
.
ModuleList
(
self
.
block
=
torch
.
nn
.
ModuleList
(
[
T5Block
(
model_dim
,
inner_dim
,
ff_dim
,
ff_activation
,
gated_act
,
num_heads
,
relative_attention_bias
=
(
i
==
0
),
dtype
=
dtype
,
device
=
device
,
operations
=
operations
)
for
i
in
range
(
num_layers
)]
[
T5Block
(
model_dim
,
inner_dim
,
ff_dim
,
ff_activation
,
gated_act
,
num_heads
,
relative_attention_bias
=
((
not
relative_attention
)
or
(
i
==
0
)
)
,
dtype
=
dtype
,
device
=
device
,
operations
=
operations
)
for
i
in
range
(
num_layers
)]
)
)
self
.
final_layer_norm
=
T5LayerNorm
(
model_dim
,
dtype
=
dtype
,
device
=
device
,
operations
=
operations
)
self
.
final_layer_norm
=
T5LayerNorm
(
model_dim
,
dtype
=
dtype
,
device
=
device
,
operations
=
operations
)
# self.dropout = nn.Dropout(config.dropout_rate)
# self.dropout = nn.Dropout(config.dropout_rate)
...
@@ -223,7 +223,7 @@ class T5(torch.nn.Module):
...
@@ -223,7 +223,7 @@ class T5(torch.nn.Module):
self
.
num_layers
=
config_dict
[
"num_layers"
]
self
.
num_layers
=
config_dict
[
"num_layers"
]
model_dim
=
config_dict
[
"d_model"
]
model_dim
=
config_dict
[
"d_model"
]
self
.
encoder
=
T5Stack
(
self
.
num_layers
,
model_dim
,
model_dim
,
config_dict
[
"d_ff"
],
config_dict
[
"dense_act_fn"
],
config_dict
[
"is_gated_act"
],
config_dict
[
"num_heads"
],
dtype
,
device
,
operations
)
self
.
encoder
=
T5Stack
(
self
.
num_layers
,
model_dim
,
model_dim
,
config_dict
[
"d_ff"
],
config_dict
[
"dense_act_fn"
],
config_dict
[
"is_gated_act"
],
config_dict
[
"num_heads"
],
config_dict
[
"model_type"
]
==
"t5"
,
dtype
,
device
,
operations
)
self
.
dtype
=
dtype
self
.
dtype
=
dtype
self
.
shared
=
torch
.
nn
.
Embedding
(
config_dict
[
"vocab_size"
],
model_dim
,
device
=
device
)
self
.
shared
=
torch
.
nn
.
Embedding
(
config_dict
[
"vocab_size"
],
model_dim
,
device
=
device
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment