Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
b8aee2e9
Unverified
Commit
b8aee2e9
authored
May 15, 2024
by
David
Committed by
GitHub
May 15, 2024
Browse files
Remove unused module DETR based models (#30823)
* removing heads for classification from DETR models. * quality fix
parent
be3aa43e
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
0 additions
and
94 deletions
+0
-94
src/transformers/models/conditional_detr/modeling_conditional_detr.py
...mers/models/conditional_detr/modeling_conditional_detr.py
+0
-19
src/transformers/models/deformable_detr/modeling_deformable_detr.py
...ormers/models/deformable_detr/modeling_deformable_detr.py
+0
-19
src/transformers/models/deta/modeling_deta.py
src/transformers/models/deta/modeling_deta.py
+0
-19
src/transformers/models/detr/modeling_detr.py
src/transformers/models/detr/modeling_detr.py
+0
-18
src/transformers/models/table_transformer/modeling_table_transformer.py
...rs/models/table_transformer/modeling_table_transformer.py
+0
-19
No files found.
src/transformers/models/conditional_detr/modeling_conditional_detr.py
View file @
b8aee2e9
...
@@ -1091,25 +1091,6 @@ class ConditionalDetrDecoderLayer(nn.Module):
...
@@ -1091,25 +1091,6 @@ class ConditionalDetrDecoderLayer(nn.Module):
return
outputs
return
outputs
# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->ConditionalDetr
class
ConditionalDetrClassificationHead
(
nn
.
Module
):
"""Head for sentence-level classification tasks."""
def
__init__
(
self
,
input_dim
:
int
,
inner_dim
:
int
,
num_classes
:
int
,
pooler_dropout
:
float
):
super
().
__init__
()
self
.
dense
=
nn
.
Linear
(
input_dim
,
inner_dim
)
self
.
dropout
=
nn
.
Dropout
(
p
=
pooler_dropout
)
self
.
out_proj
=
nn
.
Linear
(
inner_dim
,
num_classes
)
def
forward
(
self
,
hidden_states
:
torch
.
Tensor
):
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
torch
.
tanh
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
out_proj
(
hidden_states
)
return
hidden_states
# Copied from transformers.models.detr.modeling_detr.DetrMLPPredictionHead with DetrMLPPredictionHead->MLP
# Copied from transformers.models.detr.modeling_detr.DetrMLPPredictionHead with DetrMLPPredictionHead->MLP
class
MLP
(
nn
.
Module
):
class
MLP
(
nn
.
Module
):
"""
"""
...
...
src/transformers/models/deformable_detr/modeling_deformable_detr.py
View file @
b8aee2e9
...
@@ -1066,25 +1066,6 @@ class DeformableDetrDecoderLayer(nn.Module):
...
@@ -1066,25 +1066,6 @@ class DeformableDetrDecoderLayer(nn.Module):
return
outputs
return
outputs
# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead
class
DeformableDetrClassificationHead
(
nn
.
Module
):
"""Head for sentence-level classification tasks."""
def
__init__
(
self
,
input_dim
:
int
,
inner_dim
:
int
,
num_classes
:
int
,
pooler_dropout
:
float
):
super
().
__init__
()
self
.
dense
=
nn
.
Linear
(
input_dim
,
inner_dim
)
self
.
dropout
=
nn
.
Dropout
(
p
=
pooler_dropout
)
self
.
out_proj
=
nn
.
Linear
(
inner_dim
,
num_classes
)
def
forward
(
self
,
hidden_states
:
torch
.
Tensor
):
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
torch
.
tanh
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
out_proj
(
hidden_states
)
return
hidden_states
class
DeformableDetrPreTrainedModel
(
PreTrainedModel
):
class
DeformableDetrPreTrainedModel
(
PreTrainedModel
):
config_class
=
DeformableDetrConfig
config_class
=
DeformableDetrConfig
base_model_prefix
=
"model"
base_model_prefix
=
"model"
...
...
src/transformers/models/deta/modeling_deta.py
View file @
b8aee2e9
...
@@ -1032,25 +1032,6 @@ class DetaDecoderLayer(nn.Module):
...
@@ -1032,25 +1032,6 @@ class DetaDecoderLayer(nn.Module):
return
outputs
return
outputs
# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead
class
DetaClassificationHead
(
nn
.
Module
):
"""Head for sentence-level classification tasks."""
def
__init__
(
self
,
input_dim
:
int
,
inner_dim
:
int
,
num_classes
:
int
,
pooler_dropout
:
float
):
super
().
__init__
()
self
.
dense
=
nn
.
Linear
(
input_dim
,
inner_dim
)
self
.
dropout
=
nn
.
Dropout
(
p
=
pooler_dropout
)
self
.
out_proj
=
nn
.
Linear
(
inner_dim
,
num_classes
)
def
forward
(
self
,
hidden_states
:
torch
.
Tensor
):
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
torch
.
tanh
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
out_proj
(
hidden_states
)
return
hidden_states
class
DetaPreTrainedModel
(
PreTrainedModel
):
class
DetaPreTrainedModel
(
PreTrainedModel
):
config_class
=
DetaConfig
config_class
=
DetaConfig
base_model_prefix
=
"model"
base_model_prefix
=
"model"
...
...
src/transformers/models/detr/modeling_detr.py
View file @
b8aee2e9
...
@@ -875,24 +875,6 @@ class DetrDecoderLayer(nn.Module):
...
@@ -875,24 +875,6 @@ class DetrDecoderLayer(nn.Module):
return
outputs
return
outputs
class
DetrClassificationHead
(
nn
.
Module
):
"""Head for sentence-level classification tasks."""
def
__init__
(
self
,
input_dim
:
int
,
inner_dim
:
int
,
num_classes
:
int
,
pooler_dropout
:
float
):
super
().
__init__
()
self
.
dense
=
nn
.
Linear
(
input_dim
,
inner_dim
)
self
.
dropout
=
nn
.
Dropout
(
p
=
pooler_dropout
)
self
.
out_proj
=
nn
.
Linear
(
inner_dim
,
num_classes
)
def
forward
(
self
,
hidden_states
:
torch
.
Tensor
):
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
torch
.
tanh
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
out_proj
(
hidden_states
)
return
hidden_states
class
DetrPreTrainedModel
(
PreTrainedModel
):
class
DetrPreTrainedModel
(
PreTrainedModel
):
config_class
=
DetrConfig
config_class
=
DetrConfig
base_model_prefix
=
"model"
base_model_prefix
=
"model"
...
...
src/transformers/models/table_transformer/modeling_table_transformer.py
View file @
b8aee2e9
...
@@ -782,25 +782,6 @@ class TableTransformerDecoderLayer(nn.Module):
...
@@ -782,25 +782,6 @@ class TableTransformerDecoderLayer(nn.Module):
return
outputs
return
outputs
# Copied from transformers.models.detr.modeling_detr.DetrClassificationHead with Detr->TableTransformer
class
TableTransformerClassificationHead
(
nn
.
Module
):
"""Head for sentence-level classification tasks."""
def
__init__
(
self
,
input_dim
:
int
,
inner_dim
:
int
,
num_classes
:
int
,
pooler_dropout
:
float
):
super
().
__init__
()
self
.
dense
=
nn
.
Linear
(
input_dim
,
inner_dim
)
self
.
dropout
=
nn
.
Dropout
(
p
=
pooler_dropout
)
self
.
out_proj
=
nn
.
Linear
(
inner_dim
,
num_classes
)
def
forward
(
self
,
hidden_states
:
torch
.
Tensor
):
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
dense
(
hidden_states
)
hidden_states
=
torch
.
tanh
(
hidden_states
)
hidden_states
=
self
.
dropout
(
hidden_states
)
hidden_states
=
self
.
out_proj
(
hidden_states
)
return
hidden_states
class
TableTransformerPreTrainedModel
(
PreTrainedModel
):
class
TableTransformerPreTrainedModel
(
PreTrainedModel
):
config_class
=
TableTransformerConfig
config_class
=
TableTransformerConfig
base_model_prefix
=
"model"
base_model_prefix
=
"model"
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment