Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
389f6615
"src/git@developer.sourcefind.cn:chenpangpang/open-webui.git" did not exist on "62149ef7ea542c1f08f63215c65d867169605e32"
Unverified
Commit
389f6615
authored
Apr 07, 2022
by
Sylvain Gugger
Committed by
GitHub
Apr 07, 2022
Browse files
Remove parent/child tests in auto model tests (#16653)
parent
080e42d0
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
0 additions
and
85 deletions
+0
-85
tests/auto/test_modeling_auto.py
tests/auto/test_modeling_auto.py
+0
-37
tests/auto/test_modeling_tf_auto.py
tests/auto/test_modeling_tf_auto.py
+0
-35
tests/auto/test_tokenization_auto.py
tests/auto/test_tokenization_auto.py
+0
-13
No files found.
tests/auto/test_modeling_auto.py
View file @
389f6615
...
@@ -74,12 +74,9 @@ if is_torch_available():
...
@@ -74,12 +74,9 @@ if is_torch_available():
MODEL_FOR_MASKED_LM_MAPPING
,
MODEL_FOR_MASKED_LM_MAPPING
,
MODEL_FOR_PRETRAINING_MAPPING
,
MODEL_FOR_PRETRAINING_MAPPING
,
MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
MODEL_MAPPING
,
MODEL_MAPPING
,
MODEL_WITH_LM_HEAD_MAPPING
,
)
)
from
transformers.models.bert.modeling_bert
import
BERT_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.bert.modeling_bert
import
BERT_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.gpt2.modeling_gpt2
import
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.gpt2.modeling_gpt2
import
GPT2_PRETRAINED_MODEL_ARCHIVE_LIST
...
@@ -251,40 +248,6 @@ class AutoModelTest(unittest.TestCase):
...
@@ -251,40 +248,6 @@ class AutoModelTest(unittest.TestCase):
model
=
AutoModel
.
from_pretrained
(
tmp_dir
)
model
=
AutoModel
.
from_pretrained
(
tmp_dir
)
self
.
assertIsInstance
(
model
,
FunnelBaseModel
)
self
.
assertIsInstance
(
model
,
FunnelBaseModel
)
def
test_parents_and_children_in_mappings
(
self
):
# Test that the children are placed before the parents in the mappings, as the `instanceof` will be triggered
# by the parents and will return the wrong configuration type when using auto models
mappings
=
(
MODEL_MAPPING
,
MODEL_FOR_PRETRAINING_MAPPING
,
MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
MODEL_WITH_LM_HEAD_MAPPING
,
MODEL_FOR_CAUSAL_LM_MAPPING
,
MODEL_FOR_MASKED_LM_MAPPING
,
MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
,
)
for
mapping
in
mappings
:
mapping
=
tuple
(
mapping
.
items
())
for
index
,
(
child_config
,
child_model
)
in
enumerate
(
mapping
[
1
:]):
for
parent_config
,
parent_model
in
mapping
[:
index
+
1
]:
assert
not
issubclass
(
child_config
,
parent_config
),
f
"
{
child_config
.
__name__
}
is child of
{
parent_config
.
__name__
}
"
# Tuplify child_model and parent_model since some of them could be tuples.
if
not
isinstance
(
child_model
,
(
list
,
tuple
)):
child_model
=
(
child_model
,)
if
not
isinstance
(
parent_model
,
(
list
,
tuple
)):
parent_model
=
(
parent_model
,)
for
child
,
parent
in
[(
a
,
b
)
for
a
in
child_model
for
b
in
parent_model
]:
assert
not
issubclass
(
child
,
parent
),
f
"
{
child
.
__name__
}
is child of
{
parent
.
__name__
}
"
def
test_from_pretrained_dynamic_model_local
(
self
):
def
test_from_pretrained_dynamic_model_local
(
self
):
try
:
try
:
AutoConfig
.
register
(
"custom"
,
CustomConfig
)
AutoConfig
.
register
(
"custom"
,
CustomConfig
)
...
...
tests/auto/test_modeling_tf_auto.py
View file @
389f6615
...
@@ -58,12 +58,9 @@ if is_tf_available():
...
@@ -58,12 +58,9 @@ if is_tf_available():
TF_MODEL_FOR_MASKED_LM_MAPPING
,
TF_MODEL_FOR_MASKED_LM_MAPPING
,
TF_MODEL_FOR_PRETRAINING_MAPPING
,
TF_MODEL_FOR_PRETRAINING_MAPPING
,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
TF_MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING
,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
TF_MODEL_MAPPING
,
TF_MODEL_MAPPING
,
TF_MODEL_WITH_LM_HEAD_MAPPING
,
)
)
from
transformers.models.bert.modeling_tf_bert
import
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.bert.modeling_tf_bert
import
TF_BERT_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.gpt2.modeling_tf_gpt2
import
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST
from
transformers.models.gpt2.modeling_tf_gpt2
import
TF_GPT2_PRETRAINED_MODEL_ARCHIVE_LIST
...
@@ -218,38 +215,6 @@ class TFAutoModelTest(unittest.TestCase):
...
@@ -218,38 +215,6 @@ class TFAutoModelTest(unittest.TestCase):
model
=
TFAutoModel
.
from_pretrained
(
tmp_dir
)
model
=
TFAutoModel
.
from_pretrained
(
tmp_dir
)
self
.
assertIsInstance
(
model
,
TFFunnelBaseModel
)
self
.
assertIsInstance
(
model
,
TFFunnelBaseModel
)
def
test_parents_and_children_in_mappings
(
self
):
# Test that the children are placed before the parents in the mappings, as the `instanceof` will be triggered
# by the parents and will return the wrong configuration type when using auto models
mappings
=
(
TF_MODEL_MAPPING
,
TF_MODEL_FOR_PRETRAINING_MAPPING
,
TF_MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
TF_MODEL_FOR_TABLE_QUESTION_ANSWERING_MAPPING
,
TF_MODEL_FOR_SEQUENCE_CLASSIFICATION_MAPPING
,
TF_MODEL_FOR_TOKEN_CLASSIFICATION_MAPPING
,
TF_MODEL_WITH_LM_HEAD_MAPPING
,
TF_MODEL_FOR_CAUSAL_LM_MAPPING
,
TF_MODEL_FOR_MASKED_LM_MAPPING
,
TF_MODEL_FOR_SEQ_TO_SEQ_CAUSAL_LM_MAPPING
,
)
for
mapping
in
mappings
:
mapping
=
tuple
(
mapping
.
items
())
for
index
,
(
child_config
,
child_model
)
in
enumerate
(
mapping
[
1
:]):
for
parent_config
,
parent_model
in
mapping
[:
index
+
1
]:
with
self
.
subTest
(
msg
=
f
"Testing if
{
child_config
.
__name__
}
is child of
{
parent_config
.
__name__
}
"
):
self
.
assertFalse
(
issubclass
(
child_config
,
parent_config
))
# Tuplify child_model and parent_model since some of them could be tuples.
if
not
isinstance
(
child_model
,
(
list
,
tuple
)):
child_model
=
(
child_model
,)
if
not
isinstance
(
parent_model
,
(
list
,
tuple
)):
parent_model
=
(
parent_model
,)
for
child
,
parent
in
[(
a
,
b
)
for
a
in
child_model
for
b
in
parent_model
]:
assert
not
issubclass
(
child
,
parent
),
f
"
{
child
.
__name__
}
is child of
{
parent
.
__name__
}
"
def
test_new_model_registration
(
self
):
def
test_new_model_registration
(
self
):
try
:
try
:
AutoConfig
.
register
(
"new-model"
,
NewModelConfig
)
AutoConfig
.
register
(
"new-model"
,
NewModelConfig
)
...
...
tests/auto/test_tokenization_auto.py
View file @
389f6615
...
@@ -151,19 +151,6 @@ class AutoTokenizerTest(unittest.TestCase):
...
@@ -151,19 +151,6 @@ class AutoTokenizerTest(unittest.TestCase):
):
):
_
=
tokenizer_class
.
from_pretrained
(
"julien-c/herlolip-not-exists"
)
_
=
tokenizer_class
.
from_pretrained
(
"julien-c/herlolip-not-exists"
)
def
test_parents_and_children_in_mappings
(
self
):
# Test that the children are placed before the parents in the mappings, as the `instanceof` will be triggered
# by the parents and will return the wrong configuration type when using auto models
mappings
=
(
TOKENIZER_MAPPING
,)
for
mapping
in
mappings
:
mapping
=
tuple
(
mapping
.
items
())
for
index
,
(
child_config
,
_
)
in
enumerate
(
mapping
[
1
:]):
for
parent_config
,
_
in
mapping
[:
index
+
1
]:
with
self
.
subTest
(
msg
=
f
"Testing if
{
child_config
.
__name__
}
is child of
{
parent_config
.
__name__
}
"
):
self
.
assertFalse
(
issubclass
(
child_config
,
parent_config
))
def
test_model_name_edge_cases_in_mappings
(
self
):
def
test_model_name_edge_cases_in_mappings
(
self
):
# tests: https://github.com/huggingface/transformers/pull/13251
# tests: https://github.com/huggingface/transformers/pull/13251
# 1. models with `-`, e.g. xlm-roberta -> xlm_roberta
# 1. models with `-`, e.g. xlm-roberta -> xlm_roberta
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment