Unverified Commit f1e42bc5 authored by Yih-Dar's avatar Yih-Dar Committed by GitHub
Browse files

Some fixes regarding auto mappings and test class names (#19923)



* Add pegasus_x

* ViTMSN

* ESM
Co-authored-by: default avatarydshieh <ydshieh@users.noreply.github.com>
parent bec78ba1
......@@ -205,6 +205,13 @@ else:
"PegasusTokenizerFast" if is_tokenizers_available() else None,
),
),
(
"pegasus_x",
(
"PegasusTokenizer" if is_sentencepiece_available() else None,
"PegasusTokenizerFast" if is_tokenizers_available() else None,
),
),
(
"perceiver",
(
......
......@@ -21,7 +21,7 @@ from ...utils import logging
logger = logging.get_logger(__name__)
ESM_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"facebook/esm1b": "https://huggingface.co/facebook/esm1b/resolve/main/config.json",
"facebook/esm-1b": "https://huggingface.co/facebook/esm-1b/resolve/main/config.json",
# See all ESM models at https://huggingface.co/models?filter=esm
}
......@@ -31,7 +31,7 @@ class EsmConfig(PretrainedConfig):
This is the configuration class to store the configuration of a [`ESMModel`]. It is used to instantiate a ESM model
according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the ESM
[esm-base-uncased](https://huggingface.co/esm-base-uncased) architecture.
[facebook/esm-1b](https://huggingface.co/facebook/esm-1b) architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.
......@@ -90,7 +90,7 @@ class EsmConfig(PretrainedConfig):
```python
>>> from transformers import EsmModel, EsmConfig
>>> # Initializing a ESM esm-base-uncased style configuration >>> configuration = EsmConfig()
>>> # Initializing a ESM facebook/esm-1b style configuration >>> configuration = EsmConfig()
>>> # Initializing a model from the configuration >>> model = ESMModel(configuration)
......
......@@ -40,7 +40,7 @@ if is_vision_available():
from transformers import ViTFeatureExtractor
class ViTMAEModelTester:
class ViTMSNModelTester:
def __init__(
self,
parent,
......@@ -146,7 +146,7 @@ class ViTMAEModelTester:
@require_torch
class ViTMSNModelTest(ModelTesterMixin, unittest.TestCase):
"""
Here we also overwrite some of the tests of test_modeling_common.py, as ViTMAE does not use input_ids, inputs_embeds,
Here we also overwrite some of the tests of test_modeling_common.py, as ViTMSN does not use input_ids, inputs_embeds,
attention_mask and seq_length.
"""
......@@ -158,13 +158,13 @@ class ViTMSNModelTest(ModelTesterMixin, unittest.TestCase):
test_head_masking = False
def setUp(self):
self.model_tester = ViTMAEModelTester(self)
self.model_tester = ViTMSNModelTester(self)
self.config_tester = ConfigTester(self, config_class=ViTMSNConfig, has_text_modality=False, hidden_size=37)
def test_config(self):
self.config_tester.run_common_tests()
@unittest.skip(reason="ViTMAE does not use inputs_embeds")
@unittest.skip(reason="ViTMSN does not use inputs_embeds")
def test_inputs_embeds(self):
pass
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment