Unverified Commit 61abe329 authored by Jannis Vamvas's avatar Jannis Vamvas Committed by GitHub
Browse files

[WIP] Move X-MOD models to facebook organization (#21640)

Move X-MOD models to facebook org
parent 751f17aa
......@@ -38,7 +38,7 @@ There are two ways to specify the input language:
```python
from transformers import XmodModel
model = XmodModel.from_pretrained("jvamvas/xmod-base")
model = XmodModel.from_pretrained("facebook/xmod-base")
model.set_default_language("en_XX")
```
......
......@@ -25,15 +25,15 @@ from ...utils import logging
logger = logging.get_logger(__name__)
XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"jvamvas/xmod-base": "https://huggingface.co/jvamvas/xmod-base/resolve/main/config.json",
"jvamvas/xmod-large-prenorm": "https://huggingface.co/jvamvas/xmod-large-prenorm/resolve/main/config.json",
"jvamvas/xmod-base-13-125k": "https://huggingface.co/jvamvas/xmod-base-13-125k/resolve/main/config.json",
"jvamvas/xmod-base-30-125k": "https://huggingface.co/jvamvas/xmod-base-30-125k/resolve/main/config.json",
"jvamvas/xmod-base-30-195k": "https://huggingface.co/jvamvas/xmod-base-30-195k/resolve/main/config.json",
"jvamvas/xmod-base-60-125k": "https://huggingface.co/jvamvas/xmod-base-60-125k/resolve/main/config.json",
"jvamvas/xmod-base-60-265k": "https://huggingface.co/jvamvas/xmod-base-60-265k/resolve/main/config.json",
"jvamvas/xmod-base-75-125k": "https://huggingface.co/jvamvas/xmod-base-75-125k/resolve/main/config.json",
"jvamvas/xmod-base-75-269k": "https://huggingface.co/jvamvas/xmod-base-75-269k/resolve/main/config.json",
"facebook/xmod-base": "https://huggingface.co/facebook/xmod-base/resolve/main/config.json",
"facebook/xmod-large-prenorm": "https://huggingface.co/facebook/xmod-large-prenorm/resolve/main/config.json",
"facebook/xmod-base-13-125k": "https://huggingface.co/facebook/xmod-base-13-125k/resolve/main/config.json",
"facebook/xmod-base-30-125k": "https://huggingface.co/facebook/xmod-base-30-125k/resolve/main/config.json",
"facebook/xmod-base-30-195k": "https://huggingface.co/facebook/xmod-base-30-195k/resolve/main/config.json",
"facebook/xmod-base-60-125k": "https://huggingface.co/facebook/xmod-base-60-125k/resolve/main/config.json",
"facebook/xmod-base-60-265k": "https://huggingface.co/facebook/xmod-base-60-265k/resolve/main/config.json",
"facebook/xmod-base-75-125k": "https://huggingface.co/facebook/xmod-base-75-125k/resolve/main/config.json",
"facebook/xmod-base-75-269k": "https://huggingface.co/facebook/xmod-base-75-269k/resolve/main/config.json",
}
......@@ -41,7 +41,7 @@ class XmodConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`XmodModel`]. It is used to instantiate an X-MOD
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the [xmod-base](https://huggingface.co/jvamvas/xmod-base)
defaults will yield a similar configuration to that of the [xmod-base](https://huggingface.co/facebook/xmod-base)
architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
......@@ -110,10 +110,10 @@ class XmodConfig(PretrainedConfig):
```python
>>> from transformers import XmodConfig, XmodModel
>>> # Initializing an X-MOD jvamvas/xmod-base style configuration
>>> # Initializing an X-MOD facebook/xmod-base style configuration
>>> configuration = XmodConfig()
>>> # Initializing a model (with random weights) from the jvamvas/xmod-base style configuration
>>> # Initializing a model (with random weights) from the facebook/xmod-base style configuration
>>> model = XmodModel(configuration)
>>> # Accessing the model configuration
......
......@@ -42,15 +42,15 @@ from .configuration_xmod import XmodConfig
logger = logging.get_logger(__name__)
XMOD_PRETRAINED_MODEL_ARCHIVE_LIST = [
"jvamvas/xmod-base",
"jvamvas/xmod-large-prenorm",
"jvamvas/xmod-base-13-125k",
"jvamvas/xmod-base-30-125k",
"jvamvas/xmod-base-30-195k",
"jvamvas/xmod-base-60-125k",
"jvamvas/xmod-base-60-265k",
"jvamvas/xmod-base-75-125k",
"jvamvas/xmod-base-75-269k",
"facebook/xmod-base",
"facebook/xmod-large-prenorm",
"facebook/xmod-base-13-125k",
"facebook/xmod-base-30-125k",
"facebook/xmod-base-30-195k",
"facebook/xmod-base-60-125k",
"facebook/xmod-base-60-265k",
"facebook/xmod-base-75-125k",
"facebook/xmod-base-75-269k",
# See all X-MOD models at https://huggingface.co/models?filter=xmod
]
......@@ -1069,9 +1069,9 @@ class XmodForCausalLM(XmodPreTrainedModel):
>>> import torch
>>> tokenizer = AutoTokenizer.from_pretrained("xlm-roberta-base")
>>> config = AutoConfig.from_pretrained("jvamvas/xmod-base")
>>> config = AutoConfig.from_pretrained("facebook/xmod-base")
>>> config.is_decoder = True
>>> model = XmodForCausalLM.from_pretrained("jvamvas/xmod-base", config=config)
>>> model = XmodForCausalLM.from_pretrained("facebook/xmod-base", config=config)
>>> model.set_default_language("en_XX")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
......
......@@ -512,7 +512,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
class XmodModelIntegrationTest(unittest.TestCase):
@slow
def test_xmod_base(self):
model = XmodModel.from_pretrained("jvamvas/xmod-base")
model = XmodModel.from_pretrained("facebook/xmod-base")
# language en_XX
model.set_default_language("en_XX")
......@@ -545,7 +545,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@slow
def test_xmod_large_prenorm(self):
model = XmodModel.from_pretrained("jvamvas/xmod-large-prenorm")
model = XmodModel.from_pretrained("facebook/xmod-large-prenorm")
# language en_XX
model.set_default_language("en_XX")
......@@ -581,7 +581,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@slow
def test_multilingual_batch(self):
model = XmodModel.from_pretrained("jvamvas/xmod-base")
model = XmodModel.from_pretrained("facebook/xmod-base")
# fmt: off
input_ids = torch.tensor([
[0, 581, 10269, 83, 99942, 136, 60742, 23, 70, 80583, 18276, 2],
......@@ -608,7 +608,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@slow
def test_end_to_end_mask_fill(self):
tokenizer = XLMRobertaTokenizer.from_pretrained("xlm-roberta-base")
model = XmodForMaskedLM.from_pretrained("jvamvas/xmod-base", default_language="en_XX")
model = XmodForMaskedLM.from_pretrained("facebook/xmod-base", default_language="en_XX")
model.to(torch_device)
sentences = [
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment