Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
61abe329
Unverified
Commit
61abe329
authored
Feb 16, 2023
by
Jannis Vamvas
Committed by
GitHub
Feb 16, 2023
Browse files
[WIP] Move X-MOD models to facebook organization (#21640)
Move X-MOD models to facebook org
parent
751f17aa
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
28 additions
and
28 deletions
+28
-28
docs/source/en/model_doc/xmod.mdx
docs/source/en/model_doc/xmod.mdx
+1
-1
src/transformers/models/xmod/configuration_xmod.py
src/transformers/models/xmod/configuration_xmod.py
+12
-12
src/transformers/models/xmod/modeling_xmod.py
src/transformers/models/xmod/modeling_xmod.py
+11
-11
tests/models/xmod/test_modeling_xmod.py
tests/models/xmod/test_modeling_xmod.py
+4
-4
No files found.
docs/source/en/model_doc/xmod.mdx
View file @
61abe329
...
@@ -38,7 +38,7 @@ There are two ways to specify the input language:
...
@@ -38,7 +38,7 @@ There are two ways to specify the input language:
```
python
```
python
from
transformers
import
XmodModel
from
transformers
import
XmodModel
model
=
XmodModel
.
from_pretrained
(
"
jvamvas
/xmod-base"
)
model
=
XmodModel
.
from_pretrained
(
"
facebook
/xmod-base"
)
model
.
set_default_language
(
"en_XX"
)
model
.
set_default_language
(
"en_XX"
)
```
```
...
...
src/transformers/models/xmod/configuration_xmod.py
View file @
61abe329
...
@@ -25,15 +25,15 @@ from ...utils import logging
...
@@ -25,15 +25,15 @@ from ...utils import logging
logger
=
logging
.
get_logger
(
__name__
)
logger
=
logging
.
get_logger
(
__name__
)
XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP
=
{
XMOD_PRETRAINED_CONFIG_ARCHIVE_MAP
=
{
"
jvamvas
/xmod-base"
:
"https://huggingface.co/
jvamvas
/xmod-base/resolve/main/config.json"
,
"
facebook
/xmod-base"
:
"https://huggingface.co/
facebook
/xmod-base/resolve/main/config.json"
,
"
jvamvas
/xmod-large-prenorm"
:
"https://huggingface.co/
jvamvas
/xmod-large-prenorm/resolve/main/config.json"
,
"
facebook
/xmod-large-prenorm"
:
"https://huggingface.co/
facebook
/xmod-large-prenorm/resolve/main/config.json"
,
"
jvamvas
/xmod-base-13-125k"
:
"https://huggingface.co/
jvamvas
/xmod-base-13-125k/resolve/main/config.json"
,
"
facebook
/xmod-base-13-125k"
:
"https://huggingface.co/
facebook
/xmod-base-13-125k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-30-125k"
:
"https://huggingface.co/
jvamvas
/xmod-base-30-125k/resolve/main/config.json"
,
"
facebook
/xmod-base-30-125k"
:
"https://huggingface.co/
facebook
/xmod-base-30-125k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-30-195k"
:
"https://huggingface.co/
jvamvas
/xmod-base-30-195k/resolve/main/config.json"
,
"
facebook
/xmod-base-30-195k"
:
"https://huggingface.co/
facebook
/xmod-base-30-195k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-60-125k"
:
"https://huggingface.co/
jvamvas
/xmod-base-60-125k/resolve/main/config.json"
,
"
facebook
/xmod-base-60-125k"
:
"https://huggingface.co/
facebook
/xmod-base-60-125k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-60-265k"
:
"https://huggingface.co/
jvamvas
/xmod-base-60-265k/resolve/main/config.json"
,
"
facebook
/xmod-base-60-265k"
:
"https://huggingface.co/
facebook
/xmod-base-60-265k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-75-125k"
:
"https://huggingface.co/
jvamvas
/xmod-base-75-125k/resolve/main/config.json"
,
"
facebook
/xmod-base-75-125k"
:
"https://huggingface.co/
facebook
/xmod-base-75-125k/resolve/main/config.json"
,
"
jvamvas
/xmod-base-75-269k"
:
"https://huggingface.co/
jvamvas
/xmod-base-75-269k/resolve/main/config.json"
,
"
facebook
/xmod-base-75-269k"
:
"https://huggingface.co/
facebook
/xmod-base-75-269k/resolve/main/config.json"
,
}
}
...
@@ -41,7 +41,7 @@ class XmodConfig(PretrainedConfig):
...
@@ -41,7 +41,7 @@ class XmodConfig(PretrainedConfig):
r
"""
r
"""
This is the configuration class to store the configuration of a [`XmodModel`]. It is used to instantiate an X-MOD
This is the configuration class to store the configuration of a [`XmodModel`]. It is used to instantiate an X-MOD
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
model according to the specified arguments, defining the model architecture. Instantiating a configuration with the
defaults will yield a similar configuration to that of the [xmod-base](https://huggingface.co/
jvamvas
/xmod-base)
defaults will yield a similar configuration to that of the [xmod-base](https://huggingface.co/
facebook
/xmod-base)
architecture.
architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
...
@@ -110,10 +110,10 @@ class XmodConfig(PretrainedConfig):
...
@@ -110,10 +110,10 @@ class XmodConfig(PretrainedConfig):
```python
```python
>>> from transformers import XmodConfig, XmodModel
>>> from transformers import XmodConfig, XmodModel
>>> # Initializing an X-MOD
jvamvas
/xmod-base style configuration
>>> # Initializing an X-MOD
facebook
/xmod-base style configuration
>>> configuration = XmodConfig()
>>> configuration = XmodConfig()
>>> # Initializing a model (with random weights) from the
jvamvas
/xmod-base style configuration
>>> # Initializing a model (with random weights) from the
facebook
/xmod-base style configuration
>>> model = XmodModel(configuration)
>>> model = XmodModel(configuration)
>>> # Accessing the model configuration
>>> # Accessing the model configuration
...
...
src/transformers/models/xmod/modeling_xmod.py
View file @
61abe329
...
@@ -42,15 +42,15 @@ from .configuration_xmod import XmodConfig
...
@@ -42,15 +42,15 @@ from .configuration_xmod import XmodConfig
logger
=
logging
.
get_logger
(
__name__
)
logger
=
logging
.
get_logger
(
__name__
)
XMOD_PRETRAINED_MODEL_ARCHIVE_LIST
=
[
XMOD_PRETRAINED_MODEL_ARCHIVE_LIST
=
[
"
jvamvas
/xmod-base"
,
"
facebook
/xmod-base"
,
"
jvamvas
/xmod-large-prenorm"
,
"
facebook
/xmod-large-prenorm"
,
"
jvamvas
/xmod-base-13-125k"
,
"
facebook
/xmod-base-13-125k"
,
"
jvamvas
/xmod-base-30-125k"
,
"
facebook
/xmod-base-30-125k"
,
"
jvamvas
/xmod-base-30-195k"
,
"
facebook
/xmod-base-30-195k"
,
"
jvamvas
/xmod-base-60-125k"
,
"
facebook
/xmod-base-60-125k"
,
"
jvamvas
/xmod-base-60-265k"
,
"
facebook
/xmod-base-60-265k"
,
"
jvamvas
/xmod-base-75-125k"
,
"
facebook
/xmod-base-75-125k"
,
"
jvamvas
/xmod-base-75-269k"
,
"
facebook
/xmod-base-75-269k"
,
# See all X-MOD models at https://huggingface.co/models?filter=xmod
# See all X-MOD models at https://huggingface.co/models?filter=xmod
]
]
...
@@ -1069,9 +1069,9 @@ class XmodForCausalLM(XmodPreTrainedModel):
...
@@ -1069,9 +1069,9 @@ class XmodForCausalLM(XmodPreTrainedModel):
>>> import torch
>>> import torch
>>> tokenizer = AutoTokenizer.from_pretrained("xlm-roberta-base")
>>> tokenizer = AutoTokenizer.from_pretrained("xlm-roberta-base")
>>> config = AutoConfig.from_pretrained("
jvamvas
/xmod-base")
>>> config = AutoConfig.from_pretrained("
facebook
/xmod-base")
>>> config.is_decoder = True
>>> config.is_decoder = True
>>> model = XmodForCausalLM.from_pretrained("
jvamvas
/xmod-base", config=config)
>>> model = XmodForCausalLM.from_pretrained("
facebook
/xmod-base", config=config)
>>> model.set_default_language("en_XX")
>>> model.set_default_language("en_XX")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
>>> inputs = tokenizer("Hello, my dog is cute", return_tensors="pt")
...
...
tests/models/xmod/test_modeling_xmod.py
View file @
61abe329
...
@@ -512,7 +512,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
...
@@ -512,7 +512,7 @@ class XmodModelTest(ModelTesterMixin, GenerationTesterMixin, unittest.TestCase):
class
XmodModelIntegrationTest
(
unittest
.
TestCase
):
class
XmodModelIntegrationTest
(
unittest
.
TestCase
):
@
slow
@
slow
def
test_xmod_base
(
self
):
def
test_xmod_base
(
self
):
model
=
XmodModel
.
from_pretrained
(
"
jvamvas
/xmod-base"
)
model
=
XmodModel
.
from_pretrained
(
"
facebook
/xmod-base"
)
# language en_XX
# language en_XX
model
.
set_default_language
(
"en_XX"
)
model
.
set_default_language
(
"en_XX"
)
...
@@ -545,7 +545,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
...
@@ -545,7 +545,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@
slow
@
slow
def
test_xmod_large_prenorm
(
self
):
def
test_xmod_large_prenorm
(
self
):
model
=
XmodModel
.
from_pretrained
(
"
jvamvas
/xmod-large-prenorm"
)
model
=
XmodModel
.
from_pretrained
(
"
facebook
/xmod-large-prenorm"
)
# language en_XX
# language en_XX
model
.
set_default_language
(
"en_XX"
)
model
.
set_default_language
(
"en_XX"
)
...
@@ -581,7 +581,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
...
@@ -581,7 +581,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@
slow
@
slow
def
test_multilingual_batch
(
self
):
def
test_multilingual_batch
(
self
):
model
=
XmodModel
.
from_pretrained
(
"
jvamvas
/xmod-base"
)
model
=
XmodModel
.
from_pretrained
(
"
facebook
/xmod-base"
)
# fmt: off
# fmt: off
input_ids
=
torch
.
tensor
([
input_ids
=
torch
.
tensor
([
[
0
,
581
,
10269
,
83
,
99942
,
136
,
60742
,
23
,
70
,
80583
,
18276
,
2
],
[
0
,
581
,
10269
,
83
,
99942
,
136
,
60742
,
23
,
70
,
80583
,
18276
,
2
],
...
@@ -608,7 +608,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
...
@@ -608,7 +608,7 @@ class XmodModelIntegrationTest(unittest.TestCase):
@
slow
@
slow
def
test_end_to_end_mask_fill
(
self
):
def
test_end_to_end_mask_fill
(
self
):
tokenizer
=
XLMRobertaTokenizer
.
from_pretrained
(
"xlm-roberta-base"
)
tokenizer
=
XLMRobertaTokenizer
.
from_pretrained
(
"xlm-roberta-base"
)
model
=
XmodForMaskedLM
.
from_pretrained
(
"
jvamvas
/xmod-base"
,
default_language
=
"en_XX"
)
model
=
XmodForMaskedLM
.
from_pretrained
(
"
facebook
/xmod-base"
,
default_language
=
"en_XX"
)
model
.
to
(
torch_device
)
model
.
to
(
torch_device
)
sentences
=
[
sentences
=
[
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment