Unverified Commit c89bdfbe authored by Sylvain Gugger's avatar Sylvain Gugger Committed by GitHub
Browse files

Reorganize repo (#8580)

* Put models in subfolders

* Styling

* Fix imports in tests

* More fixes in test imports

* Sneaky hidden imports

* Fix imports in doc files

* More sneaky imports

* Finish fixing tests

* Fix examples

* Fix path for copies

* More fixes for examples

* Fix dummy files

* More fixes for example

* More model import fixes

* Is this why you're unhappy GitHub?

* Fix imports in conver command
parent 90150733
...@@ -48,9 +48,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works ...@@ -48,9 +48,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works
### In Transformers ### In Transformers
```python ```python
from transformers.pipelines import pipeline from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
from transformers.modeling_auto import AutoModelForQuestionAnswering
from transformers.tokenization_auto import AutoTokenizer
model_name = "deepset/minilm-uncased-squad2" model_name = "deepset/minilm-uncased-squad2"
......
...@@ -39,9 +39,8 @@ This model is the model obtained from the **third** fold of the cross-validation ...@@ -39,9 +39,8 @@ This model is the model obtained from the **third** fold of the cross-validation
### In Transformers ### In Transformers
```python ```python
from transformers.pipelines import pipeline from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
from transformers.modeling_auto import AutoModelForQuestionAnswering
from transformers.tokenization_auto import AutoTokenizer
model_name = "deepset/roberta-base-squad2-covid" model_name = "deepset/roberta-base-squad2-covid"
......
...@@ -48,9 +48,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works ...@@ -48,9 +48,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works
### In Transformers ### In Transformers
```python ```python
from transformers.pipelines import pipeline from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
from transformers.modeling_auto import AutoModelForQuestionAnswering
from transformers.tokenization_auto import AutoTokenizer
model_name = "deepset/roberta-base-squad2-v2" model_name = "deepset/roberta-base-squad2-v2"
......
...@@ -54,9 +54,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works ...@@ -54,9 +54,7 @@ Evaluated on the SQuAD 2.0 dev set with the [official eval script](https://works
### In Transformers ### In Transformers
```python ```python
from transformers.pipelines import pipeline from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
from transformers.modeling_auto import AutoModelForQuestionAnswering
from transformers.tokenization_auto import AutoTokenizer
model_name = "deepset/roberta-base-squad2" model_name = "deepset/roberta-base-squad2"
......
...@@ -63,9 +63,7 @@ Evaluated on German [XQuAD: xquad.de.json](https://github.com/deepmind/xquad) ...@@ -63,9 +63,7 @@ Evaluated on German [XQuAD: xquad.de.json](https://github.com/deepmind/xquad)
### In Transformers ### In Transformers
```python ```python
from transformers.pipelines import pipeline from transformers import AutoModelForQuestionAnswering, AutoTokenizer, pipeline
from transformers.modeling_auto import AutoModelForQuestionAnswering
from transformers.tokenization_auto import AutoTokenizer
model_name = "deepset/xlm-roberta-large-squad2" model_name = "deepset/xlm-roberta-large-squad2"
......
...@@ -36,8 +36,7 @@ All four models are available: ...@@ -36,8 +36,7 @@ All four models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "facebook/wmt19-de-en" mname = "facebook/wmt19-de-en"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -36,8 +36,7 @@ All four models are available: ...@@ -36,8 +36,7 @@ All four models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "facebook/wmt19-en-de" mname = "facebook/wmt19-en-de"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -36,8 +36,7 @@ All four models are available: ...@@ -36,8 +36,7 @@ All four models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "facebook/wmt19-en-ru" mname = "facebook/wmt19-en-ru"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -36,8 +36,7 @@ All four models are available: ...@@ -36,8 +36,7 @@ All four models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "facebook/wmt19-ru-en" mname = "facebook/wmt19-ru-en"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -2,11 +2,7 @@ ...@@ -2,11 +2,7 @@
```python ```python
from transformers.configuration_bert import BertConfig from transformers BertConfig, BertForMaskedLM, BertTokenizer, TFBertForMaskedLM
from transformers.modeling_bert import BertForMaskedLM
from transformers.modeling_tf_bert import TFBertForMaskedLM
from transformers.tokenization_bert import BertTokenizer
SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy" SMALL_MODEL_IDENTIFIER = "julien-c/bert-xsmall-dummy"
DIRNAME = "./bert-xsmall-dummy" DIRNAME = "./bert-xsmall-dummy"
......
...@@ -60,8 +60,7 @@ All 3 models are available: ...@@ -60,8 +60,7 @@ All 3 models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "allenai/{model_name}" mname = "allenai/{model_name}"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -59,8 +59,7 @@ For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the S ...@@ -59,8 +59,7 @@ For more details, please, see [Deep Encoder, Shallow Decoder: Reevaluating the S
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "allenai/{model_name}" mname = "allenai/{model_name}"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
...@@ -63,8 +63,7 @@ All four models are available: ...@@ -63,8 +63,7 @@ All four models are available:
#### How to use #### How to use
```python ```python
from transformers.tokenization_fsmt import FSMTTokenizer from transformers import FSMTForConditionalGeneration, FSMTTokenizer
from transformers.modeling_fsmt import FSMTForConditionalGeneration
mname = "facebook/wmt19-{src_lang}-{tgt_lang}" mname = "facebook/wmt19-{src_lang}-{tgt_lang}"
tokenizer = FSMTTokenizer.from_pretrained(mname) tokenizer = FSMTTokenizer.from_pretrained(mname)
model = FSMTForConditionalGeneration.from_pretrained(mname) model = FSMTForConditionalGeneration.from_pretrained(mname)
......
This diff is collapsed.
...@@ -23,7 +23,7 @@ from typing import Callable, Optional ...@@ -23,7 +23,7 @@ from typing import Callable, Optional
from ..configuration_utils import PretrainedConfig from ..configuration_utils import PretrainedConfig
from ..file_utils import is_py3nvml_available, is_torch_available from ..file_utils import is_py3nvml_available, is_torch_available
from ..modeling_auto import MODEL_MAPPING, MODEL_WITH_LM_HEAD_MAPPING from ..models.auto.modeling_auto import MODEL_MAPPING, MODEL_WITH_LM_HEAD_MAPPING
from ..utils import logging from ..utils import logging
from .benchmark_utils import ( from .benchmark_utils import (
Benchmark, Benchmark,
......
...@@ -25,7 +25,7 @@ from typing import Callable, Optional ...@@ -25,7 +25,7 @@ from typing import Callable, Optional
from ..configuration_utils import PretrainedConfig from ..configuration_utils import PretrainedConfig
from ..file_utils import is_py3nvml_available, is_tf_available from ..file_utils import is_py3nvml_available, is_tf_available
from ..modeling_tf_auto import TF_MODEL_MAPPING, TF_MODEL_WITH_LM_HEAD_MAPPING from ..models.auto.modeling_tf_auto import TF_MODEL_MAPPING, TF_MODEL_WITH_LM_HEAD_MAPPING
from ..utils import logging from ..utils import logging
from .benchmark_utils import ( from .benchmark_utils import (
Benchmark, Benchmark,
......
...@@ -327,7 +327,7 @@ def start_memory_tracing( ...@@ -327,7 +327,7 @@ def start_memory_tracing(
- `modules_to_trace`: (None, string, list/tuple of string) if None, all events are recorded if string or list - `modules_to_trace`: (None, string, list/tuple of string) if None, all events are recorded if string or list
of strings: only events from the listed module/sub-module will be recorded (e.g. 'fairseq' or of strings: only events from the listed module/sub-module will be recorded (e.g. 'fairseq' or
'transformers.modeling_gpt2') 'transformers.models.gpt2.modeling_gpt2')
- `modules_not_to_trace`: (None, string, list/tuple of string) if None, no module is avoided if string or list - `modules_not_to_trace`: (None, string, list/tuple of string) if None, no module is avoided if string or list
of strings: events from the listed module/sub-module will not be recorded (e.g. 'torch') of strings: events from the listed module/sub-module will not be recorded (e.g. 'torch')
- `events_to_trace`: string or list of string of events to be recorded (see official python doc for - `events_to_trace`: string or list of string of events to be recorded (see official python doc for
......
...@@ -73,7 +73,7 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -73,7 +73,7 @@ class ConvertCommand(BaseTransformersCLICommand):
def run(self): def run(self):
if self._model_type == "albert": if self._model_type == "albert":
try: try:
from transformers.convert_albert_original_tf_checkpoint_to_pytorch import ( from transformers.models.albert.convert_albert_original_tf_checkpoint_to_pytorch import (
convert_tf_checkpoint_to_pytorch, convert_tf_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -82,7 +82,7 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -82,7 +82,7 @@ class ConvertCommand(BaseTransformersCLICommand):
convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output) convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output)
elif self._model_type == "bert": elif self._model_type == "bert":
try: try:
from transformers.convert_bert_original_tf_checkpoint_to_pytorch import ( from transformers.models.bert.convert_bert_original_tf_checkpoint_to_pytorch import (
convert_tf_checkpoint_to_pytorch, convert_tf_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -91,7 +91,7 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -91,7 +91,7 @@ class ConvertCommand(BaseTransformersCLICommand):
convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output) convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output)
elif self._model_type == "funnel": elif self._model_type == "funnel":
try: try:
from transformers.convert_funnel_original_tf_checkpoint_to_pytorch import ( from transformers.models.funnel.convert_funnel_original_tf_checkpoint_to_pytorch import (
convert_tf_checkpoint_to_pytorch, convert_tf_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -99,14 +99,14 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -99,14 +99,14 @@ class ConvertCommand(BaseTransformersCLICommand):
convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output) convert_tf_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output)
elif self._model_type == "gpt": elif self._model_type == "gpt":
from transformers.convert_openai_original_tf_checkpoint_to_pytorch import ( from transformers.models.openai.convert_openai_original_tf_checkpoint_to_pytorch import (
convert_openai_checkpoint_to_pytorch, convert_openai_checkpoint_to_pytorch,
) )
convert_openai_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output) convert_openai_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output)
elif self._model_type == "transfo_xl": elif self._model_type == "transfo_xl":
try: try:
from transformers.convert_transfo_xl_original_tf_checkpoint_to_pytorch import ( from transformers.models.transfo_xl.convert_transfo_xl_original_tf_checkpoint_to_pytorch import (
convert_transfo_xl_checkpoint_to_pytorch, convert_transfo_xl_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -123,7 +123,7 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -123,7 +123,7 @@ class ConvertCommand(BaseTransformersCLICommand):
) )
elif self._model_type == "gpt2": elif self._model_type == "gpt2":
try: try:
from transformers.convert_gpt2_original_tf_checkpoint_to_pytorch import ( from transformers.models.gpt2.convert_gpt2_original_tf_checkpoint_to_pytorch import (
convert_gpt2_checkpoint_to_pytorch, convert_gpt2_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -132,7 +132,7 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -132,7 +132,7 @@ class ConvertCommand(BaseTransformersCLICommand):
convert_gpt2_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output) convert_gpt2_checkpoint_to_pytorch(self._tf_checkpoint, self._config, self._pytorch_dump_output)
elif self._model_type == "xlnet": elif self._model_type == "xlnet":
try: try:
from transformers.convert_xlnet_original_tf_checkpoint_to_pytorch import ( from transformers.models.xlnet.convert_xlnet_original_tf_checkpoint_to_pytorch import (
convert_xlnet_checkpoint_to_pytorch, convert_xlnet_checkpoint_to_pytorch,
) )
except ImportError: except ImportError:
...@@ -142,13 +142,13 @@ class ConvertCommand(BaseTransformersCLICommand): ...@@ -142,13 +142,13 @@ class ConvertCommand(BaseTransformersCLICommand):
self._tf_checkpoint, self._config, self._pytorch_dump_output, self._finetuning_task_name self._tf_checkpoint, self._config, self._pytorch_dump_output, self._finetuning_task_name
) )
elif self._model_type == "xlm": elif self._model_type == "xlm":
from transformers.convert_xlm_original_pytorch_checkpoint_to_pytorch import ( from transformers.models.xlm.convert_xlm_original_pytorch_checkpoint_to_pytorch import (
convert_xlm_checkpoint_to_pytorch, convert_xlm_checkpoint_to_pytorch,
) )
convert_xlm_checkpoint_to_pytorch(self._tf_checkpoint, self._pytorch_dump_output) convert_xlm_checkpoint_to_pytorch(self._tf_checkpoint, self._pytorch_dump_output)
elif self._model_type == "lxmert": elif self._model_type == "lxmert":
from transformers.convert_lxmert_original_pytorch_checkpoint_to_pytorch import ( from transformers.models.lxmert.convert_lxmert_original_pytorch_checkpoint_to_pytorch import (
convert_lxmert_checkpoint_to_pytorch, convert_lxmert_checkpoint_to_pytorch,
) )
......
...@@ -9,7 +9,7 @@ from torch.utils.data.dataset import Dataset ...@@ -9,7 +9,7 @@ from torch.utils.data.dataset import Dataset
from filelock import FileLock from filelock import FileLock
from ...modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPING from ...models.auto.modeling_auto import MODEL_FOR_QUESTION_ANSWERING_MAPPING
from ...tokenization_utils import PreTrainedTokenizer from ...tokenization_utils import PreTrainedTokenizer
from ...utils import logging from ...utils import logging
from ..processors.squad import SquadFeatures, SquadV1Processor, SquadV2Processor, squad_convert_examples_to_features from ..processors.squad import SquadFeatures, SquadV1Processor, SquadV2Processor, squad_convert_examples_to_features
......
...@@ -14,7 +14,7 @@ import math ...@@ -14,7 +14,7 @@ import math
import re import re
import string import string
from transformers.tokenization_bert import BasicTokenizer from transformers import BasicTokenizer
from ...utils import logging from ...utils import logging
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment