Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
59fe641b
"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "df536438073178da2940d6a36bdd9360fb7f4fc3"
Commit
59fe641b
authored
Sep 05, 2019
by
thomwolf
Browse files
also gathering file names in file_utils
parent
d68a8fe4
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
11 additions
and
10 deletions
+11
-10
pytorch_transformers/__init__.py
pytorch_transformers/__init__.py
+5
-3
pytorch_transformers/configuration_utils.py
pytorch_transformers/configuration_utils.py
+1
-3
pytorch_transformers/file_utils.py
pytorch_transformers/file_utils.py
+4
-0
pytorch_transformers/modeling_utils.py
pytorch_transformers/modeling_utils.py
+1
-4
No files found.
pytorch_transformers/__init__.py
View file @
59fe641b
...
...
@@ -24,7 +24,7 @@ from .tokenization_roberta import RobertaTokenizer
from
.tokenization_distilbert
import
DistilBertTokenizer
# Configurations
from
.configuration_utils
import
CONFIG_NAME
,
PretrainedConfig
from
.configuration_utils
import
PretrainedConfig
from
.configuration_auto
import
AutoConfig
from
.configuration_bert
import
BertConfig
,
BERT_PRETRAINED_CONFIG_ARCHIVE_MAP
from
.configuration_openai
import
OpenAIGPTConfig
,
OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP
...
...
@@ -36,7 +36,7 @@ from .configuration_roberta import RobertaConfig, ROBERTA_PRETRAINED_CONFIG_ARCH
from
.configuration_distilbert
import
DistilBertConfig
,
DISTILBERT_PRETRAINED_CONFIG_ARCHIVE_MAP
# Modeling
from
.modeling_utils
import
(
WEIGHTS_NAME
,
TF_WEIGHTS_NAME
,
PreTrainedModel
,
prune_layer
,
Conv1D
)
from
.modeling_utils
import
(
PreTrainedModel
,
prune_layer
,
Conv1D
)
from
.modeling_auto
import
(
AutoModel
,
AutoModelForSequenceClassification
,
AutoModelForQuestionAnswering
,
AutoModelWithLMHead
)
...
...
@@ -70,4 +70,6 @@ from .optimization import (AdamW, ConstantLRSchedule, WarmupConstantSchedule, Wa
WarmupCosineWithHardRestartsSchedule
,
WarmupLinearSchedule
)
# Files and general utilities
from
.file_utils
import
(
PYTORCH_TRANSFORMERS_CACHE
,
PYTORCH_PRETRAINED_BERT_CACHE
,
cached_path
,
add_start_docstrings
,
add_end_docstrings
)
from
.file_utils
import
(
PYTORCH_TRANSFORMERS_CACHE
,
PYTORCH_PRETRAINED_BERT_CACHE
,
cached_path
,
add_start_docstrings
,
add_end_docstrings
,
WEIGHTS_NAME
,
TF_WEIGHTS_NAME
,
CONFIG_NAME
)
pytorch_transformers/configuration_utils.py
View file @
59fe641b
...
...
@@ -24,12 +24,10 @@ import logging
import
os
from
io
import
open
from
.file_utils
import
cached_path
from
.file_utils
import
cached_path
,
CONFIG_NAME
logger
=
logging
.
getLogger
(
__name__
)
CONFIG_NAME
=
"config.json"
class
PretrainedConfig
(
object
):
r
""" Base class for all configuration classes.
Handles a few parameters common to all models' configurations as well as methods for loading/downloading/saving configurations.
...
...
pytorch_transformers/file_utils.py
View file @
59fe641b
...
...
@@ -48,6 +48,10 @@ except (AttributeError, ImportError):
PYTORCH_TRANSFORMERS_CACHE
=
PYTORCH_PRETRAINED_BERT_CACHE
# Kept for backward compatibility
WEIGHTS_NAME
=
"pytorch_model.bin"
TF_WEIGHTS_NAME
=
'model.ckpt'
CONFIG_NAME
=
"config.json"
logger
=
logging
.
getLogger
(
__name__
)
# pylint: disable=invalid-name
if
not
six
.
PY2
:
...
...
pytorch_transformers/modeling_utils.py
View file @
59fe641b
...
...
@@ -31,13 +31,10 @@ from torch.nn import CrossEntropyLoss
from
torch.nn
import
functional
as
F
from
.configuration_utils
import
PretrainedConfig
from
.file_utils
import
cached_path
from
.file_utils
import
cached_path
,
WEIGHTS_NAME
,
TF_WEIGHTS_NAME
logger
=
logging
.
getLogger
(
__name__
)
WEIGHTS_NAME
=
"pytorch_model.bin"
TF_WEIGHTS_NAME
=
'model.ckpt'
try
:
from
torch.nn
import
Identity
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment