Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
42f63e38
Commit
42f63e38
authored
Nov 13, 2020
by
Sylvain Gugger
Browse files
Merge remote-tracking branch 'origin/master'
parents
bb03a14e
4df6b593
Changes
34
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
112 additions
and
6 deletions
+112
-6
examples/adversarial/run_hans.py
examples/adversarial/run_hans.py
+7
-0
examples/bert-loses-patience/run_glue_with_pabee.py
examples/bert-loses-patience/run_glue_with_pabee.py
+7
-1
examples/bertology/run_bertology.py
examples/bertology/run_bertology.py
+7
-0
examples/contrib/legacy/run_language_modeling.py
examples/contrib/legacy/run_language_modeling.py
+7
-0
examples/contrib/mm-imdb/run_mmimdb.py
examples/contrib/mm-imdb/run_mmimdb.py
+7
-1
examples/contrib/run_swag.py
examples/contrib/run_swag.py
+7
-0
examples/deebert/run_glue_deebert.py
examples/deebert/run_glue_deebert.py
+7
-1
examples/distillation/run_squad_w_distillation.py
examples/distillation/run_squad_w_distillation.py
+7
-1
examples/language-modeling/run_clm.py
examples/language-modeling/run_clm.py
+2
-0
examples/language-modeling/run_mlm.py
examples/language-modeling/run_mlm.py
+2
-0
examples/language-modeling/run_mlm_wwm.py
examples/language-modeling/run_mlm_wwm.py
+2
-0
examples/language-modeling/run_plm.py
examples/language-modeling/run_plm.py
+2
-0
examples/multiple-choice/run_multiple_choice.py
examples/multiple-choice/run_multiple_choice.py
+7
-0
examples/multiple-choice/run_tf_multiple_choice.py
examples/multiple-choice/run_tf_multiple_choice.py
+6
-0
examples/question-answering/run_squad.py
examples/question-answering/run_squad.py
+7
-1
examples/question-answering/run_squad_trainer.py
examples/question-answering/run_squad_trainer.py
+7
-0
examples/question-answering/run_tf_squad.py
examples/question-answering/run_tf_squad.py
+6
-0
examples/seq2seq/finetune_trainer.py
examples/seq2seq/finetune_trainer.py
+7
-1
examples/text-classification/run_glue.py
examples/text-classification/run_glue.py
+2
-0
examples/text-classification/run_tf_glue.py
examples/text-classification/run_tf_glue.py
+6
-0
No files found.
examples/adversarial/run_hans.py
View file @
42f63e38
...
@@ -23,6 +23,7 @@ from typing import Dict, List, Optional
...
@@ -23,6 +23,7 @@ from typing import Dict, List, Optional
import
numpy
as
np
import
numpy
as
np
import
torch
import
torch
import
transformers
from
transformers
import
(
from
transformers
import
(
AutoConfig
,
AutoConfig
,
AutoModelForSequenceClassification
,
AutoModelForSequenceClassification
,
...
@@ -33,6 +34,7 @@ from transformers import (
...
@@ -33,6 +34,7 @@ from transformers import (
default_data_collator
,
default_data_collator
,
set_seed
,
set_seed
,
)
)
from
transformers.trainer_utils
import
is_main_process
from
utils_hans
import
HansDataset
,
InputFeatures
,
hans_processors
,
hans_tasks_num_labels
from
utils_hans
import
HansDataset
,
InputFeatures
,
hans_processors
,
hans_tasks_num_labels
...
@@ -124,6 +126,11 @@ def main():
...
@@ -124,6 +126,11 @@ def main():
bool
(
training_args
.
local_rank
!=
-
1
),
bool
(
training_args
.
local_rank
!=
-
1
),
training_args
.
fp16
,
training_args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed
# Set seed
...
...
examples/bert-loses-patience/run_glue_with_pabee.py
View file @
42f63e38
...
@@ -29,6 +29,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
...
@@ -29,6 +29,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
pabee.modeling_pabee_albert
import
AlbertForSequenceClassificationWithPabee
from
pabee.modeling_pabee_albert
import
AlbertForSequenceClassificationWithPabee
from
pabee.modeling_pabee_bert
import
BertForSequenceClassificationWithPabee
from
pabee.modeling_pabee_bert
import
BertForSequenceClassificationWithPabee
from
transformers
import
(
from
transformers
import
(
...
@@ -44,6 +45,7 @@ from transformers import glue_compute_metrics as compute_metrics
...
@@ -44,6 +45,7 @@ from transformers import glue_compute_metrics as compute_metrics
from
transformers
import
glue_convert_examples_to_features
as
convert_examples_to_features
from
transformers
import
glue_convert_examples_to_features
as
convert_examples_to_features
from
transformers
import
glue_output_modes
as
output_modes
from
transformers
import
glue_output_modes
as
output_modes
from
transformers
import
glue_processors
as
processors
from
transformers
import
glue_processors
as
processors
from
transformers.trainer_utils
import
is_main_process
try
:
try
:
...
@@ -630,7 +632,11 @@ def main():
...
@@ -630,7 +632,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/bertology/run_bertology.py
View file @
42f63e38
...
@@ -30,6 +30,7 @@ from torch.utils.data import DataLoader, SequentialSampler, Subset
...
@@ -30,6 +30,7 @@ from torch.utils.data import DataLoader, SequentialSampler, Subset
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
from
tqdm
import
tqdm
import
transformers
from
transformers
import
(
from
transformers
import
(
AutoConfig
,
AutoConfig
,
AutoModelForSequenceClassification
,
AutoModelForSequenceClassification
,
...
@@ -41,6 +42,7 @@ from transformers import (
...
@@ -41,6 +42,7 @@ from transformers import (
glue_processors
,
glue_processors
,
set_seed
,
set_seed
,
)
)
from
transformers.trainer_utils
import
is_main_process
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
@@ -368,6 +370,11 @@ def main():
...
@@ -368,6 +370,11 @@ def main():
# Setup logging
# Setup logging
logging
.
basicConfig
(
level
=
logging
.
INFO
if
args
.
local_rank
in
[
-
1
,
0
]
else
logging
.
WARN
)
logging
.
basicConfig
(
level
=
logging
.
INFO
if
args
.
local_rank
in
[
-
1
,
0
]
else
logging
.
WARN
)
logger
.
info
(
"device: {} n_gpu: {}, distributed: {}"
.
format
(
args
.
device
,
args
.
n_gpu
,
bool
(
args
.
local_rank
!=
-
1
)))
logger
.
info
(
"device: {} n_gpu: {}, distributed: {}"
.
format
(
args
.
device
,
args
.
n_gpu
,
bool
(
args
.
local_rank
!=
-
1
)))
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seeds
# Set seeds
set_seed
(
args
.
seed
)
set_seed
(
args
.
seed
)
...
...
examples/contrib/legacy/run_language_modeling.py
View file @
42f63e38
...
@@ -29,6 +29,7 @@ from typing import Optional
...
@@ -29,6 +29,7 @@ from typing import Optional
from
torch.utils.data
import
ConcatDataset
from
torch.utils.data
import
ConcatDataset
import
transformers
from
transformers
import
(
from
transformers
import
(
CONFIG_MAPPING
,
CONFIG_MAPPING
,
MODEL_WITH_LM_HEAD_MAPPING
,
MODEL_WITH_LM_HEAD_MAPPING
,
...
@@ -47,6 +48,7 @@ from transformers import (
...
@@ -47,6 +48,7 @@ from transformers import (
TrainingArguments
,
TrainingArguments
,
set_seed
,
set_seed
,
)
)
from
transformers.trainer_utils
import
is_main_process
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
@@ -219,6 +221,11 @@ def main():
...
@@ -219,6 +221,11 @@ def main():
bool
(
training_args
.
local_rank
!=
-
1
),
bool
(
training_args
.
local_rank
!=
-
1
),
training_args
.
fp16
,
training_args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed
# Set seed
...
...
examples/contrib/mm-imdb/run_mmimdb.py
View file @
42f63e38
...
@@ -31,6 +31,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
...
@@ -31,6 +31,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
transformers
import
(
from
transformers
import
(
WEIGHTS_NAME
,
WEIGHTS_NAME
,
AdamW
,
AdamW
,
...
@@ -41,6 +42,7 @@ from transformers import (
...
@@ -41,6 +42,7 @@ from transformers import (
MMBTForClassification
,
MMBTForClassification
,
get_linear_schedule_with_warmup
,
get_linear_schedule_with_warmup
,
)
)
from
transformers.trainer_utils
import
is_main_process
from
utils_mmimdb
import
ImageEncoder
,
JsonlDataset
,
collate_fn
,
get_image_transforms
,
get_mmimdb_labels
from
utils_mmimdb
import
ImageEncoder
,
JsonlDataset
,
collate_fn
,
get_image_transforms
,
get_mmimdb_labels
...
@@ -476,7 +478,11 @@ def main():
...
@@ -476,7 +478,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/contrib/run_swag.py
View file @
42f63e38
...
@@ -31,8 +31,10 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
...
@@ -31,8 +31,10 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
transformers
import
WEIGHTS_NAME
,
AdamW
,
AutoConfig
,
AutoTokenizer
,
get_linear_schedule_with_warmup
from
transformers
import
WEIGHTS_NAME
,
AdamW
,
AutoConfig
,
AutoTokenizer
,
get_linear_schedule_with_warmup
from
transformers.modeling_auto
import
AutoModelForMultipleChoice
from
transformers.modeling_auto
import
AutoModelForMultipleChoice
from
transformers.trainer_utils
import
is_main_process
try
:
try
:
...
@@ -620,6 +622,11 @@ def main():
...
@@ -620,6 +622,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/deebert/run_glue_deebert.py
View file @
42f63e38
...
@@ -13,6 +13,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
...
@@ -13,6 +13,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler, Tenso
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
src.modeling_highway_bert
import
DeeBertForSequenceClassification
from
src.modeling_highway_bert
import
DeeBertForSequenceClassification
from
src.modeling_highway_roberta
import
DeeRobertaForSequenceClassification
from
src.modeling_highway_roberta
import
DeeRobertaForSequenceClassification
from
transformers
import
(
from
transformers
import
(
...
@@ -28,6 +29,7 @@ from transformers import glue_compute_metrics as compute_metrics
...
@@ -28,6 +29,7 @@ from transformers import glue_compute_metrics as compute_metrics
from
transformers
import
glue_convert_examples_to_features
as
convert_examples_to_features
from
transformers
import
glue_convert_examples_to_features
as
convert_examples_to_features
from
transformers
import
glue_output_modes
as
output_modes
from
transformers
import
glue_output_modes
as
output_modes
from
transformers
import
glue_processors
as
processors
from
transformers
import
glue_processors
as
processors
from
transformers.trainer_utils
import
is_main_process
try
:
try
:
...
@@ -580,7 +582,11 @@ def main():
...
@@ -580,7 +582,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/distillation/run_squad_w_distillation.py
View file @
42f63e38
...
@@ -30,6 +30,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
...
@@ -30,6 +30,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
transformers
import
(
from
transformers
import
(
WEIGHTS_NAME
,
WEIGHTS_NAME
,
AdamW
,
AdamW
,
...
@@ -57,6 +58,7 @@ from transformers.data.metrics.squad_metrics import (
...
@@ -57,6 +58,7 @@ from transformers.data.metrics.squad_metrics import (
squad_evaluate
,
squad_evaluate
,
)
)
from
transformers.data.processors.squad
import
SquadResult
,
SquadV1Processor
,
SquadV2Processor
from
transformers.data.processors.squad
import
SquadResult
,
SquadV1Processor
,
SquadV2Processor
from
transformers.trainer_utils
import
is_main_process
try
:
try
:
...
@@ -745,7 +747,11 @@ def main():
...
@@ -745,7 +747,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/language-modeling/run_clm.py
View file @
42f63e38
...
@@ -168,6 +168,8 @@ def main():
...
@@ -168,6 +168,8 @@ def main():
# Set the verbosity to info of the Transformers logger (on main process only):
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed before initializing model.
# Set seed before initializing model.
...
...
examples/language-modeling/run_mlm.py
View file @
42f63e38
...
@@ -179,6 +179,8 @@ def main():
...
@@ -179,6 +179,8 @@ def main():
# Set the verbosity to info of the Transformers logger (on main process only):
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed before initializing model.
# Set seed before initializing model.
...
...
examples/language-modeling/run_mlm_wwm.py
View file @
42f63e38
...
@@ -186,6 +186,8 @@ def main():
...
@@ -186,6 +186,8 @@ def main():
# Set the verbosity to info of the Transformers logger (on main process only):
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed before initializing model.
# Set seed before initializing model.
...
...
examples/language-modeling/run_plm.py
View file @
42f63e38
...
@@ -176,6 +176,8 @@ def main():
...
@@ -176,6 +176,8 @@ def main():
# Set the verbosity to info of the Transformers logger (on main process only):
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed before initializing model.
# Set seed before initializing model.
...
...
examples/multiple-choice/run_multiple_choice.py
View file @
42f63e38
...
@@ -23,6 +23,7 @@ from typing import Dict, Optional
...
@@ -23,6 +23,7 @@ from typing import Dict, Optional
import
numpy
as
np
import
numpy
as
np
import
transformers
from
transformers
import
(
from
transformers
import
(
AutoConfig
,
AutoConfig
,
AutoModelForMultipleChoice
,
AutoModelForMultipleChoice
,
...
@@ -33,6 +34,7 @@ from transformers import (
...
@@ -33,6 +34,7 @@ from transformers import (
TrainingArguments
,
TrainingArguments
,
set_seed
,
set_seed
,
)
)
from
transformers.trainer_utils
import
is_main_process
from
utils_multiple_choice
import
MultipleChoiceDataset
,
Split
,
processors
from
utils_multiple_choice
import
MultipleChoiceDataset
,
Split
,
processors
...
@@ -115,6 +117,11 @@ def main():
...
@@ -115,6 +117,11 @@ def main():
bool
(
training_args
.
local_rank
!=
-
1
),
bool
(
training_args
.
local_rank
!=
-
1
),
training_args
.
fp16
,
training_args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed
# Set seed
...
...
examples/multiple-choice/run_tf_multiple_choice.py
View file @
42f63e38
...
@@ -33,9 +33,15 @@ from transformers import (
...
@@ -33,9 +33,15 @@ from transformers import (
TFTrainingArguments
,
TFTrainingArguments
,
set_seed
,
set_seed
,
)
)
from
transformers.utils
import
logging
as
hf_logging
from
utils_multiple_choice
import
Split
,
TFMultipleChoiceDataset
,
processors
from
utils_multiple_choice
import
Split
,
TFMultipleChoiceDataset
,
processors
hf_logging
.
set_verbosity_info
()
hf_logging
.
enable_default_handler
()
hf_logging
.
enable_explicit_format
()
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
...
examples/question-answering/run_squad.py
View file @
42f63e38
...
@@ -29,6 +29,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
...
@@ -29,6 +29,7 @@ from torch.utils.data import DataLoader, RandomSampler, SequentialSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
torch.utils.data.distributed
import
DistributedSampler
from
tqdm
import
tqdm
,
trange
from
tqdm
import
tqdm
,
trange
import
transformers
from
transformers
import
(
from
transformers
import
(
MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
MODEL_FOR_QUESTION_ANSWERING_MAPPING
,
WEIGHTS_NAME
,
WEIGHTS_NAME
,
...
@@ -45,6 +46,7 @@ from transformers.data.metrics.squad_metrics import (
...
@@ -45,6 +46,7 @@ from transformers.data.metrics.squad_metrics import (
squad_evaluate
,
squad_evaluate
,
)
)
from
transformers.data.processors.squad
import
SquadResult
,
SquadV1Processor
,
SquadV2Processor
from
transformers.data.processors.squad
import
SquadResult
,
SquadV1Processor
,
SquadV2Processor
from
transformers.trainer_utils
import
is_main_process
try
:
try
:
...
@@ -712,7 +714,11 @@ def main():
...
@@ -712,7 +714,11 @@ def main():
bool
(
args
.
local_rank
!=
-
1
),
bool
(
args
.
local_rank
!=
-
1
),
args
.
fp16
,
args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
# Set seed
# Set seed
set_seed
(
args
)
set_seed
(
args
)
...
...
examples/question-answering/run_squad_trainer.py
View file @
42f63e38
...
@@ -22,9 +22,11 @@ import sys
...
@@ -22,9 +22,11 @@ import sys
from
dataclasses
import
dataclass
,
field
from
dataclasses
import
dataclass
,
field
from
typing
import
Optional
from
typing
import
Optional
import
transformers
from
transformers
import
AutoConfig
,
AutoModelForQuestionAnswering
,
AutoTokenizer
,
HfArgumentParser
,
SquadDataset
from
transformers
import
AutoConfig
,
AutoModelForQuestionAnswering
,
AutoTokenizer
,
HfArgumentParser
,
SquadDataset
from
transformers
import
SquadDataTrainingArguments
as
DataTrainingArguments
from
transformers
import
SquadDataTrainingArguments
as
DataTrainingArguments
from
transformers
import
Trainer
,
TrainingArguments
from
transformers
import
Trainer
,
TrainingArguments
from
transformers.trainer_utils
import
is_main_process
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
@@ -91,6 +93,11 @@ def main():
...
@@ -91,6 +93,11 @@ def main():
bool
(
training_args
.
local_rank
!=
-
1
),
bool
(
training_args
.
local_rank
!=
-
1
),
training_args
.
fp16
,
training_args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Prepare Question-Answering task
# Prepare Question-Answering task
...
...
examples/question-answering/run_tf_squad.py
View file @
42f63e38
...
@@ -33,6 +33,12 @@ from transformers import (
...
@@ -33,6 +33,12 @@ from transformers import (
squad_convert_examples_to_features
,
squad_convert_examples_to_features
,
)
)
from
transformers.data.processors.squad
import
SquadV1Processor
,
SquadV2Processor
from
transformers.data.processors.squad
import
SquadV1Processor
,
SquadV2Processor
from
transformers.utils
import
logging
as
hf_logging
hf_logging
.
set_verbosity_info
()
hf_logging
.
enable_default_handler
()
hf_logging
.
enable_explicit_format
()
logger
=
logging
.
getLogger
(
__name__
)
logger
=
logging
.
getLogger
(
__name__
)
...
...
examples/seq2seq/finetune_trainer.py
View file @
42f63e38
...
@@ -4,10 +4,11 @@ import sys
...
@@ -4,10 +4,11 @@ import sys
from
dataclasses
import
dataclass
,
field
from
dataclasses
import
dataclass
,
field
from
typing
import
Optional
from
typing
import
Optional
import
transformers
from
seq2seq_trainer
import
Seq2SeqTrainer
from
seq2seq_trainer
import
Seq2SeqTrainer
from
seq2seq_training_args
import
Seq2SeqTrainingArguments
from
seq2seq_training_args
import
Seq2SeqTrainingArguments
from
transformers
import
AutoConfig
,
AutoModelForSeq2SeqLM
,
AutoTokenizer
,
HfArgumentParser
,
MBartTokenizer
,
set_seed
from
transformers
import
AutoConfig
,
AutoModelForSeq2SeqLM
,
AutoTokenizer
,
HfArgumentParser
,
MBartTokenizer
,
set_seed
from
transformers.trainer_utils
import
EvaluationStrategy
from
transformers.trainer_utils
import
EvaluationStrategy
,
is_main_process
from
utils
import
(
from
utils
import
(
Seq2SeqDataCollator
,
Seq2SeqDataCollator
,
Seq2SeqDataset
,
Seq2SeqDataset
,
...
@@ -131,6 +132,11 @@ def main():
...
@@ -131,6 +132,11 @@ def main():
bool
(
training_args
.
local_rank
!=
-
1
),
bool
(
training_args
.
local_rank
!=
-
1
),
training_args
.
fp16
,
training_args
.
fp16
,
)
)
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
logger
.
info
(
"Training/evaluation parameters %s"
,
training_args
)
# Set seed
# Set seed
...
...
examples/text-classification/run_glue.py
View file @
42f63e38
...
@@ -171,6 +171,8 @@ def main():
...
@@ -171,6 +171,8 @@ def main():
# Set the verbosity to info of the Transformers logger (on main process only):
# Set the verbosity to info of the Transformers logger (on main process only):
if
is_main_process
(
training_args
.
local_rank
):
if
is_main_process
(
training_args
.
local_rank
):
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
set_verbosity_info
()
transformers
.
utils
.
logging
.
enable_default_handler
()
transformers
.
utils
.
logging
.
enable_explicit_format
()
logger
.
info
(
f
"Training/evaluation parameters
{
training_args
}
"
)
logger
.
info
(
f
"Training/evaluation parameters
{
training_args
}
"
)
# Set seed before initializing model.
# Set seed before initializing model.
...
...
examples/text-classification/run_tf_glue.py
View file @
42f63e38
...
@@ -27,6 +27,12 @@ from transformers import (
...
@@ -27,6 +27,12 @@ from transformers import (
glue_processors
,
glue_processors
,
glue_tasks_num_labels
,
glue_tasks_num_labels
,
)
)
from
transformers.utils
import
logging
as
hf_logging
hf_logging
.
set_verbosity_info
()
hf_logging
.
enable_default_handler
()
hf_logging
.
enable_explicit_format
()
class
Split
(
Enum
):
class
Split
(
Enum
):
...
...
Prev
1
2
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment