Commit 3d5f2913 authored by thomwolf's avatar thomwolf
Browse files

updates to run_glue

parent 99b90eda
......@@ -213,11 +213,12 @@ def load_and_cache_examples(args, task, tokenizer, eval=False):
str(task)))
if os.path.exists(cached_features_file):
logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file)
else:
features = convert_examples_to_features(examples, label_list, args.max_seq_length, tokenizer, output_mode)
if args.local_rank == -1 or torch.distributed.get_rank() == 0:
logger.info(" Saving eval features into cached file %s", cached_features_file)
logger.info("Saving features into cached file %s", cached_features_file)
torch.save(features, cached_features_file)
return features
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment