"git@developer.sourcefind.cn:chenpangpang/transformers.git" did not exist on "66582492d35edd2cd929dad8d668c982fa617211"
Commit 788e6326 authored by Julien Chaumond's avatar Julien Chaumond Committed by thomwolf
Browse files

[ner] Honor args.overwrite_cache

parent 0f9ebb0b
...@@ -252,7 +252,7 @@ def load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode): ...@@ -252,7 +252,7 @@ def load_and_cache_examples(args, tokenizer, labels, pad_token_label_id, mode):
cached_features_file = os.path.join(args.data_dir, "cached_{}_{}_{}".format(mode, cached_features_file = os.path.join(args.data_dir, "cached_{}_{}_{}".format(mode,
list(filter(None, args.model_name_or_path.split("/"))).pop(), list(filter(None, args.model_name_or_path.split("/"))).pop(),
str(args.max_seq_length))) str(args.max_seq_length)))
if os.path.exists(cached_features_file): if os.path.exists(cached_features_file) and not args.overwrite_cache:
logger.info("Loading features from cached file %s", cached_features_file) logger.info("Loading features from cached file %s", cached_features_file)
features = torch.load(cached_features_file) features = torch.load(cached_features_file)
else: else:
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment