Commit ed6c8d37 authored by thomwolf's avatar thomwolf
Browse files

fix merge

parent e468192e
...@@ -183,10 +183,10 @@ def evalutate(args, eval_task, eval_output_dir, dataset, model): ...@@ -183,10 +183,10 @@ def evalutate(args, eval_task, eval_output_dir, dataset, model):
nb_eval_steps += 1 nb_eval_steps += 1
if preds is None: if preds is None:
preds = logits.detach().cpu().numpy() preds = logits.detach().cpu().numpy()
out_label_ids = label_ids.detach().cpu().numpy() out_label_ids = inputs['labels'].detach().cpu().numpy()
else: else:
preds = np.append(preds, logits.detach().cpu().numpy(), axis=0) preds = np.append(preds, logits.detach().cpu().numpy(), axis=0)
out_label_ids = np.append(out_label_ids, label_ids.detach().cpu().numpy(), axis=0) out_label_ids = np.append(out_label_ids, inputs['labels'].detach().cpu().numpy(), axis=0)
eval_loss = eval_loss / nb_eval_steps eval_loss = eval_loss / nb_eval_steps
if args.output_mode == "classification": if args.output_mode == "classification":
......
...@@ -176,7 +176,7 @@ class BertConfig(PretrainedConfig): ...@@ -176,7 +176,7 @@ class BertConfig(PretrainedConfig):
initializing all weight matrices. initializing all weight matrices.
layer_norm_eps: The epsilon used by LayerNorm. layer_norm_eps: The epsilon used by LayerNorm.
""" """
pretrained_config_archive_map = PRETRAINED_CONFIG_ARCHIVE_MAP pretrained_config_archive_map = BERT_PRETRAINED_CONFIG_ARCHIVE_MAP
def __init__(self, def __init__(self,
vocab_size_or_config_json_file=30522, vocab_size_or_config_json_file=30522,
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment