"docs/vscode:/vscode.git/clone" did not exist on "17fdd35481e6b462989c1c600e6cc0987dc88621"
Unverified Commit 349e1242 authored by Arthur's avatar Arthur Committed by GitHub
Browse files

[NLLB-MoE] `model_type` update for auto mapping (#22470)

edit default model type and testing path set to hf-internal-testing
parent 11426641
......@@ -125,7 +125,7 @@ class NllbMoeConfig(PretrainedConfig):
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "nllb_moe"
model_type = "nllb-moe"
keys_to_ignore_at_inference = ["past_key_values"]
attribute_map = {"num_attention_heads": "encoder_attention_heads", "hidden_size": "d_model"}
......
......@@ -354,14 +354,14 @@ class NllbMoeModelIntegrationTests(unittest.TestCase):
@cached_property
def tokenizer(self):
return NllbTokenizer.from_pretrained("ArthurZ/random-nllb-moe-2-experts")
return NllbTokenizer.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts")
@cached_property
def big_model(self):
return NllbMoeForConditionalGeneration.from_pretrained("facebook/nllb-moe-54b")
def inference_no_head(self):
model = NllbMoeModel.from_pretrained("ArthurZ/random-nllb-moe-2-experts").eval()
model = NllbMoeModel.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts").eval()
with torch.no_grad():
output = model(**self.model_inputs)
# fmt: off
......@@ -382,7 +382,7 @@ class NllbMoeModelIntegrationTests(unittest.TestCase):
and `transformers` implementation of NLLB-MoE transformers. We only check the logits
of the second sample of the batch, as it is padded.
"""
model = NllbMoeForConditionalGeneration.from_pretrained("ArthurZ/random-nllb-moe-2-experts").eval()
model = NllbMoeForConditionalGeneration.from_pretrained("hf-internal-testing/random-nllb-moe-2-experts").eval()
with torch.no_grad():
output = model(**self.model_inputs)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment