Commit ff83e9b2 authored by jon-tow's avatar jon-tow
Browse files

Manually concat tokenizer revision with subfolder

parent b0b76d87
...@@ -33,9 +33,9 @@ class HFLM(BaseLM): ...@@ -33,9 +33,9 @@ class HFLM(BaseLM):
else torch.device("cpu") else torch.device("cpu")
) )
# TODO: update this to be less of a hack once subfolder is fixed in HF
self.gpt2 = transformers.AutoModelForCausalLM.from_pretrained( self.gpt2 = transformers.AutoModelForCausalLM.from_pretrained(
pretrained, pretrained,
# TODO: update this to be less of a hack once subfolder is fixed in HF
revision=revision + ("/" + subfolder if subfolder is not None else ""), revision=revision + ("/" + subfolder if subfolder is not None else ""),
).to(self.device) ).to(self.device)
self.gpt2.eval() self.gpt2.eval()
...@@ -43,8 +43,8 @@ class HFLM(BaseLM): ...@@ -43,8 +43,8 @@ class HFLM(BaseLM):
# pretrained tokenizer for neo is broken for now so just hard-coding this to gpt2 # pretrained tokenizer for neo is broken for now so just hard-coding this to gpt2
self.tokenizer = transformers.AutoTokenizer.from_pretrained( self.tokenizer = transformers.AutoTokenizer.from_pretrained(
pretrained if tokenizer is None else tokenizer, pretrained if tokenizer is None else tokenizer,
revision=revision, # TODO: update this to be less of a hack once subfolder is fixed in HF
subfolder=subfolder, revision=revision + ("/" + subfolder if subfolder is not None else ""),
) )
assert isinstance( assert isinstance(
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment