Commit 3c805576 authored by lintangsutawika's avatar lintangsutawika
Browse files

reformat for pre-commit

parent 6b375468
......@@ -23,7 +23,7 @@ class HFLM(LM):
pretrained="gpt2",
revision="main",
low_cpu_mem_usage=None,
dtype: Optional[Union[str, torch.dtype]]="auto",
dtype: Optional[Union[str, torch.dtype]] = "auto",
subfolder=None,
tokenizer=None,
batch_size=1,
......
......@@ -421,9 +421,7 @@ def clear_torch_cache():
torch.cuda.empty_cache()
def get_dtype(
dtype: Union[str, torch.dtype]
) -> torch.dtype:
def get_dtype(dtype: Union[str, torch.dtype]) -> torch.dtype:
"""Converts `dtype` from `str` to torch.dtype when possible. Does not use an instantiated HF AutoConfig"""
if isinstance(dtype, str) and dtype != "auto":
# Convert `str` args torch dtype: `float16` -> `torch.float16`
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment