Commit 1d8f782f authored by gk's avatar gk
Browse files

auto-gptq -> gptq and an import warning

parent 18d842ab
...@@ -44,10 +44,10 @@ To install additional multilingual tokenization and text segmentation packages, ...@@ -44,10 +44,10 @@ To install additional multilingual tokenization and text segmentation packages,
pip install -e ".[multilingual]" pip install -e ".[multilingual]"
``` ```
To support loading GPTQ quantized models, install the package with the `auto-gptq` extra: To support loading GPTQ quantized models, install the package with the `gptq` extra:
```bash ```bash
pip install -e ".[auto-gptq]" pip install -e ".[gptq]"
``` ```
## Basic Usage ## Basic Usage
......
...@@ -168,7 +168,13 @@ class HFLM(LM): ...@@ -168,7 +168,13 @@ class HFLM(LM):
**model_kwargs, **model_kwargs,
) )
else: else:
from auto_gptq import AutoGPTQForCausalLM try:
from auto_gptq import AutoGPTQForCausalLM
except ModuleNotFoundError:
raise Exception(
"Tried to load auto_gptq, but auto-gptq is not installed ",
"please install auto-gptq via pip install lm-eval[gptq] or pip install -e .[gptq]",
)
self._model = AutoGPTQForCausalLM.from_quantized( self._model = AutoGPTQForCausalLM.from_quantized(
pretrained, pretrained,
......
...@@ -55,7 +55,7 @@ setuptools.setup( ...@@ -55,7 +55,7 @@ setuptools.setup(
"promptsource": [ "promptsource": [
"promptsource @ git+https://github.com/bigscience-workshop/promptsource.git#egg=promptsource" "promptsource @ git+https://github.com/bigscience-workshop/promptsource.git#egg=promptsource"
], ],
"auto-gptq": ["auto-gptq[triton] @ git+https://github.com/PanQiWei/AutoGPTQ"], "gptq": ["auto-gptq[triton] @ git+https://github.com/PanQiWei/AutoGPTQ"],
"anthropic": ["anthropic"], "anthropic": ["anthropic"],
}, },
) )
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment