"torchvision/vscode:/vscode.git/clone" did not exist on "434271dc5b9d2cb8d1188c8436977b46c388a20c"
Unverified Commit d4ae9635 authored by Leonid Sinev's avatar Leonid Sinev Committed by GitHub
Browse files

[Fix] Replace generic exception classes with a more specific ones (#1989)

* Replace generic exception classes with a more specific ones

* rerun pre-commit to pass linter tests

* Revert "rerun pre-commit to pass linter tests"

This reverts commit 67f88ccf144469853217704520e613196042d859.

* reduce repetitions in errors or so

* Replace generic exception class with a more specific one
parent 389347ee
...@@ -45,8 +45,8 @@ def anthropic_completion( ...@@ -45,8 +45,8 @@ def anthropic_completion(
try: try:
import anthropic import anthropic
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \ "attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \
please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`", please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`",
) )
...@@ -108,8 +108,8 @@ def anthropic_chat( ...@@ -108,8 +108,8 @@ def anthropic_chat(
try: try:
import anthropic import anthropic
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \ "attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \
please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`", please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`",
) )
...@@ -168,8 +168,8 @@ class AnthropicLM(LM): ...@@ -168,8 +168,8 @@ class AnthropicLM(LM):
try: try:
import anthropic import anthropic
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \ "attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \
please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`", please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`",
) )
...@@ -217,8 +217,8 @@ please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install ...@@ -217,8 +217,8 @@ please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install
def generate_until(self, requests, disable_tqdm: bool = False) -> List[str]: def generate_until(self, requests, disable_tqdm: bool = False) -> List[str]:
try: try:
import anthropic import anthropic
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \ "attempted to use 'anthropic' LM type, but package `anthropic` is not installed. \
please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`", please install anthropic via `pip install 'lm-eval[anthropic]'` or `pip install -e '.[anthropic]'`",
) )
......
...@@ -144,7 +144,7 @@ class TemplateAPI(TemplateLM): ...@@ -144,7 +144,7 @@ class TemplateAPI(TemplateLM):
self.tokenizer = tiktoken.encoding_for_model(self.model) self.tokenizer = tiktoken.encoding_for_model(self.model)
except ModuleNotFoundError as e: except ModuleNotFoundError as e:
raise Exception( raise ModuleNotFoundError(
"Attempted to use 'openai' LM type, but the package `tiktoken` is not installed. " "Attempted to use 'openai' LM type, but the package `tiktoken` is not installed. "
"Please install it via `pip install lm-eval[api]` or `pip install -e .[api]`." "Please install it via `pip install lm-eval[api]` or `pip install -e .[api]`."
) from e ) from e
......
...@@ -68,7 +68,9 @@ class GGUFLM(LM): ...@@ -68,7 +68,9 @@ class GGUFLM(LM):
logger.error(f"RequestException: {e}") logger.error(f"RequestException: {e}")
time.sleep(delay) # wait before retrying time.sleep(delay) # wait before retrying
else: else:
raise Exception(f"Failed to get a valid response after {retries} retries.") raise RuntimeError(
f"Failed to get a valid response after {retries} retries."
)
def loglikelihood(self, requests, disable_tqdm: bool = False): def loglikelihood(self, requests, disable_tqdm: bool = False):
if not requests: if not requests:
......
...@@ -579,8 +579,8 @@ class HFLM(TemplateLM): ...@@ -579,8 +579,8 @@ class HFLM(TemplateLM):
else: else:
try: try:
from auto_gptq import AutoGPTQForCausalLM from auto_gptq import AutoGPTQForCausalLM
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Tried to load auto_gptq, but auto-gptq is not installed ", "Tried to load auto_gptq, but auto-gptq is not installed ",
"please install auto-gptq via pip install lm-eval[gptq] or pip install -e .[gptq]", "please install auto-gptq via pip install lm-eval[gptq] or pip install -e .[gptq]",
) )
......
...@@ -69,8 +69,8 @@ class MambaLMWrapper(HFLM): ...@@ -69,8 +69,8 @@ class MambaLMWrapper(HFLM):
) -> None: ) -> None:
try: try:
from mamba_ssm.utils.hf import load_config_hf # noqa: F811 from mamba_ssm.utils.hf import load_config_hf # noqa: F811
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'mamba_ssm' LM type, but package `mamba_ssm` is not installed. \ "attempted to use 'mamba_ssm' LM type, but package `mamba_ssm` is not installed. \
please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba]`", please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba]`",
) )
...@@ -88,8 +88,8 @@ please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba ...@@ -88,8 +88,8 @@ please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba
) -> None: ) -> None:
try: try:
from mamba_ssm.models.mixer_seq_simple import MambaLMHeadModel # noqa: F811 from mamba_ssm.models.mixer_seq_simple import MambaLMHeadModel # noqa: F811
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"attempted to use 'mamba_ssm' LM type, but package `mamba_ssm` is not installed. \ "attempted to use 'mamba_ssm' LM type, but package `mamba_ssm` is not installed. \
please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba]`", please install mamba via `pip install lm-eval[mamba]` or `pip install -e .[mamba]`",
) )
......
...@@ -39,8 +39,8 @@ def _patch_pretrained_cfg( ...@@ -39,8 +39,8 @@ def _patch_pretrained_cfg(
): ):
try: try:
import omegaconf import omegaconf
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Attempted to use 'nemo_lm' model type, but package `nemo` is not installed" "Attempted to use 'nemo_lm' model type, but package `nemo` is not installed"
"Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, " "Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, "
"or installing nemo following https://github.com/NVIDIA/NeMo.", "or installing nemo following https://github.com/NVIDIA/NeMo.",
...@@ -79,8 +79,8 @@ def load_model( ...@@ -79,8 +79,8 @@ def load_model(
MegatronGPTModel, MegatronGPTModel,
) )
from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector from nemo.collections.nlp.parts.nlp_overrides import NLPSaveRestoreConnector
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Attempted to use 'nemo_lm' model type, but package `nemo` is not installed" "Attempted to use 'nemo_lm' model type, but package `nemo` is not installed"
"Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, " "Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, "
"or installing nemo following https://github.com/NVIDIA/NeMo.", "or installing nemo following https://github.com/NVIDIA/NeMo.",
...@@ -140,8 +140,8 @@ def load_model( ...@@ -140,8 +140,8 @@ def load_model(
def setup_distributed_environment(trainer): def setup_distributed_environment(trainer):
try: try:
from nemo.utils.app_state import AppState from nemo.utils.app_state import AppState
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Attempted to use 'nemo_lm' model type, but package `nemo` is not installed" "Attempted to use 'nemo_lm' model type, but package `nemo` is not installed"
"Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, " "Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, "
"or installing nemo following https://github.com/NVIDIA/NeMo.", "or installing nemo following https://github.com/NVIDIA/NeMo.",
...@@ -194,8 +194,8 @@ class NeMoLM(LM): ...@@ -194,8 +194,8 @@ class NeMoLM(LM):
from pytorch_lightning.trainer.trainer import Trainer from pytorch_lightning.trainer.trainer import Trainer
self.generate = generate self.generate = generate
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Attempted to use 'nemo_lm' model type, but package `nemo` is not installed" "Attempted to use 'nemo_lm' model type, but package `nemo` is not installed"
"Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, " "Please install nemo following the instructions in the README: either with a NVIDIA PyTorch or NeMo container, "
"or installing nemo following https://github.com/NVIDIA/NeMo.", "or installing nemo following https://github.com/NVIDIA/NeMo.",
......
...@@ -38,8 +38,8 @@ class SparseMLLM(HFLM): ...@@ -38,8 +38,8 @@ class SparseMLLM(HFLM):
) -> None: ) -> None:
try: try:
from sparseml.transformers import SparseAutoModelForCausalLM from sparseml.transformers import SparseAutoModelForCausalLM
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Package `sparseml` is not installed. " "Package `sparseml` is not installed. "
"Please install it via `pip install sparseml[transformers]`" "Please install it via `pip install sparseml[transformers]`"
) )
...@@ -88,8 +88,8 @@ class SparseMLLM(HFLM): ...@@ -88,8 +88,8 @@ class SparseMLLM(HFLM):
def _get_config(self, pretrained: str, **kwargs) -> None: def _get_config(self, pretrained: str, **kwargs) -> None:
try: try:
from sparseml.transformers import SparseAutoConfig from sparseml.transformers import SparseAutoConfig
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Package `sparseml` is not installed. " "Package `sparseml` is not installed. "
"Please install it via `pip install sparseml[transformers]`" "Please install it via `pip install sparseml[transformers]`"
) )
...@@ -112,8 +112,8 @@ class SparseMLLM(HFLM): ...@@ -112,8 +112,8 @@ class SparseMLLM(HFLM):
) -> None: ) -> None:
try: try:
from sparseml.transformers import SparseAutoTokenizer from sparseml.transformers import SparseAutoTokenizer
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Package `sparseml` is not installed. " "Package `sparseml` is not installed. "
"Please install it via `pip install sparseml[transformers]`" "Please install it via `pip install sparseml[transformers]`"
) )
...@@ -171,8 +171,8 @@ class DeepSparseLM(LM): ...@@ -171,8 +171,8 @@ class DeepSparseLM(LM):
try: try:
import deepsparse import deepsparse
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Package `deepsparse` is not installed. " "Package `deepsparse` is not installed. "
"Please install it via `pip install deepsparse[transformers]`" "Please install it via `pip install deepsparse[transformers]`"
) )
......
...@@ -144,7 +144,7 @@ class NEURON_HF(TemplateLM): ...@@ -144,7 +144,7 @@ class NEURON_HF(TemplateLM):
add_bos_token: Optional[bool] = False, add_bos_token: Optional[bool] = False,
) -> None: ) -> None:
if not NEURON_AVAILABLE: if not NEURON_AVAILABLE:
raise Exception( raise ImportError(
"Tried to load neuron model, but neuron is not installed ", "Tried to load neuron model, but neuron is not installed ",
"please install neuron via pip install transformers-neuron ", "please install neuron via pip install transformers-neuron ",
"also make sure you are running on an AWS inf2 instance", "also make sure you are running on an AWS inf2 instance",
......
...@@ -50,7 +50,7 @@ class OptimumLM(HFLM): ...@@ -50,7 +50,7 @@ class OptimumLM(HFLM):
**kwargs, **kwargs,
) -> None: ) -> None:
if not find_spec("optimum"): if not find_spec("optimum"):
raise Exception( raise ModuleNotFoundError(
"package `optimum` is not installed. Please install it via `pip install optimum[openvino]`" "package `optimum` is not installed. Please install it via `pip install optimum[openvino]`"
) )
else: else:
......
...@@ -65,7 +65,7 @@ class VLLM(TemplateLM): ...@@ -65,7 +65,7 @@ class VLLM(TemplateLM):
super().__init__() super().__init__()
if not find_spec("vllm"): if not find_spec("vllm"):
raise Exception( raise ModuleNotFoundError(
"attempted to use 'vllm' LM type, but package `vllm` is not installed. " "attempted to use 'vllm' LM type, but package `vllm` is not installed. "
"Please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`" "Please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`"
) )
......
...@@ -29,8 +29,8 @@ def get_prompt(prompt_id: str, dataset_name: str = None, subset_name: str = None ...@@ -29,8 +29,8 @@ def get_prompt(prompt_id: str, dataset_name: str = None, subset_name: str = None
if category_name == "promptsource": if category_name == "promptsource":
try: try:
from promptsource.templates import DatasetTemplates from promptsource.templates import DatasetTemplates
except ModuleNotFoundError: except ModuleNotFoundError as exception:
raise Exception( raise type(exception)(
"Tried to load a Promptsource template, but promptsource is not installed ", "Tried to load a Promptsource template, but promptsource is not installed ",
"please install promptsource via pip install lm-eval[promptsource] or pip install -e .[promptsource]", "please install promptsource via pip install lm-eval[promptsource] or pip install -e .[promptsource]",
) )
...@@ -118,7 +118,7 @@ class PromptString: ...@@ -118,7 +118,7 @@ class PromptString:
# TODO need a way to process doc_to_choice # TODO need a way to process doc_to_choice
if "doc_to_choice" in self.prompt_string: if "doc_to_choice" in self.prompt_string:
raise Exception("Not yet implemented to accept doc_to_choice") raise NotImplementedError("Not yet implemented to accept doc_to_choice")
text_string = utils.apply_template(doc_to_text, doc) text_string = utils.apply_template(doc_to_text, doc)
target_string = utils.apply_template(doc_to_target, doc) target_string = utils.apply_template(doc_to_target, doc)
......
...@@ -55,7 +55,7 @@ def yield_pile(start_offsets=None, checkpoint_offset=None): ...@@ -55,7 +55,7 @@ def yield_pile(start_offsets=None, checkpoint_offset=None):
print( print(
"We expect the pile archives to be in the 'pile' directory, but this was not found." "We expect the pile archives to be in the 'pile' directory, but this was not found."
) )
raise Exception("Pile directory not found.") raise FileNotFoundError("Pile directory not found.")
files = list(sorted(glob.glob(os.path.join(directory, "*.jsonl.zst*")))) files = list(sorted(glob.glob(os.path.join(directory, "*.jsonl.zst*"))))
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment