Unverified Commit 67683095 authored by Peter Law's avatar Peter Law Committed by GitHub
Browse files

Move usage of deprecated logging.warn to logging.warning (#25310)

The former spelling is deprecated and has been discouraged for a
while. The latter spelling seems to be more common in this project
anyway, so this change ought to be safe.

Fixes https://github.com/huggingface/transformers/issues/25283
parent 641adca5
...@@ -2853,7 +2853,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -2853,7 +2853,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
if "special_dtypes" in inspect.signature(infer_auto_device_map).parameters: if "special_dtypes" in inspect.signature(infer_auto_device_map).parameters:
kwargs["special_dtypes"] = special_dtypes kwargs["special_dtypes"] = special_dtypes
elif len(special_dtypes) > 0: elif len(special_dtypes) > 0:
logger.warn( logger.warning(
"This model has some weights that should be kept in higher precision, you need to upgrade " "This model has some weights that should be kept in higher precision, you need to upgrade "
"`accelerate` to properly deal with them (`pip install --upgrade accelerate`)." "`accelerate` to properly deal with them (`pip install --upgrade accelerate`)."
) )
...@@ -3359,7 +3359,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix ...@@ -3359,7 +3359,7 @@ class PreTrainedModel(nn.Module, ModuleUtilsMixin, GenerationMixin, PushToHubMix
if len(unexpected_keys) > 0: if len(unexpected_keys) > 0:
archs = [] if model.config.architectures is None else model.config.architectures archs = [] if model.config.architectures is None else model.config.architectures
warner = logger.warn if model.__class__.__name__ in archs else logger.info warner = logger.warning if model.__class__.__name__ in archs else logger.info
warner( warner(
f"Some weights of the model checkpoint at {pretrained_model_name_or_path} were not used when" f"Some weights of the model checkpoint at {pretrained_model_name_or_path} were not used when"
f" initializing {model.__class__.__name__}: {unexpected_keys}\n- This IS expected if you are" f" initializing {model.__class__.__name__}: {unexpected_keys}\n- This IS expected if you are"
......
...@@ -403,7 +403,7 @@ class BlipTextEncoder(nn.Module): ...@@ -403,7 +403,7 @@ class BlipTextEncoder(nn.Module):
) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPastAndCrossAttentions]: ) -> Union[Tuple[torch.Tensor], BaseModelOutputWithPastAndCrossAttentions]:
if self.gradient_checkpointing and self.training: if self.gradient_checkpointing and self.training:
if use_cache: if use_cache:
logger.warn( logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
) )
use_cache = False use_cache = False
......
...@@ -940,7 +940,7 @@ class Blip2QFormerEncoder(nn.Module): ...@@ -940,7 +940,7 @@ class Blip2QFormerEncoder(nn.Module):
if getattr(self.config, "gradient_checkpointing", False) and self.training: if getattr(self.config, "gradient_checkpointing", False) and self.training:
if use_cache: if use_cache:
logger.warn( logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
) )
use_cache = False use_cache = False
......
...@@ -39,7 +39,7 @@ try: ...@@ -39,7 +39,7 @@ try:
from xformers import ops as xops from xformers import ops as xops
except ImportError: except ImportError:
xops = None xops = None
logger.warn( logger.warning(
"Xformers is not installed correctly. If you want to use memory_efficient_attention to accelerate training use the following command to install Xformers\npip install xformers." "Xformers is not installed correctly. If you want to use memory_efficient_attention to accelerate training use the following command to install Xformers\npip install xformers."
) )
......
...@@ -930,7 +930,7 @@ class InstructBlipQFormerEncoder(nn.Module): ...@@ -930,7 +930,7 @@ class InstructBlipQFormerEncoder(nn.Module):
if getattr(self.config, "gradient_checkpointing", False) and self.training: if getattr(self.config, "gradient_checkpointing", False) and self.training:
if use_cache: if use_cache:
logger.warn( logger.warning(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..." "`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
) )
use_cache = False use_cache = False
......
...@@ -1273,7 +1273,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel): ...@@ -1273,7 +1273,7 @@ class Wav2Vec2PreTrainedModel(PreTrainedModel):
raise ValueError(f"Cannot load_adapter for {target_lang} if `config.adapter_attn_dim` is not defined.") raise ValueError(f"Cannot load_adapter for {target_lang} if `config.adapter_attn_dim` is not defined.")
if target_lang == self.target_lang and not force_load: if target_lang == self.target_lang and not force_load:
logger.warn(f"Adapter weights are already set to {target_lang}.") logger.warning(f"Adapter weights are already set to {target_lang}.")
return return
cache_dir = kwargs.pop("cache_dir", None) cache_dir = kwargs.pop("cache_dir", None)
......
...@@ -224,12 +224,12 @@ class Agent: ...@@ -224,12 +224,12 @@ class Agent:
self._toolbox.update(additional_tools) self._toolbox.update(additional_tools)
if len(replacements) > 1: if len(replacements) > 1:
names = "\n".join([f"- {n}: {t}" for n, t in replacements.items()]) names = "\n".join([f"- {n}: {t}" for n, t in replacements.items()])
logger.warn( logger.warning(
f"The following tools have been replaced by the ones provided in `additional_tools`:\n{names}." f"The following tools have been replaced by the ones provided in `additional_tools`:\n{names}."
) )
elif len(replacements) == 1: elif len(replacements) == 1:
name = list(replacements.keys())[0] name = list(replacements.keys())[0]
logger.warn(f"{name} has been replaced by {replacements[name]} as provided in `additional_tools`.") logger.warning(f"{name} has been replaced by {replacements[name]} as provided in `additional_tools`.")
self.prepare_for_new_chat() self.prepare_for_new_chat()
......
...@@ -264,7 +264,7 @@ class Tool: ...@@ -264,7 +264,7 @@ class Tool:
if len(tool_class.name) == 0: if len(tool_class.name) == 0:
tool_class.name = custom_tool["name"] tool_class.name = custom_tool["name"]
if tool_class.name != custom_tool["name"]: if tool_class.name != custom_tool["name"]:
logger.warn( logger.warning(
f"{tool_class.__name__} implements a different name in its configuration and class. Using the tool " f"{tool_class.__name__} implements a different name in its configuration and class. Using the tool "
"configuration name." "configuration name."
) )
...@@ -273,7 +273,7 @@ class Tool: ...@@ -273,7 +273,7 @@ class Tool:
if len(tool_class.description) == 0: if len(tool_class.description) == 0:
tool_class.description = custom_tool["description"] tool_class.description = custom_tool["description"]
if tool_class.description != custom_tool["description"]: if tool_class.description != custom_tool["description"]:
logger.warn( logger.warning(
f"{tool_class.__name__} implements a different description in its configuration and class. Using the " f"{tool_class.__name__} implements a different description in its configuration and class. Using the "
"tool configuration description." "tool configuration description."
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment