Unverified Commit 34cc7f9b authored by M. Tolga Cangöz's avatar M. Tolga Cangöz Committed by GitHub
Browse files

Fix typos (#7068)

parent 53605ed0
...@@ -31,7 +31,7 @@ EXAMPLE_DOC_STRING = """ ...@@ -31,7 +31,7 @@ EXAMPLE_DOC_STRING = """
torch_dtype=torch.float16 torch_dtype=torch.float16
).to('cuda:0') ).to('cuda:0')
>>> pipe.scheduler = UniPCMultistepScheduler.from_config(pipe_controlnet.scheduler.config) >>> pipe.scheduler = UniPCMultistepScheduler.from_config(pipe.scheduler.config)
>>> result_img = pipe(ref_image=input_image, >>> result_img = pipe(ref_image=input_image,
prompt="1girl", prompt="1girl",
......
...@@ -1192,7 +1192,7 @@ class LoraLoaderMixin: ...@@ -1192,7 +1192,7 @@ class LoraLoaderMixin:
class StableDiffusionXLLoraLoaderMixin(LoraLoaderMixin): class StableDiffusionXLLoraLoaderMixin(LoraLoaderMixin):
"""This class overrides `LoraLoaderMixin` with LoRA loading/saving code that's specific to SDXL""" """This class overrides `LoraLoaderMixin` with LoRA loading/saving code that's specific to SDXL"""
# Overrride to properly handle the loading and unloading of the additional text encoder. # Override to properly handle the loading and unloading of the additional text encoder.
def load_lora_weights( def load_lora_weights(
self, self,
pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]], pretrained_model_name_or_path_or_dict: Union[str, Dict[str, torch.Tensor]],
......
...@@ -215,7 +215,7 @@ class TextualInversionLoaderMixin: ...@@ -215,7 +215,7 @@ class TextualInversionLoaderMixin:
embedding = state_dict["string_to_param"]["*"] embedding = state_dict["string_to_param"]["*"]
else: else:
raise ValueError( raise ValueError(
f"Loaded state dictonary is incorrect: {state_dict}. \n\n" f"Loaded state dictionary is incorrect: {state_dict}. \n\n"
"Please verify that the loaded state dictionary of the textual embedding either only has a single key or includes the `string_to_param`" "Please verify that the loaded state dictionary of the textual embedding either only has a single key or includes the `string_to_param`"
" input key." " input key."
) )
......
...@@ -170,7 +170,7 @@ def is_safetensors_compatible(filenames, variant=None, passed_components=None) - ...@@ -170,7 +170,7 @@ def is_safetensors_compatible(filenames, variant=None, passed_components=None) -
sf_filenames.add(os.path.normpath(filename)) sf_filenames.add(os.path.normpath(filename))
for filename in pt_filenames: for filename in pt_filenames:
# filename = 'foo/bar/baz.bam' -> path = 'foo/bar', filename = 'baz', extention = '.bam' # filename = 'foo/bar/baz.bam' -> path = 'foo/bar', filename = 'baz', extension = '.bam'
path, filename = os.path.split(filename) path, filename = os.path.split(filename)
filename, extension = os.path.splitext(filename) filename, extension = os.path.splitext(filename)
...@@ -375,7 +375,7 @@ def _get_pipeline_class( ...@@ -375,7 +375,7 @@ def _get_pipeline_class(
if repo_id is not None and hub_revision is not None: if repo_id is not None and hub_revision is not None:
# if we load the pipeline code from the Hub # if we load the pipeline code from the Hub
# make sure to overwrite the `revison` # make sure to overwrite the `revision`
revision = hub_revision revision = hub_revision
return get_class_from_dynamic_module( return get_class_from_dynamic_module(
...@@ -451,7 +451,7 @@ def load_sub_model( ...@@ -451,7 +451,7 @@ def load_sub_model(
) )
load_method_name = None load_method_name = None
# retrive load method name # retrieve load method name
for class_name, class_candidate in class_candidates.items(): for class_name, class_candidate in class_candidates.items():
if class_candidate is not None and issubclass(class_obj, class_candidate): if class_candidate is not None and issubclass(class_obj, class_candidate):
load_method_name = importable_classes[class_name][1] load_method_name = importable_classes[class_name][1]
...@@ -1897,7 +1897,7 @@ class DiffusionPipeline(ConfigMixin, PushToHubMixin): ...@@ -1897,7 +1897,7 @@ class DiffusionPipeline(ConfigMixin, PushToHubMixin):
else: else:
# 2. we forced `local_files_only=True` when `model_info` failed # 2. we forced `local_files_only=True` when `model_info` failed
raise EnvironmentError( raise EnvironmentError(
f"Cannot load model {pretrained_model_name}: model is not cached locally and an error occured" f"Cannot load model {pretrained_model_name}: model is not cached locally and an error occurred"
" while trying to fetch metadata from the Hub. Please check out the root cause in the stacktrace" " while trying to fetch metadata from the Hub. Please check out the root cause in the stacktrace"
" above." " above."
) from model_info_call_error ) from model_info_call_error
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment