@@ -3638,7 +3638,7 @@ def unfuse_lora(self, components: List[str] = ["transformer"], **kwargs):
36383638 """
36393639 super ().unfuse_lora (components = components , ** kwargs )
36403640
3641-
3641+
36423642class KandinskyLoraLoaderMixin (LoraBaseMixin ):
36433643 r"""
36443644 Load LoRA layers into [`Kandinsky5Transformer3DModel`],
@@ -3662,7 +3662,8 @@ def lora_state_dict(
36623662 Can be either:
36633663 - A string, the *model id* of a pretrained model hosted on the Hub.
36643664 - A path to a *directory* containing the model weights.
3665- - A [torch state dict](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict).
3665+ - A [torch state
3666+ dict](https://pytorch.org/tutorials/beginner/saving_loading_models.html#what-is-a-state-dict).
36663667
36673668 cache_dir (`Union[str, os.PathLike]`, *optional*):
36683669 Path to a directory where a downloaded pretrained model configuration is cached.
@@ -3737,7 +3738,7 @@ def load_lora_weights(
37373738 ):
37383739 """
37393740 Load LoRA weights specified in `pretrained_model_name_or_path_or_dict` into `self.transformer`
3740-
3741+
37413742 Parameters:
37423743 pretrained_model_name_or_path_or_dict (`str` or `os.PathLike` or `dict`):
37433744 See [`~loaders.KandinskyLoraLoaderMixin.lora_state_dict`].
@@ -3746,7 +3747,8 @@ def load_lora_weights(
37463747 hotswap (`bool`, *optional*):
37473748 Whether to substitute an existing (LoRA) adapter with the newly loaded adapter in-place.
37483749 low_cpu_mem_usage (`bool`, *optional*):
3749- Speed up model loading by only loading the pretrained LoRA weights and not initializing the random weights.
3750+ Speed up model loading by only loading the pretrained LoRA weights and not initializing the random
3751+ weights.
37503752 kwargs (`dict`, *optional*):
37513753 See [`~loaders.KandinskyLoraLoaderMixin.lora_state_dict`].
37523754 """
@@ -3827,7 +3829,6 @@ def load_lora_into_transformer(
38273829 hotswap = hotswap ,
38283830 )
38293831
3830-
38313832 @classmethod
38323833 def save_lora_weights (
38333834 cls ,
@@ -3864,9 +3865,7 @@ def save_lora_weights(
38643865 lora_metadata [cls .transformer_name ] = transformer_lora_adapter_metadata
38653866
38663867 if not lora_layers :
3867- raise ValueError (
3868- "You must pass at least one of `transformer_lora_layers`"
3869- )
3868+ raise ValueError ("You must pass at least one of `transformer_lora_layers`" )
38703869
38713870 cls ._save_lora_weights (
38723871 save_directory = save_directory ,
@@ -3923,7 +3922,7 @@ def unfuse_lora(self, components: List[str] = ["transformer"], **kwargs):
39233922 components (`List[str]`): List of LoRA-injectable components to unfuse LoRA from.
39243923 """
39253924 super ().unfuse_lora (components = components , ** kwargs )
3926-
3925+
39273926
39283927class WanLoraLoaderMixin (LoraBaseMixin ):
39293928 r"""
@@ -5088,4 +5087,4 @@ class LoraLoaderMixin(StableDiffusionLoraLoaderMixin):
50885087 def __init__ (self , * args , ** kwargs ):
50895088 deprecation_message = "LoraLoaderMixin is deprecated and this will be removed in a future version. Please use `StableDiffusionLoraLoaderMixin`, instead."
50905089 deprecate ("LoraLoaderMixin" , "1.0.0" , deprecation_message )
5091- super ().__init__ (* args , ** kwargs )
5090+ super ().__init__ (* args , ** kwargs )
0 commit comments