From 92960de8d6a2778954cfb7d2d6482cd765bf6130 Mon Sep 17 00:00:00 2001 From: awsr <43862868+awsr@users.noreply.github.com> Date: Tue, 24 Mar 2026 04:27:30 -0700 Subject: [PATCH] RUF013 updates --- modules/lora/extra_networks_lora.py | 2 +- modules/masking.py | 2 +- modules/merging/convert_sdxl.py | 2 +- modules/merging/modules_sdxl.py | 14 +++++++------- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/modules/lora/extra_networks_lora.py b/modules/lora/extra_networks_lora.py index 9ca2b3b9f..4e82e79f7 100644 --- a/modules/lora/extra_networks_lora.py +++ b/modules/lora/extra_networks_lora.py @@ -173,7 +173,7 @@ class ExtraNetworkLora(extra_networks.ExtraNetwork): def signature(self, names: list[str], te_multipliers: list, unet_multipliers: list): return [f'{name}:{te}:{unet}' for name, te, unet in zip(names, te_multipliers, unet_multipliers, strict=False)] - def changed(self, requested: list[str], include: list[str] = None, exclude: list[str] = None) -> bool: + def changed(self, requested: list[str], include: list[str] | None = None, exclude: list[str] | None = None) -> bool: if shared.opts.lora_force_reload: debug_log(f'Network check: type=LoRA requested={requested} status=forced') return True diff --git a/modules/masking.py b/modules/masking.py index 82c24a3e2..600ec87f2 100644 --- a/modules/masking.py +++ b/modules/masking.py @@ -378,7 +378,7 @@ def outpaint(input_image: Image.Image, outpaint_type: str = 'Edge'): return image, mask -def run_mask(input_image: Image.Image, input_mask: Image.Image = None, return_type: str = None, mask_blur: int = None, mask_padding: int = None, invert=None): +def run_mask(input_image: Image.Image, input_mask: Image.Image | None = None, return_type: str | None = None, mask_blur: int | None = None, mask_padding: int | None = None, invert=None): if isinstance(input_image, list) and len(input_image) > 0: input_image = input_image[0] elif isinstance(input_image, dict): diff --git a/modules/merging/convert_sdxl.py b/modules/merging/convert_sdxl.py index 3238cfd35..969816ae1 100644 --- a/modules/merging/convert_sdxl.py +++ b/modules/merging/convert_sdxl.py @@ -260,7 +260,7 @@ def calculate_model_hash(state_dict): return func.hexdigest() -def convert(model_path:str, checkpoint_path:str, metadata:dict=None): +def convert(model_path: str, checkpoint_path: str, metadata: dict | None = None): if metadata is None: metadata = {} unet_path = os.path.join(model_path, "unet", "diffusion_pytorch_model.safetensors") diff --git a/modules/merging/modules_sdxl.py b/modules/merging/modules_sdxl.py index d4fb799c5..b803e90d3 100644 --- a/modules/merging/modules_sdxl.py +++ b/modules/merging/modules_sdxl.py @@ -65,7 +65,7 @@ def msg(text, err:bool=False): return status -def load_base(override:str=None): +def load_base(override: str | None = None): global pipeline # pylint: disable=global-statement fn = override or recipe.base yield msg(f'base={fn}') @@ -79,7 +79,7 @@ def load_base(override:str=None): pipeline.vae.register_to_config(force_upcast = False) -def load_unet(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): +def load_unet(pipe: diffusers.StableDiffusionXLPipeline, override: str | None = None): if (recipe.unet is None or len(recipe.unet) == 0) and override is None: return fn = override or recipe.unet @@ -99,7 +99,7 @@ def load_unet(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): yield msg(f'unet: {e}') -def load_scheduler(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): +def load_scheduler(pipe: diffusers.StableDiffusionXLPipeline, override: str | None = None): if recipe.scheduler is None and override is None: return config = pipe.scheduler.config.__dict__ @@ -114,7 +114,7 @@ def load_scheduler(pipe: diffusers.StableDiffusionXLPipeline, override:str=None) -def load_vae(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): +def load_vae(pipe: diffusers.StableDiffusionXLPipeline, override: str | None = None): if (recipe.vae is None or len(recipe.vae) == 0)and override is None: return fn = override or recipe.vae @@ -135,7 +135,7 @@ def load_vae(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): yield msg(f'vae: {e}') -def load_te1(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): +def load_te1(pipe: diffusers.StableDiffusionXLPipeline, override: str | None = None): if (recipe.te1 is None or len(recipe.te1) == 0) and override is None: return config = pipe.text_encoder.config.__dict__ @@ -156,7 +156,7 @@ def load_te1(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): yield msg(f'te1: {e}') -def load_te2(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): +def load_te2(pipe: diffusers.StableDiffusionXLPipeline, override: str | None = None): if (recipe.te2 is None or len(recipe.te2) == 0) and override is None: return config = pipe.text_encoder_2.config.__dict__ @@ -177,7 +177,7 @@ def load_te2(pipe: diffusers.StableDiffusionXLPipeline, override:str=None): yield msg(f'te2: {e}') -def load_lora(pipe: diffusers.StableDiffusionXLPipeline, override: dict=None, fuse: float=None): +def load_lora(pipe: diffusers.StableDiffusionXLPipeline, override: dict | None = None, fuse: float | None = None): if recipe.lora is None and override is None: return names = []