diff --git a/html/reference.json b/html/reference.json index ad1b7e333..2c3ca1d7e 100644 --- a/html/reference.json +++ b/html/reference.json @@ -234,31 +234,36 @@ "size": 56.1, "date": "2025 August" }, - "Qwen-Image-Pruning": { - "path": "OPPOer/Qwen-Image-Pruning-13B", + "Qwen-Image Pruning-13B": { + "path": "OPPOer/Qwen-Image-Pruning", "subfolder": "Qwen-Image-13B", "preview": "vladmandic--Qwen-Lightning-Edit.jpg", "desc": "This open-source project is based on Qwen-Image and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.", "skip": true, - "size": 56.1, "date": "2025 Ocotober" }, - "Qwen-Image-Edit-Pruning": { - "path": "OPPOer/Qwen-Image-Edit-Pruning-13B", + "Qwen-Image Pruning-12B": { + "path": "OPPOer/Qwen-Image-Pruning", + "subfolder": "Qwen-Image-12B-40steps", + "preview": "vladmandic--Qwen-Lightning-Edit.jpg", + "desc": "This open-source project is based on Qwen-Image and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.", + "skip": true, + "date": "2025 Ocotober" + }, + "Qwen-Image-Edit Pruning-13B": { + "path": "OPPOer/Qwen-Image-Edit-Pruning", "subfolder": "Qwen-Image-Edit-13B", "preview": "vladmandic--Qwen-Lightning-Edit.jpg", "desc": "This open-source project is based on Qwen-Image-Edit and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.", "skip": true, - "size": 56.1, "date": "2025 Ocotober" }, - "Qwen-Image-Edit-2509-Pruning": { - "path": "OPPOer/Qwen-Image-Edit-Pruning-14B", + "Qwen-Image-Edit-2509 Pruning-14B": { + "path": "OPPOer/Qwen-Image-Edit-Pruning", "subfolder": "Qwen-Image-Edit-2509-14B", "preview": "vladmandic--Qwen-Lightning-Edit.jpg", "desc": "This open-source project is based on Qwen-Image-Edit and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.", "skip": true, - "size": 56.1, "date": "2025 Ocotober" }, diff --git a/modules/extras.py b/modules/extras.py index 2914d6c1e..4f282653c 100644 --- a/modules/extras.py +++ b/modules/extras.py @@ -39,24 +39,24 @@ def run_modelmerger(id_task, **kwargs): # pylint: disable=unused-argument return [*[gr.update() for _ in range(4)], message] kwargs["models"] = { - "model_a": sd_models.get_closet_checkpoint_match(kwargs.get("primary_model_name", None)).filename, - "model_b": sd_models.get_closet_checkpoint_match(kwargs.get("secondary_model_name", None)).filename, + "model_a": sd_models.get_closest_checkpoint_match(kwargs.get("primary_model_name", None)).filename, + "model_b": sd_models.get_closest_checkpoint_match(kwargs.get("secondary_model_name", None)).filename, } if kwargs.get("primary_model_name", None) in [None, 'None']: return fail("Failed: Merging requires a primary model.") - primary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("primary_model_name", None)) + primary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("primary_model_name", None)) if kwargs.get("secondary_model_name", None) in [None, 'None']: return fail("Failed: Merging requires a secondary model.") - secondary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("secondary_model_name", None)) + secondary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("secondary_model_name", None)) if kwargs.get("tertiary_model_name", None) in [None, 'None'] and kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS: return fail(f"Failed: Interpolation method ({kwargs.get('merge_mode', None)}) requires a tertiary model.") - tertiary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("tertiary_model_name", None)) if kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS else None + tertiary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("tertiary_model_name", None)) if kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS else None del kwargs["primary_model_name"] del kwargs["secondary_model_name"] if kwargs.get("tertiary_model_name", None) is not None: - kwargs["models"] |= {"model_c": sd_models.get_closet_checkpoint_match(kwargs.get("tertiary_model_name", None)).filename} + kwargs["models"] |= {"model_c": sd_models.get_closest_checkpoint_match(kwargs.get("tertiary_model_name", None)).filename} del kwargs["tertiary_model_name"] if kwargs.get("alpha_base", None) and kwargs.get("alpha_in_blocks", None) and kwargs.get("alpha_mid_block", None) and kwargs.get("alpha_out_blocks", None): @@ -204,7 +204,7 @@ def run_model_modules(model_type:str, model_name:str, custom_name:str, if len(custom_name) == 0: yield msg("output name is required", err=True) return - checkpoint_info = sd_models.get_closet_checkpoint_match(model_name) + checkpoint_info = sd_models.get_closest_checkpoint_match(model_name) if checkpoint_info is None: yield msg("input model not found", err=True) return diff --git a/modules/modelloader.py b/modules/modelloader.py index 0502a367c..5018d5cb9 100644 --- a/modules/modelloader.py +++ b/modules/modelloader.py @@ -227,6 +227,8 @@ def get_reference_opts(name: str, quiet=False): def load_reference(name: str, variant: str = None, revision: str = None, mirror: str = None, custom_pipeline: str = None): + if '+' in name: + name = name.split('+')[0] found = [r for r in diffuser_repos if name == r['name'] or name == r['friendly'] or name == r['path']] if len(found) > 0: # already downloaded model_opts = get_reference_opts(found[0]['name']) @@ -258,7 +260,7 @@ def load_reference(name: str, variant: str = None, revision: str = None, mirror: def load_civitai(model: str, url: str): from modules import sd_models name, _ext = os.path.splitext(model) - info = sd_models.get_closet_checkpoint_match(name) + info = sd_models.get_closest_checkpoint_match(name) if info is not None: _model_opts = get_reference_opts(info.model_name) return name # already downloaded @@ -268,7 +270,7 @@ def load_civitai(model: str, url: str): download_civit_model_thread(model_name=model, model_url=url, model_path='', model_type='safetensors', token=shared.opts.civitai_token) shared.log.debug(f'Reference download complete: model="{name}"') sd_models.list_models() - info = sd_models.get_closet_checkpoint_match(name) + info = sd_models.get_closest_checkpoint_match(name) if info is not None: shared.log.debug(f'Reference: model="{name}"') return name # already downloaded diff --git a/modules/modelstats.py b/modules/modelstats.py index 2e5e7f447..2ab1a6b62 100644 --- a/modules/modelstats.py +++ b/modules/modelstats.py @@ -79,7 +79,7 @@ class Model(): return self.cls = shared.sd_model.__class__.__name__ self.type = shared.sd_model_type - self.info = sd_models.get_closet_checkpoint_match(name) + self.info = sd_models.get_closest_checkpoint_match(name) if self.info is not None: self.name = self.info.name or self.name self.hash = self.info.shorthash or '' diff --git a/modules/onnx_impl/ui.py b/modules/onnx_impl/ui.py index 0af6ba345..703392d82 100644 --- a/modules/onnx_impl/ui.py +++ b/modules/onnx_impl/ui.py @@ -15,7 +15,7 @@ def create_ui(): from modules.ui_common import create_refresh_button from modules.ui_components import DropdownMulti from modules.shared import log, opts, cmd_opts, refresh_checkpoints - from modules.sd_models import checkpoint_titles, get_closet_checkpoint_match + from modules.sd_models import checkpoint_titles, get_closest_checkpoint_match from modules.paths import sd_configs_path from .execution_providers import ExecutionProvider, install_execution_provider from .utils import check_diffusers_cache @@ -74,7 +74,7 @@ def create_ui(): cache_remove_optimized.click(fn=remove_cache_optimized, inputs=[cache_state_dirname, cache_optimized_selected,]) def cache_update_menus(query: str): - checkpoint_info = get_closet_checkpoint_match(query) + checkpoint_info = get_closest_checkpoint_match(query) if checkpoint_info is None: log.error(f"Could not find checkpoint object for '{query}'.") return diff --git a/modules/sd_checkpoint.py b/modules/sd_checkpoint.py index 6870c740f..4b0be2e97 100644 --- a/modules/sd_checkpoint.py +++ b/modules/sd_checkpoint.py @@ -22,12 +22,12 @@ warn_once = False class CheckpointInfo: - def __init__(self, filename, sha=None): + def __init__(self, filename, sha=None, subfolder=None): self.name = None self.hash = sha self.filename = filename self.type = '' - self.subfolder = None + self.subfolder = subfolder relname = filename app_path = os.path.abspath(paths.script_path) @@ -197,13 +197,13 @@ def remove_hash(s): return re.sub(r'\s*\[.*?\]', '', s) -def get_closet_checkpoint_match(s: str) -> CheckpointInfo: +def get_closest_checkpoint_match(s: str) -> CheckpointInfo: + # direct hf url if s.startswith('https://huggingface.co/'): model_name = s.replace('https://huggingface.co/', '') checkpoint_info = CheckpointInfo(model_name) # create a virutal model info checkpoint_info.type = 'huggingface' return checkpoint_info - if s.startswith('huggingface/'): model_name = s.replace('huggingface/', '') checkpoint_info = CheckpointInfo(model_name) # create a virutal model info @@ -229,6 +229,16 @@ def get_closet_checkpoint_match(s: str) -> CheckpointInfo: # absolute path if s.endswith('.safetensors') and os.path.isfile(s): checkpoint_info = CheckpointInfo(s) + checkpoint_info.type = 'safetensors' + return checkpoint_info + + # reference search + ref = [(k, v) for k, v in shared.reference_models.items() if f"{v.get('path', '')}+{v.get('subfolder', '')}" == s] + if ref and len(ref) > 0: + _name, info = ref[0] + checkpoint_info = CheckpointInfo(s) + checkpoint_info.subfolder = info.get('subfolder', None) + checkpoint_info.type = 'reference' return checkpoint_info # huggingface search @@ -247,7 +257,8 @@ def get_closet_checkpoint_match(s: str) -> CheckpointInfo: if found is not None and len(found) == 1: checkpoint_info = CheckpointInfo(s) checkpoint_info.type = 'huggingface' - checkpoint_info.subfolder = subfolder + if subfolder is not None and len(subfolder) > 0: + checkpoint_info.subfolder = subfolder return checkpoint_info # civitai search @@ -281,7 +292,7 @@ def select_checkpoint(op='model', sd_model_checkpoint=None): model_checkpoint = sd_model_checkpoint or (shared.opts.data.get('sd_model_refiner', None) if op == 'refiner' else shared.opts.data.get('sd_model_checkpoint', None)) if model_checkpoint is None or model_checkpoint == 'None' or len(model_checkpoint) < 3: return None - checkpoint_info = get_closet_checkpoint_match(model_checkpoint) + checkpoint_info = get_closest_checkpoint_match(model_checkpoint) if checkpoint_info is not None: shared.log.info(f'Load {op}: select="{checkpoint_info.title if checkpoint_info is not None else None}"') return checkpoint_info diff --git a/modules/sd_models.py b/modules/sd_models.py index bb89f6df2..8e971e400 100644 --- a/modules/sd_models.py +++ b/modules/sd_models.py @@ -13,7 +13,7 @@ from installer import log from modules import timer, paths, shared, shared_items, modelloader, devices, script_callbacks, sd_vae, sd_unet, errors, sd_models_compile, sd_hijack_accelerate, sd_detect, model_quant, sd_hijack_te from modules.memstats import memory_stats from modules.modeldata import model_data -from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closet_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import +from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closest_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import from modules.sd_offload import disable_offload, set_diffuser_offload, apply_balanced_offload, set_accelerate # pylint: disable=unused-import from modules.sd_models_utils import NoWatermark, get_signature, get_call, path_to_repo, patch_diffuser_config, convert_to_faketensors, read_state_dict, get_state_dict_from_checkpoint, apply_function_to_model # pylint: disable=unused-import diff --git a/modules/sd_models_utils.py b/modules/sd_models_utils.py index 0d201779c..637f8c585 100644 --- a/modules/sd_models_utils.py +++ b/modules/sd_models_utils.py @@ -8,7 +8,7 @@ import torch import safetensors.torch from modules import paths, shared, errors -from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closet_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import +from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closest_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import from modules.sd_offload import disable_offload, set_diffuser_offload, apply_balanced_offload, set_accelerate # pylint: disable=unused-import @@ -46,6 +46,8 @@ def path_to_repo(checkpoint_info): repo_id = repo_id.replace('--', '/') if repo_id.count('/') != 1: shared.log.warning(f'Model: repo="{repo_id}" repository not recognized') + if '+' in repo_id: + repo_id = repo_id.split('+')[0] return repo_id diff --git a/modules/ui_extra_networks_checkpoints.py b/modules/ui_extra_networks_checkpoints.py index ae4c98422..bd7bccc87 100644 --- a/modules/ui_extra_networks_checkpoints.py +++ b/modules/ui_extra_networks_checkpoints.py @@ -32,6 +32,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage): def reference_downloaded(url): url = url.split('@')[0] if '@' in url else 'Diffusers/' + url + url = url.split('+')[0] if '+' in url else url return any(model.endswith(url) for model in existing) if not shared.opts.sd_checkpoint_autodownload or not shared.opts.extra_network_reference_enable: @@ -56,6 +57,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage): mtime = datetime.strptime(mtime, '%Y %B') # 2025 January except Exception: _size, mtime = modelstats.stat(preview_file) + path = f'{v.get("path", "")}+{v.get("subfolder", "")}' yield { "type": 'Model', "name": name, @@ -63,7 +65,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage): "filename": url, "preview": self.find_preview(os.path.join(paths.reference_path, preview)), "local_preview": preview_file, - "onclick": '"' + html.escape(f"selectReference({json.dumps(url)})") + '"', + "onclick": '"' + html.escape(f"selectReference({json.dumps(path)})") + '"', "hash": None, "mtime": mtime, "size": size, diff --git a/modules/ui_settings.py b/modules/ui_settings.py index 4cee32959..926b31762 100644 --- a/modules/ui_settings.py +++ b/modules/ui_settings.py @@ -24,7 +24,7 @@ def apply_setting(key, value): if key in shared.opts.disable_apply_metadata: gr.update() if key == "sd_model_checkpoint": - ckpt_info = sd_models.get_closet_checkpoint_match(value) + ckpt_info = sd_models.get_closest_checkpoint_match(value) if ckpt_info is not None: value = ckpt_info.title else: diff --git a/pipelines/model_qwen.py b/pipelines/model_qwen.py index eedefd014..6ed90bb15 100644 --- a/pipelines/model_qwen.py +++ b/pipelines/model_qwen.py @@ -55,7 +55,7 @@ def load_qwen(checkpoint_info, diffusers_load_config={}): repo_te = 'Qwen/Qwen-Image' text_encoder = generic.load_text_encoder(repo_te, cls_name=transformers.Qwen2_5_VLForConditionalGeneration, load_config=diffusers_load_config) - repo_id = qwen.check_qwen_pruning(repo_id) + repo_id, repo_subfolder = qwen.check_qwen_pruning(repo_id, repo_subfolder) pipe = cls_name.from_pretrained( repo_id, transformer=transformer, diff --git a/pipelines/qwen/qwen_pruning.py b/pipelines/qwen/qwen_pruning.py index 971589958..e2a642af1 100644 --- a/pipelines/qwen/qwen_pruning.py +++ b/pipelines/qwen/qwen_pruning.py @@ -1,12 +1,12 @@ -def check_qwen_pruning(repo_id): +def check_qwen_pruning(repo_id, subfolder): from modules.shared import log if 'pruning' not in repo_id.lower(): - return repo_id - if '2509' in repo_id: - repo_id = "Qwen/Qwen-Image-Edit-2509" - elif 'Edit' in repo_id: - repo_id = "Qwen/Qwen-Image-Edit" + return repo_id, subfolder + if '2509' in (repo_id or '') or '2509' in (subfolder or ''): + repo_id, subfolder = "Qwen/Qwen-Image-Edit-2509", None + elif 'Edit' in (repo_id or '') or 'Edit' in (subfolder or ''): + repo_id, subfolder = "Qwen/Qwen-Image-Edit", None else: - repo_id = "Qwen/Qwen-Image" + repo_id, subfolder = "Qwen/Qwen-Image", None log.debug(f'Load model: variant=pruning target="{repo_id}"') - return repo_id + return repo_id, subfolder diff --git a/scripts/stablevideodiffusion.py b/scripts/stablevideodiffusion.py index 76aa08b22..a0b959824 100644 --- a/scripts/stablevideodiffusion.py +++ b/scripts/stablevideodiffusion.py @@ -53,7 +53,7 @@ class Script(scripts_manager.Script): # load/download model on-demand model_path = models[model] model_name = os.path.basename(model_path) - has_checkpoint = sd_models.get_closet_checkpoint_match(model_path) + has_checkpoint = sd_models.get_closest_checkpoint_match(model_path) if has_checkpoint is None: shared.log.error(f'SVD: no checkpoint for {model_name}') modelloader.load_reference(model_path, variant='fp16') diff --git a/scripts/text2video.py b/scripts/text2video.py index b69fd7605..f6880b10f 100644 --- a/scripts/text2video.py +++ b/scripts/text2video.py @@ -62,7 +62,7 @@ class Script(scripts_manager.Script): if model['path'] in shared.opts.sd_model_checkpoint: shared.log.debug(f'Text2Video cached: model={shared.opts.sd_model_checkpoint}') else: - checkpoint = sd_models.get_closet_checkpoint_match(model['path']) + checkpoint = sd_models.get_closest_checkpoint_match(model['path']) if checkpoint is None: shared.log.debug(f'Text2Video downloading: model={model["path"]}') checkpoint = modelloader.download_diffusers_model(hub_id=model['path']) diff --git a/scripts/xyz/xyz_grid_shared.py b/scripts/xyz/xyz_grid_shared.py index 76d4df769..dd454d3c2 100644 --- a/scripts/xyz/xyz_grid_shared.py +++ b/scripts/xyz/xyz_grid_shared.py @@ -164,7 +164,7 @@ def apply_sdnq_quant_te(p, x, xs): def apply_checkpoint(p, x, xs): if x == shared.opts.sd_model_checkpoint: return - info = sd_models.get_closet_checkpoint_match(x) + info = sd_models.get_closest_checkpoint_match(x) if info is None: shared.log.warning(f"XYZ grid: apply checkpoint unknown checkpoint: {x}") else: @@ -178,7 +178,7 @@ def apply_refiner(p, x, xs): return if x == 'None': return - info = sd_models.get_closet_checkpoint_match(x) + info = sd_models.get_closest_checkpoint_match(x) if info is None: shared.log.warning(f"XYZ grid: apply refiner unknown checkpoint: {x}") else: diff --git a/wiki b/wiki index dfcb801c5..f0f550688 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit dfcb801c592a493d48b7e22a0a46ab39340fd021 +Subproject commit f0f5506886e1478d08081ce5296eb246291fc196