upadte qwen pruning and allow hf models in subfolders

Signed-off-by: Vladimir Mandic <mandic00@live.com>
pull/4251/head
Vladimir Mandic 2025-10-04 15:48:42 -04:00
parent f2e12a682f
commit 8b698ed67f
16 changed files with 67 additions and 45 deletions

View File

@ -234,31 +234,36 @@
"size": 56.1,
"date": "2025 August"
},
"Qwen-Image-Pruning": {
"path": "OPPOer/Qwen-Image-Pruning-13B",
"Qwen-Image Pruning-13B": {
"path": "OPPOer/Qwen-Image-Pruning",
"subfolder": "Qwen-Image-13B",
"preview": "vladmandic--Qwen-Lightning-Edit.jpg",
"desc": "This open-source project is based on Qwen-Image and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.",
"skip": true,
"size": 56.1,
"date": "2025 Ocotober"
},
"Qwen-Image-Edit-Pruning": {
"path": "OPPOer/Qwen-Image-Edit-Pruning-13B",
"Qwen-Image Pruning-12B": {
"path": "OPPOer/Qwen-Image-Pruning",
"subfolder": "Qwen-Image-12B-40steps",
"preview": "vladmandic--Qwen-Lightning-Edit.jpg",
"desc": "This open-source project is based on Qwen-Image and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.",
"skip": true,
"date": "2025 Ocotober"
},
"Qwen-Image-Edit Pruning-13B": {
"path": "OPPOer/Qwen-Image-Edit-Pruning",
"subfolder": "Qwen-Image-Edit-13B",
"preview": "vladmandic--Qwen-Lightning-Edit.jpg",
"desc": "This open-source project is based on Qwen-Image-Edit and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.",
"skip": true,
"size": 56.1,
"date": "2025 Ocotober"
},
"Qwen-Image-Edit-2509-Pruning": {
"path": "OPPOer/Qwen-Image-Edit-Pruning-14B",
"Qwen-Image-Edit-2509 Pruning-14B": {
"path": "OPPOer/Qwen-Image-Edit-Pruning",
"subfolder": "Qwen-Image-Edit-2509-14B",
"preview": "vladmandic--Qwen-Lightning-Edit.jpg",
"desc": "This open-source project is based on Qwen-Image-Edit and has attempted model pruning, removing 20 layers while retaining the weights of 40 layers, resulting in a model size of 13.6B parameters.",
"skip": true,
"size": 56.1,
"date": "2025 Ocotober"
},

View File

@ -39,24 +39,24 @@ def run_modelmerger(id_task, **kwargs): # pylint: disable=unused-argument
return [*[gr.update() for _ in range(4)], message]
kwargs["models"] = {
"model_a": sd_models.get_closet_checkpoint_match(kwargs.get("primary_model_name", None)).filename,
"model_b": sd_models.get_closet_checkpoint_match(kwargs.get("secondary_model_name", None)).filename,
"model_a": sd_models.get_closest_checkpoint_match(kwargs.get("primary_model_name", None)).filename,
"model_b": sd_models.get_closest_checkpoint_match(kwargs.get("secondary_model_name", None)).filename,
}
if kwargs.get("primary_model_name", None) in [None, 'None']:
return fail("Failed: Merging requires a primary model.")
primary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("primary_model_name", None))
primary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("primary_model_name", None))
if kwargs.get("secondary_model_name", None) in [None, 'None']:
return fail("Failed: Merging requires a secondary model.")
secondary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("secondary_model_name", None))
secondary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("secondary_model_name", None))
if kwargs.get("tertiary_model_name", None) in [None, 'None'] and kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS:
return fail(f"Failed: Interpolation method ({kwargs.get('merge_mode', None)}) requires a tertiary model.")
tertiary_model_info = sd_models.get_closet_checkpoint_match(kwargs.get("tertiary_model_name", None)) if kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS else None
tertiary_model_info = sd_models.get_closest_checkpoint_match(kwargs.get("tertiary_model_name", None)) if kwargs.get("merge_mode", None) in merge_utils.TRIPLE_METHODS else None
del kwargs["primary_model_name"]
del kwargs["secondary_model_name"]
if kwargs.get("tertiary_model_name", None) is not None:
kwargs["models"] |= {"model_c": sd_models.get_closet_checkpoint_match(kwargs.get("tertiary_model_name", None)).filename}
kwargs["models"] |= {"model_c": sd_models.get_closest_checkpoint_match(kwargs.get("tertiary_model_name", None)).filename}
del kwargs["tertiary_model_name"]
if kwargs.get("alpha_base", None) and kwargs.get("alpha_in_blocks", None) and kwargs.get("alpha_mid_block", None) and kwargs.get("alpha_out_blocks", None):
@ -204,7 +204,7 @@ def run_model_modules(model_type:str, model_name:str, custom_name:str,
if len(custom_name) == 0:
yield msg("output name is required", err=True)
return
checkpoint_info = sd_models.get_closet_checkpoint_match(model_name)
checkpoint_info = sd_models.get_closest_checkpoint_match(model_name)
if checkpoint_info is None:
yield msg("input model not found", err=True)
return

View File

@ -227,6 +227,8 @@ def get_reference_opts(name: str, quiet=False):
def load_reference(name: str, variant: str = None, revision: str = None, mirror: str = None, custom_pipeline: str = None):
if '+' in name:
name = name.split('+')[0]
found = [r for r in diffuser_repos if name == r['name'] or name == r['friendly'] or name == r['path']]
if len(found) > 0: # already downloaded
model_opts = get_reference_opts(found[0]['name'])
@ -258,7 +260,7 @@ def load_reference(name: str, variant: str = None, revision: str = None, mirror:
def load_civitai(model: str, url: str):
from modules import sd_models
name, _ext = os.path.splitext(model)
info = sd_models.get_closet_checkpoint_match(name)
info = sd_models.get_closest_checkpoint_match(name)
if info is not None:
_model_opts = get_reference_opts(info.model_name)
return name # already downloaded
@ -268,7 +270,7 @@ def load_civitai(model: str, url: str):
download_civit_model_thread(model_name=model, model_url=url, model_path='', model_type='safetensors', token=shared.opts.civitai_token)
shared.log.debug(f'Reference download complete: model="{name}"')
sd_models.list_models()
info = sd_models.get_closet_checkpoint_match(name)
info = sd_models.get_closest_checkpoint_match(name)
if info is not None:
shared.log.debug(f'Reference: model="{name}"')
return name # already downloaded

View File

@ -79,7 +79,7 @@ class Model():
return
self.cls = shared.sd_model.__class__.__name__
self.type = shared.sd_model_type
self.info = sd_models.get_closet_checkpoint_match(name)
self.info = sd_models.get_closest_checkpoint_match(name)
if self.info is not None:
self.name = self.info.name or self.name
self.hash = self.info.shorthash or ''

View File

@ -15,7 +15,7 @@ def create_ui():
from modules.ui_common import create_refresh_button
from modules.ui_components import DropdownMulti
from modules.shared import log, opts, cmd_opts, refresh_checkpoints
from modules.sd_models import checkpoint_titles, get_closet_checkpoint_match
from modules.sd_models import checkpoint_titles, get_closest_checkpoint_match
from modules.paths import sd_configs_path
from .execution_providers import ExecutionProvider, install_execution_provider
from .utils import check_diffusers_cache
@ -74,7 +74,7 @@ def create_ui():
cache_remove_optimized.click(fn=remove_cache_optimized, inputs=[cache_state_dirname, cache_optimized_selected,])
def cache_update_menus(query: str):
checkpoint_info = get_closet_checkpoint_match(query)
checkpoint_info = get_closest_checkpoint_match(query)
if checkpoint_info is None:
log.error(f"Could not find checkpoint object for '{query}'.")
return

View File

@ -22,12 +22,12 @@ warn_once = False
class CheckpointInfo:
def __init__(self, filename, sha=None):
def __init__(self, filename, sha=None, subfolder=None):
self.name = None
self.hash = sha
self.filename = filename
self.type = ''
self.subfolder = None
self.subfolder = subfolder
relname = filename
app_path = os.path.abspath(paths.script_path)
@ -197,13 +197,13 @@ def remove_hash(s):
return re.sub(r'\s*\[.*?\]', '', s)
def get_closet_checkpoint_match(s: str) -> CheckpointInfo:
def get_closest_checkpoint_match(s: str) -> CheckpointInfo:
# direct hf url
if s.startswith('https://huggingface.co/'):
model_name = s.replace('https://huggingface.co/', '')
checkpoint_info = CheckpointInfo(model_name) # create a virutal model info
checkpoint_info.type = 'huggingface'
return checkpoint_info
if s.startswith('huggingface/'):
model_name = s.replace('huggingface/', '')
checkpoint_info = CheckpointInfo(model_name) # create a virutal model info
@ -229,6 +229,16 @@ def get_closet_checkpoint_match(s: str) -> CheckpointInfo:
# absolute path
if s.endswith('.safetensors') and os.path.isfile(s):
checkpoint_info = CheckpointInfo(s)
checkpoint_info.type = 'safetensors'
return checkpoint_info
# reference search
ref = [(k, v) for k, v in shared.reference_models.items() if f"{v.get('path', '')}+{v.get('subfolder', '')}" == s]
if ref and len(ref) > 0:
_name, info = ref[0]
checkpoint_info = CheckpointInfo(s)
checkpoint_info.subfolder = info.get('subfolder', None)
checkpoint_info.type = 'reference'
return checkpoint_info
# huggingface search
@ -247,7 +257,8 @@ def get_closet_checkpoint_match(s: str) -> CheckpointInfo:
if found is not None and len(found) == 1:
checkpoint_info = CheckpointInfo(s)
checkpoint_info.type = 'huggingface'
checkpoint_info.subfolder = subfolder
if subfolder is not None and len(subfolder) > 0:
checkpoint_info.subfolder = subfolder
return checkpoint_info
# civitai search
@ -281,7 +292,7 @@ def select_checkpoint(op='model', sd_model_checkpoint=None):
model_checkpoint = sd_model_checkpoint or (shared.opts.data.get('sd_model_refiner', None) if op == 'refiner' else shared.opts.data.get('sd_model_checkpoint', None))
if model_checkpoint is None or model_checkpoint == 'None' or len(model_checkpoint) < 3:
return None
checkpoint_info = get_closet_checkpoint_match(model_checkpoint)
checkpoint_info = get_closest_checkpoint_match(model_checkpoint)
if checkpoint_info is not None:
shared.log.info(f'Load {op}: select="{checkpoint_info.title if checkpoint_info is not None else None}"')
return checkpoint_info

View File

@ -13,7 +13,7 @@ from installer import log
from modules import timer, paths, shared, shared_items, modelloader, devices, script_callbacks, sd_vae, sd_unet, errors, sd_models_compile, sd_hijack_accelerate, sd_detect, model_quant, sd_hijack_te
from modules.memstats import memory_stats
from modules.modeldata import model_data
from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closet_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import
from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closest_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import
from modules.sd_offload import disable_offload, set_diffuser_offload, apply_balanced_offload, set_accelerate # pylint: disable=unused-import
from modules.sd_models_utils import NoWatermark, get_signature, get_call, path_to_repo, patch_diffuser_config, convert_to_faketensors, read_state_dict, get_state_dict_from_checkpoint, apply_function_to_model # pylint: disable=unused-import

View File

@ -8,7 +8,7 @@ import torch
import safetensors.torch
from modules import paths, shared, errors
from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closet_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import
from modules.sd_checkpoint import CheckpointInfo, select_checkpoint, list_models, checkpoints_list, checkpoint_titles, get_closest_checkpoint_match, model_hash, update_model_hashes, setup_model, write_metadata, read_metadata_from_safetensors # pylint: disable=unused-import
from modules.sd_offload import disable_offload, set_diffuser_offload, apply_balanced_offload, set_accelerate # pylint: disable=unused-import
@ -46,6 +46,8 @@ def path_to_repo(checkpoint_info):
repo_id = repo_id.replace('--', '/')
if repo_id.count('/') != 1:
shared.log.warning(f'Model: repo="{repo_id}" repository not recognized')
if '+' in repo_id:
repo_id = repo_id.split('+')[0]
return repo_id

View File

@ -32,6 +32,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage):
def reference_downloaded(url):
url = url.split('@')[0] if '@' in url else 'Diffusers/' + url
url = url.split('+')[0] if '+' in url else url
return any(model.endswith(url) for model in existing)
if not shared.opts.sd_checkpoint_autodownload or not shared.opts.extra_network_reference_enable:
@ -56,6 +57,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage):
mtime = datetime.strptime(mtime, '%Y %B') # 2025 January
except Exception:
_size, mtime = modelstats.stat(preview_file)
path = f'{v.get("path", "")}+{v.get("subfolder", "")}'
yield {
"type": 'Model',
"name": name,
@ -63,7 +65,7 @@ class ExtraNetworksPageCheckpoints(ui_extra_networks.ExtraNetworksPage):
"filename": url,
"preview": self.find_preview(os.path.join(paths.reference_path, preview)),
"local_preview": preview_file,
"onclick": '"' + html.escape(f"selectReference({json.dumps(url)})") + '"',
"onclick": '"' + html.escape(f"selectReference({json.dumps(path)})") + '"',
"hash": None,
"mtime": mtime,
"size": size,

View File

@ -24,7 +24,7 @@ def apply_setting(key, value):
if key in shared.opts.disable_apply_metadata:
gr.update()
if key == "sd_model_checkpoint":
ckpt_info = sd_models.get_closet_checkpoint_match(value)
ckpt_info = sd_models.get_closest_checkpoint_match(value)
if ckpt_info is not None:
value = ckpt_info.title
else:

View File

@ -55,7 +55,7 @@ def load_qwen(checkpoint_info, diffusers_load_config={}):
repo_te = 'Qwen/Qwen-Image'
text_encoder = generic.load_text_encoder(repo_te, cls_name=transformers.Qwen2_5_VLForConditionalGeneration, load_config=diffusers_load_config)
repo_id = qwen.check_qwen_pruning(repo_id)
repo_id, repo_subfolder = qwen.check_qwen_pruning(repo_id, repo_subfolder)
pipe = cls_name.from_pretrained(
repo_id,
transformer=transformer,

View File

@ -1,12 +1,12 @@
def check_qwen_pruning(repo_id):
def check_qwen_pruning(repo_id, subfolder):
from modules.shared import log
if 'pruning' not in repo_id.lower():
return repo_id
if '2509' in repo_id:
repo_id = "Qwen/Qwen-Image-Edit-2509"
elif 'Edit' in repo_id:
repo_id = "Qwen/Qwen-Image-Edit"
return repo_id, subfolder
if '2509' in (repo_id or '') or '2509' in (subfolder or ''):
repo_id, subfolder = "Qwen/Qwen-Image-Edit-2509", None
elif 'Edit' in (repo_id or '') or 'Edit' in (subfolder or ''):
repo_id, subfolder = "Qwen/Qwen-Image-Edit", None
else:
repo_id = "Qwen/Qwen-Image"
repo_id, subfolder = "Qwen/Qwen-Image", None
log.debug(f'Load model: variant=pruning target="{repo_id}"')
return repo_id
return repo_id, subfolder

View File

@ -53,7 +53,7 @@ class Script(scripts_manager.Script):
# load/download model on-demand
model_path = models[model]
model_name = os.path.basename(model_path)
has_checkpoint = sd_models.get_closet_checkpoint_match(model_path)
has_checkpoint = sd_models.get_closest_checkpoint_match(model_path)
if has_checkpoint is None:
shared.log.error(f'SVD: no checkpoint for {model_name}')
modelloader.load_reference(model_path, variant='fp16')

View File

@ -62,7 +62,7 @@ class Script(scripts_manager.Script):
if model['path'] in shared.opts.sd_model_checkpoint:
shared.log.debug(f'Text2Video cached: model={shared.opts.sd_model_checkpoint}')
else:
checkpoint = sd_models.get_closet_checkpoint_match(model['path'])
checkpoint = sd_models.get_closest_checkpoint_match(model['path'])
if checkpoint is None:
shared.log.debug(f'Text2Video downloading: model={model["path"]}')
checkpoint = modelloader.download_diffusers_model(hub_id=model['path'])

View File

@ -164,7 +164,7 @@ def apply_sdnq_quant_te(p, x, xs):
def apply_checkpoint(p, x, xs):
if x == shared.opts.sd_model_checkpoint:
return
info = sd_models.get_closet_checkpoint_match(x)
info = sd_models.get_closest_checkpoint_match(x)
if info is None:
shared.log.warning(f"XYZ grid: apply checkpoint unknown checkpoint: {x}")
else:
@ -178,7 +178,7 @@ def apply_refiner(p, x, xs):
return
if x == 'None':
return
info = sd_models.get_closet_checkpoint_match(x)
info = sd_models.get_closest_checkpoint_match(x)
if info is None:
shared.log.warning(f"XYZ grid: apply refiner unknown checkpoint: {x}")
else:

2
wiki

@ -1 +1 @@
Subproject commit dfcb801c592a493d48b7e22a0a46ab39340fd021
Subproject commit f0f5506886e1478d08081ce5296eb246291fc196