switch lora logic and improve ui networks filter

Signed-off-by: vladmandic <mandic00@live.com>
pull/4550/head
vladmandic 2026-01-14 10:31:21 +01:00
parent e5176ef1f3
commit 4dbdee10e3
10 changed files with 50 additions and 63 deletions

View File

@ -39,6 +39,7 @@
- zluda detection and initialization improvements
- new env variable `SD_VAE_DEFAULT` to force default vae processing
- update `nunchaku==1.1.0`
- lora switch logic from force-diffusers to allow-native
- **Fixes**
- extension tab: update checker, date handling, formatting etc., thanks @awsr
- controlnet with non-english ui locales
@ -60,6 +61,7 @@
- google-genai auth, thanks @CalamitousFelicitousness
- reduce triton test verbosity
- improve qwen i2i handling
- networks filter by model type
## Update for 2025-12-26

View File

@ -6,8 +6,9 @@
## Internal
- Update `transformers==5.0.0`
- Unify *huggingface* and *diffusers* model folders
- Feature: Move `nunchaku` models to refernce instead of internal decision
- Update: `transformers==5.0.0`
- Feature: Unify *huggingface* and *diffusers* model folders
- Reimplement `llama` remover for Kanvas
- Deploy: Create executable for SD.Next
- Feature: Integrate natural language image search

View File

@ -678,6 +678,9 @@ def check_transformers():
if args.use_directml:
target_transformers = '4.52.4'
target_tokenizers = '0.21.4'
elif args.new:
target_transformers = '5.0.0rc2'
target_tokenizers = '0.22.2'
else:
target_transformers = '4.57.5'
target_tokenizers = '0.22.2'

View File

@ -326,21 +326,21 @@ function extraNetworksSearchButton(event) {
function extraNetworksFilterVersion(event) {
const version = event.target.textContent.trim();
const activeTab = getENActiveTab();
const activePage = getENActivePage().toLowerCase();
let cardContainer = gradioApp().querySelector(`#${activeTab}_${activePage}_cards`);
if (!cardContainer) cardContainer = gradioApp().querySelector(`#txt2img_extra_networks_${activePage}_cards`);
log('extraNetworksFilterVersion', { version, activeTab, activePage, cardContainer });
if (!cardContainer) return;
if (cardContainer.dataset.activeVersion === version) {
cardContainer.dataset.activeVersion = '';
cardContainer.querySelectorAll('.card').forEach((card) => { card.style.display = ''; });
} else {
cardContainer.dataset.activeVersion = version;
cardContainer.querySelectorAll('.card').forEach((card) => {
if (card.dataset.version === version) card.style.display = '';
else card.style.display = 'none';
});
const cardContainers = gradioApp().querySelectorAll('.extra-network-cards');
log('extraNetworksFilterVersion', { activePage, version });
for (const cardContainer of cardContainers) {
if (!cardContainer.id.includes(activePage)) continue;
if (cardContainer.dataset.activeVersion === version) {
cardContainer.dataset.activeVersion = '';
cardContainer.querySelectorAll('.card').forEach((card) => { card.style.display = ''; });
} else {
cardContainer.dataset.activeVersion = version;
cardContainer.querySelectorAll('.card').forEach((card) => {
if (card.dataset.version === version) card.style.display = '';
else card.style.display = 'none';
});
}
}
}

View File

@ -1,12 +1,14 @@
from modules import shared
maybe_diffusers = [ # forced if lora_maybe_diffusers is enabled
force_hashes_diffusers = [ # forced always
# '816d0eed49fd', # flash-sdxl
# 'c2ec22757b46', # flash-sd15
# '22c8339e7666', # spo-sdxl-10ep
# 'aaebf6360f7d', # sd15-lcm
# '3d18b05e4f56', # sdxl-lcm
# 'b71dcb732467', # sdxl-tcd
# '813ea5fb1c67', # sdxl-turbo
# not really needed, but just in case
# '5a48ac366664', # hyper-sd15-1step
# 'ee0ff23dcc42', # hyper-sd15-2step
# 'e476eb1da5df', # hyper-sd15-4step
@ -19,41 +21,14 @@ maybe_diffusers = [ # forced if lora_maybe_diffusers is enabled
# '8cca3706050b', # hyper-sdxl-1step
]
force_diffusers = [ # forced always
'816d0eed49fd', # flash-sdxl
'c2ec22757b46', # flash-sd15
'22c8339e7666', # spo-sdxl-10ep
allow_native = [
'sd',
'sdxl',
'sd3',
'f1',
'chroma',
]
force_models_diffusers = [ # forced always
# 'sd3',
'sc',
'h1',
'kandinsky5',
'kandinsky3',
'kandinsky',
'hunyuandit',
'hunyuanimage',
'auraflow',
'lumina2',
'qwen',
'bria',
'flite',
'cosmos',
'chrono',
'z_image',
'f2',
'longcat',
# video models
'hunyuanvideo',
'hunyuanvideo15'
'cogvideo',
'wanai',
'chrono',
'ltxvideo',
'mochivideo',
'allegrovideo',
]
force_classes_diffusers = [ # forced always
'FluxKontextPipeline', 'FluxKontextInpaintPipeline',
@ -65,11 +40,9 @@ fuse_ignore = [
def get_method(shorthash=''):
use_diffusers = shared.opts.lora_force_diffusers or (shared.sd_model_type in force_models_diffusers) or (shared.sd_model.__class__.__name__ in force_classes_diffusers)
if shared.opts.lora_maybe_diffusers and len(shorthash) > 4:
use_diffusers = use_diffusers or any(x.startswith(shorthash) for x in maybe_diffusers)
if shared.opts.lora_force_diffusers and len(shorthash) > 4:
use_diffusers = use_diffusers or any(x.startswith(shorthash) for x in force_diffusers)
use_diffusers = shared.opts.lora_force_diffusers or (shared.sd_model.__class__.__name__ in force_classes_diffusers) or (shared.sd_model_type not in allow_native)
if len(shorthash) > 4:
use_diffusers = use_diffusers or any(x.startswith(shorthash) for x in force_hashes_diffusers)
nunchaku_dit = hasattr(shared.sd_model, 'transformer') and 'Nunchaku' in shared.sd_model.transformer.__class__.__name__
nunchaku_unet = hasattr(shared.sd_model, 'unet') and 'Nunchaku' in shared.sd_model.unet.__class__.__name__
use_nunchaku = nunchaku_dit or nunchaku_unet

View File

@ -65,6 +65,10 @@ class NetworkOnDisk:
return 'hv'
if base.startswith("chroma"):
return 'chroma'
if base.startswith('zimage'):
return 'zimage'
if base.startswith('qwen'):
return 'qwen'
if arch.startswith("stable-diffusion-v1"):
return 'sd1'

View File

@ -14,7 +14,7 @@ from modules.timer import process as process_timer
debug = os.environ.get('SD_MOVE_DEBUG', None) is not None
verbose = os.environ.get('SD_MOVE_VERBOSE', None) is not None
debug_move = log.trace if debug else lambda *args, **kwargs: None
offload_warn = ['sc', 'sd3', 'f1', 'f2', 'h1', 'hunyuandit', 'auraflow', 'omnigen', 'omnigen2', 'cogview4', 'cosmos', 'chroma', 'x-omni', 'hunyuanimage', 'hunyuanimage3', 'longcat']
offload_allow_none = ['sd', 'sdxl']
offload_post = ['h1']
offload_hook_instance = None
balanced_offload_exclude = ['CogView4Pipeline', 'MeissonicPipeline']
@ -132,7 +132,7 @@ def apply_sequential_offload(sd_model, op:str='model', quiet:bool=False):
def apply_none_offload(sd_model, op:str='model', quiet:bool=False):
if shared.sd_model_type in offload_warn or 'video' in shared.sd_model_type:
if shared.sd_model_type not in offload_allow_none:
shared.log.warning(f'Setting {op}: offload={shared.opts.diffusers_offload_mode} type={shared.sd_model.__class__.__name__} large model')
else:
shared.log.quiet(quiet, f'Setting {op}: offload={shared.opts.diffusers_offload_mode} limit={shared.opts.cuda_mem_fraction}')

View File

@ -747,7 +747,6 @@ options_templates.update(options_section(('extra_networks', "Networks"), {
"lora_in_memory_limit": OptionInfo(1, "LoRA memory cache", gr.Slider, {"minimum": 0, "maximum": 32, "step": 1}),
"lora_add_hashes_to_infotext": OptionInfo(False, "LoRA add hash info to metadata"),
"lora_quant": OptionInfo("NF4","LoRA precision when quantized", gr.Radio, {"choices": ["NF4", "FP4"]}),
"lora_maybe_diffusers": OptionInfo(False, "LoRA load using Diffusers method for selected models", gr.Checkbox, {"visible": False}),
"extra_networks_styles_sep": OptionInfo("<h2>Styles</h2>", "", gr.HTML),
"extra_networks_styles": OptionInfo(True, "Show reference styles"),

View File

@ -157,7 +157,7 @@ class ExtraNetworksPage:
pass
def patch(self, text: str, tabname: str):
return text.replace('~tabname', tabname)
return text.replace('~tabname', tabname).replace('txt2img', tabname)
def create_xyz_grid(self):
pass

View File

@ -64,6 +64,11 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
clean_tags.pop('dataset', None)
return clean_tags
def cleanup_version(self, dct, lora):
ver = dct.get("baseModel", lora.sd_version)
ver = ver.replace(' 0.9', '').replace(' 1.0', '').replace(' ', '')
return ver
def create_item(self, name):
l = lora_load.available_networks.get(name)
if l is None:
@ -74,7 +79,7 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
name = os.path.splitext(os.path.relpath(l.filename, shared.cmd_opts.lora_dir))[0]
size, mtime = modelstats.stat(l.filename)
info = self.find_info(l.filename)
version = self.find_version(l, info)
ver_dct = self.find_version(l, info)
item = {
"type": 'Lora',
"name": name,
@ -85,10 +90,10 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
"metadata": json.dumps(l.metadata, indent=4) if l.metadata else None,
"mtime": mtime,
"size": size,
"version": version.get("baseModel", l.sd_version),
"version": self.cleanup_version(ver_dct, l),
"info": info,
"description": self.find_description(l.filename, info),
"tags": self.get_tags(l, info, version),
"tags": self.get_tags(l, info, ver_dct),
}
return item
except Exception as e: