diff --git a/.eslintrc.json b/.eslintrc.json index 81c196021..af846df83 100644 --- a/.eslintrc.json +++ b/.eslintrc.json @@ -17,10 +17,10 @@ ], "env": { "browser": true, - "commonjs": false, - "node": false, - "jquery": false, - "es2021": true + "commonjs": true, + "node": true, + "jquery": true, + "es2024": true }, "rules": { "max-len": [1, 275, 3], @@ -122,12 +122,13 @@ "ignorePatterns": [ "node_modules", "extensions", - "extensions-builtin", "repositories", "venv", "panzoom.js", "split.js", "exifr.js", + "jquery.js", + "sparkline.js", "iframeResizer.min.js" ] } diff --git a/.pylintrc b/.pylintrc index 27d3e7a52..5f22da840 100644 --- a/.pylintrc +++ b/.pylintrc @@ -61,10 +61,10 @@ ignore-paths=/usr/lib/.*$, scripts/pulid, scripts/xadapter, repositories, - extensions-builtin/Lora, extensions-builtin/sd-extension-chainner/nodes, extensions-builtin/sd-webui-agent-scheduler, extensions-builtin/sdnext-modernui/node_modules, + extensions-builtin/sdnext-kanvas/node_modules, ignore-patterns=.*test*.py$, .*_model.py$, .*_arch.py$, diff --git a/CHANGELOG.md b/CHANGELOG.md index 51a8c8cd1..48f1f4c4e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,17 @@ ## Update for 2025-11-09 +### Highlights for 2025-11-09 + +TBD + +### Details for 2025-11-09 + - **Features** + - **kanvas**: new module for native canvas-based image manipulation + kanvas is a full replacement for *img2img, inpaint and outpaint* controls + see [docs](https://vladmandic.github.io/sdnext-docs/Kanvas/) for details + *experimental*: report any feedback in master [issue](https://github.com/vladmandic/sdnext/issues/4358) - **wildcards**: allow recursive inline wildcards using curly braces syntax - **sdnq**: simplify pre-quantization saved config - **attention**: additional torch attention settings diff --git a/extensions-builtin/sd-extension-system-info b/extensions-builtin/sd-extension-system-info index c724b86a7..90abd719f 160000 --- a/extensions-builtin/sd-extension-system-info +++ b/extensions-builtin/sd-extension-system-info @@ -1 +1 @@ -Subproject commit c724b86a7a35208ee1132ab06c2ae5b7ed395824 +Subproject commit 90abd719f54578664317325c83781226217d68fd diff --git a/extensions-builtin/sdnext-kanvas b/extensions-builtin/sdnext-kanvas index 767d94ee4..6d20e813b 160000 --- a/extensions-builtin/sdnext-kanvas +++ b/extensions-builtin/sdnext-kanvas @@ -1 +1 @@ -Subproject commit 767d94ee44cb215545777f8c1f7585b9c6c82055 +Subproject commit 6d20e813bf876a8994081abb967b39d76679e35c diff --git a/modules/attention.py b/modules/attention.py index f547a303b..e92eb95dc 100644 --- a/modules/attention.py +++ b/modules/attention.py @@ -51,12 +51,12 @@ def set_triton_flash_attention(backend: str): def set_flex_attention(): try: from torch.nn.attention.flex_attention import flex_attention, create_block_mask - def flex_attention_causal_mask(b, h, q_idx, kv_idx): + def flex_attention_causal_mask(b, h, q_idx, kv_idx): # pylint: disable=unused-argument return q_idx >= kv_idx sdpa_pre_flex_atten = torch.nn.functional.scaled_dot_product_attention @wraps(sdpa_pre_flex_atten) - def sdpa_flex_atten(query: torch.FloatTensor, key: torch.FloatTensor, value: torch.FloatTensor, attn_mask: Optional[torch.Tensor] = None, dropout_p: float = 0.0, is_causal: bool = False, scale: Optional[float] = None, enable_gqa: bool = False, **kwargs) -> torch.FloatTensor: + def sdpa_flex_atten(query: torch.FloatTensor, key: torch.FloatTensor, value: torch.FloatTensor, attn_mask: Optional[torch.Tensor] = None, dropout_p: float = 0.0, is_causal: bool = False, scale: Optional[float] = None, enable_gqa: bool = False, **kwargs) -> torch.FloatTensor: # pylint: disable=unused-argument score_mod = None block_mask = None if attn_mask is not None: @@ -71,8 +71,9 @@ def set_flex_attention(): return attn_mask[batch_idx, head_idx, q_idx, kv_idx] block_mask = create_block_mask(mask_mod, batch_size, None, seq_len_q, seq_len_kv, device=query.device) else: - def score_mod(score, batch_idx, head_idx, q_idx, kv_idx): + def score_mod_fn(score, batch_idx, head_idx, q_idx, kv_idx): return score + attn_mask[batch_idx, head_idx, q_idx, kv_idx] + score_mod = score_mod_fn elif is_causal: block_mask = create_block_mask(flex_attention_causal_mask, query.shape[0], query.shape[1], query.shape[-2], key.shape[-2], device=query.device) return flex_attention(query, key, value, score_mod=score_mod, block_mask=block_mask, scale=scale, enable_gqa=enable_gqa) diff --git a/modules/linfusion/__init__.py b/modules/linfusion/__init__.py index 34495936b..01b894af9 100644 --- a/modules/linfusion/__init__.py +++ b/modules/linfusion/__init__.py @@ -1,4 +1,4 @@ -from modules import shared, sd_models, devices +from modules import shared, sd_models, devices, attention from .linfusion import LinFusion from .attention import GeneralizedLinearAttention @@ -41,6 +41,6 @@ def unapply(pipeline): if applied is None: return # shared.log.debug('LinFusion: unapply') - sd_models.set_diffusers_attention(pipeline) + attention.set_diffusers_attention(pipeline) devices.torch_gc() applied = None diff --git a/package.json b/package.json index e42d5a2fd..12a76a8c4 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,7 @@ "homepage": "https://github.com/vladmandic/sdnext", "license": "Apache-2.0", "engines": { - "node": ">=14.0.0" + "node": ">=22.0.0" }, "repository": { "type": "git", @@ -20,7 +20,7 @@ "start": ". venv/bin/activate; python launch.py --debug", "localize": "node cli/localize.js", "packages": ". venv/bin/activate && pip install --upgrade transformers accelerate huggingface_hub safetensors tokenizers peft pytorch_lightning pylint ruff", - "eslint": "eslint . javascript/ extensions-builtin/sdnext-modernui/javascript/", + "eslint": "eslint . javascript/", "ruff": ". venv/bin/activate && ruff check", "pylint": ". venv/bin/activate && pylint *.py modules/ pipelines/ scripts/ extensions-builtin/ | grep -v '^*'", "format": ". venv/bin/activate && pre-commit run --all-files", diff --git a/wiki b/wiki index 9e9871c5a..c35bdbb94 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit 9e9871c5acda37d2101a09d456c7fccc0f56a518 +Subproject commit c35bdbb9479172cf19f4cd1b2f9891d675d1769a