Restruct ONNX-related files & change olive-ai to optional dependency.

pull/2784/head
Seunghoon Lee 2024-01-30 20:46:19 +09:00
parent 88882a38e4
commit 49b13b9526
No known key found for this signature in database
GPG Key ID: 436E38F4E70BD152
21 changed files with 141 additions and 155 deletions

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "text_encoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["input_ids"],
"output_names": ["last_hidden_state", "pooler_output"],
@ -27,7 +27,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "text_encoder_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "unet_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": [
"sample",
@ -44,7 +44,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "unet_data_loader",
"batch_size": 2
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "vae_decoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["latent_sample", "return_dict"],
"output_names": ["sample"],
@ -34,7 +34,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "vae_decoder_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "vae_encoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["sample", "return_dict"],
"output_names": ["latent_sample"],
@ -34,7 +34,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "vae_encoder_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "text_encoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["input_ids", "output_hidden_states"],
"output_names": [
@ -62,7 +62,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "text_encoder_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "text_encoder_2_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["input_ids", "output_hidden_states"],
"output_names": [
@ -102,7 +102,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "text_encoder_2_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "unet_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": [
"sample",
@ -52,7 +52,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "unet_data_loader",
"batch_size": 2
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "vae_decoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["latent_sample", "return_dict"],
"output_names": ["sample"],
@ -42,7 +42,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "vae_decoder_data_loader",
"batch_size": 1
}

View File

@ -4,7 +4,7 @@
"config": {
"model_path": "",
"model_loader": "vae_encoder_load",
"model_script": "modules/olive.py",
"model_script": "modules/olive_script.py",
"io_config": {
"input_names": ["sample", "return_dict"],
"output_names": ["latent_sample"],
@ -42,7 +42,7 @@
"type": "latency",
"sub_types": [{ "name": "avg" }],
"user_config": {
"user_script": "modules/olive.py",
"user_script": "modules/olive_script.py",
"dataloader_func": "vae_encoder_data_loader",
"batch_size": 1
}

View File

@ -68,11 +68,11 @@ class TemporalModule(TorchCompatibleModule):
self.sess_options = sess_options
def to(self, *args, **kwargs):
from modules.onnx_utils import extract_device
from .utils import extract_device
device = extract_device(args, kwargs)
if device is not None and device.type != "cpu":
from modules.onnx_ep import TORCH_DEVICE_TO_EP
from .execution_providers import TORCH_DEVICE_TO_EP
provider = TORCH_DEVICE_TO_EP[device.type] if device.type in TORCH_DEVICE_TO_EP else self.provider
return OnnxRuntimeModel.load_model(self.path, provider, DynamicSessionOptions.from_sess_options(self.sess_options))
@ -86,7 +86,7 @@ class OnnxRuntimeModel(TorchCompatibleModule, diffusers.OnnxRuntimeModel):
return ()
def to(self, *args, **kwargs):
from modules.onnx_utils import extract_device, move_inference_session
from modules.onnx_impl.utils import extract_device, move_inference_session
device = extract_device(args, kwargs)
if device is not None:
@ -133,13 +133,14 @@ def preprocess_pipeline(p, refiner_enabled: bool):
def initialize():
global initialized
global initialized # pylint: disable=global-statement
if initialized:
return
from modules import onnx_pipelines as pipelines, devices
from modules.onnx_ep import ExecutionProvider, TORCH_DEVICE_TO_EP
from modules import devices
from . import pipelines
from .execution_providers import ExecutionProvider, TORCH_DEVICE_TO_EP
if devices.backend == "rocm":
TORCH_DEVICE_TO_EP["cuda"] = ExecutionProvider.ROCm

View File

@ -0,0 +1,10 @@
from installer import log, installed, install
def install_olive():
if installed("olive-ai"):
log.debug("Olive: olive-ai is already installed. Skipping olive-ai installation.")
return
install("olive-ai", "olive-ai")
log.info("Olive: olive-ai is installed. Please restart webui session.")

View File

@ -2,9 +2,11 @@ import os
import json
import shutil
import inspect
import tempfile
from abc import ABCMeta
from typing import Union, Optional, Callable, Type, Tuple, List, Any, Dict
from packaging import version
import onnx
import torch
import numpy as np
import diffusers
@ -18,32 +20,16 @@ from modules import shared
from modules.paths import sd_configs_path, models_path
from modules.sd_models import CheckpointInfo
from modules.processing import StableDiffusionProcessing
from modules.olive import config
from modules.onnx import DynamicSessionOptions, TorchCompatibleModule
from modules.onnx_utils import extract_device, move_inference_session, check_diffusers_cache, check_pipeline_sdxl, check_cache_onnx, load_init_dict, load_submodel, load_submodels, patch_kwargs, load_pipeline, get_base_constructor
from modules.onnx_ep import ExecutionProvider, EP_TO_NAME, get_provider
from modules.olive_script import config
from . import DynamicSessionOptions, TorchCompatibleModule
from .utils import extract_device, move_inference_session, check_diffusers_cache, check_pipeline_sdxl, check_cache_onnx, load_init_dict, load_submodel, load_submodels, patch_kwargs, load_pipeline, get_base_constructor, get_io_config
from .execution_providers import ExecutionProvider, EP_TO_NAME, get_provider
SUBMODELS_SD = ("text_encoder", "unet", "vae_encoder", "vae_decoder",)
SUBMODELS_SDXL = ("text_encoder", "text_encoder_2", "unet", "vae_encoder", "vae_decoder",)
SUBMODELS_SDXL_REFINER = ("text_encoder_2", "unet", "vae_encoder", "vae_decoder",)
CONVERSION_PASS = {
"type": "OnnxConversion",
"config": {
"target_opset": 14,
},
}
CONVERSION_PASS_UNET = {
"type": "OnnxConversion",
"config": {
"target_opset": 14,
"save_as_external_data": True,
"all_tensors_to_one_file": True,
"external_data_name": "weights.pb",
},
}
class PipelineBase(TorchCompatibleModule, diffusers.DiffusionPipeline, metaclass=ABCMeta):
model_type: str
@ -51,7 +37,7 @@ class PipelineBase(TorchCompatibleModule, diffusers.DiffusionPipeline, metaclass
sd_checkpoint_info: CheckpointInfo
sd_model_checkpoint: str
def __init__(self):
def __init__(self): # pylint: disable=super-init-not-called
self.model_type = self.__class__.__name__
def to(self, *args, **kwargs):
@ -66,7 +52,7 @@ class PipelineBase(TorchCompatibleModule, diffusers.DiffusionPipeline, metaclass
module = getattr(self, name)
if isinstance(module, optimum.onnxruntime.modeling_diffusion._ORTDiffusionModelPart):
if isinstance(module, optimum.onnxruntime.modeling_diffusion._ORTDiffusionModelPart): # pylint: disable=protected-access
device = extract_device(args, kwargs)
if device is None:
return self
@ -83,7 +69,7 @@ class PipelineBase(TorchCompatibleModule, diffusers.DiffusionPipeline, metaclass
return self
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path, **_):
def from_pretrained(cls, pretrained_model_name_or_path, **_): # pylint: disable=arguments-differ
return OnnxRawPipeline(
cls,
pretrained_model_name_or_path,
@ -114,7 +100,7 @@ class OnnxRawPipeline(PipelineBase):
scheduler: Any = None # for Img2Img
def __init__(self, constructor: Type[PipelineBase], path: os.PathLike):
def __init__(self, constructor: Type[PipelineBase], path: os.PathLike): # pylint: disable=super-init-not-called
self._is_sdxl = check_pipeline_sdxl(constructor)
self.from_diffusers_cache = check_diffusers_cache(path)
self.path = path
@ -154,21 +140,12 @@ class OnnxRawPipeline(PipelineBase):
return pipeline
def convert(self, submodels: List[str], in_dir: os.PathLike):
if not shared.cmd_opts.debug:
ort.set_default_logger_severity(3)
out_dir = os.path.join(shared.opts.onnx_cached_models_path, self.original_filename)
if (self.from_diffusers_cache and check_cache_onnx(self.path)):
return self.path
if os.path.isdir(out_dir): # if model is ONNX format or had already converted.
return out_dir
from olive.workflows import run
try:
from olive.model import ONNXModel
except ImportError:
from olive.model import ONNXModelHandler as ONNXModel
shutil.rmtree("cache", ignore_errors=True)
shutil.rmtree("footprints", ignore_errors=True)
@ -177,56 +154,34 @@ class OnnxRawPipeline(PipelineBase):
in_dir, out_dir, ignore=shutil.ignore_patterns("weights.pb", "*.onnx", "*.safetensors", "*.ckpt")
)
converted_model_paths = {}
from modules import olive_script as olv
for submodel in submodels:
log.info(f"\nConverting {submodel}")
destination = os.path.join(out_dir, submodel)
with open(os.path.join(sd_configs_path, "olive", 'sdxl' if self._is_sdxl else 'sd', f"{submodel}.json"), "r", encoding="utf-8") as config_file:
conversion_config = json.load(config_file)
conversion_config["input_model"]["config"]["model_path"] = os.path.abspath(in_dir)
conversion_config["passes"] = {
"_conversion": CONVERSION_PASS_UNET if submodel == "unet" else CONVERSION_PASS,
}
conversion_config["pass_flows"] = [["_conversion"]]
conversion_config["engine"]["execution_providers"] = [shared.opts.onnx_execution_provider]
if not os.path.isdir(destination):
os.mkdir(destination)
run(conversion_config)
with open(os.path.join("footprints", f"{submodel}_{EP_TO_NAME[shared.opts.onnx_execution_provider]}_footprints.json"), "r", encoding="utf-8") as footprint_file:
footprints = json.load(footprint_file)
conversion_footprint = None
for _, footprint in footprints.items():
if footprint["from_pass"] == "OnnxConversion":
conversion_footprint = footprint
assert conversion_footprint, "Failed to convert model"
converted_model_paths[submodel] = ONNXModel(
**conversion_footprint["model_config"]["config"]
).model_path
log.info(f"Converted {submodel}")
for submodel in submodels:
src_path = converted_model_paths[submodel]
src_parent = os.path.dirname(src_path)
dst_parent = os.path.join(out_dir, submodel)
dst_path = os.path.join(dst_parent, "model.onnx")
if not os.path.isdir(dst_parent):
os.mkdir(dst_parent)
shutil.copyfile(src_path, dst_path)
data_src_path = os.path.join(src_parent, (os.path.basename(src_path) + ".data"))
if os.path.isfile(data_src_path):
data_dst_path = os.path.join(dst_parent, (os.path.basename(dst_path) + ".data"))
shutil.copyfile(data_src_path, data_dst_path)
weights_src_path = os.path.join(src_parent, "weights.pb")
if os.path.isfile(weights_src_path):
weights_dst_path = os.path.join(dst_parent, "weights.pb")
shutil.copyfile(weights_src_path, weights_dst_path)
del converted_model_paths
model = getattr(olv, f"{submodel}_load")(in_dir)
sample = getattr(olv, f"{submodel}_conversion_inputs")(None)
with tempfile.TemporaryDirectory(prefix="onnx_conversion") as temp_dir:
temp_path = os.path.join(temp_dir, "model.onnx")
torch.onnx.export(
model,
sample,
temp_path,
opset_version=14,
**get_io_config(submodel, self._is_sdxl),
)
model = onnx.load(temp_path)
onnx.save_model(
model,
os.path.join(destination, "model.onnx"),
save_as_external_data=submodel == "unet",
all_tensors_to_one_file=True,
location="weights.pb",
)
log.info(f"ONNX: Successfully exported converted model: submodel={submodel}")
kwargs = {}
@ -269,11 +224,11 @@ class OnnxRawPipeline(PipelineBase):
if not shared.opts.olive_cache_optimized:
out_dir = shared.opts.onnx_temp_dir
from olive.workflows import run
from olive.workflows import run # pylint: disable=no-name-in-module
try:
from olive.model import ONNXModel
from olive.model import ONNXModel # olive-ai==0.4.0
except ImportError:
from olive.model import ONNXModelHandler as ONNXModel
from olive.model import ONNXModelHandler as ONNXModel # olive-ai==0.5.0
shutil.rmtree("cache", ignore_errors=True)
shutil.rmtree("footprints", ignore_errors=True)
@ -322,7 +277,7 @@ class OnnxRawPipeline(PipelineBase):
**processor_final_pass_footprint["model_config"]["config"]
).model_path
log.info(f"Processed {submodel}")
log.info(f"Olive: Successfully processed model: submodel={submodel}")
for submodel in submodels:
src_path = optimized_model_paths[submodel]

View File

@ -17,8 +17,8 @@ def create_ui():
from modules.shared import log, opts, cmd_opts, refresh_checkpoints
from modules.sd_models import checkpoint_tiles, get_closet_checkpoint_match
from modules.paths import sd_configs_path
from modules.onnx_ep import ExecutionProvider, install_execution_provider
from modules.onnx_utils import check_diffusers_cache
from .execution_providers import ExecutionProvider, install_execution_provider
from .utils import check_diffusers_cache
with gr.Blocks(analytics_enabled=False) as ui:
with gr.Row():
@ -145,7 +145,7 @@ def create_ui():
sd_pass_config_components[submodel] = {}
with open(os.path.join(sd_config_path, submodel), "r") as file:
with open(os.path.join(sd_config_path, submodel), "r", encoding="utf-8") as file:
config = json.load(file)
sd_configs[submodel] = config
@ -170,7 +170,7 @@ def create_ui():
return listener
for config_key, v in getattr(olive_passes, config_dict["type"], olive_passes.Pass)._default_config(accelerator).items():
for config_key, v in getattr(olive_passes, config_dict["type"], olive_passes.Pass)._default_config(accelerator).items(): # pylint: disable=protected-access
component = None
if v.type_ == bool:
@ -185,11 +185,11 @@ def create_ui():
sd_pass_config_components[submodel][pass_name][config_key] = component
component.change(fn=create_pass_config_change_listener(submodel, pass_name, config_key), inputs=component)
pass_type.change(fn=sd_create_change_listener(submodel, "passes", config_key, "type"), inputs=pass_type)
pass_type.change(fn=sd_create_change_listener(submodel, "passes", config_key, "type"), inputs=pass_type) # pylint: disable=undefined-loop-variable
def sd_save():
for k, v in sd_configs.items():
with open(os.path.join(sd_config_path, k), "w") as file:
with open(os.path.join(sd_config_path, k), "w", encoding="utf-8") as file:
json.dump(v, file)
log.info("Olive: config for SD was saved.")
@ -213,7 +213,7 @@ def create_ui():
sdxl_pass_config_components[submodel] = {}
with open(os.path.join(sdxl_config_path, submodel), "r") as file:
with open(os.path.join(sdxl_config_path, submodel), "r", encoding="utf-8") as file:
config = json.load(file)
sdxl_configs[submodel] = config
@ -232,13 +232,13 @@ def create_ui():
pass_type = gr.Dropdown(label="Type", value=sdxl_configs[submodel]["passes"][pass_name]["type"], choices=(x.__name__ for x in tuple(olive_passes.REGISTRY.values())))
def create_pass_config_change_listener(submodel, pass_name, config_key):
def create_pass_config_change_listener(submodel, pass_name, config_key): # pylint: disable=function-redefined
def listener(value):
sdxl_configs[submodel]["passes"][pass_name]["config"][config_key] = value
return listener
for config_key, v in getattr(olive_passes, config_dict["type"], olive_passes.Pass)._default_config(accelerator).items():
for config_key, v in getattr(olive_passes, config_dict["type"], olive_passes.Pass)._default_config(accelerator).items(): # pylint: disable=protected-access
component = None
if v.type_ == bool:
@ -257,7 +257,7 @@ def create_ui():
def sdxl_save():
for k, v in sdxl_configs.items():
with open(os.path.join(sdxl_config_path, k), "w") as file:
with open(os.path.join(sdxl_config_path, k), "w", encoding="utf-8") as file:
json.dump(v, file)
log.info("Olive: config for SDXL was saved.")

View File

@ -1,4 +1,5 @@
import os
import json
import importlib
from typing import Type, Tuple, Union, List, Dict, Any
import torch
@ -18,37 +19,18 @@ def extract_device(args: List, kwargs: Dict):
def move_inference_session(session: ort.InferenceSession, device: torch.device):
from modules.onnx import DynamicSessionOptions, TemporalModule
from modules.onnx_ep import TORCH_DEVICE_TO_EP
from . import DynamicSessionOptions, TemporalModule
from .execution_providers import TORCH_DEVICE_TO_EP
previous_provider = session._providers
previous_provider = session._providers # pylint: disable=protected-access
provider = TORCH_DEVICE_TO_EP[device.type] if device.type in TORCH_DEVICE_TO_EP else previous_provider
path = session._model_path
path = session._model_path # pylint: disable=protected-access
if provider is not None:
try:
return diffusers.OnnxRuntimeModel.load_model(path, provider, DynamicSessionOptions.from_sess_options(session._sess_options))
return diffusers.OnnxRuntimeModel.load_model(path, provider, DynamicSessionOptions.from_sess_options(session._sess_options)) # pylint: disable=protected-access
except Exception:
return TemporalModule(previous_provider, path, session._sess_options)
def load_init_dict(cls: Type[diffusers.DiffusionPipeline], path: os.PathLike):
merged: Dict[str, Any] = {}
extracted = cls.extract_init_dict(diffusers.DiffusionPipeline.load_config(path))
for item in extracted:
merged.update(item)
merged = merged.items()
R: Dict[str, Tuple[str]] = {}
for k, v in merged:
if isinstance(v, list):
if k not in cls.__init__.__annotations__:
continue
R[k] = v
return R
return TemporalModule(previous_provider, path, session._sess_options) # pylint: disable=protected-access
def check_diffusers_cache(path: os.PathLike):
@ -80,6 +62,25 @@ def check_cache_onnx(path: os.PathLike) -> bool:
return True
def load_init_dict(cls: Type[diffusers.DiffusionPipeline], path: os.PathLike):
merged: Dict[str, Any] = {}
extracted = cls.extract_init_dict(diffusers.DiffusionPipeline.load_config(path))
for item in extracted:
merged.update(item)
merged = merged.items()
R: Dict[str, Tuple[str]] = {}
for k, v in merged:
if isinstance(v, list):
if k not in cls.__init__.__annotations__:
continue
R[k] = v
return R
def load_submodel(path: os.PathLike, is_sdxl: bool, submodel_name: str, item: List[Union[str, None]], **kwargs_ort):
lib, atr = item
@ -117,6 +118,13 @@ def load_submodels(path: os.PathLike, is_sdxl: bool, init_dict: Dict[str, Type],
return loaded
def load_pipeline(cls: Type[diffusers.DiffusionPipeline], path: os.PathLike, **kwargs_ort) -> diffusers.DiffusionPipeline:
if os.path.isdir(path):
return cls(**patch_kwargs(cls, load_submodels(path, check_pipeline_sdxl(cls), load_init_dict(cls, path), **kwargs_ort)))
else:
return cls.from_single_file(path)
def patch_kwargs(cls: Type[diffusers.DiffusionPipeline], kwargs: Dict) -> Dict:
if cls == diffusers.OnnxStableDiffusionPipeline or cls == diffusers.OnnxStableDiffusionImg2ImgPipeline or cls == diffusers.OnnxStableDiffusionInpaintPipeline:
kwargs["safety_checker"] = None
@ -128,13 +136,6 @@ def patch_kwargs(cls: Type[diffusers.DiffusionPipeline], kwargs: Dict) -> Dict:
return kwargs
def load_pipeline(cls: Type[diffusers.DiffusionPipeline], path: os.PathLike, **kwargs_ort) -> diffusers.DiffusionPipeline:
if os.path.isdir(path):
return cls(**patch_kwargs(cls, load_submodels(path, check_pipeline_sdxl(cls), load_init_dict(cls, path), **kwargs_ort)))
else:
return cls.from_single_file(path)
def get_base_constructor(cls: Type[diffusers.DiffusionPipeline], is_refiner: bool):
if cls == diffusers.OnnxStableDiffusionImg2ImgPipeline or cls == diffusers.OnnxStableDiffusionInpaintPipeline:
return diffusers.OnnxStableDiffusionPipeline
@ -143,3 +144,15 @@ def get_base_constructor(cls: Type[diffusers.DiffusionPipeline], is_refiner: boo
return diffusers.OnnxStableDiffusionXLPipeline
return cls
def get_io_config(submodel: str, is_sdxl: bool):
from modules.paths import sd_configs_path
with open(os.path.join(sd_configs_path, "olive", 'sdxl' if is_sdxl else 'sd', f"{submodel}.json"), "r", encoding="utf-8") as config_file:
io_config: Dict[str, Any] = json.load(config_file)["input_model"]["config"]["io_config"]
for axe in io_config["dynamic_axes"]:
io_config["dynamic_axes"][axe] = { int(k): v for k, v in io_config["dynamic_axes"][axe].items() }
return io_config

View File

@ -7,7 +7,7 @@ import torch
import torchvision.transforms.functional as TF
import diffusers
from modules import shared, devices, processing, sd_samplers, sd_models, images, errors, masking, prompt_parser_diffusers, sd_hijack_hypertile, processing_correction, processing_vae
from modules.onnx import preprocess_pipeline as onnx_preprocess_pipeline
from modules.onnx_impl import preprocess_pipeline as preprocess_onnx_pipeline
debug = shared.log.trace if os.environ.get('SD_DIFFUSERS_DEBUG', None) is not None else lambda *args, **kwargs: None
@ -399,6 +399,8 @@ def process_diffusers(p: processing.StableDiffusionProcessing):
p.task_args['sag_scale'] = p.sag_scale
else:
shared.log.warning(f'SAG incompatible scheduler: current={sd_model.scheduler.__class__.__name__} supported={supported}')
if shared.opts.cuda_compile_backend == "olive-ai":
sd_model = preprocess_onnx_pipeline(p, is_refiner_enabled())
return sd_model
if len(getattr(p, 'init_images', [])) > 0:
@ -538,7 +540,7 @@ def process_diffusers(p: processing.StableDiffusionProcessing):
if (latent_scale_mode is not None or p.hr_force) and p.denoising_strength > 0:
p.ops.append('hires')
shared.sd_model = sd_models.set_diffuser_pipe(shared.sd_model, sd_models.DiffusersTaskType.IMAGE_2_IMAGE)
onnx_preprocess_pipeline(p, is_refiner_enabled())
preprocess_onnx_pipeline(p, is_refiner_enabled())
recompile_model(hires=True)
update_sampler(shared.sd_model, second_pass=True)
hires_args = set_pipeline_args(

View File

@ -16,7 +16,7 @@ from rich.console import Console
from modules import errors, shared_items, shared_state, cmd_args, theme
from modules.paths import models_path, script_path, data_path, sd_configs_path, sd_default_config, sd_model_file, default_sd_model_file, extensions_dir, extensions_builtin_dir # pylint: disable=W0611
from modules.dml import memory_providers, default_memory_provider, directml_do_hijack
from modules.onnx_ep import available_execution_providers, get_default_execution_provider
from modules.onnx_impl.execution_providers import available_execution_providers, get_default_execution_provider
import modules.interrogate
import modules.memmon
import modules.styles

View File

@ -27,7 +27,7 @@ def list_crossattention():
def get_pipelines():
import diffusers
from installer import log
from modules.onnx import initialize as initialize_onnx_pipelines
from modules.onnx_impl import initialize as initialize_onnx_pipelines
initialize_onnx_pipelines()

View File

@ -7,6 +7,7 @@ from modules.call_queue import wrap_gradio_call
from modules import timer, gr_hijack, shared, theme, sd_models, script_callbacks, modelloader, ui_common, ui_loadsave, ui_symbols, ui_javascript, generation_parameters_copypaste, call_queue
from modules.paths import script_path, data_path # pylint: disable=unused-import
from modules.dml import directml_override_opts
from modules.onnx_impl.olive_dep import install_olive
import modules.scripts
import modules.errors
@ -234,6 +235,8 @@ def create_ui(startup_timer = None):
continue
if opts.set(key, value):
changed.append(key)
if shared.opts.cuda_compile_backend == "olive-ai":
install_olive()
if cmd_opts.use_directml:
directml_override_opts()
if cmd_opts.use_openvino:
@ -260,6 +263,8 @@ def create_ui(startup_timer = None):
return gr.update(visible=True), opts.dumpjson()
if not opts.set(key, value):
return gr.update(value=getattr(opts, key)), opts.dumpjson()
if key == "cuda_compile_backend" and value == "olive-ai":
install_olive()
if cmd_opts.use_directml:
directml_override_opts()
opts.save(shared.config_filename)
@ -371,7 +376,7 @@ def create_ui(startup_timer = None):
if shared.opts.onnx_show_menu:
with gr.Blocks(analytics_enabled=False) as onnx_interface:
if shared.backend == shared.Backend.DIFFUSERS:
from modules import ui_onnx
from modules.onnx_impl import ui as ui_onnx
ui_onnx.create_ui()
timer.startup.record("ui-onnx")
interfaces += [(onnx_interface, "ONNX", "onnx")]

View File

@ -25,7 +25,7 @@ lpips
omegaconf
open-clip-torch
opencv-contrib-python-headless
olive-ai
onnx
optimum
piexif
psutil