update to sd webui 1.4

main
a2569875 2023-06-28 17:02:27 +08:00
parent 213aea98cb
commit e8f461f0e9
3 changed files with 43 additions and 18 deletions

View File

@ -150,9 +150,15 @@ def add_step_counters():
should_print = True
step_counter += 1
if step_counter > num_steps:
reset_flag = False
if step_counter == num_steps + 1:
if not opt_hires_step_as_global:
step_counter = 0
reset_flag = True
elif step_counter > num_steps + num_hires_steps:
step_counter = 0
else:
reset_flag = True
if not reset_flag:
if opt_plot_lora_weight:
log_lora()
@ -525,6 +531,9 @@ def lora_MultiheadAttention_forward(self, input):
res = composable_lycoris.lycoris_forward(self, input, res)
return res
def noop():
pass
def should_reload():
#pytorch 2.0 should reload
match = re.search(r"\d+(\.\d+)?",str(torch.__version__))
@ -533,13 +542,14 @@ def should_reload():
ver = float(match.group(0))
return ver >= 2.0
enabled = False
opt_composable_with_step = False
opt_uc_text_model_encoder = False
opt_uc_diffusion_model = False
opt_plot_lora_weight = False
opt_single_no_uc = False
verbose = True
enabled : bool = False
opt_composable_with_step : bool = False
opt_uc_text_model_encoder : bool = False
opt_uc_diffusion_model : bool = False
opt_plot_lora_weight : bool = False
opt_single_no_uc : bool = False
opt_hires_step_as_global : bool = False
verbose : bool = True
sd_processing = None
full_prompt: str = ""
@ -552,6 +562,7 @@ first_log_drawing : bool = False
is_single_block : bool = False
num_batches: int = 0
num_steps: int = 20
num_hires_steps: int = 20
prompt_loras: List[Dict[str, float]] = []
text_model_encoder_counter: int = -1
diffusion_model_counter: int = 0

View File

@ -277,7 +277,7 @@ def check_lora_weight(controllers : List[LoRA_Controller_Base], test_lora : str,
result_weight = 0.0
for controller in controllers:
calc_weight = controller.test(test_lora, step, all_step, custom_scope)
if calc_weight > result_weight:
if abs(calc_weight) > abs(result_weight):
result_weight = calc_weight
return result_weight

View File

@ -60,7 +60,7 @@ torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward
def check_install_state():
if not hasattr(composable_lora, "should_reload"):
if not hasattr(composable_lora, "noop"):
import warnings
warnings.warn( #NOTICE: You Must Restart the WebUI after Install composable_lora!
"module 'composable_lora' not found! Please reinstall composable_lora and restart the WebUI.")
@ -80,7 +80,7 @@ class ComposableLoraScript(scripts.Script):
def ui(self, is_img2img):
with gr.Group():
with gr.Accordion("Composable Lora", open=False):
if not hasattr(composable_lora, "should_reload"):
if not hasattr(composable_lora, "noop"):
gr.Markdown('<span style="color:red">Error! Composable Lora install failed! Please reinstall composable_lora and restart the WebUI.</span>')
enabled = gr.Checkbox(value=False, label="Enabled")
opt_composable_with_step = gr.Checkbox(value=False, label="Composable LoRA with step")
@ -88,21 +88,35 @@ class ComposableLoraScript(scripts.Script):
opt_uc_diffusion_model = gr.Checkbox(value=False, label="Use Lora in uc diffusion model")
opt_plot_lora_weight = gr.Checkbox(value=False, label="Plot the LoRA weight in all steps")
opt_single_no_uc = gr.Checkbox(value=False, label="Don't use LoRA in uc if there're no subprompts")
opt_hires_step_as_global = gr.Checkbox(value=False, label="Treat hires step as global step")
return [enabled, opt_composable_with_step, opt_uc_text_model_encoder, opt_uc_diffusion_model, opt_plot_lora_weight, opt_single_no_uc, opt_hires_step_as_global]
return [enabled, opt_composable_with_step, opt_uc_text_model_encoder, opt_uc_diffusion_model, opt_plot_lora_weight, opt_single_no_uc]
def process(self, p: StableDiffusionProcessing, enabled: bool, opt_composable_with_step: bool, opt_uc_text_model_encoder: bool, opt_uc_diffusion_model: bool, opt_plot_lora_weight: bool, opt_single_no_uc: bool):
def process(self, p: StableDiffusionProcessing,
enabled: bool,
opt_composable_with_step: bool,
opt_uc_text_model_encoder: bool, opt_uc_diffusion_model:
bool, opt_plot_lora_weight: bool, opt_single_no_uc:
bool, opt_hires_step_as_global: bool):
composable_lora.enabled = enabled
composable_lora.opt_uc_text_model_encoder = opt_uc_text_model_encoder
composable_lora.opt_uc_diffusion_model = opt_uc_diffusion_model
composable_lora.opt_composable_with_step = opt_composable_with_step
composable_lora.opt_plot_lora_weight = opt_plot_lora_weight
composable_lora.opt_single_no_uc = opt_single_no_uc
composable_lora.opt_hires_step_as_global = opt_hires_step_as_global
composable_lora.num_batches = p.batch_size
composable_lora.num_steps = p.steps
if hasattr(p, "hr_second_pass_steps"):
hr_second_pass_steps = p.hr_second_pass_steps
else:
hr_second_pass_steps = 0
if opt_hires_step_as_global:
composable_lora.num_steps = p.steps + hr_second_pass_steps
else:
composable_lora.num_steps = p.steps
composable_lora.num_hires_steps = hr_second_pass_steps
if not hasattr(composable_lora, "should_reload"):
if not hasattr(composable_lora, "noop"):
raise ModuleNotFoundError( #NOTICE: You Must Restart the WebUI after Install composable_lora!
"No module named 'composable_lora'! Please reinstall composable_lora and restart the WebUI.")
composable_lora_function_handler.on_enable()
@ -118,7 +132,7 @@ class ComposableLoraScript(scripts.Script):
composable_lora.reset_counters()
def postprocess(self, p, processed, *args):
if not hasattr(composable_lora, "should_reload"):
if not hasattr(composable_lora, "noop"):
raise ModuleNotFoundError( #NOTICE: You Must Restart the WebUI after Install composable_lora!
"No module named 'composable_lora'! Please reinstall composable_lora and restart the WebUI.")
composable_lora_function_handler.on_disable()