Code refactor

pull/730/head
bmaltais 2023-05-05 22:19:17 -04:00
parent 315a1cd3d6
commit b2ffe30a84
17 changed files with 351 additions and 164 deletions

View File

@ -39,6 +39,7 @@ from library.dreambooth_folder_creation_gui import (
)
from library.utilities import utilities_tab
from library.sampler_gui import sample_gradio_config, run_cmd_sample
# from easygui import msgbox
folder_symbol = '\U0001f4c2' # 📂
@ -340,39 +341,60 @@ def train_model(
wandb_api_key,
):
headless_bool = True if headless.get('label') == 'True' else False
if pretrained_model_name_or_path == '':
output_message(msg='Source model information is missing', headless=headless_bool)
output_message(
msg='Source model information is missing', headless=headless_bool
)
return
if train_data_dir == '':
output_message(msg='Image folder path is missing', headless=headless_bool)
output_message(
msg='Image folder path is missing', headless=headless_bool
)
return
if not os.path.exists(train_data_dir):
output_message(msg='Image folder does not exist', headless=headless_bool)
output_message(
msg='Image folder does not exist', headless=headless_bool
)
return
if reg_data_dir != '':
if not os.path.exists(reg_data_dir):
output_message(msg='Regularisation folder does not exist', headless=headless_bool)
output_message(
msg='Regularisation folder does not exist',
headless=headless_bool,
)
return
if output_dir == '':
output_message(msg='Output folder path is missing', headless=headless_bool)
output_message(
msg='Output folder path is missing', headless=headless_bool
)
return
if check_if_model_exist(output_name, output_dir, save_model_as, headless=headless_bool):
if check_if_model_exist(
output_name, output_dir, save_model_as, headless=headless_bool
):
return
if optimizer == 'Adafactor' and lr_warmup != '0':
output_message(msg="Warning: lr_scheduler is set to 'Adafactor', so 'LR warmup (% of steps)' will be considered 0.",
title='Warning', headless=headless_bool
output_message(
msg="Warning: lr_scheduler is set to 'Adafactor', so 'LR warmup (% of steps)' will be considered 0.",
title='Warning',
headless=headless_bool,
)
lr_warmup = '0'
if float(noise_offset) > 0 and (multires_noise_iterations > 0 or multires_noise_discount > 0):
output_message(msg='noise offset and multires_noise can\'t be set at the same time. Only use one or the other.', title='Error', headless=headless_bool)
if float(noise_offset) > 0 and (
multires_noise_iterations > 0 or multires_noise_discount > 0
):
output_message(
msg="noise offset and multires_noise can't be set at the same time. Only use one or the other.",
title='Error',
headless=headless_bool,
)
return
# Get a list of all subfolders in train_data_dir, excluding hidden folders
@ -602,7 +624,7 @@ def dreambooth_tab(
reg_data_dir=gr.Textbox(),
output_dir=gr.Textbox(),
logging_dir=gr.Textbox(),
headless=False
headless=False,
):
dummy_db_true = gr.Label(value=True, visible=False)
dummy_db_false = gr.Label(value=False, visible=False)
@ -753,7 +775,9 @@ def dreambooth_tab(
label='VAE',
placeholder='(Optiona) path to checkpoint of vae to replace for training',
)
vae_button = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
vae_button = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
vae_button.click(
get_any_file_path,
outputs=vae,
@ -950,7 +974,7 @@ def dreambooth_tab(
def UI(**kwargs):
css = ''
headless = kwargs.get('headless', False)
print(f'headless: {headless}')
@ -978,7 +1002,7 @@ def UI(**kwargs):
output_dir_input=output_dir_input,
logging_dir_input=logging_dir_input,
enable_copy_info_button=True,
headless=headless
headless=headless,
)
# Show the interface

View File

@ -30,6 +30,7 @@ from library.tensorboard_gui import (
)
from library.utilities import utilities_tab
from library.sampler_gui import sample_gradio_config, run_cmd_sample
# from easygui import msgbox
folder_symbol = '\U0001f4c2' # 📂
@ -351,18 +352,27 @@ def train_model(
wandb_api_key,
):
headless_bool = True if headless.get('label') == 'True' else False
if check_if_model_exist(output_name, output_dir, save_model_as, headless_bool):
if check_if_model_exist(
output_name, output_dir, save_model_as, headless_bool
):
return
if float(noise_offset) > 0 and (multires_noise_iterations > 0 or multires_noise_discount > 0):
output_message(msg='noise offset and multires_noise can\'t be set at the same time. Only use one or the other.', title='Error', headless=headless_bool)
if float(noise_offset) > 0 and (
multires_noise_iterations > 0 or multires_noise_discount > 0
):
output_message(
msg="noise offset and multires_noise can't be set at the same time. Only use one or the other.",
title='Error',
headless=headless_bool,
)
return
if optimizer == 'Adafactor' and lr_warmup != '0':
output_message(
msg="Warning: lr_scheduler is set to 'Adafactor', so 'LR warmup (% of steps)' will be considered 0.",
title='Warning', headless=headless_bool
title='Warning',
headless=headless_bool,
)
lr_warmup = '0'
@ -595,7 +605,9 @@ def finetune_tab(headless=False):
placeholder='folder where the training configuration files will be saved',
)
train_dir_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
train_dir_folder.click(
get_folder_path,
@ -608,7 +620,9 @@ def finetune_tab(headless=False):
placeholder='folder where the training images are located',
)
image_folder_input_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
image_folder_input_folder.click(
get_folder_path,
@ -621,7 +635,9 @@ def finetune_tab(headless=False):
placeholder='folder where the model will be saved',
)
output_dir_input_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
output_dir_input_folder.click(
get_folder_path,
@ -634,7 +650,9 @@ def finetune_tab(headless=False):
placeholder='Optional: enable logging and output TensorBoard log to this folder',
)
logging_dir_input_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
logging_dir_input_folder.click(
get_folder_path,
@ -904,7 +922,7 @@ def finetune_tab(headless=False):
def UI(**kwargs):
css = ''
headless = kwargs.get('headless', False)
print(f'headless: {headless}')

View File

@ -82,7 +82,7 @@ def setup_logging(clean=False):
def UI(**kwargs):
css = ''
headless = kwargs.get('headless', False)
print(f'headless: {headless}')
@ -116,7 +116,7 @@ def UI(**kwargs):
output_dir_input=output_dir_input,
logging_dir_input=logging_dir_input,
enable_copy_info_button=True,
headless=headless
headless=headless,
)
gradio_extract_dylora_tab(headless=headless)
gradio_extract_lora_tab(headless=headless)

View File

@ -79,7 +79,9 @@ def gradio_basic_caption_gui_tab(headless=False):
placeholder='Directory containing the images to caption',
interactive=True,
)
folder_button = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
folder_button = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
folder_button.click(
get_folder_path,
outputs=images_dir,

View File

@ -35,11 +35,15 @@ ALL_PRESET_MODELS = V2_BASE_MODELS + V_PARAMETERIZATION_MODELS + V1_MODELS
ENV_EXCLUSION = ['COLAB_GPU', 'RUNPOD_POD_ID']
def check_if_model_exist(output_name, output_dir, save_model_as, headless=False):
def check_if_model_exist(
output_name, output_dir, save_model_as, headless=False
):
if headless:
print('Headless mode, skipping verification if model already exist... if model already exist it will be overwritten...')
print(
'Headless mode, skipping verification if model already exist... if model already exist it will be overwritten...'
)
return False
if save_model_as in ['diffusers', 'diffusers_safetendors']:
ckpt_folder = os.path.join(output_dir, output_name)
if os.path.isdir(ckpt_folder):
@ -66,12 +70,14 @@ def check_if_model_exist(output_name, output_dir, save_model_as, headless=False)
return False
def output_message(msg='', title='', headless=False):
if headless:
print(msg)
else:
msgbox(msg=msg, title=title)
def update_my_data(my_data):
# Update the optimizer based on the use_8bit_adam flag
use_8bit_adam = my_data.get('use_8bit_adam', False)
@ -95,7 +101,7 @@ def update_my_data(my_data):
my_data[key] = int(value)
elif not value:
my_data[key] = -1
# Convert values to float if they are strings
for key in ['noise_offset', 'learning_rate', 'text_encoder_lr', 'unet_lr']:
value = my_data.get(key, -1)
@ -347,11 +353,11 @@ def add_pre_postfix(
caption_file_path = os.path.join(folder, caption_file_name)
if not os.path.exists(caption_file_path):
with open(caption_file_path, 'w', encoding="utf8") as f:
with open(caption_file_path, 'w', encoding='utf8') as f:
separator = ' ' if prefix and postfix else ''
f.write(f'{prefix}{separator}{postfix}')
else:
with open(caption_file_path, 'r+', encoding="utf8") as f:
with open(caption_file_path, 'r+', encoding='utf8') as f:
content = f.read()
content = content.rstrip()
f.seek(0, 0)
@ -541,8 +547,12 @@ def set_model_list(
def gradio_config(headless=False):
with gr.Accordion('Configuration file', open=False):
with gr.Row():
button_open_config = gr.Button('Open 📂', elem_id='open_folder', visible=(not headless))
button_save_config = gr.Button('Save 💾', elem_id='open_folder', visible=(not headless))
button_open_config = gr.Button(
'Open 📂', elem_id='open_folder', visible=(not headless)
)
button_save_config = gr.Button(
'Save 💾', elem_id='open_folder', visible=(not headless)
)
button_save_as_config = gr.Button(
'Save as... 💾', elem_id='open_folder', visible=(not headless)
)
@ -583,7 +593,7 @@ def gradio_source_model(
'diffusers_safetensors',
'safetensors',
],
headless=False
headless=False,
):
with gr.Tab('Source model'):
# Define the input elements
@ -594,7 +604,9 @@ def gradio_source_model(
value='runwayml/stable-diffusion-v1-5',
)
pretrained_model_name_or_path_file = gr.Button(
document_symbol, elem_id='open_folder_small', visible=(not headless)
document_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
pretrained_model_name_or_path_file.click(
get_any_file_path,
@ -603,7 +615,9 @@ def gradio_source_model(
show_progress=False,
)
pretrained_model_name_or_path_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
pretrained_model_name_or_path_folder.click(
get_folder_path,
@ -758,7 +772,11 @@ def gradio_training(
value=lr_scheduler_value,
)
lr_warmup = gr.Slider(
label='LR warmup (% of steps)', value=lr_warmup_value, minimum=0, maximum=100, step=1,
label='LR warmup (% of steps)',
value=lr_warmup_value,
minimum=0,
maximum=100,
step=1,
)
optimizer = gr.Dropdown(
label='Optimizer',
@ -923,13 +941,28 @@ def gradio_advanced_training(headless=False):
)
with gr.Row():
noise_offset = gr.Slider(
label='Noise offset', value=0, minimum=0, maximum=1, step=0.01, info='recommended values are 0.05 - 0.15'
label='Noise offset',
value=0,
minimum=0,
maximum=1,
step=0.01,
info='recommended values are 0.05 - 0.15',
)
multires_noise_iterations = gr.Slider(
label='Multires noise iterations', value=0, minimum=0, maximum=64, step=1, info='enable multires noise (recommended values are 6-10)'
label='Multires noise iterations',
value=0,
minimum=0,
maximum=64,
step=1,
info='enable multires noise (recommended values are 6-10)',
)
multires_noise_discount = gr.Slider(
label='Multires noise discount', value=0, minimum=0, maximum=1, step=0.01, info='recommended values are 0.8. For LoRAs with small datasets, 0.1-0.3'
label='Multires noise discount',
value=0,
minimum=0,
maximum=1,
step=0.01,
info='recommended values are 0.8. For LoRAs with small datasets, 0.1-0.3',
)
with gr.Row():
caption_dropout_every_n_epochs = gr.Number(
@ -947,7 +980,9 @@ def gradio_advanced_training(headless=False):
label='Resume from saved training state',
placeholder='path to "last-state" state folder to resume from',
)
resume_button = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
resume_button = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
resume_button.click(
get_folder_path,
outputs=resume,
@ -1015,69 +1050,50 @@ def run_cmd_advanced_training(**kwargs):
f' --max_train_epochs="{kwargs.get("max_train_epochs", "")}"'
if kwargs.get('max_train_epochs')
else '',
f' --max_data_loader_n_workers="{kwargs.get("max_data_loader_n_workers", "")}"'
if kwargs.get('max_data_loader_n_workers')
else '',
f' --max_token_length={kwargs.get("max_token_length", "")}'
if int(kwargs.get('max_token_length', 75)) > 75
else '',
f' --clip_skip={kwargs.get("clip_skip", "")}'
if int(kwargs.get('clip_skip', 1)) > 1
else '',
f' --resume="{kwargs.get("resume", "")}"'
if kwargs.get('resume')
else '',
f' --keep_tokens="{kwargs.get("keep_tokens", "")}"'
if int(kwargs.get('keep_tokens', 0)) > 0
else '',
f' --caption_dropout_every_n_epochs="{int(kwargs.get("caption_dropout_every_n_epochs", 0))}"'
if int(kwargs.get('caption_dropout_every_n_epochs', 0)) > 0
else '',
f' --caption_dropout_rate="{float(kwargs.get("caption_dropout_rate", 0))}"'
if float(kwargs.get('caption_dropout_rate', 0)) > 0
else '',
f' --vae_batch_size="{kwargs.get("vae_batch_size", 0)}"'
if int(kwargs.get('vae_batch_size', 0)) > 0
else '',
f' --bucket_reso_steps={int(kwargs.get("bucket_reso_steps", 1))}'
if int(kwargs.get('bucket_reso_steps', 64)) >= 1
else '',
f' --save_every_n_steps="{int(kwargs.get("save_every_n_steps", 0))}"'
if int(kwargs.get('save_every_n_steps')) > 0
else '',
f' --save_last_n_steps="{int(kwargs.get("save_last_n_steps", 0))}"'
if int(kwargs.get('save_last_n_steps')) > 0
else '',
f' --save_last_n_steps_state="{int(kwargs.get("save_last_n_steps_state", 0))}"'
if int(kwargs.get('save_last_n_steps_state')) > 0
else '',
f' --min_snr_gamma={int(kwargs.get("min_snr_gamma", 0))}'
if int(kwargs.get('min_snr_gamma', 0)) >= 1
else '',
' --save_state' if kwargs.get('save_state') else '',
' --mem_eff_attn' if kwargs.get('mem_eff_attn') else '',
' --color_aug' if kwargs.get('color_aug') else '',
' --flip_aug' if kwargs.get('flip_aug') else '',
' --shuffle_caption' if kwargs.get('shuffle_caption') else '',
' --gradient_checkpointing'
if kwargs.get('gradient_checkpointing')
else '',
@ -1089,28 +1105,21 @@ def run_cmd_advanced_training(**kwargs):
else '',
' --bucket_no_upscale' if kwargs.get('bucket_no_upscale') else '',
' --random_crop' if kwargs.get('random_crop') else '',
f' --multires_noise_iterations="{int(kwargs.get("multires_noise_iterations", 0))}"'
if kwargs.get('multires_noise_iterations', 0) > 0
else '',
f' --multires_noise_discount="{float(kwargs.get("multires_noise_discount", 0.0))}"'
if kwargs.get('multires_noise_discount', 0) > 0
else '',
f' --noise_offset={float(kwargs.get("noise_offset", 0))}'
if kwargs.get('noise_offset') > 0
else '',
f' {kwargs.get("additional_parameters", "")}',
' --log_with wandb' if kwargs.get('use_wandb') else '',
f' --wandb_api_key="{kwargs.get("wandb_api_key", "")}"'
if kwargs.get('wandb_api_key')
else '',
]
run_cmd = ''.join(options)
return run_cmd

View File

@ -68,12 +68,12 @@ def convert_model(
if target_model_type == 'diffuser_safetensors':
run_cmd += ' --use_safetensors'
# Fix for stabilityAI diffusers format. When saving v2 models in Diffusers format in training scripts and conversion scripts,
# it was found that the U-Net configuration is different from those of Hugging Face's stabilityai models (this repository is
# it was found that the U-Net configuration is different from those of Hugging Face's stabilityai models (this repository is
# "use_linear_projection": false, stabilityai is true). Please note that the weight shapes are different, so please be careful
# when using the weight files directly.
if unet_use_linear_projection:
run_cmd += ' --unet_use_linear_projection'
@ -176,7 +176,9 @@ def gradio_convert_model_tab(headless=False):
interactive=True,
)
button_source_model_dir = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_source_model_dir.click(
get_folder_path,
@ -185,7 +187,9 @@ def gradio_convert_model_tab(headless=False):
)
button_source_model_file = gr.Button(
document_symbol, elem_id='open_folder_small', visible=(not headless)
document_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_source_model_file.click(
get_file_path,
@ -212,7 +216,9 @@ def gradio_convert_model_tab(headless=False):
interactive=True,
)
button_target_model_folder = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_target_model_folder.click(
get_folder_path,
@ -239,7 +245,11 @@ def gradio_convert_model_tab(headless=False):
choices=['unspecified', 'fp16', 'bf16', 'float'],
value='unspecified',
)
unet_use_linear_projection = gr.Checkbox(label="UNet linear projection", value=False, info="Enable for Hugging Face's stabilityai models")
unet_use_linear_projection = gr.Checkbox(
label='UNet linear projection',
value=False,
info="Enable for Hugging Face's stabilityai models",
)
convert_button = gr.Button('Convert model')

View File

@ -114,7 +114,7 @@ def gradio_dreambooth_folder_creation_tab(
reg_data_dir_input=gr.Textbox(),
output_dir_input=gr.Textbox(),
logging_dir_input=gr.Textbox(),
headless=False
headless=False,
):
with gr.Tab('Dreambooth/LoRA Folder preparation'):
gr.Markdown(

View File

@ -67,7 +67,9 @@ def gradio_extract_dylora_tab(headless=False):
interactive=True,
)
button_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_model_file.click(
get_file_path,
@ -82,7 +84,9 @@ def gradio_extract_dylora_tab(headless=False):
interactive=True,
)
button_save_to = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_save_to.click(
get_saveasfilename_path,

View File

@ -88,7 +88,9 @@ def gradio_extract_lora_tab(headless=False):
interactive=True,
)
button_model_tuned_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_model_tuned_file.click(
get_file_path,
@ -103,7 +105,9 @@ def gradio_extract_lora_tab(headless=False):
interactive=True,
)
button_model_org_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_model_org_file.click(
get_file_path,
@ -118,7 +122,9 @@ def gradio_extract_lora_tab(headless=False):
interactive=True,
)
button_save_to = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_save_to.click(
get_saveasfilename_path,

View File

@ -138,7 +138,9 @@ def gradio_extract_lycoris_locon_tab(headless=False):
interactive=True,
)
button_db_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_db_model_file.click(
get_file_path,
@ -153,7 +155,9 @@ def gradio_extract_lycoris_locon_tab(headless=False):
interactive=True,
)
button_base_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_base_model_file.click(
get_file_path,
@ -168,7 +172,9 @@ def gradio_extract_lycoris_locon_tab(headless=False):
interactive=True,
)
button_output_name = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_output_name.click(
get_saveasfilename_path,
@ -217,7 +223,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.65,
step=0.01,
interactive=True,
info='The higher the value, the smaller the file. Recommended starting value: 0.65'
info='The higher the value, the smaller the file. Recommended starting value: 0.65',
)
conv_threshold = gr.Slider(
minimum=0,
@ -226,7 +232,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.65,
step=0.01,
interactive=True,
info='The higher the value, the smaller the file. Recommended starting value: 0.65'
info='The higher the value, the smaller the file. Recommended starting value: 0.65',
)
with gr.Row(visible=False) as ratio:
linear_ratio = gr.Slider(
@ -236,7 +242,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.75,
step=0.01,
interactive=True,
info='The higher the value, the smaller the file. Recommended starting value: 0.75'
info='The higher the value, the smaller the file. Recommended starting value: 0.75',
)
conv_ratio = gr.Slider(
minimum=0,
@ -245,7 +251,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.75,
step=0.01,
interactive=True,
info='The higher the value, the smaller the file. Recommended starting value: 0.75'
info='The higher the value, the smaller the file. Recommended starting value: 0.75',
)
with gr.Row(visible=False) as quantile:
linear_quantile = gr.Slider(
@ -255,7 +261,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.75,
step=0.01,
interactive=True,
info='The higher the value, the larger the file. Recommended starting value: 0.75'
info='The higher the value, the larger the file. Recommended starting value: 0.75',
)
conv_quantile = gr.Slider(
minimum=0,
@ -264,7 +270,7 @@ def gradio_extract_lycoris_locon_tab(headless=False):
value=0.75,
step=0.01,
interactive=True,
info='The higher the value, the larger the file. Recommended starting value: 0.75'
info='The higher the value, the larger the file. Recommended starting value: 0.75',
)
with gr.Row():
use_sparse_bias = gr.Checkbox(

View File

@ -115,7 +115,9 @@ def gradio_merge_lora_tab(headless=False):
info='Provide a SD file path IF you want to merge it with LoRA files',
)
sd_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
sd_model_file.click(
get_file_path,
@ -131,7 +133,9 @@ def gradio_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_a_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_a_model_file.click(
get_file_path,
@ -146,7 +150,9 @@ def gradio_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_b_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_b_model_file.click(
get_file_path,
@ -181,7 +187,9 @@ def gradio_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_c_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_c_model_file.click(
get_file_path,
@ -196,7 +204,9 @@ def gradio_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_d_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_d_model_file.click(
get_file_path,
@ -231,7 +241,9 @@ def gradio_merge_lora_tab(headless=False):
interactive=True,
)
button_save_to = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_save_to.click(
get_saveasfilename_path,

View File

@ -70,7 +70,9 @@ def gradio_merge_lycoris_tab(headless=False):
info='Provide a SD file path that you want to merge with the LyCORIS file',
)
base_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
base_model_file.click(
get_file_path,
@ -86,7 +88,9 @@ def gradio_merge_lycoris_tab(headless=False):
interactive=True,
)
button_lycoris_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lycoris_model_file.click(
get_file_path,
@ -112,7 +116,9 @@ def gradio_merge_lycoris_tab(headless=False):
interactive=True,
)
button_output_name = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_output_name.click(
get_saveasfilename_path,

View File

@ -94,7 +94,9 @@ def gradio_resize_lora_tab(headless=False):
interactive=True,
)
button_lora_a_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_a_model_file.click(
get_file_path,
@ -133,7 +135,9 @@ def gradio_resize_lora_tab(headless=False):
interactive=True,
)
button_save_to = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_save_to.click(
get_saveasfilename_path,

View File

@ -85,7 +85,9 @@ def gradio_svd_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_a_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_a_model_file.click(
get_file_path,
@ -100,7 +102,9 @@ def gradio_svd_merge_lora_tab(headless=False):
interactive=True,
)
button_lora_b_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_b_model_file.click(
get_file_path,
@ -141,7 +145,9 @@ def gradio_svd_merge_lora_tab(headless=False):
interactive=True,
)
button_save_to = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_save_to.click(
get_saveasfilename_path,

View File

@ -66,7 +66,9 @@ def gradio_verify_lora_tab(headless=False):
interactive=True,
)
button_lora_model_file = gr.Button(
folder_symbol, elem_id='open_folder_small', visible=(not headless)
folder_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
button_lora_model_file.click(
get_file_path,

View File

@ -4,6 +4,7 @@
# v3.1: Adding captionning of images to utilities
import gradio as gr
# import easygui
import json
import math
@ -45,6 +46,7 @@ from library.svd_merge_lora_gui import gradio_svd_merge_lora_tab
from library.verify_lora_gui import gradio_verify_lora_tab
from library.resize_lora_gui import gradio_resize_lora_tab
from library.sampler_gui import sample_gradio_config, run_cmd_sample
# from easygui import msgbox
folder_symbol = '\U0001f4c2' # 📂
@ -75,7 +77,8 @@ def save_configuration(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -211,7 +214,8 @@ def open_configuration(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -318,7 +322,6 @@ def open_configuration(
return tuple(values)
def train_model(
headless,
print_only,
@ -340,7 +343,8 @@ def train_model(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -414,39 +418,62 @@ def train_model(
headless_bool = True if headless.get('label') == 'True' else False
if pretrained_model_name_or_path == '':
output_message(msg='Source model information is missing', headless=headless_bool)
output_message(
msg='Source model information is missing', headless=headless_bool
)
return
if train_data_dir == '':
output_message(msg='Image folder path is missing', headless=headless_bool)
output_message(
msg='Image folder path is missing', headless=headless_bool
)
return
if not os.path.exists(train_data_dir):
output_message(msg='Image folder does not exist', headless=headless_bool)
output_message(
msg='Image folder does not exist', headless=headless_bool
)
return
if reg_data_dir != '':
if not os.path.exists(reg_data_dir):
output_message(msg='Regularisation folder does not exist', headless=headless_bool)
output_message(
msg='Regularisation folder does not exist',
headless=headless_bool,
)
return
if output_dir == '':
output_message(msg='Output folder path is missing', headless=headless_bool)
output_message(
msg='Output folder path is missing', headless=headless_bool
)
return
if int(bucket_reso_steps) < 1:
output_message(msg='Bucket resolution steps need to be greater than 0', headless=headless_bool)
output_message(
msg='Bucket resolution steps need to be greater than 0',
headless=headless_bool,
)
return
if noise_offset == '':
noise_offset = 0
if float(noise_offset) > 1 or float(noise_offset) < 0:
output_message(msg='Noise offset need to be a value between 0 and 1', headless=headless_bool)
output_message(
msg='Noise offset need to be a value between 0 and 1',
headless=headless_bool,
)
return
if float(noise_offset) > 0 and (multires_noise_iterations > 0 or multires_noise_discount > 0):
output_message(msg='noise offset and multires_noise can\'t be set at the same time. Only use one or the other.', title='Error', headless=headless_bool)
if float(noise_offset) > 0 and (
multires_noise_iterations > 0 or multires_noise_discount > 0
):
output_message(
msg="noise offset and multires_noise can't be set at the same time. Only use one or the other.",
title='Error',
headless=headless_bool,
)
return
if not os.path.exists(output_dir):
@ -454,17 +481,21 @@ def train_model(
if stop_text_encoder_training_pct > 0:
output_message(
msg='Output "stop text encoder training" is not yet supported. Ignoring', headless=headless_bool
msg='Output "stop text encoder training" is not yet supported. Ignoring',
headless=headless_bool,
)
stop_text_encoder_training_pct = 0
if check_if_model_exist(output_name, output_dir, save_model_as, headless=headless_bool):
if check_if_model_exist(
output_name, output_dir, save_model_as, headless=headless_bool
):
return
if optimizer == 'Adafactor' and lr_warmup != '0':
output_message(
msg="Warning: lr_scheduler is set to 'Adafactor', so 'LR warmup (% of steps)' will be considered 0.",
title='Warning', headless=headless_bool
title='Warning',
headless=headless_bool,
)
lr_warmup = '0'
@ -677,7 +708,10 @@ def train_model(
run_cmd += f' --network_train_unet_only'
else:
if float(learning_rate) == 0:
output_message(msg='Please input learning rate values.', headless=headless_bool)
output_message(
msg='Please input learning rate values.',
headless=headless_bool,
)
return
run_cmd += f' --network_dim={network_dim}'
@ -800,12 +834,12 @@ def lora_tab(
reg_data_dir_input=gr.Textbox(),
output_dir_input=gr.Textbox(),
logging_dir_input=gr.Textbox(),
headless=False
headless=False,
):
dummy_db_true = gr.Label(value=True, visible=False)
dummy_db_false = gr.Label(value=False, visible=False)
dummy_headless = gr.Label(value=headless, visible=False)
gr.Markdown(
'Train a custom model using kohya train network LoRA python code...'
)
@ -827,7 +861,8 @@ def lora_tab(
save_model_as_choices=[
'ckpt',
'safetensors',
], headless=headless
],
headless=headless,
)
with gr.Tab('Folders'):
@ -836,7 +871,9 @@ def lora_tab(
label='Image folder',
placeholder='Folder where the training folders containing the images are located',
)
train_data_dir_folder = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
train_data_dir_folder = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
train_data_dir_folder.click(
get_folder_path,
outputs=train_data_dir,
@ -846,7 +883,9 @@ def lora_tab(
label='Regularisation folder',
placeholder='(Optional) Folder where where the regularization folders containing the images are located',
)
reg_data_dir_folder = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
reg_data_dir_folder = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
reg_data_dir_folder.click(
get_folder_path,
outputs=reg_data_dir,
@ -857,7 +896,9 @@ def lora_tab(
label='Output folder',
placeholder='Folder to output trained model',
)
output_dir_folder = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
output_dir_folder = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
output_dir_folder.click(
get_folder_path,
outputs=output_dir,
@ -867,7 +908,9 @@ def lora_tab(
label='Logging folder',
placeholder='Optional: enable logging and output TensorBoard log to this folder',
)
logging_dir_folder = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
logging_dir_folder = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
logging_dir_folder.click(
get_folder_path,
outputs=logging_dir,
@ -924,7 +967,9 @@ def lora_tab(
placeholder='{Optional) Path to existing LoRA network weights to resume training',
)
lora_network_weights_file = gr.Button(
document_symbol, elem_id='open_folder_small', visible=(not headless)
document_symbol,
elem_id='open_folder_small',
visible=(not headless),
)
lora_network_weights_file.click(
get_any_file_path,
@ -944,7 +989,8 @@ def lora_tab(
num_cpu_threads_per_process,
seed,
caption_extension,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
optimizer,
optimizer_args,
) = gradio_training(
@ -1240,7 +1286,8 @@ def lora_tab(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -1361,7 +1408,7 @@ def lora_tab(
def UI(**kwargs):
css = ''
headless = kwargs.get('headless', False)
print(f'headless: {headless}')
@ -1389,7 +1436,7 @@ def UI(**kwargs):
output_dir_input=output_dir_input,
logging_dir_input=logging_dir_input,
enable_copy_info_button=True,
headless=headless
headless=headless,
)
# Show the interface

View File

@ -39,6 +39,7 @@ from library.dreambooth_folder_creation_gui import (
)
from library.utilities import utilities_tab
from library.sampler_gui import sample_gradio_config, run_cmd_sample
# from easygui import msgbox
folder_symbol = '\U0001f4c2' # 📂
@ -68,7 +69,8 @@ def save_configuration(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -190,7 +192,8 @@ def open_configuration(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -295,7 +298,8 @@ def train_model(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -352,26 +356,37 @@ def train_model(
wandb_api_key,
):
headless_bool = True if headless.get('label') == 'True' else False
if pretrained_model_name_or_path == '':
output_message(msg='Source model information is missing', headless=headless_bool)
output_message(
msg='Source model information is missing', headless=headless_bool
)
return
if train_data_dir == '':
output_message(msg='Image folder path is missing', headless=headless_bool)
output_message(
msg='Image folder path is missing', headless=headless_bool
)
return
if not os.path.exists(train_data_dir):
output_message(msg='Image folder does not exist', headless=headless_bool)
output_message(
msg='Image folder does not exist', headless=headless_bool
)
return
if reg_data_dir != '':
if not os.path.exists(reg_data_dir):
output_message(msg='Regularisation folder does not exist', headless=headless_bool)
output_message(
msg='Regularisation folder does not exist',
headless=headless_bool,
)
return
if output_dir == '':
output_message(msg='Output folder path is missing', headless=headless_bool)
output_message(
msg='Output folder path is missing', headless=headless_bool
)
return
if token_string == '':
@ -385,17 +400,26 @@ def train_model(
if not os.path.exists(output_dir):
os.makedirs(output_dir)
if check_if_model_exist(output_name, output_dir, save_model_as, headless_bool):
if check_if_model_exist(
output_name, output_dir, save_model_as, headless_bool
):
return
if float(noise_offset) > 0 and (multires_noise_iterations > 0 or multires_noise_discount > 0):
output_message(msg='noise offset and multires_noise can\'t be set at the same time. Only use one or the other.', title='Error', headless=headless_bool)
if float(noise_offset) > 0 and (
multires_noise_iterations > 0 or multires_noise_discount > 0
):
output_message(
msg="noise offset and multires_noise can't be set at the same time. Only use one or the other.",
title='Error',
headless=headless_bool,
)
return
if optimizer == 'Adafactor' and lr_warmup != '0':
output_message(
msg="Warning: lr_scheduler is set to 'Adafactor', so 'LR warmup (% of steps)' will be considered 0.",
title='Warning', headless=headless_bool
title='Warning',
headless=headless_bool,
)
lr_warmup = '0'
@ -606,7 +630,7 @@ def ti_tab(
reg_data_dir=gr.Textbox(),
output_dir=gr.Textbox(),
logging_dir=gr.Textbox(),
headless=False
headless=False,
):
dummy_db_true = gr.Label(value=True, visible=False)
dummy_db_false = gr.Label(value=False, visible=False)
@ -630,7 +654,8 @@ def ti_tab(
save_model_as_choices=[
'ckpt',
'safetensors',
], headless=headless
],
headless=headless,
)
with gr.Tab('Folders'):
@ -717,7 +742,9 @@ def ti_tab(
label='Resume TI training',
placeholder='(Optional) Path to existing TI embeding file to keep training',
)
weights_file_input = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
weights_file_input = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
weights_file_input.click(
get_file_path,
outputs=weights,
@ -764,7 +791,8 @@ def ti_tab(
num_cpu_threads_per_process,
seed,
caption_extension,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
optimizer,
optimizer_args,
) = gradio_training(
@ -802,7 +830,9 @@ def ti_tab(
label='VAE',
placeholder='(Optiona) path to checkpoint of vae to replace for training',
)
vae_button = gr.Button('📂', elem_id='open_folder_small', visible=(not headless))
vae_button = gr.Button(
'📂', elem_id='open_folder_small', visible=(not headless)
)
vae_button.click(
get_any_file_path,
outputs=vae,
@ -902,7 +932,8 @@ def ti_tab(
save_precision,
seed,
num_cpu_threads_per_process,
cache_latents,cache_latents_to_disk,
cache_latents,
cache_latents_to_disk,
caption_extension,
enable_bucket,
gradient_checkpointing,
@ -1003,7 +1034,7 @@ def ti_tab(
def UI(**kwargs):
css = ''
headless = kwargs.get('headless', False)
print(f'headless: {headless}')
@ -1031,7 +1062,7 @@ def UI(**kwargs):
output_dir_input=output_dir_input,
logging_dir_input=logging_dir_input,
enable_copy_info_button=True,
headless=headless
headless=headless,
)
# Show the interface