mirror of https://github.com/bmaltais/kohya_ss
Update
parent
8d3b4732c9
commit
85f8ece7c9
|
|
@ -507,4 +507,5 @@ If you come across a `FileNotFoundError`, it is likely due to an installation is
|
|||
|
||||
* 2023/08/05 (v21.8.7)
|
||||
- Merge latest sd-scripts updates.
|
||||
- Updated layout? This is up for debate... but I think it make things easier to find. Tab instead of endless scrolling...
|
||||
- Updated layout? This is up for debate... but I think it make things easier to find. Tab instead of endless scrolling...
|
||||
- Fix issue with LoRA merge GUI
|
||||
|
|
@ -68,7 +68,7 @@ def merge_lora(
|
|||
if not check_model(model):
|
||||
return
|
||||
|
||||
if not sd_model:
|
||||
if not sdxl_model:
|
||||
run_cmd = f'{PYTHON} "{os.path.join("networks","merge_lora.py")}"'
|
||||
else:
|
||||
run_cmd = f'{PYTHON} "{os.path.join("networks","sdxl_merge_lora.py")}"'
|
||||
|
|
|
|||
|
|
@ -0,0 +1,95 @@
|
|||
{
|
||||
"LoRA_type": "Standard",
|
||||
"adaptive_noise_scale": 0,
|
||||
"additional_parameters": "",
|
||||
"block_alphas": "",
|
||||
"block_dims": "",
|
||||
"block_lr_zero_threshold": "",
|
||||
"bucket_no_upscale": true,
|
||||
"bucket_reso_steps": 32,
|
||||
"cache_latents": true,
|
||||
"cache_latents_to_disk": true,
|
||||
"caption_dropout_every_n_epochs": 0.0,
|
||||
"caption_dropout_rate": 0,
|
||||
"caption_extension": ".txt",
|
||||
"clip_skip": "1",
|
||||
"color_aug": false,
|
||||
"conv_alpha": 32,
|
||||
"conv_block_alphas": "",
|
||||
"conv_block_dims": "",
|
||||
"conv_dim": 32,
|
||||
"decompose_both": false,
|
||||
"dim_from_weights": false,
|
||||
"down_lr_weight": "",
|
||||
"enable_bucket": true,
|
||||
"epoch": 160,
|
||||
"factor": -1,
|
||||
"flip_aug": false,
|
||||
"full_bf16": false,
|
||||
"full_fp16": false,
|
||||
"gradient_accumulation_steps": 1,
|
||||
"gradient_checkpointing": true,
|
||||
"keep_tokens": "0",
|
||||
"learning_rate": 1.0,
|
||||
"lora_network_weights": "",
|
||||
"lr_scheduler": "constant",
|
||||
"lr_scheduler_num_cycles": "1",
|
||||
"lr_scheduler_power": "",
|
||||
"lr_warmup": 0,
|
||||
"max_bucket_reso": 2048,
|
||||
"max_data_loader_n_workers": "0",
|
||||
"max_resolution": "1024,1024",
|
||||
"max_timestep": 1000,
|
||||
"max_token_length": "75",
|
||||
"max_train_epochs": "",
|
||||
"mem_eff_attn": false,
|
||||
"mid_lr_weight": "",
|
||||
"min_bucket_reso": 256,
|
||||
"min_snr_gamma": 5,
|
||||
"min_timestep": 0,
|
||||
"mixed_precision": "bf16",
|
||||
"module_dropout": 0,
|
||||
"multires_noise_discount": 0,
|
||||
"multires_noise_iterations": 0,
|
||||
"network_alpha": 16,
|
||||
"network_dim": 32,
|
||||
"network_dropout": 0,
|
||||
"no_token_padding": false,
|
||||
"noise_offset": 0,
|
||||
"noise_offset_type": "Original",
|
||||
"num_cpu_threads_per_process": 2,
|
||||
"optimizer": "Prodigy",
|
||||
"optimizer_args": "weight_decay=0.01 decouple=True d0=0.000012 use_bias_correction=True",
|
||||
"persistent_data_loader_workers": false,
|
||||
"prior_loss_weight": 1.0,
|
||||
"random_crop": false,
|
||||
"rank_dropout": 0,
|
||||
"save_every_n_epochs": 10,
|
||||
"save_every_n_steps": 0,
|
||||
"save_last_n_steps": 0,
|
||||
"save_last_n_steps_state": 0,
|
||||
"save_precision": "bf16",
|
||||
"scale_v_pred_loss_like_noise_pred": false,
|
||||
"scale_weight_norms": 5,
|
||||
"sdxl": true,
|
||||
"sdxl_cache_text_encoder_outputs": false,
|
||||
"sdxl_no_half_vae": true,
|
||||
"seed": "",
|
||||
"shuffle_caption": false,
|
||||
"stop_text_encoder_training_pct": 0,
|
||||
"text_encoder_lr": 1.0,
|
||||
"train_batch_size": 8,
|
||||
"train_on_input": true,
|
||||
"training_comment": "trigger: the queen of heart 1a",
|
||||
"unet_lr": 1.0,
|
||||
"unit": 1,
|
||||
"up_lr_weight": "",
|
||||
"use_cp": false,
|
||||
"use_wandb": false,
|
||||
"v2": false,
|
||||
"v_parameterization": false,
|
||||
"vae_batch_size": 0,
|
||||
"wandb_api_key": "",
|
||||
"weighted_captions": false,
|
||||
"xformers": "xformers"
|
||||
}
|
||||
Loading…
Reference in New Issue