updated to v1.6, added vram release function under boot assistant
parent
908fa69b75
commit
564a7c39fb
|
|
@ -4,6 +4,8 @@
|
|||
|
||||
MiaoshouAI Assistant for [Automatic1111 WebUI](https://github.com/AUTOMATIC1111/stable-diffusion-webui)
|
||||
|
||||
1.6 Add VRAM garbage collection for image generation so that VRAM is freed up after every run; if your run into out of memory, just go to Boot Assistant and click "VRAM Release" to free up the memory.
|
||||
1.5 Rewrote how assets are loaded to largely reduce the size and installation time for the extension. (extension reinstall is needed for this version and up). Added download covers for all models.
|
||||
1.4 Add new feature for using GPT to generate prompts. Fixed sub folder support for model management</br>
|
||||
1.3 Add support for LyCoris(just put them in the lora folder, <a herf="">LyCoris extension</a> is needed.); Cleanup work for git to reduce project size</br>
|
||||
1.2 Add support for model search. Allow model to load directly from model management. Now you can update model source directly under update tab.</br>
|
||||
|
|
|
|||
|
|
@ -3,6 +3,8 @@
|
|||
|
||||
喵手助理 [Automatic1111 WebUI](https://github.com/AUTOMATIC1111/stable-diffusion-webui)
|
||||
|
||||
1.6 增加了显存自动清理功能。在启动助手中启用后可以在每次生图后自动清理显存。
|
||||
1.5 重构了assets的读取方式,大幅减少了下载时间和插件大小(使用1.5+版本需要重新安装); 增加了一键下载封面功能。
|
||||
1.4 添加了使用GPT来生成咒语的功能; 修复了模型管理中对子文件夹的支持。
|
||||
1.3 增加了模型下载和管理功能中对LyCoris的支持(你需要将LoCoris模型放置在Lora目录下, 需要安装<a herf="https://github.com/KohakuBlueleaf/a1111-sd-webui-lycoris"> LyCoris插件 </a>)</br>
|
||||
1.2 增加了模型搜索功能,可以直接在模型管理下load模型,增加了插件和模型源更新功能。</br>
|
||||
|
|
|
|||
|
|
@ -626,5 +626,46 @@
|
|||
"OF_Coconut_Kitty.safetensors": null,
|
||||
"aZovyaPhotoreal_v2.safetensors": null,
|
||||
"meinahentai_v4.safetensors": null,
|
||||
"3DMM_V11.safetensors": null
|
||||
"3DMM_V11.safetensors": null,
|
||||
"anythingqingmix25D_v10.safetensors": null,
|
||||
"anythingV3Inpainting_1-inpainting.ckpt": null,
|
||||
"AnythingV5Ink_v32Ink.safetensors": null,
|
||||
"couterfeitv30_v30.inpainting.safetensors": null,
|
||||
"disneyPixarCartoon_v10.safetensors": null,
|
||||
"dis_V1-000020.safetensors": null,
|
||||
"ghostmix_v20Bakedvae.safetensors": null,
|
||||
"guofengrealmix_v10.safetensors": null,
|
||||
"majicMIX fantasy_v1.0.safetensors": null,
|
||||
"majicmixRealistic_v4.safetensors": null,
|
||||
"majicmixRealistic_v6.safetensors": null,
|
||||
"miaoshouai-anything-v4-pruned.safetensors": null,
|
||||
"niantu_v1.safetensors": null,
|
||||
"oukaGufeng_s1.safetensors": null,
|
||||
"pixarStyleModel_v10.ckpt": null,
|
||||
"rpg_V4.inpainting.safetensors": null,
|
||||
"rpg_V4.safetensors": null,
|
||||
"rpg_v4_2.inpainting.safetensors": null,
|
||||
"sdxl_base_pruned_no-ema.safetensors": null,
|
||||
"sdxl_refiner_pruned_no-ema.safetensors": null,
|
||||
"model_contest_1\\1-DangerAngelSavour10.safetensors-ckpt.safetensors": null,
|
||||
"model_contest_1\\10-huacai1.0-ckpt.safetensors": null,
|
||||
"model_contest_1\\11-CmixG-ckpt.safetensors": null,
|
||||
"model_contest_1\\16-NewGufengV1.0-ckpt.safetensors": null,
|
||||
"model_contest_1\\29-xxmix9realistic v4.0-ckpt.safetensors": null,
|
||||
"model_contest_1\\32-Rely on Defenc-ckpt.safetensors": null,
|
||||
"model_contest_1\\33-super_gufeng-ckpt.safetensors": null,
|
||||
"model_contest_1\\35-Dream CNrealistic_MIXv11-ckpt.safetensors": null,
|
||||
"model_contest_1\\38-version V1-ckpt.safetensors": null,
|
||||
"model_contest_1\\40-Alpha Lyrae-ckpt.safetensors": null,
|
||||
"model_contest_1\\41-tkf-ckpt.safetensors": null,
|
||||
"model_contest_1\\5-Dream CNrealistic_MIXv21-ckpt.safetensors": null,
|
||||
"model_contest_1\\54-guofeng-checkpoint.safetensors": null,
|
||||
"model_contest_1\\68-Colorful_girl-checkpoint.ckpt": null,
|
||||
"model_contest_1\\71-GCM-Game Concept Map-checkpoint.safetensors": null,
|
||||
"model_contest_1\\76-SJ_real109-checkpoint.safetensors": null,
|
||||
"model_contest_1\\95-chunzi-checkpoiint.ckpt": null,
|
||||
"model_contest_1\\99-Dream CNrealistic_MIXv12-checkpoint.safetensors": null,
|
||||
"model_contest_1\\CMixG-1-0.33.safetensors": null,
|
||||
"leokan.safetensors": null,
|
||||
"Moses_ten_commandments-BibleGilgalHK-V1.safetensors": null
|
||||
}
|
||||
|
|
@ -11,6 +11,7 @@
|
|||
"Allow Local Network Access": "False",
|
||||
"drp_choose_version": "Official Release",
|
||||
"txt_args_more": "",
|
||||
"auto_vram": true,
|
||||
"disable_log_console_output": true,
|
||||
"model_source": "liandange.com",
|
||||
"my_model_source": "civitai.com",
|
||||
|
|
|
|||
|
|
@ -2,5 +2,6 @@ psutil
|
|||
rehash
|
||||
tqdm
|
||||
openai
|
||||
numba
|
||||
gpt_index==0.4.24
|
||||
langchain==0.0.132
|
||||
|
|
@ -124,11 +124,17 @@ class MiaoShouAssistant(object):
|
|||
with gr.Row():
|
||||
sys_info_refbtn = gr.Button(value="Refresh")
|
||||
|
||||
with gr.Row():
|
||||
md_vram_release = gr.Markdown(visible=False, interactive=False, value='Memory Released', show_label=False)
|
||||
with gr.Row():
|
||||
chk_auto_release = gr.Checkbox(value=self.prelude.boot_settings['auto_vram'], label='Enable Auto Memory Release')
|
||||
reload_button = gr.Button('Forc VRAM Release')
|
||||
|
||||
|
||||
self.drp_gpu.change(self.runtime.update_xformers, inputs=[self.drp_gpu, self.chk_group_args], outputs=[self.chk_group_args])
|
||||
sys_info_refbtn.click(self.prelude.get_sys_info, None, txt_sys_info)
|
||||
|
||||
chk_auto_release.change(self.runtime.change_auto_vram, inputs=[chk_auto_release])
|
||||
reload_button.click(self.runtime.force_mem_release, outputs=[md_vram_release])
|
||||
|
||||
def create_subtab_model_management(self) -> None:
|
||||
with gr.TabItem('Model Management', elem_id="model_management_tab") as tab_model_manager:
|
||||
|
|
@ -211,9 +217,12 @@ class MiaoShouAssistant(object):
|
|||
with gr.Row(variant='panel'):
|
||||
display_text = 'Select a model and type some text here, ChatGPT will generate prompt for you. Supports different text in different languages.'
|
||||
display_value = ''
|
||||
|
||||
if self.prelude.boot_settings['openai_api'] == '':
|
||||
print('a')
|
||||
display_text = 'Set your OpenAI api key in Setting & Update first: https://platform.openai.com/account/api-keys'
|
||||
display_value = display_text
|
||||
|
||||
self.txt_main_prompt = gr.Textbox(label='Let ChatGPT write your prompt', placeholder=display_text, value=display_value, interactive=True, visible=True, elem_id="txt_main_prompt")
|
||||
with gr.Row(variant='panel'):
|
||||
with gr.Row():
|
||||
|
|
|
|||
|
|
@ -2,6 +2,7 @@ import modules
|
|||
import modules.scripts as scripts
|
||||
|
||||
from scripts.assistant.miaoshou import MiaoShouAssistant
|
||||
from scripts.runtime.msai_runtime import MiaoshouRuntime
|
||||
|
||||
|
||||
class MiaoshouScript(scripts.Script):
|
||||
|
|
@ -17,6 +18,10 @@ class MiaoshouScript(scripts.Script):
|
|||
def ui(self, is_img2img):
|
||||
return ()
|
||||
|
||||
def postprocess(self, p, processed):
|
||||
self.runtime = MiaoshouRuntime()
|
||||
self.runtime.mem_release()
|
||||
return None
|
||||
|
||||
assistant = MiaoShouAssistant()
|
||||
modules.script_callbacks.on_ui_tabs(assistant.on_event_ui_tabs_opened)
|
||||
|
|
|
|||
|
|
@ -18,10 +18,14 @@ import modules
|
|||
import random
|
||||
from gpt_index import SimpleDirectoryReader, GPTListIndex, GPTSimpleVectorIndex, LLMPredictor, PromptHelper
|
||||
import openai
|
||||
import gc
|
||||
import json
|
||||
#import tkinter as tk
|
||||
#from tkinter import filedialog, ttk
|
||||
from modules import shared, sd_hijack
|
||||
import modules.devices as devices
|
||||
import torch
|
||||
from numba import cuda
|
||||
from modules import shared, sd_hijack, sd_samplers, processing
|
||||
from modules.sd_models import CheckpointInfo
|
||||
from scripts.download.msai_downloader_manager import MiaoshouDownloaderManager
|
||||
from scripts.msai_logging.msai_logger import Logger
|
||||
|
|
@ -212,7 +216,7 @@ class MiaoshouRuntime(object):
|
|||
return model_cover_thumbnails
|
||||
|
||||
# TODO: add typing hint
|
||||
def update_boot_settings(self, version, drp_gpu, drp_theme, txt_listen_port, chk_group_args, additional_args):
|
||||
def update_boot_settings(self, version, drp_gpu, drp_theme, txt_listen_port, chk_group_args, additional_args, auto_vram):
|
||||
boot_settings = self.prelude.boot_settings
|
||||
boot_settings['drp_args_vram'] = drp_gpu
|
||||
boot_settings["drp_args_theme"] = drp_theme
|
||||
|
|
@ -222,6 +226,7 @@ class MiaoshouRuntime(object):
|
|||
boot_settings[chk] = self.prelude.checkboxes[chk]
|
||||
boot_settings['txt_args_more'] = additional_args
|
||||
boot_settings['drp_choose_version'] = version
|
||||
boot_settings['auto_vram'] = auto_vram
|
||||
|
||||
all_settings = self.prelude.all_settings
|
||||
all_settings['boot_settings'] = boot_settings
|
||||
|
|
@ -236,6 +241,40 @@ class MiaoshouRuntime(object):
|
|||
all_settings['boot_settings'] = boot_settings
|
||||
toolkit.write_json(self.prelude.setting_file, all_settings)
|
||||
|
||||
def change_auto_vram(self, auto_vram):
|
||||
self.update_boot_setting('auto_vram', auto_vram)
|
||||
|
||||
def mem_release(self):
|
||||
try:
|
||||
gc.collect()
|
||||
devices.torch_gc()
|
||||
torch.cuda.empty_cache()
|
||||
gc.collect()
|
||||
|
||||
print('Miaoshouai boot assistant: Memory Released!')
|
||||
except:
|
||||
print('Miaoshouai boot assistant: Memory Release Failed...!')
|
||||
|
||||
def force_mem_release(self):
|
||||
try:
|
||||
if hasattr(sd_samplers, "create_sampler_original_md"):
|
||||
sd_samplers.create_sampler = sd_samplers.create_sampler_original_md
|
||||
del sd_samplers.create_sampler_original_md
|
||||
if hasattr(processing, "create_random_tensors_original_md"):
|
||||
processing.create_random_tensors = processing.create_random_tensors_original_md
|
||||
del processing.create_random_tensors_original_md
|
||||
|
||||
cuda.select_device(0)
|
||||
cuda.close()
|
||||
cuda.select_device(0)
|
||||
self.mem_release()
|
||||
msg = 'Memory Released! (May not work if you already got CUDA out of memory error)'
|
||||
except Exception as e:
|
||||
msg = f'Memory Release Failed! ({str(e)})'
|
||||
|
||||
return gr.Markdown.update(visible=True, value=msg)
|
||||
|
||||
return gr.Markdown.update(visible=True, value=msg)
|
||||
def get_all_models(self, site: str) -> t.Any:
|
||||
return toolkit.read_json(self.prelude.model_json[site])
|
||||
|
||||
|
|
@ -1033,7 +1072,6 @@ class MiaoshouRuntime(object):
|
|||
return gr.TextArea.update(value=res_prompt)
|
||||
|
||||
def update_gptapi(self, apikey):
|
||||
|
||||
if apikey == '':
|
||||
res = 'Please enter a valid API Key'
|
||||
gpt_hint_text = 'Set your OpenAI api key in Setting & Update first: https://platform.openai.com/account/api-keys'
|
||||
|
|
|
|||
Loading…
Reference in New Issue