major: rename toml folder to config

pull/222/head
akiba 2023-08-31 10:35:24 +08:00
parent 5eb8ba4e82
commit 60bb4df617
No known key found for this signature in database
GPG Key ID: 9D600258808ACBCD
8 changed files with 8 additions and 7 deletions

1
.gitignore vendored
View File

@ -15,6 +15,7 @@ train/*
logs/*
sd-models/*
toml/autosave/*
config/autosave/*
!sd-models/put stable diffusion model here.txt
!logs/.keep

2
gui.py
View File

@ -32,7 +32,7 @@ if __name__ == "__main__":
setup_logging()
smart_pip_mirror()
prepare_frontend()
check_dirs(["toml/autosave", "logs"])
check_dirs(["config/autosave", "logs"])
if not args.skip_prepare_environment:
requirements_file = "requirements_win.txt" if sys.platform == "win32" else "requirements.txt"
validate_requirements(requirements_file)

View File

@ -85,7 +85,7 @@ async def create_toml_file(request: Request, background_tasks: BackgroundTasks):
return {"status": "fail", "detail": "已有正在进行的训练"}
timestamp = datetime.now().strftime("%Y%m%d-%H%M%S")
toml_file = os.path.join(os.getcwd(), f"toml", "autosave", f"{timestamp}.toml")
toml_file = os.path.join(os.getcwd(), f"config", "autosave", f"{timestamp}.toml")
toml_data = await request.body()
j = json.loads(toml_data.decode("utf-8"))
@ -111,7 +111,7 @@ async def create_toml_file(request: Request, background_tasks: BackgroundTasks):
sample_prompts = j.get("sample_prompts", None)
if sample_prompts is not None and not os.path.exists(sample_prompts) and is_promopt_like(sample_prompts):
sample_prompts_file = os.path.join(os.getcwd(), f"toml", "autosave", f"{timestamp}-promopt.txt")
sample_prompts_file = os.path.join(os.getcwd(), f"config", "autosave", f"{timestamp}-promopt.txt")
with open(sample_prompts_file, "w", encoding="utf-8") as f:
f.write(sample_prompts)
j["sample_prompts"] = sample_prompts_file

View File

@ -1,8 +1,8 @@
# LoRA train script by @Akegarasu
$multi_gpu = 0 # multi gpu | 多显卡训练 该参数仅限在显卡数 >= 2 使用
$config_file = "./toml/default.toml" # config_file | 使用toml文件指定训练参数
$sample_prompts = "./toml/sample_prompts.txt" # sample_prompts | 采样prompts文件,留空则不启用采样功能
$config_file = "./config/default.toml" # config_file | 使用toml文件指定训练参数
$sample_prompts = "./config/sample_prompts.txt" # sample_prompts | 采样prompts文件,留空则不启用采样功能
$utf8 = 1 # utf8 | 使用utf-8编码读取toml以utf-8编码编写的、含中文的toml必须开启

View File

@ -2,8 +2,8 @@
# LoRA train script by @Akegarasu
multi_gpu=0 # multi gpu | 多显卡训练 该参数仅限在显卡数 >= 2 使用
config_file="./toml/default.toml" # config_file | 使用toml文件指定训练参数
sample_prompts="./toml/sample_prompts.txt" # sample_prompts | 采样prompts文件,留空则不启用采样功能
config_file="./config/default.toml" # config_file | 使用toml文件指定训练参数
sample_prompts="./config/sample_prompts.txt" # sample_prompts | 采样prompts文件,留空则不启用采样功能
utf8=1 # utf8 | 使用utf-8编码读取toml以utf-8编码编写的、含中文的toml必须开启
# ============= DO NOT MODIFY CONTENTS BELOW | 请勿修改下方内容 =====================