gui bug fixs

pull/4/head
arena 2023-01-06 14:48:00 +08:00
parent 18de44e14a
commit 6b46d9d47a
3 changed files with 10 additions and 6 deletions

View File

@ -56,7 +56,7 @@ Interestingly, the EMA data is itself an independant and functional model, it ca
![](https://cdn.discordapp.com/attachments/973151736946622467/1060767681692827718/ema.png)
### CLIP
During merging a CLIP key called `embeddings.position_ids` is sometimes broken. This is an int64 tensor that has the values from 0 to 76, merging will convert these to float and introduce errors. For example in AnythingV3 the value `76` has become `75.9975`, which is cast back to int64 when loaded by the webui, resulting in `75`. The option `Fix broken CLIP position IDs` will fix this tensor, which changes the model output slightly (perhaps for the worst). Broken vs Fixed.
During merging a CLIP key called `embeddings.position_ids` is sometimes broken. This is an int64 tensor that has the values from 0 to 76, merging will convert these to float and introduce errors. For example in AnythingV3 the value `76` has become `75.9975`, which is cast back to int64 when loaded by the webui, resulting in `75`. The option `Fix broken CLIP position IDs` will fix this tensor, which changes the model output slightly (perhaps for the worse). Broken vs Fixed.
![](https://cdn.discordapp.com/attachments/973151736946622467/1060777823624765470/clip_fix.png)
### Merging

View File

@ -321,6 +321,8 @@ def get_lists():
source_list += ["NEW " + a]
def find_source(source):
if not source:
return None
if os.sep in source:
s = os.path.join(ROOT_PATH, source)
if os.path.exists(s):
@ -330,7 +332,7 @@ def find_source(source):
else:
paths = [MODEL_PATH, VAE_PATH, COMPONENT_PATH]
for p in paths:
s = glob.glob(os.path.join(p, "**", source))
s = glob.glob(os.path.join(p, "**", "*" + source), recursive=True)
if s:
return s[0]
return None

View File

@ -416,9 +416,6 @@ def fix_ema(model):
if kk in model:
model[k] = model[kk]
del model[kk]
else:
print(kk)
def compute_metric(model, arch=None):
def tensor_metric(t):
t = t.to(torch.float16).to(torch.float32)
@ -581,7 +578,12 @@ def log(model, file):
f.write(out)
if __name__ == '__main__':
r = "/run/media/pul/ssd/stable-diffusion-webui/models/Stable-diffusion/Anything-V3.0.ckpt"
import glob
r = "/run/media/pul/ssd/stable-diffusion-webui/models/Stable-diffusion/**/*fumo-800.ckpt"
print(glob.glob(r, recursive=True))
exit()
a, _ = load(r)