trying fix issues: #8 and #11 Not sure if it works

main
a2569875 2023-06-28 01:45:01 +08:00
parent dd29e057e0
commit db3552a356
2 changed files with 28 additions and 1 deletions

View File

@ -24,7 +24,11 @@ def on_enable():
composable_lycoris.backup_MultiheadAttention_forward_before_lyco = torch.nn.MultiheadAttention_forward_before_lyco
if hasattr(torch.nn, 'MultiheadAttention_load_state_dict_before_lyco'):
composable_lycoris.backup_MultiheadAttention_load_state_dict_before_lyco = torch.nn.MultiheadAttention_load_state_dict_before_lyco
if hasattr(composable_lora, 'lyco_notfound'):
if composable_lora.lyco_notfound:
torch.nn.Linear_forward_before_lyco = composable_lora.Linear_forward_before_clora
torch.nn.Conv2d_forward_before_lyco = composable_lora.Conv2d_forward_before_clora
torch.nn.MultiheadAttention_forward_before_lyco = composable_lora.MultiheadAttention_forward_before_clora
torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward
torch.nn.MultiheadAttention.forward = lycoris.lyco_MultiheadAttention_forward

View File

@ -15,6 +15,24 @@ def unload():
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
torch.nn.MultiheadAttention.forward = torch.nn.MultiheadAttention_forward_before_lora
if not hasattr(composable_lora, 'Linear_forward_before_clora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.Linear_forward_before_clora = torch.nn.Linear_forward_before_lyco
else:
composable_lora.Linear_forward_before_clora = torch.nn.Linear.forward
if not hasattr(composable_lora, 'Conv2d_forward_before_clora'):
if hasattr(torch.nn, 'Conv2d_forward_before_lyco'):
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d_forward_before_lyco
else:
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d.forward
if not hasattr(composable_lora, 'MultiheadAttention_forward_before_clora'):
if hasattr(torch.nn, 'MultiheadAttention_forward_before_lyco'):
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention_forward_before_lyco
else:
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention.forward
if not hasattr(torch.nn, 'Linear_forward_before_lora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
torch.nn.Linear_forward_before_lora = torch.nn.Linear_forward_before_lyco
@ -33,6 +51,11 @@ if not hasattr(torch.nn, 'MultiheadAttention_forward_before_lora'):
else:
torch.nn.MultiheadAttention_forward_before_lora = torch.nn.MultiheadAttention.forward
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.lyco_notfound = False
else:
composable_lora.lyco_notfound = True
torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward