提交 db3552a3 编写于 作者: A a2569875

trying fix issues: #8 and #11 Not sure if it works

上级 dd29e057
......@@ -24,7 +24,11 @@ def on_enable():
composable_lycoris.backup_MultiheadAttention_forward_before_lyco = torch.nn.MultiheadAttention_forward_before_lyco
if hasattr(torch.nn, 'MultiheadAttention_load_state_dict_before_lyco'):
composable_lycoris.backup_MultiheadAttention_load_state_dict_before_lyco = torch.nn.MultiheadAttention_load_state_dict_before_lyco
if hasattr(composable_lora, 'lyco_notfound'):
if composable_lora.lyco_notfound:
torch.nn.Linear_forward_before_lyco = composable_lora.Linear_forward_before_clora
torch.nn.Conv2d_forward_before_lyco = composable_lora.Conv2d_forward_before_clora
torch.nn.MultiheadAttention_forward_before_lyco = composable_lora.MultiheadAttention_forward_before_clora
torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward
torch.nn.MultiheadAttention.forward = lycoris.lyco_MultiheadAttention_forward
......
......@@ -15,6 +15,24 @@ def unload():
torch.nn.Conv2d.forward = torch.nn.Conv2d_forward_before_lora
torch.nn.MultiheadAttention.forward = torch.nn.MultiheadAttention_forward_before_lora
if not hasattr(composable_lora, 'Linear_forward_before_clora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.Linear_forward_before_clora = torch.nn.Linear_forward_before_lyco
else:
composable_lora.Linear_forward_before_clora = torch.nn.Linear.forward
if not hasattr(composable_lora, 'Conv2d_forward_before_clora'):
if hasattr(torch.nn, 'Conv2d_forward_before_lyco'):
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d_forward_before_lyco
else:
composable_lora.Conv2d_forward_before_clora = torch.nn.Conv2d.forward
if not hasattr(composable_lora, 'MultiheadAttention_forward_before_clora'):
if hasattr(torch.nn, 'MultiheadAttention_forward_before_lyco'):
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention_forward_before_lyco
else:
composable_lora.MultiheadAttention_forward_before_clora = torch.nn.MultiheadAttention.forward
if not hasattr(torch.nn, 'Linear_forward_before_lora'):
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
torch.nn.Linear_forward_before_lora = torch.nn.Linear_forward_before_lyco
......@@ -33,6 +51,11 @@ if not hasattr(torch.nn, 'MultiheadAttention_forward_before_lora'):
else:
torch.nn.MultiheadAttention_forward_before_lora = torch.nn.MultiheadAttention.forward
if hasattr(torch.nn, 'Linear_forward_before_lyco'):
composable_lora.lyco_notfound = False
else:
composable_lora.lyco_notfound = True
torch.nn.Linear.forward = composable_lora.lora_Linear_forward
torch.nn.Conv2d.forward = composable_lora.lora_Conv2d_forward
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册