aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin
diff options
context:
space:
mode:
Diffstat (limited to 'extensions-builtin')
-rw-r--r--extensions-builtin/Lora/network_oft.py4
-rw-r--r--extensions-builtin/Lora/networks.py35
2 files changed, 2 insertions, 37 deletions
diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py
index 2af1bc4c..0a87958e 100644
--- a/extensions-builtin/Lora/network_oft.py
+++ b/extensions-builtin/Lora/network_oft.py
@@ -37,7 +37,7 @@ class NetworkModuleOFT(network.NetworkModule):
def apply_to(self):
self.org_forward = self.org_module[0].forward
self.org_module[0].forward = self.forward
-
+
def get_weight(self, oft_blocks, multiplier=None):
block_Q = oft_blocks - oft_blocks.transpose(1, 2)
norm_Q = torch.norm(block_Q.flatten())
@@ -66,7 +66,7 @@ class NetworkModuleOFT(network.NetworkModule):
output_shape = self.oft_blocks.shape
return self.finalize_updown(updown, orig_weight, output_shape)
-
+
def forward(self, x, y=None):
x = self.org_forward(x)
if self.multiplier() == 0.0:
diff --git a/extensions-builtin/Lora/networks.py b/extensions-builtin/Lora/networks.py
index e5e73450..78a97033 100644
--- a/extensions-builtin/Lora/networks.py
+++ b/extensions-builtin/Lora/networks.py
@@ -169,10 +169,6 @@ def load_network(name, network_on_disk):
else:
emb_dict[vec_name] = weight
bundle_embeddings[emb_name] = emb_dict
-
- #if key_network_without_network_parts == "oft_unet":
- # print(key_network_without_network_parts)
- # pass
key = convert_diffusers_name_to_compvis(key_network_without_network_parts, is_sd2)
sd_module = shared.sd_model.network_layer_mapping.get(key, None)
@@ -196,31 +192,8 @@ def load_network(name, network_on_disk):
sd_module = shared.sd_model.network_layer_mapping.get(key, None)
elif sd_module is None and "oft_unet" in key_network_without_network_parts:
- # UNET_TARGET_REPLACE_MODULE_ALL_LINEAR = ["Transformer2DModel"]
- # UNET_TARGET_REPLACE_MODULE_CONV2D_3X3 = ["ResnetBlock2D", "Downsample2D", "Upsample2D"]
- UNET_TARGET_REPLACE_MODULE_ATTN_ONLY = ["CrossAttention"]
- # TODO: Change matchedm odules based on whether all linear, conv, etc
-
key = key_network_without_network_parts.replace("oft_unet", "diffusion_model")
sd_module = shared.sd_model.network_layer_mapping.get(key, None)
- #key_no_suffix = key.rsplit("_to_", 1)[0]
- ## Match all modules of class CrossAttention
- #replace_module_list = []
- #for module_type in UNET_TARGET_REPLACE_MODULE_ATTN_ONLY:
- # replace_module_list += [module for k, module in shared.sd_model.network_layer_mapping.items() if module_type in module.__class__.__name__]
-
- #matched_module = replace_module_list.get(key_no_suffix, None)
- #if key.endswith('to_q'):
- # sd_module = matched_module.to_q or None
- #if key.endswith('to_k'):
- # sd_module = matched_module.to_k or None
- #if key.endswith('to_v'):
- # sd_module = matched_module.to_v or None
- #if key.endswith('to_out_0'):
- # sd_module = matched_module.to_out[0] or None
- #if key.endswith('to_out_1'):
- # sd_module = matched_module.to_out[1] or None
-
if sd_module is None:
keys_failed_to_match[key_network] = key
@@ -242,14 +215,6 @@ def load_network(name, network_on_disk):
raise AssertionError(f"Could not find a module type (out of {', '.join([x.__class__.__name__ for x in module_types])}) that would accept those keys: {', '.join(weights.w)}")
net.modules[key] = net_module
-
- # replaces forward method of original Linear
- # applied_to_count = 0
- #for key, created_module in net.modules.items():
- # if isinstance(created_module, network_oft.NetworkModuleOFT):
- # net_module.apply_to()
- #applied_to_count += 1
- # print(f'Applied OFT modules: {applied_to_count}')
embeddings = {}
for emb_name, data in bundle_embeddings.items():