aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora/network.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-18 18:20:22 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-18 18:20:22 +0300
commiteb7c9b58fc2fbab205d4bc9f708800870dcda3fb (patch)
tree337bc9e4e6793aa072c2e2a8c10e3a3f7daf6a95 /extensions-builtin/Lora/network.py
parentf865d3e11647dfd6c7b2cdf90dde24680e58acd8 (diff)
parent7f7db1700bda40ba3171a49b6a4ef38f868b7d0a (diff)
Merge branch 'dev' into release_candidate
Diffstat (limited to 'extensions-builtin/Lora/network.py')
-rw-r--r--extensions-builtin/Lora/network.py154
1 files changed, 154 insertions, 0 deletions
diff --git a/extensions-builtin/Lora/network.py b/extensions-builtin/Lora/network.py
new file mode 100644
index 00000000..8ecfa29a
--- /dev/null
+++ b/extensions-builtin/Lora/network.py
@@ -0,0 +1,154 @@
+import os
+from collections import namedtuple
+import enum
+
+from modules import sd_models, cache, errors, hashes, shared
+
+NetworkWeights = namedtuple('NetworkWeights', ['network_key', 'sd_key', 'w', 'sd_module'])
+
+metadata_tags_order = {"ss_sd_model_name": 1, "ss_resolution": 2, "ss_clip_skip": 3, "ss_num_train_images": 10, "ss_tag_frequency": 20}
+
+
+class SdVersion(enum.Enum):
+ Unknown = 1
+ SD1 = 2
+ SD2 = 3
+ SDXL = 4
+
+
+class NetworkOnDisk:
+ def __init__(self, name, filename):
+ self.name = name
+ self.filename = filename
+ self.metadata = {}
+ self.is_safetensors = os.path.splitext(filename)[1].lower() == ".safetensors"
+
+ def read_metadata():
+ metadata = sd_models.read_metadata_from_safetensors(filename)
+ metadata.pop('ssmd_cover_images', None) # those are cover images, and they are too big to display in UI as text
+
+ return metadata
+
+ if self.is_safetensors:
+ try:
+ self.metadata = cache.cached_data_for_file('safetensors-metadata', "lora/" + self.name, filename, read_metadata)
+ except Exception as e:
+ errors.display(e, f"reading lora {filename}")
+
+ if self.metadata:
+ m = {}
+ for k, v in sorted(self.metadata.items(), key=lambda x: metadata_tags_order.get(x[0], 999)):
+ m[k] = v
+
+ self.metadata = m
+
+ self.alias = self.metadata.get('ss_output_name', self.name)
+
+ self.hash = None
+ self.shorthash = None
+ self.set_hash(
+ self.metadata.get('sshs_model_hash') or
+ hashes.sha256_from_cache(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or
+ ''
+ )
+
+ self.sd_version = self.detect_version()
+
+ def detect_version(self):
+ if str(self.metadata.get('ss_base_model_version', "")).startswith("sdxl_"):
+ return SdVersion.SDXL
+ elif str(self.metadata.get('ss_v2', "")) == "True":
+ return SdVersion.SD2
+ elif len(self.metadata):
+ return SdVersion.SD1
+
+ return SdVersion.Unknown
+
+ def set_hash(self, v):
+ self.hash = v
+ self.shorthash = self.hash[0:12]
+
+ if self.shorthash:
+ import networks
+ networks.available_network_hash_lookup[self.shorthash] = self
+
+ def read_hash(self):
+ if not self.hash:
+ self.set_hash(hashes.sha256(self.filename, "lora/" + self.name, use_addnet_hash=self.is_safetensors) or '')
+
+ def get_alias(self):
+ import networks
+ if shared.opts.lora_preferred_name == "Filename" or self.alias.lower() in networks.forbidden_network_aliases:
+ return self.name
+ else:
+ return self.alias
+
+
+class Network: # LoraModule
+ def __init__(self, name, network_on_disk: NetworkOnDisk):
+ self.name = name
+ self.network_on_disk = network_on_disk
+ self.te_multiplier = 1.0
+ self.unet_multiplier = 1.0
+ self.dyn_dim = None
+ self.modules = {}
+ self.mtime = None
+
+ self.mentioned_name = None
+ """the text that was used to add the network to prompt - can be either name or an alias"""
+
+
+class ModuleType:
+ def create_module(self, net: Network, weights: NetworkWeights) -> Network | None:
+ return None
+
+
+class NetworkModule:
+ def __init__(self, net: Network, weights: NetworkWeights):
+ self.network = net
+ self.network_key = weights.network_key
+ self.sd_key = weights.sd_key
+ self.sd_module = weights.sd_module
+
+ if hasattr(self.sd_module, 'weight'):
+ self.shape = self.sd_module.weight.shape
+
+ self.dim = None
+ self.bias = weights.w.get("bias")
+ self.alpha = weights.w["alpha"].item() if "alpha" in weights.w else None
+ self.scale = weights.w["scale"].item() if "scale" in weights.w else None
+
+ def multiplier(self):
+ if 'transformer' in self.sd_key[:20]:
+ return self.network.te_multiplier
+ else:
+ return self.network.unet_multiplier
+
+ def calc_scale(self):
+ if self.scale is not None:
+ return self.scale
+ if self.dim is not None and self.alpha is not None:
+ return self.alpha / self.dim
+
+ return 1.0
+
+ def finalize_updown(self, updown, orig_weight, output_shape):
+ if self.bias is not None:
+ updown = updown.reshape(self.bias.shape)
+ updown += self.bias.to(orig_weight.device, dtype=orig_weight.dtype)
+ updown = updown.reshape(output_shape)
+
+ if len(output_shape) == 4:
+ updown = updown.reshape(output_shape)
+
+ if orig_weight.size().numel() == updown.size().numel():
+ updown = updown.reshape(orig_weight.shape)
+
+ return updown * self.calc_scale() * self.multiplier()
+
+ def calc_updown(self, target):
+ raise NotImplementedError()
+
+ def forward(self, x, y):
+ raise NotImplementedError()
+