aboutsummaryrefslogtreecommitdiff
path: root/extensions-builtin/Lora/network_oft.py
diff options
context:
space:
mode:
authorSj-Si <sjw.jetty@gmail.com>2024-01-11 16:37:35 -0500
committerSj-Si <sjw.jetty@gmail.com>2024-01-11 16:37:35 -0500
commit036500223de0a3caaa86360a8ad3ed301e4367b0 (patch)
treef05f0d5fc503d9c35d57bad077a5dab1dfd6569e /extensions-builtin/Lora/network_oft.py
parent0726a6e12e85a37d1e514f5603acf9f058c11783 (diff)
parentcb5b335acddd126d4f6c990982816c06beb0d6ae (diff)
Merge changes from dev
Diffstat (limited to 'extensions-builtin/Lora/network_oft.py')
-rw-r--r--extensions-builtin/Lora/network_oft.py6
1 files changed, 3 insertions, 3 deletions
diff --git a/extensions-builtin/Lora/network_oft.py b/extensions-builtin/Lora/network_oft.py
index fa647020..342fcd0d 100644
--- a/extensions-builtin/Lora/network_oft.py
+++ b/extensions-builtin/Lora/network_oft.py
@@ -56,7 +56,7 @@ class NetworkModuleOFT(network.NetworkModule):
self.block_size, self.num_blocks = factorization(self.out_dim, self.dim)
def calc_updown(self, orig_weight):
- oft_blocks = self.oft_blocks.to(orig_weight.device, dtype=orig_weight.dtype)
+ oft_blocks = self.oft_blocks.to(orig_weight.device)
eye = torch.eye(self.block_size, device=self.oft_blocks.device)
if self.is_kohya:
@@ -66,7 +66,7 @@ class NetworkModuleOFT(network.NetworkModule):
block_Q = block_Q * ((new_norm_Q + 1e-8) / (norm_Q + 1e-8))
oft_blocks = torch.matmul(eye + block_Q, (eye - block_Q).float().inverse())
- R = oft_blocks.to(orig_weight.device, dtype=orig_weight.dtype)
+ R = oft_blocks.to(orig_weight.device)
# This errors out for MultiheadAttention, might need to be handled up-stream
merged_weight = rearrange(orig_weight, '(k n) ... -> k n ...', k=self.num_blocks, n=self.block_size)
@@ -77,6 +77,6 @@ class NetworkModuleOFT(network.NetworkModule):
)
merged_weight = rearrange(merged_weight, 'k m ... -> (k m) ...')
- updown = merged_weight.to(orig_weight.device, dtype=orig_weight.dtype) - orig_weight
+ updown = merged_weight.to(orig_weight.device) - orig_weight.to(merged_weight.dtype)
output_shape = orig_weight.shape
return self.finalize_updown(updown, orig_weight, output_shape)