aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-03-27 06:38:17 +0300
committerGitHub <noreply@github.com>2023-03-27 06:38:17 +0300
commita336c7fe233fa7dff062f5187c0f4d01ab26e80b (patch)
treeb8b8844a0a244ba6fb1ed03694fe4798e3922e44
parent4c1ad743e3baf1246db0711aa0107debf036a12b (diff)
parent6a147db1287fe660e1bfb2ebf5b3fadc14835c69 (diff)
Merge pull request #9017 from camenduru/dev
convert to python v3.9
-rw-r--r--extensions-builtin/Lora/lora.py5
1 files changed, 3 insertions, 2 deletions
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index edd95f78..696be8ea 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -2,6 +2,7 @@ import glob
import os
import re
import torch
+from typing import Union
from modules import shared, devices, sd_models, errors
@@ -235,7 +236,7 @@ def lora_calc_updown(lora, module, target):
return updown
-def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.MultiheadAttention):
+def lora_apply_weights(self: Union[torch.nn.Conv2d, torch.nn.Linear, torch.nn.MultiheadAttention]):
"""
Applies the currently selected set of Loras to the weights of torch layer self.
If weights already have this particular set of loras applied, does nothing.
@@ -295,7 +296,7 @@ def lora_apply_weights(self: torch.nn.Conv2d | torch.nn.Linear | torch.nn.Multih
setattr(self, "lora_current_names", wanted_names)
-def lora_reset_cached_weight(self: torch.nn.Conv2d | torch.nn.Linear):
+def lora_reset_cached_weight(self: Union[torch.nn.Conv2d, torch.nn.Linear]):
setattr(self, "lora_current_names", ())
setattr(self, "lora_weights_backup", None)