aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGELOG.md13
-rw-r--r--extensions-builtin/Lora/lora.py10
-rw-r--r--extensions-builtin/Lora/scripts/lora_script.py1
-rw-r--r--extensions-builtin/Lora/ui_extra_networks_lora.py8
-rw-r--r--javascript/localization.js60
-rw-r--r--modules/localization.py4
-rw-r--r--modules/modelloader.py27
-rw-r--r--modules/safe.py2
-rw-r--r--modules/shared.py9
-rw-r--r--modules/ui.py12
-rw-r--r--style.css3
-rw-r--r--webui.py6
12 files changed, 94 insertions, 61 deletions
diff --git a/CHANGELOG.md b/CHANGELOG.md
index d1727864..8cf444ca 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,16 @@
+## 1.2.1
+
+### Features:
+ * add an option to always refer to lora by filenames
+
+### Bug Fixes:
+ * never refer to lora by an alias if multiple loras have same alias or the alias is called none
+ * fix upscalers disappearing after the user reloads UI
+ * allow bf16 in safe unpickler (resolves problems with loading some loras)
+ * allow web UI to be ran fully offline
+ * fix localizations not working
+ * fix error for loras: 'LatentDiffusion' object has no attribute 'lora_layer_mapping'
+
## 1.2.0
### Features:
diff --git a/extensions-builtin/Lora/lora.py b/extensions-builtin/Lora/lora.py
index ba1293df..b5d0c98f 100644
--- a/extensions-builtin/Lora/lora.py
+++ b/extensions-builtin/Lora/lora.py
@@ -133,6 +133,10 @@ def load_lora(name, filename):
sd = sd_models.read_state_dict(filename)
+ # this should not be needed but is here as an emergency fix for an unknown error people are experiencing in 1.2.0
+ if not hasattr(shared.sd_model, 'lora_layer_mapping'):
+ assign_lora_names_to_compvis_modules(shared.sd_model)
+
keys_failed_to_match = {}
is_sd2 = 'model_transformer_resblocks' in shared.sd_model.lora_layer_mapping
@@ -393,6 +397,8 @@ def lora_MultiheadAttention_load_state_dict(self, *args, **kwargs):
def list_available_loras():
available_loras.clear()
available_lora_aliases.clear()
+ forbidden_lora_aliases.clear()
+ forbidden_lora_aliases.update({"none": 1})
os.makedirs(shared.cmd_opts.lora_dir, exist_ok=True)
@@ -406,6 +412,9 @@ def list_available_loras():
available_loras[name] = entry
+ if entry.alias in available_lora_aliases:
+ forbidden_lora_aliases[entry.alias.lower()] = 1
+
available_lora_aliases[name] = entry
available_lora_aliases[entry.alias] = entry
@@ -445,6 +454,7 @@ def infotext_pasted(infotext, params):
available_loras = {}
available_lora_aliases = {}
+forbidden_lora_aliases = {}
loaded_loras = []
list_available_loras()
diff --git a/extensions-builtin/Lora/scripts/lora_script.py b/extensions-builtin/Lora/scripts/lora_script.py
index 7db971fd..060bda05 100644
--- a/extensions-builtin/Lora/scripts/lora_script.py
+++ b/extensions-builtin/Lora/scripts/lora_script.py
@@ -54,6 +54,7 @@ script_callbacks.on_infotext_pasted(lora.infotext_pasted)
shared.options_templates.update(shared.options_section(('extra_networks', "Extra Networks"), {
"sd_lora": shared.OptionInfo("None", "Add Lora to prompt", gr.Dropdown, lambda: {"choices": ["None"] + [x for x in lora.available_loras]}, refresh=lora.list_available_loras),
+ "lora_preferred_name": shared.OptionInfo("Alias from file", "When adding to prompt, refer to lora by", gr.Radio, {"choices": ["Alias from file", "Filename"]}),
}))
diff --git a/extensions-builtin/Lora/ui_extra_networks_lora.py b/extensions-builtin/Lora/ui_extra_networks_lora.py
index a0edbc1e..2050e3fa 100644
--- a/extensions-builtin/Lora/ui_extra_networks_lora.py
+++ b/extensions-builtin/Lora/ui_extra_networks_lora.py
@@ -15,13 +15,19 @@ class ExtraNetworksPageLora(ui_extra_networks.ExtraNetworksPage):
def list_items(self):
for name, lora_on_disk in lora.available_loras.items():
path, ext = os.path.splitext(lora_on_disk.filename)
+
+ if shared.opts.lora_preferred_name == "Filename" or lora_on_disk.alias.lower() in lora.forbidden_lora_aliases:
+ alias = name
+ else:
+ alias = lora_on_disk.alias
+
yield {
"name": name,
"filename": path,
"preview": self.find_preview(path),
"description": self.find_description(path),
"search_term": self.search_terms_from_path(lora_on_disk.filename),
- "prompt": json.dumps(f"<lora:{lora_on_disk.alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
+ "prompt": json.dumps(f"<lora:{alias}:") + " + opts.extra_networks_default_multiplier + " + json.dumps(">"),
"local_preview": f"{path}.{shared.opts.samples_format}",
"metadata": json.dumps(lora_on_disk.metadata, indent=4) if lora_on_disk.metadata else None,
}
diff --git a/javascript/localization.js b/javascript/localization.js
index 0123b877..86e5ca67 100644
--- a/javascript/localization.js
+++ b/javascript/localization.js
@@ -109,18 +109,23 @@ function processNode(node){
}
function dumpTranslations(){
+ if(!hasLocalization()) {
+ // If we don't have any localization,
+ // we will not have traversed the app to find
+ // original_lines, so do that now.
+ processNode(gradioApp());
+ }
var dumped = {}
if (localization.rtl) {
- dumped.rtl = true
+ dumped.rtl = true;
}
- Object.keys(original_lines).forEach(function(text){
- if(dumped[text] !== undefined) return
-
- dumped[text] = localization[text] || text
- })
+ for (const text in original_lines) {
+ if(dumped[text] !== undefined) continue;
+ dumped[text] = localization[text] || text;
+ }
- return dumped
+ return dumped;
}
function download_localization() {
@@ -137,7 +142,11 @@ function download_localization() {
document.body.removeChild(element);
}
-if(hasLocalization()) {
+document.addEventListener("DOMContentLoaded", function () {
+ if (!hasLocalization()) {
+ return;
+ }
+
onUiUpdate(function (m) {
m.forEach(function (mutation) {
mutation.addedNodes.forEach(function (node) {
@@ -146,26 +155,23 @@ if(hasLocalization()) {
});
})
+ processNode(gradioApp())
- document.addEventListener("DOMContentLoaded", function () {
- processNode(gradioApp())
+ if (localization.rtl) { // if the language is from right to left,
+ (new MutationObserver((mutations, observer) => { // wait for the style to load
+ mutations.forEach(mutation => {
+ mutation.addedNodes.forEach(node => {
+ if (node.tagName === 'STYLE') {
+ observer.disconnect();
- if (localization.rtl) { // if the language is from right to left,
- (new MutationObserver((mutations, observer) => { // wait for the style to load
- mutations.forEach(mutation => {
- mutation.addedNodes.forEach(node => {
- if (node.tagName === 'STYLE') {
- observer.disconnect();
-
- for (const x of node.sheet.rules) { // find all rtl media rules
- if (Array.from(x.media || []).includes('rtl')) {
- x.media.appendMedium('all'); // enable them
- }
+ for (const x of node.sheet.rules) { // find all rtl media rules
+ if (Array.from(x.media || []).includes('rtl')) {
+ x.media.appendMedium('all'); // enable them
}
}
- })
- });
- })).observe(gradioApp(), { childList: true });
- }
- })
-}
+ }
+ })
+ });
+ })).observe(gradioApp(), { childList: true });
+ }
+})
diff --git a/modules/localization.py b/modules/localization.py
index f6a6f2fb..ee9c65e7 100644
--- a/modules/localization.py
+++ b/modules/localization.py
@@ -23,7 +23,7 @@ def list_localizations(dirname):
localizations[fn] = file.path
-def localization_js(current_localization_name):
+def localization_js(current_localization_name: str) -> str:
fn = localizations.get(current_localization_name, None)
data = {}
if fn is not None:
@@ -34,4 +34,4 @@ def localization_js(current_localization_name):
print(f"Error loading localization from {fn}:", file=sys.stderr)
print(traceback.format_exc(), file=sys.stderr)
- return f"var localization = {json.dumps(data)}\n"
+ return f"window.localization = {json.dumps(data)}"
diff --git a/modules/modelloader.py b/modules/modelloader.py
index cb85ac4f..a70aa0e3 100644
--- a/modules/modelloader.py
+++ b/modules/modelloader.py
@@ -117,20 +117,6 @@ def move_files(src_path: str, dest_path: str, ext_filter: str = None):
pass
-builtin_upscaler_classes = []
-forbidden_upscaler_classes = set()
-
-
-def list_builtin_upscalers():
- builtin_upscaler_classes.clear()
- builtin_upscaler_classes.extend(Upscaler.__subclasses__())
-
-def forbid_loaded_nonbuiltin_upscalers():
- for cls in Upscaler.__subclasses__():
- if cls not in builtin_upscaler_classes:
- forbidden_upscaler_classes.add(cls)
-
-
def load_upscalers():
# We can only do this 'magic' method to dynamically load upscalers if they are referenced,
# so we'll try to import any _model.py files before looking in __subclasses__
@@ -146,10 +132,17 @@ def load_upscalers():
datas = []
commandline_options = vars(shared.cmd_opts)
- for cls in Upscaler.__subclasses__():
- if cls in forbidden_upscaler_classes:
- continue
+ # some of upscaler classes will not go away after reloading their modules, and we'll end
+ # up with two copies of those classes. The newest copy will always be the last in the list,
+ # so we go from end to beginning and ignore duplicates
+ used_classes = {}
+ for cls in reversed(Upscaler.__subclasses__()):
+ classname = str(cls)
+ if classname not in used_classes:
+ used_classes[classname] = cls
+
+ for cls in reversed(used_classes.values()):
name = cls.__name__
cmd_name = f"{name.lower().replace('upscaler', '')}_models_path"
scaler = cls(commandline_options.get(cmd_name, None))
diff --git a/modules/safe.py b/modules/safe.py
index e6c2f2c0..e1a67f73 100644
--- a/modules/safe.py
+++ b/modules/safe.py
@@ -40,7 +40,7 @@ class RestrictedUnpickler(pickle.Unpickler):
return getattr(collections, name)
if module == 'torch._utils' and name in ['_rebuild_tensor_v2', '_rebuild_parameter', '_rebuild_device_tensor_from_numpy']:
return getattr(torch._utils, name)
- if module == 'torch' and name in ['FloatStorage', 'HalfStorage', 'IntStorage', 'LongStorage', 'DoubleStorage', 'ByteStorage', 'float32']:
+ if module == 'torch' and name in ['FloatStorage', 'HalfStorage', 'IntStorage', 'LongStorage', 'DoubleStorage', 'ByteStorage', 'float32', 'BFloat16Storage']:
return getattr(torch, name)
if module == 'torch.nn.modules.container' and name in ['ParameterDict']:
return getattr(torch.nn.modules.container, name)
diff --git a/modules/shared.py b/modules/shared.py
index 4631965b..b3508883 100644
--- a/modules/shared.py
+++ b/modules/shared.py
@@ -667,14 +667,19 @@ def reload_gradio_theme(theme_name=None):
if not theme_name:
theme_name = opts.gradio_theme
+ default_theme_args = dict(
+ font=["Source Sans Pro", 'ui-sans-serif', 'system-ui', 'sans-serif'],
+ font_mono=['IBM Plex Mono', 'ui-monospace', 'Consolas', 'monospace'],
+ )
+
if theme_name == "Default":
- gradio_theme = gr.themes.Default()
+ gradio_theme = gr.themes.Default(**default_theme_args)
else:
try:
gradio_theme = gr.themes.ThemeClass.from_hub(theme_name)
except Exception as e:
errors.display(e, "changing gradio theme")
- gradio_theme = gr.themes.Default()
+ gradio_theme = gr.themes.Default(**default_theme_args)
diff --git a/modules/ui.py b/modules/ui.py
index d02f6e82..f07bcc41 100644
--- a/modules/ui.py
+++ b/modules/ui.py
@@ -1863,12 +1863,11 @@ def webpath(fn):
def javascript_html():
- script_js = os.path.join(script_path, "script.js")
- head = f'<script type="text/javascript" src="{webpath(script_js)}"></script>\n'
+ # Ensure localization is in `window` before scripts
+ head = f'<script type="text/javascript">{localization.localization_js(shared.opts.localization)}</script>\n'
- inline = f"{localization.localization_js(shared.opts.localization)};"
- if cmd_opts.theme is not None:
- inline += f"set_theme('{cmd_opts.theme}');"
+ script_js = os.path.join(script_path, "script.js")
+ head += f'<script type="text/javascript" src="{webpath(script_js)}"></script>\n'
for script in modules.scripts.list_scripts("javascript", ".js"):
head += f'<script type="text/javascript" src="{webpath(script.path)}"></script>\n'
@@ -1876,7 +1875,8 @@ def javascript_html():
for script in modules.scripts.list_scripts("javascript", ".mjs"):
head += f'<script type="module" src="{webpath(script.path)}"></script>\n'
- head += f'<script type="text/javascript">{inline}</script>\n'
+ if cmd_opts.theme:
+ head += f'<script type="text/javascript">set_theme(\"{cmd_opts.theme}\");</script>\n'
return head
diff --git a/style.css b/style.css
index b823c7dd..31b2ed5a 100644
--- a/style.css
+++ b/style.css
@@ -1,3 +1,6 @@
+/* temporary fix to load default gradio font in frontend instead of backend */
+
+@import url('https://fonts.googleapis.com/css2?family=Source+Sans+Pro:wght@400;600&display=swap');
/* general gradio fixes */
diff --git a/webui.py b/webui.py
index 727ebd31..e8f0a63d 100644
--- a/webui.py
+++ b/webui.py
@@ -181,14 +181,11 @@ def initialize():
gfpgan.setup_model(cmd_opts.gfpgan_models_path)
startup_timer.record("setup gfpgan")
- modelloader.list_builtin_upscalers()
- startup_timer.record("list builtin upscalers")
-
modules.scripts.load_scripts()
startup_timer.record("load scripts")
modelloader.load_upscalers()
- #startup_timer.record("load upscalers") #Is this necessary? I don't know.
+ startup_timer.record("load upscalers") #Is this necessary? I don't know.
modules.sd_vae.refresh_vae_list()
startup_timer.record("refresh VAE")
@@ -388,7 +385,6 @@ def webui():
localization.list_localizations(cmd_opts.localizations_dir)
- modelloader.forbid_loaded_nonbuiltin_upscalers()
modules.scripts.reload_scripts()
startup_timer.record("load scripts")