aboutsummaryrefslogtreecommitdiff
path: root/webui.py
diff options
context:
space:
mode:
authorAUTOMATIC <16777216c@gmail.com>2023-06-01 08:42:50 +0300
committerAUTOMATIC <16777216c@gmail.com>2023-06-01 08:42:50 +0300
commitb3390a984081950c626070889580e01727689921 (patch)
treef1fb3352f91d70034d54a99c220f1ca594d8b3dc /webui.py
parent0cc05fc492a9360d3b2f1b3f64c7d74f9041f74e (diff)
parent36888092afa82ee248bc947229f813b453629317 (diff)
Merge branch 'dev' into startup-profile
Diffstat (limited to 'webui.py')
-rw-r--r--webui.py44
1 files changed, 28 insertions, 16 deletions
diff --git a/webui.py b/webui.py
index 940966eb..828259b8 100644
--- a/webui.py
+++ b/webui.py
@@ -1,3 +1,5 @@
+from __future__ import annotations
+
import os
import sys
import time
@@ -18,7 +20,7 @@ import logging
logging.getLogger("xformers").addFilter(lambda record: 'A matching Triton is not available' not in record.getMessage())
-from modules import paths, timer, import_hook, errors # noqa: F401
+from modules import paths, timer, import_hook, errors, devices # noqa: F401
startup_timer = timer.startup_timer
@@ -56,6 +58,7 @@ import modules.sd_hijack
import modules.sd_hijack_optimizations
import modules.sd_models
import modules.sd_vae
+import modules.sd_unet
import modules.txt2img
import modules.script_callbacks
import modules.textual_inversion.textual_inversion
@@ -132,7 +135,7 @@ there are reports of issues with training tab on the latest version.
Use --skip-version-check commandline argument to disable this check.
""".strip())
- expected_xformers_version = "0.0.17"
+ expected_xformers_version = "0.0.20"
if shared.xformers_available:
import xformers
@@ -289,9 +292,25 @@ def initialize_rest(*, reload_script_modules=False):
modules.sd_hijack.list_optimizers()
startup_timer.record("scripts list_optimizers")
- # load model in parallel to other startup stuff
- # (when reloading, this does nothing)
- Thread(target=lambda: shared.sd_model).start()
+ modules.sd_unet.list_unets()
+ startup_timer.record("scripts list_unets")
+
+ def load_model():
+ """
+ Accesses shared.sd_model property to load model.
+ After it's available, if it has been loaded before this access by some extension,
+ its optimization may be None because the list of optimizaers has neet been filled
+ by that time, so we apply optimization again.
+ """
+
+ shared.sd_model # noqa: B018
+
+ if modules.sd_hijack.current_optimizer is None:
+ modules.sd_hijack.apply_optimizations()
+
+ Thread(target=load_model).start()
+
+ Thread(target=devices.first_time_calculation).start()
shared.reload_hypernetworks()
startup_timer.record("reload hypernetworks")
@@ -368,17 +387,6 @@ def webui():
gradio_auth_creds = list(get_gradio_auth_creds()) or None
- # this restores the missing /docs endpoint
- if launch_api and not hasattr(FastAPI, 'original_setup'):
- # TODO: replace this with `launch(app_kwargs=...)` if https://github.com/gradio-app/gradio/pull/4282 gets merged
- def fastapi_setup(self):
- self.docs_url = "/docs"
- self.redoc_url = "/redoc"
- self.original_setup()
-
- FastAPI.original_setup = FastAPI.setup
- FastAPI.setup = fastapi_setup
-
app, local_url, share_url = shared.demo.launch(
share=cmd_opts.share,
server_name=server_name,
@@ -391,6 +399,10 @@ def webui():
inbrowser=cmd_opts.autolaunch,
prevent_thread_lock=True,
allowed_paths=cmd_opts.gradio_allowed_path,
+ app_kwargs={
+ "docs_url": "/docs",
+ "redoc_url": "/redoc",
+ },
)
if cmd_opts.add_stop_route:
app.add_route("/_stop", stop_route, methods=["POST"])