aboutsummaryrefslogtreecommitdiff
path: root/modules/cache.py
diff options
context:
space:
mode:
authorAUTOMATIC1111 <16777216c@gmail.com>2023-07-16 09:25:32 +0300
committerAUTOMATIC1111 <16777216c@gmail.com>2023-07-16 09:25:32 +0300
commit47d9dd0240872dc70fd26bc1bf309f49fe17c104 (patch)
tree846cadd8a62bfca332e714d5840fb98fd6492da2 /modules/cache.py
parenta1d6ada69ac686a628e79b61b8f86d01592a7209 (diff)
speedup extra networks listing
Diffstat (limited to 'modules/cache.py')
-rw-r--r--modules/cache.py27
1 files changed, 14 insertions, 13 deletions
diff --git a/modules/cache.py b/modules/cache.py
index 4c2db604..07180602 100644
--- a/modules/cache.py
+++ b/modules/cache.py
@@ -1,12 +1,12 @@
import json
import os.path
-
-import filelock
+import threading
from modules.paths import data_path, script_path
cache_filename = os.path.join(data_path, "cache.json")
cache_data = None
+cache_lock = threading.Lock()
def dump_cache():
@@ -14,7 +14,7 @@ def dump_cache():
Saves all cache data to a file.
"""
- with filelock.FileLock(f"{cache_filename}.lock"):
+ with cache_lock:
with open(cache_filename, "w", encoding="utf8") as file:
json.dump(cache_data, file, indent=4)
@@ -33,17 +33,18 @@ def cache(subsection):
global cache_data
if cache_data is None:
- with filelock.FileLock(f"{cache_filename}.lock"):
- if not os.path.isfile(cache_filename):
- cache_data = {}
- else:
- try:
- with open(cache_filename, "r", encoding="utf8") as file:
- cache_data = json.load(file)
- except Exception:
- os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
- print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
+ with cache_lock:
+ if cache_data is None:
+ if not os.path.isfile(cache_filename):
cache_data = {}
+ else:
+ try:
+ with open(cache_filename, "r", encoding="utf8") as file:
+ cache_data = json.load(file)
+ except Exception:
+ os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json"))
+ print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache')
+ cache_data = {}
s = cache_data.get(subsection, {})
cache_data[subsection] = s