From 510e5fc8c60dd6278d0bc52effc23257c717dc1b Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sat, 15 Jul 2023 09:20:43 +0300 Subject: cache git extension repo information --- modules/cache.py | 96 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 96 insertions(+) create mode 100644 modules/cache.py (limited to 'modules/cache.py') diff --git a/modules/cache.py b/modules/cache.py new file mode 100644 index 00000000..4c2db604 --- /dev/null +++ b/modules/cache.py @@ -0,0 +1,96 @@ +import json +import os.path + +import filelock + +from modules.paths import data_path, script_path + +cache_filename = os.path.join(data_path, "cache.json") +cache_data = None + + +def dump_cache(): + """ + Saves all cache data to a file. + """ + + with filelock.FileLock(f"{cache_filename}.lock"): + with open(cache_filename, "w", encoding="utf8") as file: + json.dump(cache_data, file, indent=4) + + +def cache(subsection): + """ + Retrieves or initializes a cache for a specific subsection. + + Parameters: + subsection (str): The subsection identifier for the cache. + + Returns: + dict: The cache data for the specified subsection. + """ + + global cache_data + + if cache_data is None: + with filelock.FileLock(f"{cache_filename}.lock"): + if not os.path.isfile(cache_filename): + cache_data = {} + else: + try: + with open(cache_filename, "r", encoding="utf8") as file: + cache_data = json.load(file) + except Exception: + os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json")) + print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache') + cache_data = {} + + s = cache_data.get(subsection, {}) + cache_data[subsection] = s + + return s + + +def cached_data_for_file(subsection, title, filename, func): + """ + Retrieves or generates data for a specific file, using a caching mechanism. + + Parameters: + subsection (str): The subsection of the cache to use. + title (str): The title of the data entry in the subsection of the cache. + filename (str): The path to the file to be checked for modifications. + func (callable): A function that generates the data if it is not available in the cache. + + Returns: + dict or None: The cached or generated data, or None if data generation fails. + + The `cached_data_for_file` function implements a caching mechanism for data stored in files. + It checks if the data associated with the given `title` is present in the cache and compares the + modification time of the file with the cached modification time. If the file has been modified, + the cache is considered invalid and the data is regenerated using the provided `func`. + Otherwise, the cached data is returned. + + If the data generation fails, None is returned to indicate the failure. Otherwise, the generated + or cached data is returned as a dictionary. + """ + + existing_cache = cache(subsection) + ondisk_mtime = os.path.getmtime(filename) + + entry = existing_cache.get(title) + if entry: + cached_mtime = existing_cache[title].get("mtime", 0) + if ondisk_mtime > cached_mtime: + entry = None + + if not entry: + entry = func() + if entry is None: + return None + + entry['mtime'] = ondisk_mtime + existing_cache[title] = entry + + dump_cache() + + return entry -- cgit v1.2.3 From 47d9dd0240872dc70fd26bc1bf309f49fe17c104 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 16 Jul 2023 09:25:32 +0300 Subject: speedup extra networks listing --- modules/cache.py | 27 ++++++++++++++------------- 1 file changed, 14 insertions(+), 13 deletions(-) (limited to 'modules/cache.py') diff --git a/modules/cache.py b/modules/cache.py index 4c2db604..07180602 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -1,12 +1,12 @@ import json import os.path - -import filelock +import threading from modules.paths import data_path, script_path cache_filename = os.path.join(data_path, "cache.json") cache_data = None +cache_lock = threading.Lock() def dump_cache(): @@ -14,7 +14,7 @@ def dump_cache(): Saves all cache data to a file. """ - with filelock.FileLock(f"{cache_filename}.lock"): + with cache_lock: with open(cache_filename, "w", encoding="utf8") as file: json.dump(cache_data, file, indent=4) @@ -33,17 +33,18 @@ def cache(subsection): global cache_data if cache_data is None: - with filelock.FileLock(f"{cache_filename}.lock"): - if not os.path.isfile(cache_filename): - cache_data = {} - else: - try: - with open(cache_filename, "r", encoding="utf8") as file: - cache_data = json.load(file) - except Exception: - os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json")) - print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache') + with cache_lock: + if cache_data is None: + if not os.path.isfile(cache_filename): cache_data = {} + else: + try: + with open(cache_filename, "r", encoding="utf8") as file: + cache_data = json.load(file) + except Exception: + os.replace(cache_filename, os.path.join(script_path, "tmp", "cache.json")) + print('[ERROR] issue occurred while trying to read cache.json, move current cache to tmp/cache.json and create new cache') + cache_data = {} s = cache_data.get(subsection, {}) cache_data[subsection] = s -- cgit v1.2.3 From ccd97886da1f659472cdca3de8731f59a70bbc28 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 16 Jul 2023 09:49:22 +0300 Subject: fix bogus metadata for extra networks appearing out of cache fix description editing for checkpoint not immediately appearing on cards --- modules/cache.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) (limited to 'modules/cache.py') diff --git a/modules/cache.py b/modules/cache.py index 07180602..28d42a8c 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -80,18 +80,18 @@ def cached_data_for_file(subsection, title, filename, func): entry = existing_cache.get(title) if entry: - cached_mtime = existing_cache[title].get("mtime", 0) + cached_mtime = entry.get("mtime", 0) if ondisk_mtime > cached_mtime: entry = None if not entry: - entry = func() - if entry is None: + value = func() + if value is None: return None - entry['mtime'] = ondisk_mtime + entry = {'mtime': ondisk_mtime, 'value': value} existing_cache[title] = entry dump_cache() - return entry + return entry['value'] -- cgit v1.2.3 From 67ea4eabc3e78c4b496a9fcd21aca95fd5ef7027 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Sun, 16 Jul 2023 13:46:33 +0300 Subject: fix cache loading wrong entries from old cache files --- modules/cache.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'modules/cache.py') diff --git a/modules/cache.py b/modules/cache.py index 28d42a8c..ddf44637 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -84,7 +84,7 @@ def cached_data_for_file(subsection, title, filename, func): if ondisk_mtime > cached_mtime: entry = None - if not entry: + if not entry or 'value' not in entry: value = func() if value is None: return None -- cgit v1.2.3 From 9251ae3bc78e465058c286e86f3c26cb6f819a31 Mon Sep 17 00:00:00 2001 From: AUTOMATIC1111 <16777216c@gmail.com> Date: Mon, 17 Jul 2023 09:29:36 +0300 Subject: delay writing cache to prevent writing the same thing over and over --- modules/cache.py | 29 ++++++++++++++++++++++++++--- 1 file changed, 26 insertions(+), 3 deletions(-) (limited to 'modules/cache.py') diff --git a/modules/cache.py b/modules/cache.py index ddf44637..71fe6302 100644 --- a/modules/cache.py +++ b/modules/cache.py @@ -1,6 +1,7 @@ import json import os.path import threading +import time from modules.paths import data_path, script_path @@ -8,15 +9,37 @@ cache_filename = os.path.join(data_path, "cache.json") cache_data = None cache_lock = threading.Lock() +dump_cache_after = None +dump_cache_thread = None + def dump_cache(): """ - Saves all cache data to a file. + Marks cache for writing to disk. 5 seconds after no one else flags the cache for writing, it is written. """ + global dump_cache_after + global dump_cache_thread + + def thread_func(): + global dump_cache_after + global dump_cache_thread + + while dump_cache_after is not None and time.time() < dump_cache_after: + time.sleep(1) + + with cache_lock: + with open(cache_filename, "w", encoding="utf8") as file: + json.dump(cache_data, file, indent=4) + + dump_cache_after = None + dump_cache_thread = None + with cache_lock: - with open(cache_filename, "w", encoding="utf8") as file: - json.dump(cache_data, file, indent=4) + dump_cache_after = time.time() + 5 + if dump_cache_thread is None: + dump_cache_thread = threading.Thread(name='cache-writer', target=thread_func) + dump_cache_thread.start() def cache(subsection): -- cgit v1.2.3