summaryrefslogtreecommitdiff
path: root/searx/cache.py
diff options
context:
space:
mode:
authorMarkus Heiser <markus.heiser@darmarit.de>2025-09-24 19:34:03 +0200
committerMarkus Heiser <markus.heiser@darmarIT.de>2025-09-28 07:32:41 +0200
commit18a58943ccaa324b39232fa503462eb39f407399 (patch)
tree04b355ad60c205b28ea1083ea97abdfabea857ff /searx/cache.py
parent4f4de3fc8743b3732834c5ffaa4a3e264d200e6c (diff)
[mod] ExpireCacheSQLite - implement .setmany() for bulk loading
[1] https://github.com/searxng/searxng/issues/5223#issuecomment-3328597147 Suggested-by: Ivan G <igabaldon@inetol.net> [1] Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Diffstat (limited to 'searx/cache.py')
-rw-r--r--searx/cache.py95
1 files changed, 79 insertions, 16 deletions
diff --git a/searx/cache.py b/searx/cache.py
index 0de482363..ea58c9328 100644
--- a/searx/cache.py
+++ b/searx/cache.py
@@ -29,6 +29,8 @@ from searx import get_setting
log = logger.getChild("cache")
+CacheRowType: typing.TypeAlias = tuple[str, typing.Any, int | None]
+
class ExpireCacheCfg(msgspec.Struct): # pylint: disable=too-few-public-methods
"""Configuration of a :py:obj:`ExpireCache` cache."""
@@ -81,7 +83,7 @@ class ExpireCacheCfg(msgspec.Struct): # pylint: disable=too-few-public-methods
class ExpireCacheStats:
"""Dataclass which provides information on the status of the cache."""
- cached_items: dict[str, list[tuple[str, typing.Any, int]]]
+ cached_items: dict[str, list[CacheRowType]]
"""Values in the cache mapped by context name.
.. code: python
@@ -108,7 +110,9 @@ class ExpireCacheStats:
continue
for key, value, expire in kv_list:
- valid_until = datetime.datetime.fromtimestamp(expire).strftime("%Y-%m-%d %H:%M:%S")
+ valid_until = ""
+ if expire:
+ valid_until = datetime.datetime.fromtimestamp(expire).strftime("%Y-%m-%d %H:%M:%S")
c_kv += 1
lines.append(f"[{ctx_name:20s}] {valid_until} {key:12}" f" --> ({type(value).__name__}) {value} ")
@@ -339,38 +343,97 @@ class ExpireCacheSQLite(sqlitedb.SQLiteAppl, ExpireCache):
exists, it will be created (on demand) by :py:obj:`self.create_table
<ExpireCacheSQLite.create_table>`.
"""
- table = ctx
- self.maintenance()
+ c, err_msg_list = self._setmany([(key, value, expire)], ctx=ctx)
+ if c:
+ log.debug("%s -- %s: key '%s' updated or inserted (%s errors)", self.cfg.name, ctx, key, len(err_msg_list))
+ else:
+ for msg in err_msg_list:
+ log.error("%s -- %s: %s", self.cfg.name, ctx, msg)
+ return bool(c)
+
+ def setmany(
+ self,
+ opt_list: list[CacheRowType],
+ ctx: str | None = None,
+ ) -> int:
+ """Efficient bootload of the cache from a list of options. The list
+ contains tuples with the arguments described in
+ :py:obj:`ExpireCacheSQLite.set`."""
+ _start = time.time()
+ c, err_msg_list = self._setmany(opt_list=opt_list, ctx=ctx)
+ _end = time.time()
+ for msg in err_msg_list:
+ log.error("%s -- %s: %s", self.cfg.name, ctx, msg)
+
+ log.debug(
+ "%s -- %s: %s/%s key/value pairs updated or inserted in %s sec (%s errors)",
+ self.cfg.name,
+ ctx,
+ c,
+ len(opt_list),
+ _end - _start,
+ len(err_msg_list),
+ )
+ return c
- value = self.serialize(value=value)
- if len(value) > self.cfg.MAX_VALUE_LEN:
- log.warning("ExpireCache.set(): %s.key='%s' - value too big to cache (len: %s) ", table, value, len(value))
- return False
+ def _setmany(
+ self,
+ opt_list: list[CacheRowType],
+ ctx: str | None = None,
+ ) -> tuple[int, list[str]]:
- if not expire:
- expire = self.cfg.MAXHOLD_TIME
- expire = int(time.time()) + expire
+ table = ctx
+ self.maintenance()
table_name = table
if not table_name:
table_name = self.normalize_name(self.cfg.name)
self.create_table(table_name)
- sql = (
+ sql_str = (
f"INSERT INTO {table_name} (key, value, expire) VALUES (?, ?, ?)"
f" ON CONFLICT DO "
f"UPDATE SET value=?, expire=?"
)
+ sql_rows: list[
+ tuple[
+ str, # key
+ typing.Any, # value
+ int | None, # expire
+ typing.Any, # value
+ int | None, # expire
+ ]
+ ] = []
+
+ err_msg_list: list[str] = []
+ for key, _val, expire in opt_list:
+
+ value: bytes = self.serialize(value=_val)
+ if len(value) > self.cfg.MAX_VALUE_LEN:
+ err_msg_list.append(f"{table}.key='{key}' - serialized value too big to cache (len: {len(value)}) ")
+ continue
+
+ if not expire:
+ expire = self.cfg.MAXHOLD_TIME
+ expire = int(time.time()) + expire
+
+ # positional arguments of the INSERT INTO statement
+ sql_args = (key, value, expire, value, expire)
+ sql_rows.append(sql_args)
+
+ if not sql_rows:
+ return 0, err_msg_list
+
if table:
with self.DB:
- self.DB.execute(sql, (key, value, expire, value, expire))
+ self.DB.executemany(sql_str, sql_rows)
else:
with self.connect() as conn:
- conn.execute(sql, (key, value, expire, value, expire))
+ conn.executemany(sql_str, sql_rows)
conn.close()
- return True
+ return len(sql_rows), err_msg_list
def get(self, key: str, default: typing.Any = None, ctx: str | None = None) -> typing.Any:
"""Get value of ``key`` from table given by argument ``ctx``. If
@@ -410,7 +473,7 @@ class ExpireCacheSQLite(sqlitedb.SQLiteAppl, ExpireCache):
yield row[0], self.deserialize(row[1])
def state(self) -> ExpireCacheStats:
- cached_items: dict[str, list[tuple[str, typing.Any, int]]] = {}
+ cached_items: dict[str, list[CacheRowType]] = {}
for table in self.table_names:
cached_items[table] = []
for row in self.DB.execute(f"SELECT key, value, expire FROM {table}"):