diff options
| author | Markus Heiser <markus.heiser@darmarit.de> | 2025-05-24 17:53:57 +0200 |
|---|---|---|
| committer | Bnyro <bnyro@tutanota.com> | 2025-06-23 22:12:18 +0200 |
| commit | 2dd4f7b9721b201dc51cb2fb06d32cb1cb833458 (patch) | |
| tree | fe74795a1a6fa06bf5761083a1e9c57428be1b3c /searx/plugins | |
| parent | 58c10f758b09affda1a15c105e7ce86f3a3bdd3a (diff) | |
[mod] data: implement a simple tracker URL (SQL) database
On demand, the tracker data is loaded directly into the cache, so that the
maintenance of this data via PRs is no longer necessary.
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Diffstat (limited to 'searx/plugins')
| -rw-r--r-- | searx/plugins/tracker_url_remover.py | 50 |
1 files changed, 8 insertions, 42 deletions
diff --git a/searx/plugins/tracker_url_remover.py b/searx/plugins/tracker_url_remover.py index efc593775..b7e8e25f3 100644 --- a/searx/plugins/tracker_url_remover.py +++ b/searx/plugins/tracker_url_remover.py @@ -2,17 +2,15 @@ # pylint: disable=missing-module-docstring, unused-argument from __future__ import annotations -import typing -import re -from urllib.parse import urlparse, urlunparse, parse_qsl, urlencode +import logging +import typing from flask_babel import gettext from searx.data import TRACKER_PATTERNS from . import Plugin, PluginInfo -from ._core import log if typing.TYPE_CHECKING: from searx.search import SearchWithPlugins @@ -21,13 +19,16 @@ if typing.TYPE_CHECKING: from searx.plugins import PluginCfg +log = logging.getLogger("searx.plugins.tracker_url_remover") + + class SXNGPlugin(Plugin): """Remove trackers arguments from the returned URL.""" id = "tracker_url_remover" - log = log.getChild(id) def __init__(self, plg_cfg: "PluginCfg") -> None: + super().__init__(plg_cfg) self.info = PluginInfo( id=self.id, @@ -47,42 +48,7 @@ class SXNGPlugin(Plugin): If URL should be modified, the returned string is the new URL to use.""" if not url_src: - cls.log.debug("missing a URL in field %s", field_name) + log.debug("missing a URL in field %s", field_name) return True - new_url = url_src - parsed_new_url = urlparse(url=new_url) - - for rule in TRACKER_PATTERNS: - - if not re.match(rule["urlPattern"], new_url): - # no match / ignore pattern - continue - - in_exceptions = False - for exception in rule["exceptions"]: - if re.match(exception, new_url): - in_exceptions = True - break - if in_exceptions: - # pattern is in the list of exceptions / ignore pattern - # hint: we can't break the outer pattern loop since we have - # overlapping urlPattern like ".*" - continue - - # remove tracker arguments from the url-query part - query_args: list[tuple[str, str]] = list(parse_qsl(parsed_new_url.query)) - - for name, val in query_args.copy(): - for reg in rule["trackerParams"]: - if re.match(reg, name): - cls.log.debug("%s remove tracker arg: %s='%s'", parsed_new_url.netloc, name, val) - query_args.remove((name, val)) - - parsed_new_url = parsed_new_url._replace(query=urlencode(query_args)) - new_url = urlunparse(parsed_new_url) - - if new_url != url_src: - return new_url - - return True + return TRACKER_PATTERNS.clean_url(url=url_src) |