summaryrefslogtreecommitdiff
path: root/searx/plugins
diff options
context:
space:
mode:
authorMarkus Heiser <markus.heiser@darmarIT.de>2023-06-03 06:00:15 +0200
committerGitHub <noreply@github.com>2023-06-03 06:00:15 +0200
commit80aaef6c95b572df1fa3a8c30b7fdc1538d7a306 (patch)
treea023de5925099e69b6b5de850cab1e0ddc330f39 /searx/plugins
parent1a1ab34d9dba7368b146bc7402e8f54bafea98bb (diff)
parent80af38d37b21dc6e5edbf27bd22310db42a6f923 (diff)
Merge pull request #2357 / limiter -> botdetection
The monolithic implementation of the limiter was divided into methods and implemented in the Python package searx.botdetection. Detailed documentation on the methods has been added. The methods are divided into two groups: 1. Probe HTTP headers - Method http_accept - Method http_accept_encoding - Method http_accept_language - Method http_connection - Method http_user_agent 2. Rate limit: - Method ip_limit - Method link_token (new) The (reduced) implementation of the limiter is now in the module searx.botdetection.limiter. The first group was transferred unchanged to this module. The ip_limit contains the sliding windows implemented by the limiter so far. This merge also fixes some long outstandig issue: - limiter does not evaluate the Accept-Language correct [1] - limiter needs a IPv6 prefix to block networks instead of IPs [2] Without additional configuration the limiter works as before (apart from the bugfixes). For the commissioning of additional methods (link_toke), a configuration must be made in an additional configuration file. Without this configuration, the limiter runs as before (zero configuration). The ip_limit Method implements the sliding windows of the vanilla limiter, additionally the link_token method can be used in this method. The link_token method can be used to investigate whether a request is suspicious. To activate the link_token method in the ip_limit method add the following to your /etc/searxng/limiter.toml:: [botdetection.ip_limit] link_token = true [1] https://github.com/searxng/searxng/issues/2455 [2] https://github.com/searxng/searxng/issues/2477
Diffstat (limited to 'searx/plugins')
-rw-r--r--searx/plugins/limiter.py103
-rw-r--r--searx/plugins/self_info.py31
2 files changed, 15 insertions, 119 deletions
diff --git a/searx/plugins/limiter.py b/searx/plugins/limiter.py
index 46c82f588..a8beb5e88 100644
--- a/searx/plugins/limiter.py
+++ b/searx/plugins/limiter.py
@@ -1,119 +1,32 @@
# SPDX-License-Identifier: AGPL-3.0-or-later
# lint: pylint
# pyright: basic
-"""Some bot protection / rate limitation
+"""see :ref:`limiter src`"""
-To monitor rate limits and protect privacy the IP addresses are getting stored
-with a hash so the limiter plugin knows who to block. A redis database is
-needed to store the hash values.
-
-Enable the plugin in ``settings.yml``:
-
-- ``server.limiter: true``
-- ``redis.url: ...`` check the value, see :ref:`settings redis`
-"""
-
-import re
-from flask import request
+import flask
from searx import redisdb
from searx.plugins import logger
-from searx.redislib import incr_sliding_window
+from searx.botdetection import limiter
name = "Request limiter"
description = "Limit the number of request"
default_on = False
preference_section = 'service'
-logger = logger.getChild('limiter')
-
-block_user_agent = re.compile(
- r'('
- + r'unknown'
- + r'|[Cc][Uu][Rr][Ll]|[wW]get|Scrapy|splash|JavaFX|FeedFetcher|python-requests|Go-http-client|Java|Jakarta|okhttp'
- + r'|HttpClient|Jersey|Python|libwww-perl|Ruby|SynHttpClient|UniversalFeedParser|Googlebot|GoogleImageProxy'
- + r'|bingbot|Baiduspider|yacybot|YandexMobileBot|YandexBot|Yahoo! Slurp|MJ12bot|AhrefsBot|archive.org_bot|msnbot'
- + r'|MJ12bot|SeznamBot|linkdexbot|Netvibes|SMTBot|zgrab|James BOT|Sogou|Abonti|Pixray|Spinn3r|SemrushBot|Exabot'
- + r'|ZmEu|BLEXBot|bitlybot'
- # unmaintained Farside instances
- + r'|'
- + re.escape(r'Mozilla/5.0 (compatible; Farside/0.1.0; +https://farside.link)')
- + '|.*PetalBot.*'
- + r')'
-)
-
-
-def is_accepted_request() -> bool:
- # pylint: disable=too-many-return-statements
- redis_client = redisdb.client()
- user_agent = request.headers.get('User-Agent', 'unknown')
- x_forwarded_for = request.headers.get('X-Forwarded-For', '')
-
- if request.path == '/healthz':
- return True
-
- if block_user_agent.match(user_agent):
- logger.debug("BLOCK %s: %s --> detected User-Agent: %s" % (x_forwarded_for, request.path, user_agent))
- return False
-
- if request.path == '/search':
- c_burst = incr_sliding_window(redis_client, 'IP limit, burst' + x_forwarded_for, 20)
- c_10min = incr_sliding_window(redis_client, 'IP limit, 10 minutes' + x_forwarded_for, 600)
- if c_burst > 15 or c_10min > 150:
- logger.debug("BLOCK %s: to many request", x_forwarded_for)
- return False
-
- if len(request.headers.get('Accept-Language', '').strip()) == '':
- logger.debug("BLOCK %s: missing Accept-Language", x_forwarded_for)
- return False
-
- if request.headers.get('Connection') == 'close':
- logger.debug("BLOCK %s: got Connection=close", x_forwarded_for)
- return False
-
- accept_encoding_list = [l.strip() for l in request.headers.get('Accept-Encoding', '').split(',')]
- if 'gzip' not in accept_encoding_list and 'deflate' not in accept_encoding_list:
- logger.debug("BLOCK %s: suspicious Accept-Encoding", x_forwarded_for)
- return False
-
- if 'text/html' not in request.accept_mimetypes:
- logger.debug("BLOCK %s: Accept-Encoding misses text/html", x_forwarded_for)
- return False
-
- if request.args.get('format', 'html') != 'html':
- c = incr_sliding_window(redis_client, 'API limit' + x_forwarded_for, 3600)
- if c > 4:
- logger.debug("BLOCK %s: API limit exceeded", x_forwarded_for)
- return False
-
- logger.debug(
- "OK %s: '%s'" % (x_forwarded_for, request.path)
- + " || form: %s" % request.form
- + " || Accept: %s" % request.headers.get('Accept', '')
- + " || Accept-Language: %s" % request.headers.get('Accept-Language', '')
- + " || Accept-Encoding: %s" % request.headers.get('Accept-Encoding', '')
- + " || Content-Type: %s" % request.headers.get('Content-Type', '')
- + " || Content-Length: %s" % request.headers.get('Content-Length', '')
- + " || Connection: %s" % request.headers.get('Connection', '')
- + " || User-Agent: %s" % user_agent
- )
-
- return True
+logger = logger.getChild('limiter')
def pre_request():
- if not is_accepted_request():
- return 'Too Many Requests', 429
- return None
+ """See :ref:`flask.Flask.before_request`"""
+ return limiter.filter_request(flask.request)
-def init(app, settings):
+def init(app: flask.Flask, settings) -> bool:
if not settings['server']['limiter']:
return False
-
if not redisdb.client():
- logger.error("The limiter requires Redis") # pylint: disable=undefined-variable
+ logger.error("The limiter requires Redis")
return False
-
app.before_request(pre_request)
return True
diff --git a/searx/plugins/self_info.py b/searx/plugins/self_info.py
index fbe4518b5..8079ee0d4 100644
--- a/searx/plugins/self_info.py
+++ b/searx/plugins/self_info.py
@@ -1,21 +1,11 @@
-'''
-searx is free software: you can redistribute it and/or modify
-it under the terms of the GNU Affero General Public License as published by
-the Free Software Foundation, either version 3 of the License, or
-(at your option) any later version.
+# SPDX-License-Identifier: AGPL-3.0-or-later
+# lint: pylint
+# pylint: disable=missing-module-docstring,invalid-name
-searx is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU Affero General Public License for more details.
-
-You should have received a copy of the GNU Affero General Public License
-along with searx. If not, see < http://www.gnu.org/licenses/ >.
-
-(C) 2015 by Adam Tauber, <asciimoo@gmail.com>
-'''
-from flask_babel import gettext
import re
+from flask_babel import gettext
+
+from searx.botdetection._helpers import get_real_ip
name = gettext('Self Information')
description = gettext('Displays your IP if the query is "ip" and your user agent if the query contains "user agent".')
@@ -28,18 +18,11 @@ query_examples = ''
p = re.compile('.*user[ -]agent.*', re.IGNORECASE)
-# attach callback to the post search hook
-# request: flask request object
-# ctx: the whole local context of the pre search hook
def post_search(request, search):
if search.search_query.pageno > 1:
return True
if search.search_query.query == 'ip':
- x_forwarded_for = request.headers.getlist("X-Forwarded-For")
- if x_forwarded_for:
- ip = x_forwarded_for[0]
- else:
- ip = request.remote_addr
+ ip = get_real_ip(request)
search.result_container.answers['ip'] = {'answer': ip}
elif p.match(search.search_query.query):
ua = request.user_agent