summaryrefslogtreecommitdiff
path: root/searx/engines
diff options
context:
space:
mode:
authorMarkus Heiser <markus.heiser@darmarIT.de>2025-07-26 06:22:46 +0200
committerGitHub <noreply@github.com>2025-07-26 06:22:46 +0200
commit649a8dd577b7db5549a34af6f667daf1b61ffb6b (patch)
treebae5c18a77fa71ccc7e5df4aec9082201d5d6fd8 /searx/engines
parent02cbdf468b316d9c9609d35ec2a9d0916c6def4c (diff)
[fix] cleanup: rename `searx` leftovers to `SearXNG` (#5049)
Signed-off-by: Markus Heiser <markus.heiser@darmarit.de>
Diffstat (limited to 'searx/engines')
-rwxr-xr-xsearx/engines/base.py4
-rw-r--r--searx/engines/elasticsearch.py2
-rw-r--r--searx/engines/photon.py6
-rw-r--r--searx/engines/stract.py4
-rw-r--r--searx/engines/torznab.py2
-rw-r--r--searx/engines/wikidata.py4
6 files changed, 11 insertions, 11 deletions
diff --git a/searx/engines/base.py b/searx/engines/base.py
index 4f99d6717..328a1fc07 100755
--- a/searx/engines/base.py
+++ b/searx/engines/base.py
@@ -7,7 +7,7 @@ import re
from urllib.parse import urlencode
from lxml import etree
-from searx.utils import searx_useragent
+from searx.utils import searxng_useragent
# about
about = {
@@ -69,7 +69,7 @@ def request(query, params):
params['url'] = base_url.format(**string_args)
- params['headers']['User-Agent'] = searx_useragent()
+ params['headers']['User-Agent'] = searxng_useragent()
return params
diff --git a/searx/engines/elasticsearch.py b/searx/engines/elasticsearch.py
index c4992b02f..c613ae367 100644
--- a/searx/engines/elasticsearch.py
+++ b/searx/engines/elasticsearch.py
@@ -101,7 +101,7 @@ def request(query, params):
def _match_query(query):
"""
The standard for full text queries.
- searx format: "key:value" e.g. city:berlin
+ SearXNG format: "key:value" e.g. city:berlin
REF: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl-match-query.html
"""
diff --git a/searx/engines/photon.py b/searx/engines/photon.py
index 43c41bf46..07fcfdf31 100644
--- a/searx/engines/photon.py
+++ b/searx/engines/photon.py
@@ -5,7 +5,7 @@
from json import loads
from urllib.parse import urlencode
-from searx.utils import searx_useragent
+from searx.utils import searxng_useragent
# about
about = {
@@ -40,8 +40,8 @@ def request(query, params):
if language in supported_languages:
params['url'] = params['url'] + "&lang=" + language
- # using searx User-Agent
- params['headers']['User-Agent'] = searx_useragent()
+ # using SearXNG User-Agent
+ params['headers']['User-Agent'] = searxng_useragent()
return params
diff --git a/searx/engines/stract.py b/searx/engines/stract.py
index ffd475732..feeae05b1 100644
--- a/searx/engines/stract.py
+++ b/searx/engines/stract.py
@@ -6,7 +6,7 @@ ends.
"""
from json import dumps
-from searx.utils import searx_useragent
+from searx.utils import searxng_useragent
from searx.enginelib.traits import EngineTraits
about = {
@@ -31,7 +31,7 @@ def request(query, params):
params['headers'] = {
'Accept': 'application/json',
'Content-Type': 'application/json',
- 'User-Agent': searx_useragent(),
+ 'User-Agent': searxng_useragent(),
}
region = traits.get_region(params["searxng_locale"], default=traits.all_locale)
params['data'] = dumps(
diff --git a/searx/engines/torznab.py b/searx/engines/torznab.py
index cfe7e2b4f..333a21812 100644
--- a/searx/engines/torznab.py
+++ b/searx/engines/torznab.py
@@ -149,7 +149,7 @@ def build_result(item: etree.Element) -> Dict[str, Any]:
leechers = get_torznab_attribute(item, 'leechers')
peers = get_torznab_attribute(item, 'peers')
- # map attributes to searx result
+ # map attributes to SearXNG result
result: Dict[str, Any] = {
'template': 'torrent.html',
'title': get_attribute(item, 'title'),
diff --git a/searx/engines/wikidata.py b/searx/engines/wikidata.py
index 5b5764d20..167364d4e 100644
--- a/searx/engines/wikidata.py
+++ b/searx/engines/wikidata.py
@@ -15,7 +15,7 @@ from babel.dates import format_datetime, format_date, format_time, get_datetime_
from searx.data import WIKIDATA_UNITS
from searx.network import post, get
-from searx.utils import searx_useragent, get_string_replaces_function
+from searx.utils import searxng_useragent, get_string_replaces_function
from searx.external_urls import get_external_url, get_earth_coordinates_url, area_to_osm_zoom
from searx.engines.wikipedia import (
fetch_wikimedia_traits,
@@ -142,7 +142,7 @@ replace_http_by_https = get_string_replaces_function({'http:': 'https:'})
def get_headers():
# user agent: https://www.mediawiki.org/wiki/Wikidata_Query_Service/User_Manual#Query_limits
- return {'Accept': 'application/sparql-results+json', 'User-Agent': searx_useragent()}
+ return {'Accept': 'application/sparql-results+json', 'User-Agent': searxng_useragent()}
def get_label_for_entity(entity_id, language):