summaryrefslogtreecommitdiff
path: root/searx/engines
diff options
context:
space:
mode:
authorBubu <43925055+p3psi-boo@users.noreply.github.com>2024-12-18 11:21:40 +0000
committerMarkus Heiser <markus.heiser@darmarIT.de>2025-03-07 06:59:28 +0100
commitb8671c7a4a34bda3d63c2a391e9b895801f1f1ed (patch)
treea80e97dbd346da5c28773fe2aafb6f2dd823caf0 /searx/engines
parent066aabc112f7869f03966553aa048e9508f89545 (diff)
[feat] engines: add baidu (general)
Diffstat (limited to 'searx/engines')
-rw-r--r--searx/engines/baidu.py75
1 files changed, 75 insertions, 0 deletions
diff --git a/searx/engines/baidu.py b/searx/engines/baidu.py
new file mode 100644
index 000000000..6aa0716fd
--- /dev/null
+++ b/searx/engines/baidu.py
@@ -0,0 +1,75 @@
+# SPDX-License-Identifier: AGPL-3.0-or-later
+"""Baidu_
+
+.. _Baidu: https://www.baidu.com
+"""
+
+# There exits a https://github.com/ohblue/baidu-serp-api/
+# but we don't use it here (may we can learn from).
+
+from urllib.parse import urlencode
+from datetime import datetime
+
+from searx.exceptions import SearxEngineAPIException
+
+about = {
+ "website": "https://www.baidu.com",
+ "wikidata_id": "Q14772",
+ "official_api_documentation": None,
+ "use_official_api": False,
+ "require_api_key": False,
+ "results": "JSON",
+}
+
+paging = True
+categories = ["general"]
+base_url = "https://www.baidu.com/s"
+results_per_page = 10
+
+
+def request(query, params):
+ keyword = query.strip()
+
+ query_params = {
+ "wd": keyword,
+ "rn": results_per_page,
+ "pn": (params["pageno"] - 1) * results_per_page,
+ "tn": "json",
+ }
+
+ params["url"] = f"{base_url}?{urlencode(query_params)}"
+ return params
+
+
+def response(resp):
+ try:
+ data = resp.json()
+ except Exception as e:
+ raise SearxEngineAPIException(f"Invalid response: {e}") from e
+ results = []
+
+ if "feed" not in data or "entry" not in data["feed"]:
+ raise SearxEngineAPIException("Invalid response")
+
+ for entry in data["feed"]["entry"]:
+ if not entry.get("title") or not entry.get("url"):
+ continue
+
+ published_date = None
+ if entry.get("time"):
+ try:
+ published_date = datetime.fromtimestamp(entry["time"])
+ except (ValueError, TypeError):
+ published_date = None
+
+ results.append(
+ {
+ "title": entry["title"],
+ "url": entry["url"],
+ "content": entry.get("abs", ""),
+ "publishedDate": published_date,
+ # "source": entry.get('source')
+ }
+ )
+
+ return results