diff options
Diffstat (limited to 'searx/engines/recoll.py')
| -rw-r--r-- | searx/engines/recoll.py | 172 |
1 files changed, 91 insertions, 81 deletions
diff --git a/searx/engines/recoll.py b/searx/engines/recoll.py index b7499b5a5..ee97f330d 100644 --- a/searx/engines/recoll.py +++ b/searx/engines/recoll.py @@ -13,23 +13,12 @@ Configuration You must configure the following settings: -``base_url``: - Location where recoll-webui can be reached. +- :py:obj:`base_url` +- :py:obj:`mount_prefix` +- :py:obj:`dl_prefix` +- :py:obj:`search_dir` -``mount_prefix``: - Location where the file hierarchy is mounted on your *local* filesystem. - -``dl_prefix``: - Location where the file hierarchy as indexed by recoll can be reached. - -``search_dir``: - Part of the indexed file hierarchy to be search, if empty the full domain is - searched. - -Example -======= - -Scenario: +Example scenario: #. Recoll indexes a local filesystem mounted in ``/export/documents/reference``, #. the Recoll search interface can be reached at https://recoll.example.org/ and @@ -37,107 +26,128 @@ Scenario: .. code:: yaml - base_url: https://recoll.example.org/ + base_url: https://recoll.example.org mount_prefix: /export/documents dl_prefix: https://download.example.org - search_dir: '' + search_dir: "" Implementations =============== """ +import typing as t from datetime import date, timedelta -from json import loads from urllib.parse import urlencode, quote -# about +from searx.result_types import EngineResults + +if t.TYPE_CHECKING: + from searx.extended_types import SXNG_Response + from searx.search.processors import OnlineParams + + about = { "website": None, - "wikidata_id": 'Q15735774', - "official_api_documentation": 'https://www.lesbonscomptes.com/recoll/', + "wikidata_id": "Q15735774", + "official_api_documentation": "https://www.lesbonscomptes.com/recoll/", "use_official_api": True, "require_api_key": False, - "results": 'JSON', + "results": "JSON", } -# engine dependent config paging = True time_range_support = True -# parameters from settings.yml -base_url = None -search_dir = '' -mount_prefix = None -dl_prefix = None - -# embedded -embedded_url = '<{ttype} controls height="166px" ' + 'src="{url}" type="{mtype}"></{ttype}>' +base_url: str = "" +"""Location where recoll-webui can be reached.""" +mount_prefix: str = "" +"""Location where the file hierarchy is mounted on your *local* filesystem.""" -# helper functions -def get_time_range(time_range): - sw = {'day': 1, 'week': 7, 'month': 30, 'year': 365} # pylint: disable=invalid-name - - offset = sw.get(time_range, 0) - if not offset: - return '' - - return (date.today() - timedelta(days=offset)).isoformat() +dl_prefix: str = "" +"""Location where the file hierarchy as indexed by recoll can be reached.""" +search_dir: str = "" +"""Part of the indexed file hierarchy to be search, if empty the full domain is +searched.""" -# do search-request -def request(query, params): - search_after = get_time_range(params['time_range']) - search_url = base_url + 'json?{query}&highlight=0' - params['url'] = search_url.format( - query=urlencode({'query': query, 'page': params['pageno'], 'after': search_after, 'dir': search_dir}) - ) +_s2i: dict[str | None, int] = {"day": 1, "week": 7, "month": 30, "year": 365} - return params +def setup(engine_settings: dict[str, t.Any]) -> bool: + """Initialization of the Recoll engine, checks if the mandatory values are + configured. + """ + missing: list[str] = [] + for cfg_name in ["base_url", "mount_prefix", "dl_prefix"]: + if not engine_settings.get(cfg_name): + missing.append(cfg_name) + if missing: + logger.error("missing recoll configuration: %s", missing) + return False -# get response from search-request -def response(resp): - results = [] + if engine_settings["base_url"].endswith("/"): + engine_settings["base_url"] = engine_settings["base_url"][:-1] + return True - response_json = loads(resp.text) - if not response_json: - return [] +def search_after(time_range: str | None) -> str: + offset = _s2i.get(time_range, 0) + if not offset: + return "" + return (date.today() - timedelta(days=offset)).isoformat() - for result in response_json.get('results', []): - title = result['label'] - url = result['url'].replace('file://' + mount_prefix, dl_prefix) - content = '{}'.format(result['snippet']) - # append result - item = {'url': url, 'title': title, 'content': content, 'template': 'files.html'} +def request(query: str, params: "OnlineParams") -> None: + args = { + "query": query, + "page": params["pageno"], + "after": search_after(params["time_range"]), + "dir": search_dir, + "highlight": 0, + } + params["url"] = f"{base_url}/json?{urlencode(args)}" - if result['size']: - item['size'] = int(result['size']) - for parameter in ['filename', 'abstract', 'author', 'mtype', 'time']: - if result[parameter]: - item[parameter] = result[parameter] +def response(resp: "SXNG_Response") -> EngineResults: - # facilitate preview support for known mime types - if 'mtype' in result and '/' in result['mtype']: - (mtype, subtype) = result['mtype'].split('/') - item['mtype'] = mtype - item['subtype'] = subtype + res = EngineResults() + json_data = resp.json() - if mtype in ['audio', 'video']: - item['embedded'] = embedded_url.format( - ttype=mtype, url=quote(url.encode('utf8'), '/:'), mtype=result['mtype'] - ) + if not json_data: + return res - if mtype in ['image'] and subtype in ['bmp', 'gif', 'jpeg', 'png']: - item['thumbnail'] = url + for result in json_data.get("results", []): - results.append(item) + url = result.get("url", "").replace("file://" + mount_prefix, dl_prefix) - if 'nres' in response_json: - results.append({'number_of_results': response_json['nres']}) + mtype = subtype = result.get("mime", "") + if mtype: + mtype, subtype = (mtype.split("/", 1) + [""])[:2] - return results + # facilitate preview support for known mime types + thumbnail = embedded = "" + if mtype in ["audio", "video"]: + embedded_url = '<{ttype} controls height="166px" ' + 'src="{url}" type="{mtype}"></{ttype}>' + embedded = embedded_url.format(ttype=mtype, url=quote(url.encode("utf8"), "/:"), mtype=result["mtype"]) + if mtype in ["image"] and subtype in ["bmp", "gif", "jpeg", "png"]: + thumbnail = url + + res.add( + res.types.File( + title=result.get("label", ""), + url=url, + content=result.get("snippet", ""), + size=result.get("size", ""), + filename=result.get("filename", ""), + abstract=result.get("abstract", ""), + author=result.get("author", ""), + mtype=mtype, + subtype=subtype, + time=result.get("time", ""), + embedded=embedded, + thumbnail=thumbnail, + ) + ) + return res |