diff options
| author | Alexandre Flament <alex@al-f.net> | 2021-12-27 19:11:01 +0100 |
|---|---|---|
| committer | GitHub <noreply@github.com> | 2021-12-27 19:11:01 +0100 |
| commit | c6922ae7c5e53f695d5f5f8704b10b4e2815cda4 (patch) | |
| tree | 9c0456ad1a1d9d375311ccd8c9bd3eafd0779114 /searxng_extra/update/update_engine_descriptions.py | |
| parent | 54bce130f9074c3d63009237b014c727a1443cc5 (diff) | |
| parent | d84226bf63757b1d4245ab26e9c081daf42389aa (diff) | |
Merge pull request #619 from dalf/apply-black
Apply black
Diffstat (limited to 'searxng_extra/update/update_engine_descriptions.py')
| -rwxr-xr-x | searxng_extra/update/update_engine_descriptions.py | 31 |
1 files changed, 11 insertions, 20 deletions
diff --git a/searxng_extra/update/update_engine_descriptions.py b/searxng_extra/update/update_engine_descriptions.py index 59a9a72a0..51cfc7cc2 100755 --- a/searxng_extra/update/update_engine_descriptions.py +++ b/searxng_extra/update/update_engine_descriptions.py @@ -55,7 +55,10 @@ NOT_A_DESCRIPTION = [ ] SKIP_ENGINE_SOURCE = [ - ('gitlab', 'wikidata') # descriptions are about wikipedia disambiguation pages + # fmt: off + ('gitlab', 'wikidata') + # descriptions are about wikipedia disambiguation pages + # fmt: on ] LANGUAGES = LOCALE_NAMES.keys() @@ -92,10 +95,7 @@ def update_description(engine_name, lang, description, source, replace=True): def get_wikipedia_summary(lang, pageid): - params = { - 'language': lang.replace('_','-'), - 'headers': {} - } + params = {'language': lang.replace('_', '-'), 'headers': {}} searx.engines.engines['wikipedia'].request(pageid, params) try: response = searx.network.get(params['url'], headers=params['headers'], timeout=10) @@ -160,10 +160,7 @@ def initialize(): global IDS, WIKIPEDIA_LANGUAGES, LANGUAGES_SPARQL searx.search.initialize() wikipedia_engine = searx.engines.engines['wikipedia'] - WIKIPEDIA_LANGUAGES = { - language: wikipedia_engine.url_lang(language.replace('_', '-')) - for language in LANGUAGES - } + WIKIPEDIA_LANGUAGES = {language: wikipedia_engine.url_lang(language.replace('_', '-')) for language in LANGUAGES} WIKIPEDIA_LANGUAGES['nb_NO'] = 'no' LANGUAGES_SPARQL = ', '.join(f"'{l}'" for l in set(WIKIPEDIA_LANGUAGES.values())) for engine_name, engine in searx.engines.engines.items(): @@ -178,9 +175,7 @@ def initialize(): def fetch_wikidata_descriptions(): searx.network.set_timeout_for_thread(60) result = wikidata.send_wikidata_query( - SPARQL_DESCRIPTION - .replace('%IDS%', IDS) - .replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) + SPARQL_DESCRIPTION.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) ) if result is not None: for binding in result['results']['bindings']: @@ -195,9 +190,7 @@ def fetch_wikidata_descriptions(): def fetch_wikipedia_descriptions(): result = wikidata.send_wikidata_query( - SPARQL_WIKIPEDIA_ARTICLE - .replace('%IDS%', IDS) - .replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) + SPARQL_WIKIPEDIA_ARTICLE.replace('%IDS%', IDS).replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL) ) if result is not None: for binding in result['results']['bindings']: @@ -224,9 +217,9 @@ def fetch_website_description(engine_name, website): # the front page can't be fetched: skip this engine return - wikipedia_languages_r = { V: K for K, V in WIKIPEDIA_LANGUAGES.items() } + wikipedia_languages_r = {V: K for K, V in WIKIPEDIA_LANGUAGES.items()} languages = ['en', 'es', 'pt', 'ru', 'tr', 'fr'] - languages = languages + [ l for l in LANGUAGES if l not in languages] + languages = languages + [l for l in LANGUAGES if l not in languages] previous_matched_lang = None previous_count = 0 @@ -279,9 +272,7 @@ def get_output(): * description (if source = "wikipedia") * [f"engine:lang", "ref"] (reference to another existing description) """ - output = { - locale: {} for locale in LOCALE_NAMES - } + output = {locale: {} for locale in LOCALE_NAMES} seen_descriptions = {} |