summaryrefslogtreecommitdiff
path: root/utils
diff options
context:
space:
mode:
authorAlexandre Flament <alex@al-f.net>2021-02-19 12:52:26 +0100
committerAlexandre Flament <alex@al-f.net>2021-02-23 16:42:28 +0100
commit46ca32c3ccbc5d740cffa2aa8ddd06e66f30e772 (patch)
tree8c0feae9cc2c0ecafa2135648cedf5aab7852ab5 /utils
parent1be6ab2a91786ac4c83cdb08193a8dfae0c1d84f (diff)
[mod] update currencies.json and fetch_currencies.py
use a sparql request on wikidata to get the list of currencies. currencies.json contains the translation for all supported searx languages. Supersede #993
Diffstat (limited to 'utils')
-rw-r--r--utils/fetch_currencies.py242
1 files changed, 115 insertions, 127 deletions
diff --git a/utils/fetch_currencies.py b/utils/fetch_currencies.py
index 437c375db..8811049a5 100644
--- a/utils/fetch_currencies.py
+++ b/utils/fetch_currencies.py
@@ -1,163 +1,151 @@
-# -*- coding: utf-8 -*-
+#!/usr/bin/env python
-
-import json
import re
import unicodedata
-import string
-from urllib.parse import urlencode
-from requests import get
-
-languages = {'de', 'en', 'es', 'fr', 'hu', 'it', 'nl', 'jp'}
-
-url_template = 'https://www.wikidata.org/w/api.php?action=wbgetentities&format=json&{query}&props=labels%7Cdatatype%7Cclaims%7Caliases&languages=' + '|'.join(languages)
-url_wmflabs_template = 'http://wdq.wmflabs.org/api?q='
-url_wikidata_search_template = 'http://www.wikidata.org/w/api.php?action=query&list=search&format=json&srnamespace=0&srprop=sectiontitle&{query}'
-
-wmflabs_queries = [
- 'CLAIM[31:8142]', # all devise
-]
+import json
-db = {
- 'iso4217': {
- },
- 'names': {
- }
+# set path
+from sys import path
+from os.path import realpath, dirname, join
+path.append(realpath(dirname(realpath(__file__)) + '/../'))
+
+from searx import searx_dir, settings
+from searx.engines.wikidata import send_wikidata_query
+
+
+# ORDER BY (with all the query fields) is important to keep a deterministic result order
+# so multiple invokation of this script doesn't change currencies.json
+SARQL_REQUEST = """
+SELECT DISTINCT ?iso4217 ?unit ?unicode ?label ?alias WHERE {
+ ?item wdt:P498 ?iso4217; rdfs:label ?label.
+ OPTIONAL { ?item skos:altLabel ?alias FILTER (LANG (?alias) = LANG(?label)). }
+ OPTIONAL { ?item wdt:P5061 ?unit. }
+ OPTIONAL { ?item wdt:P489 ?symbol.
+ ?symbol wdt:P487 ?unicode. }
+ MINUS { ?item wdt:P582 ?end_data . } # Ignore monney with an end date
+ MINUS { ?item wdt:P31/wdt:P279* wd:Q15893266 . } # Ignore "former entity" (obsolete currency)
+ FILTER(LANG(?label) IN (%LANGUAGES_SPARQL%)).
}
+ORDER BY ?iso4217 ?unit ?unicode ?label ?alias
+"""
+
+# ORDER BY (with all the query fields) is important to keep a deterministic result order
+# so multiple invokation of this script doesn't change currencies.json
+SPARQL_WIKIPEDIA_NAMES_REQUEST = """
+SELECT DISTINCT ?iso4217 ?article_name WHERE {
+ ?item wdt:P498 ?iso4217 .
+ ?article schema:about ?item ;
+ schema:name ?article_name ;
+ schema:isPartOf [ wikibase:wikiGroup "wikipedia" ]
+ MINUS { ?item wdt:P582 ?end_data . } # Ignore monney with an end date
+ MINUS { ?item wdt:P31/wdt:P279* wd:Q15893266 . } # Ignore "former entity" (obsolete currency)
+ FILTER(LANG(?article_name) IN (%LANGUAGES_SPARQL%)).
+}
+ORDER BY ?iso4217 ?article_name
+"""
-def remove_accents(data):
- return unicodedata.normalize('NFKD', data).lower()
-
-
-def normalize_name(name):
- return re.sub(' +', ' ', remove_accents(name.lower()).replace('-', ' '))
-
-
-def add_currency_name(name, iso4217):
- global db
-
- db_names = db['names']
-
- if not isinstance(iso4217, str):
- print("problem", name, iso4217)
- return
-
- name = normalize_name(name)
-
- if name == '':
- print("name empty", iso4217)
- return
-
- iso4217_set = db_names.get(name, None)
- if iso4217_set is not None and iso4217 not in iso4217_set:
- db_names[name].append(iso4217)
- else:
- db_names[name] = [iso4217]
+LANGUAGES = settings['locales'].keys()
+LANGUAGES_SPARQL = ', '.join(set(map(lambda l: repr(l.split('_')[0]), LANGUAGES)))
-def add_currency_label(label, iso4217, language):
- global db
+def remove_accents(name):
+ return unicodedata.normalize('NFKD', name).lower()
- db['iso4217'][iso4217] = db['iso4217'].get(iso4217, {})
- db['iso4217'][iso4217][language] = label
+def remove_extra(name):
+ for c in ('(', ':'):
+ if c in name:
+ name = name.split(c)[0].strip()
+ return name
-def get_property_value(data, name):
- prop = data.get('claims', {}).get(name, {})
- if len(prop) == 0:
- return None
- value = prop[0].get('mainsnak', {}).get('datavalue', {}).get('value', '')
- if value == '':
- return None
+def _normalize_name(name):
+ name = re.sub(' +', ' ', remove_accents(name.lower()).replace('-', ' '))
+ name = remove_extra(name)
+ return name
- return value
+def add_currency_name(db, name, iso4217, normalize_name=True):
+ db_names = db['names']
-def parse_currency(data):
- iso4217 = get_property_value(data, 'P498')
+ if normalize_name:
+ name = _normalize_name(name)
- if iso4217 is not None:
- unit = get_property_value(data, 'P558')
- if unit is not None:
- add_currency_name(unit, iso4217)
+ iso4217_set = db_names.setdefault(name, [])
+ if iso4217 not in iso4217_set:
+ iso4217_set.insert(0, iso4217)
- labels = data.get('labels', {})
- for language in languages:
- name = labels.get(language, {}).get('value', None)
- if name is not None:
- add_currency_name(name, iso4217)
- add_currency_label(name, iso4217, language)
- aliases = data.get('aliases', {})
- for language in aliases:
- for i in range(0, len(aliases[language])):
- alias = aliases[language][i].get('value', None)
- add_currency_name(alias, iso4217)
+def add_currency_label(db, label, iso4217, language):
+ labels = db['iso4217'].setdefault(iso4217, {})
+ labels[language] = label
-def fetch_data(wikidata_ids):
- url = url_template.format(query=urlencode({'ids': '|'.join(wikidata_ids)}))
- htmlresponse = get(url)
- jsonresponse = json.loads(htmlresponse.content)
- entities = jsonresponse.get('entities', {})
+def wikidata_request_result_iterator(request):
+ result = send_wikidata_query(request.replace('%LANGUAGES_SPARQL%', LANGUAGES_SPARQL))
+ if result is not None:
+ for r in result['results']['bindings']:
+ yield r
- for pname in entities:
- pvalue = entities.get(pname)
- parse_currency(pvalue)
+def fetch_db():
+ db = {
+ 'names': {},
+ 'iso4217': {},
+ }
-def add_q(i):
- return "Q" + str(i)
+ for r in wikidata_request_result_iterator(SPARQL_WIKIPEDIA_NAMES_REQUEST):
+ iso4217 = r['iso4217']['value']
+ article_name = r['article_name']['value']
+ article_lang = r['article_name']['xml:lang']
+ add_currency_name(db, article_name, iso4217)
+ add_currency_label(db, article_name, iso4217, article_lang)
+ for r in wikidata_request_result_iterator(SARQL_REQUEST):
+ iso4217 = r['iso4217']['value']
+ if 'label' in r:
+ label = r['label']['value']
+ label_lang = r['label']['xml:lang']
+ add_currency_name(db, label, iso4217)
+ add_currency_label(db, label, iso4217, label_lang)
-def fetch_data_batch(wikidata_ids):
- while len(wikidata_ids) > 0:
- if len(wikidata_ids) > 50:
- fetch_data(wikidata_ids[0:49])
- wikidata_ids = wikidata_ids[50:]
- else:
- fetch_data(wikidata_ids)
- wikidata_ids = []
+ if 'alias' in r:
+ add_currency_name(db, r['alias']['value'], iso4217)
+ if 'unicode' in r:
+ add_currency_name(db, r['unicode']['value'], iso4217, normalize_name=False)
-def wdq_query(query):
- url = url_wmflabs_template + query
- htmlresponse = get(url)
- jsonresponse = json.loads(htmlresponse.content)
- qlist = list(map(add_q, jsonresponse.get('items', {})))
- error = jsonresponse.get('status', {}).get('error', None)
- if error is not None and error != 'OK':
- print("error for query '" + query + "' :" + error)
+ if 'unit' in r:
+ add_currency_name(db, r['unit']['value'], iso4217, normalize_name=False)
- fetch_data_batch(qlist)
+ # reduce memory usage:
+ # replace lists with one item by the item.
+ # see searx.search.processors.online_currency.name_to_iso4217
+ for name in db['names']:
+ if len(db['names'][name]) == 1:
+ db['names'][name] = db['names'][name][0]
+ return db
-def wd_query(query, offset=0):
- qlist = []
- url = url_wikidata_search_template.format(query=urlencode({'srsearch': query, 'srlimit': 50, 'sroffset': offset}))
- htmlresponse = get(url)
- jsonresponse = json.loads(htmlresponse.content)
- for r in jsonresponse.get('query', {}).get('search', {}):
- qlist.append(r.get('title', ''))
- fetch_data_batch(qlist)
+def get_filename():
+ return join(join(searx_dir, "data"), "currencies.json")
-# fetch #
-for q in wmflabs_queries:
- wdq_query(q)
+def main():
+ #
+ db = fetch_db()
+ # static
+ add_currency_name(db, "euro", 'EUR')
+ add_currency_name(db, "euros", 'EUR')
+ add_currency_name(db, "dollar", 'USD')
+ add_currency_name(db, "dollars", 'USD')
+ add_currency_name(db, "peso", 'MXN')
+ add_currency_name(db, "pesos", 'MXN')
-# static
-add_currency_name("euro", 'EUR')
-add_currency_name("euros", 'EUR')
-add_currency_name("dollar", 'USD')
-add_currency_name("dollars", 'USD')
-add_currency_name("peso", 'MXN')
-add_currency_name("pesos", 'MXN')
+ with open(get_filename(), 'w', encoding='utf8') as f:
+ json.dump(db, f, ensure_ascii=False, indent=4)
-# write
-f = open("currencies.json", "wb")
-json.dump(db, f, indent=4, encoding="utf-8")
-f.close()
+if __name__ == '__main__':
+ main()