2021-02-25 16:42:52 +00:00
|
|
|
#!/usr/bin/env python
|
2022-01-03 11:58:48 +00:00
|
|
|
# lint: pylint
|
|
|
|
|
2021-10-03 13:12:09 +00:00
|
|
|
# SPDX-License-Identifier: AGPL-3.0-or-later
|
2022-01-03 11:40:06 +00:00
|
|
|
"""This script generates languages.py from intersecting each engine's supported
|
|
|
|
languages.
|
2016-11-06 02:51:38 +00:00
|
|
|
|
2022-01-03 11:40:06 +00:00
|
|
|
Output files: :origin:`searx/data/engines_languages.json` and
|
|
|
|
:origin:`searx/languages.py` (:origin:`CI Update data ...
|
|
|
|
<.github/workflows/data-update.yml>`).
|
|
|
|
|
|
|
|
"""
|
2016-11-06 02:51:38 +00:00
|
|
|
|
2022-01-03 11:58:48 +00:00
|
|
|
# pylint: disable=invalid-name
|
2022-03-16 17:07:00 +00:00
|
|
|
from unicodedata import lookup
|
2020-03-01 06:56:46 +00:00
|
|
|
import json
|
2021-01-24 13:25:27 +00:00
|
|
|
from pathlib import Path
|
2020-09-14 07:07:45 +00:00
|
|
|
from pprint import pformat
|
2018-02-14 22:17:46 +00:00
|
|
|
from babel import Locale, UnknownLocaleError
|
|
|
|
from babel.languages import get_global
|
2022-03-16 17:07:00 +00:00
|
|
|
from babel.core import parse_locale
|
2018-02-14 22:17:46 +00:00
|
|
|
|
2021-01-24 13:25:27 +00:00
|
|
|
from searx import settings, searx_dir
|
2021-06-01 07:58:46 +00:00
|
|
|
from searx.engines import load_engines, engines
|
|
|
|
from searx.network import set_timeout_for_thread
|
2016-11-06 02:51:38 +00:00
|
|
|
|
|
|
|
# Output files.
|
2021-01-24 13:25:27 +00:00
|
|
|
engines_languages_file = Path(searx_dir) / 'data' / 'engines_languages.json'
|
|
|
|
languages_file = Path(searx_dir) / 'languages.py'
|
2016-11-06 02:51:38 +00:00
|
|
|
|
|
|
|
|
2022-09-27 15:01:00 +00:00
|
|
|
# Fetches supported languages for each engine and writes json file with those.
|
2016-11-06 02:51:38 +00:00
|
|
|
def fetch_supported_languages():
|
2021-06-01 07:58:46 +00:00
|
|
|
set_timeout_for_thread(10.0)
|
2020-03-01 06:56:46 +00:00
|
|
|
|
2022-01-03 11:58:48 +00:00
|
|
|
engines_languages = {}
|
2020-03-01 06:56:46 +00:00
|
|
|
names = list(engines)
|
|
|
|
names.sort()
|
|
|
|
|
|
|
|
for engine_name in names:
|
2016-11-06 02:51:38 +00:00
|
|
|
if hasattr(engines[engine_name], 'fetch_supported_languages'):
|
2020-03-01 06:56:46 +00:00
|
|
|
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
|
2021-12-27 08:26:22 +00:00
|
|
|
print("fetched %s languages from engine %s" % (len(engines_languages[engine_name]), engine_name))
|
2022-01-03 11:58:48 +00:00
|
|
|
if type(engines_languages[engine_name]) == list: # pylint: disable=unidiomatic-typecheck
|
2020-03-01 06:56:46 +00:00
|
|
|
engines_languages[engine_name] = sorted(engines_languages[engine_name])
|
2016-11-06 02:51:38 +00:00
|
|
|
|
2022-01-01 16:22:22 +00:00
|
|
|
print("fetched languages from %s engines" % len(engines_languages))
|
|
|
|
|
2016-11-06 02:51:38 +00:00
|
|
|
# write json file
|
2020-03-01 06:56:46 +00:00
|
|
|
with open(engines_languages_file, 'w', encoding='utf-8') as f:
|
|
|
|
json.dump(engines_languages, f, indent=2, sort_keys=True)
|
2016-11-06 02:51:38 +00:00
|
|
|
|
2018-02-14 22:17:46 +00:00
|
|
|
return engines_languages
|
|
|
|
|
|
|
|
|
|
|
|
# Get babel Locale object from lang_code if possible.
|
|
|
|
def get_locale(lang_code):
|
|
|
|
try:
|
|
|
|
locale = Locale.parse(lang_code, sep='-')
|
|
|
|
return locale
|
|
|
|
except (UnknownLocaleError, ValueError):
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
2022-03-16 17:07:00 +00:00
|
|
|
lang2emoji = {
|
|
|
|
'ha': '\U0001F1F3\U0001F1EA', # Hausa / Niger
|
|
|
|
'bs': '\U0001F1E7\U0001F1E6', # Bosnian / Bosnia & Herzegovina
|
|
|
|
'jp': '\U0001F1EF\U0001F1F5', # Japanese
|
|
|
|
'ua': '\U0001F1FA\U0001F1E6', # Ukrainian
|
|
|
|
'he': '\U0001F1EE\U0001F1F7', # Hebrew
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
def get_unicode_flag(lang_code):
|
|
|
|
"""Determine a unicode flag (emoji) that fits to the ``lang_code``"""
|
|
|
|
|
|
|
|
emoji = lang2emoji.get(lang_code.lower())
|
|
|
|
if emoji:
|
|
|
|
return emoji
|
|
|
|
|
|
|
|
if len(lang_code) == 2:
|
2022-03-19 11:14:07 +00:00
|
|
|
return '\U0001F310'
|
2022-03-16 17:07:00 +00:00
|
|
|
|
|
|
|
language = territory = script = variant = ''
|
|
|
|
try:
|
|
|
|
language, territory, script, variant = parse_locale(lang_code, '-')
|
|
|
|
except ValueError as exc:
|
|
|
|
print(exc)
|
|
|
|
|
|
|
|
# https://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
|
|
|
|
if not territory:
|
|
|
|
# https://www.unicode.org/emoji/charts/emoji-list.html#country-flag
|
|
|
|
emoji = lang2emoji.get(language)
|
|
|
|
if not emoji:
|
|
|
|
print(
|
|
|
|
"%s --> language: %s / territory: %s / script: %s / variant: %s"
|
|
|
|
% (lang_code, language, territory, script, variant)
|
|
|
|
)
|
|
|
|
return emoji
|
|
|
|
|
|
|
|
emoji = lang2emoji.get(territory.lower())
|
|
|
|
if emoji:
|
|
|
|
return emoji
|
|
|
|
|
|
|
|
try:
|
|
|
|
c1 = lookup('REGIONAL INDICATOR SYMBOL LETTER ' + territory[0])
|
|
|
|
c2 = lookup('REGIONAL INDICATOR SYMBOL LETTER ' + territory[1])
|
|
|
|
# print("%s --> territory: %s --> %s%s" %(lang_code, territory, c1, c2 ))
|
|
|
|
except KeyError as exc:
|
|
|
|
print("%s --> territory: %s --> %s" % (lang_code, territory, exc))
|
|
|
|
return None
|
|
|
|
|
|
|
|
return c1 + c2
|
|
|
|
|
|
|
|
|
2022-04-03 12:30:19 +00:00
|
|
|
def get_territory_name(lang_code):
|
|
|
|
country_name = None
|
|
|
|
locale = get_locale(lang_code)
|
|
|
|
try:
|
2022-04-22 10:09:42 +00:00
|
|
|
if locale is not None:
|
|
|
|
country_name = locale.get_territory_name()
|
2022-04-03 12:30:19 +00:00
|
|
|
except FileNotFoundError as exc:
|
|
|
|
print("ERROR: %s --> %s" % (locale, exc))
|
|
|
|
return country_name
|
|
|
|
|
|
|
|
|
2018-02-14 22:17:46 +00:00
|
|
|
# Join all language lists.
|
|
|
|
def join_language_lists(engines_languages):
|
2022-01-03 11:58:48 +00:00
|
|
|
language_list = {}
|
2016-11-06 02:51:38 +00:00
|
|
|
for engine_name in engines_languages:
|
2018-02-14 22:17:46 +00:00
|
|
|
for lang_code in engines_languages[engine_name]:
|
|
|
|
|
|
|
|
# apply custom fixes if necessary
|
2018-03-01 04:30:48 +00:00
|
|
|
if lang_code in getattr(engines[engine_name], 'language_aliases', {}).values():
|
2021-12-27 08:26:22 +00:00
|
|
|
lang_code = next(
|
|
|
|
lc for lc, alias in engines[engine_name].language_aliases.items() if lang_code == alias
|
|
|
|
)
|
2018-02-14 22:17:46 +00:00
|
|
|
|
|
|
|
locale = get_locale(lang_code)
|
|
|
|
|
|
|
|
# ensure that lang_code uses standard language and country codes
|
|
|
|
if locale and locale.territory:
|
2020-09-14 07:07:45 +00:00
|
|
|
lang_code = "{lang}-{country}".format(lang=locale.language, country=locale.territory)
|
|
|
|
short_code = lang_code.split('-')[0]
|
2018-02-14 22:17:46 +00:00
|
|
|
|
2020-09-14 07:07:45 +00:00
|
|
|
# add language without country if not in list
|
|
|
|
if short_code not in language_list:
|
2018-02-14 22:17:46 +00:00
|
|
|
if locale:
|
2020-09-14 07:07:45 +00:00
|
|
|
# get language's data from babel's Locale object
|
|
|
|
language_name = locale.get_language_name().title()
|
|
|
|
english_name = locale.english_name.split(' (')[0]
|
|
|
|
elif short_code in engines_languages['wikipedia']:
|
|
|
|
# get language's data from wikipedia if not known by babel
|
|
|
|
language_name = engines_languages['wikipedia'][short_code]['name']
|
|
|
|
english_name = engines_languages['wikipedia'][short_code]['english_name']
|
2018-02-14 22:17:46 +00:00
|
|
|
else:
|
2020-09-14 07:07:45 +00:00
|
|
|
language_name = None
|
|
|
|
english_name = None
|
|
|
|
|
|
|
|
# add language to list
|
2021-12-27 08:26:22 +00:00
|
|
|
language_list[short_code] = {
|
|
|
|
'name': language_name,
|
|
|
|
'english_name': english_name,
|
|
|
|
'counter': set(),
|
2022-01-03 11:58:48 +00:00
|
|
|
'countries': {},
|
2021-12-27 08:26:22 +00:00
|
|
|
}
|
2020-09-14 07:07:45 +00:00
|
|
|
|
|
|
|
# add language with country if not in list
|
|
|
|
if lang_code != short_code and lang_code not in language_list[short_code]['countries']:
|
|
|
|
country_name = ''
|
|
|
|
if locale:
|
|
|
|
# get country name from babel's Locale object
|
2022-01-01 16:22:22 +00:00
|
|
|
try:
|
|
|
|
country_name = locale.get_territory_name()
|
|
|
|
except FileNotFoundError as exc:
|
|
|
|
print("ERROR: %s --> %s" % (locale, exc))
|
|
|
|
locale = None
|
2020-09-14 07:07:45 +00:00
|
|
|
|
2022-03-16 17:07:00 +00:00
|
|
|
language_list[short_code]['countries'][lang_code] = {
|
|
|
|
'country_name': country_name,
|
|
|
|
'counter': set(),
|
|
|
|
}
|
2018-02-14 22:17:46 +00:00
|
|
|
|
|
|
|
# count engine for both language_country combination and language alone
|
2020-09-14 07:07:45 +00:00
|
|
|
language_list[short_code]['counter'].add(engine_name)
|
|
|
|
if lang_code != short_code:
|
|
|
|
language_list[short_code]['countries'][lang_code]['counter'].add(engine_name)
|
2018-02-14 22:17:46 +00:00
|
|
|
|
|
|
|
return language_list
|
|
|
|
|
|
|
|
|
2020-09-14 07:07:45 +00:00
|
|
|
# Filter language list so it only includes the most supported languages and countries
|
2018-02-14 22:17:46 +00:00
|
|
|
def filter_language_list(all_languages):
|
2022-04-22 10:09:42 +00:00
|
|
|
min_engines_per_lang = 12
|
2021-10-26 10:10:29 +00:00
|
|
|
min_engines_per_country = 7
|
2022-01-03 11:58:48 +00:00
|
|
|
# pylint: disable=consider-using-dict-items, consider-iterating-dictionary
|
2021-12-27 08:26:22 +00:00
|
|
|
main_engines = [
|
|
|
|
engine_name
|
|
|
|
for engine_name in engines.keys()
|
|
|
|
if 'general' in engines[engine_name].categories
|
|
|
|
and engines[engine_name].supported_languages
|
|
|
|
and not engines[engine_name].disabled
|
|
|
|
]
|
2018-02-14 22:17:46 +00:00
|
|
|
|
|
|
|
# filter list to include only languages supported by most engines or all default general engines
|
2021-12-27 08:26:22 +00:00
|
|
|
filtered_languages = {
|
|
|
|
code: lang
|
|
|
|
for code, lang in all_languages.items()
|
|
|
|
if (
|
|
|
|
len(lang['counter']) >= min_engines_per_lang
|
|
|
|
or all(main_engine in lang['counter'] for main_engine in main_engines)
|
|
|
|
)
|
|
|
|
}
|
2018-02-14 22:17:46 +00:00
|
|
|
|
2020-09-14 07:07:45 +00:00
|
|
|
def _copy_lang_data(lang, country_name=None):
|
2022-01-03 11:58:48 +00:00
|
|
|
new_dict = {}
|
2020-09-14 07:07:45 +00:00
|
|
|
new_dict['name'] = all_languages[lang]['name']
|
|
|
|
new_dict['english_name'] = all_languages[lang]['english_name']
|
|
|
|
if country_name:
|
|
|
|
new_dict['country_name'] = country_name
|
|
|
|
return new_dict
|
|
|
|
|
|
|
|
# for each language get country codes supported by most engines or at least one country code
|
2022-01-03 11:58:48 +00:00
|
|
|
filtered_languages_with_countries = {}
|
2020-09-14 07:07:45 +00:00
|
|
|
for lang, lang_data in filtered_languages.items():
|
|
|
|
countries = lang_data['countries']
|
2022-01-03 11:58:48 +00:00
|
|
|
filtered_countries = {}
|
2020-09-14 07:07:45 +00:00
|
|
|
|
|
|
|
# get language's country codes with enough supported engines
|
|
|
|
for lang_country, country_data in countries.items():
|
|
|
|
if len(country_data['counter']) >= min_engines_per_country:
|
|
|
|
filtered_countries[lang_country] = _copy_lang_data(lang, country_data['country_name'])
|
|
|
|
|
|
|
|
# add language without countries too if there's more than one country to choose from
|
|
|
|
if len(filtered_countries) > 1:
|
2022-03-16 17:07:00 +00:00
|
|
|
filtered_countries[lang] = _copy_lang_data(lang, None)
|
2020-09-14 07:07:45 +00:00
|
|
|
elif len(filtered_countries) == 1:
|
|
|
|
lang_country = next(iter(filtered_countries))
|
|
|
|
|
|
|
|
# if no country has enough engines try to get most likely country code from babel
|
|
|
|
if not filtered_countries:
|
|
|
|
lang_country = None
|
|
|
|
subtags = get_global('likely_subtags').get(lang)
|
|
|
|
if subtags:
|
|
|
|
country_code = subtags.split('_')[-1]
|
|
|
|
if len(country_code) == 2:
|
|
|
|
lang_country = "{lang}-{country}".format(lang=lang, country=country_code)
|
|
|
|
|
|
|
|
if lang_country:
|
2022-03-16 17:07:00 +00:00
|
|
|
filtered_countries[lang_country] = _copy_lang_data(lang, None)
|
2020-09-14 07:07:45 +00:00
|
|
|
else:
|
2022-03-16 17:07:00 +00:00
|
|
|
filtered_countries[lang] = _copy_lang_data(lang, None)
|
2020-09-14 07:07:45 +00:00
|
|
|
|
|
|
|
filtered_languages_with_countries.update(filtered_countries)
|
|
|
|
|
|
|
|
return filtered_languages_with_countries
|
2016-11-06 02:51:38 +00:00
|
|
|
|
|
|
|
|
2022-03-16 17:07:00 +00:00
|
|
|
class UnicodeEscape(str):
|
|
|
|
"""Escape unicode string in :py:obj:`pprint.pformat`"""
|
|
|
|
|
|
|
|
def __repr__(self):
|
|
|
|
return "'" + "".join([chr(c) for c in self.encode('unicode-escape')]) + "'"
|
|
|
|
|
|
|
|
|
2016-11-06 02:51:38 +00:00
|
|
|
# Write languages.py.
|
2018-02-14 22:17:46 +00:00
|
|
|
def write_languages_file(languages):
|
2020-09-14 07:07:45 +00:00
|
|
|
file_headers = (
|
|
|
|
"# -*- coding: utf-8 -*-",
|
|
|
|
"# list of language codes",
|
|
|
|
"# this file is generated automatically by utils/fetch_languages.py",
|
2022-01-01 17:32:21 +00:00
|
|
|
"language_codes = (\n",
|
2020-09-14 07:07:45 +00:00
|
|
|
)
|
|
|
|
|
2022-01-01 16:22:22 +00:00
|
|
|
language_codes = []
|
|
|
|
|
|
|
|
for code in sorted(languages):
|
|
|
|
|
|
|
|
name = languages[code]['name']
|
|
|
|
if name is None:
|
|
|
|
print("ERROR: languages['%s'] --> %s" % (code, languages[code]))
|
|
|
|
continue
|
2022-03-16 17:07:00 +00:00
|
|
|
|
|
|
|
flag = get_unicode_flag(code) or ''
|
2022-01-01 16:22:22 +00:00
|
|
|
item = (
|
|
|
|
code,
|
|
|
|
languages[code]['name'].split(' (')[0],
|
2022-04-03 12:30:19 +00:00
|
|
|
get_territory_name(code) or '',
|
2022-01-01 16:22:22 +00:00
|
|
|
languages[code].get('english_name') or '',
|
2022-03-16 17:07:00 +00:00
|
|
|
UnicodeEscape(flag),
|
2022-01-01 16:22:22 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
language_codes.append(item)
|
|
|
|
|
|
|
|
language_codes = tuple(language_codes)
|
2020-09-14 07:07:45 +00:00
|
|
|
|
2022-01-03 11:58:48 +00:00
|
|
|
with open(languages_file, 'w', encoding='utf-8') as new_file:
|
2022-01-01 17:32:21 +00:00
|
|
|
file_content = "{file_headers} {language_codes},\n)\n".format(
|
|
|
|
# fmt: off
|
|
|
|
file_headers = '\n'.join(file_headers),
|
|
|
|
language_codes = pformat(language_codes, indent=4)[1:-1]
|
|
|
|
# fmt: on
|
2020-09-14 07:07:45 +00:00
|
|
|
)
|
|
|
|
new_file.write(file_content)
|
|
|
|
new_file.close()
|
2016-11-06 02:51:38 +00:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2021-06-01 07:58:46 +00:00
|
|
|
load_engines(settings['engines'])
|
2022-01-03 11:58:48 +00:00
|
|
|
_engines_languages = fetch_supported_languages()
|
|
|
|
_all_languages = join_language_lists(_engines_languages)
|
|
|
|
_filtered_languages = filter_language_list(_all_languages)
|
|
|
|
write_languages_file(_filtered_languages)
|