forked from Archives/searxng
Merge pull request #748 from a01200356/languages
[mod] Allow users to search in most engine supported languagesdependabot/pip/master/sphinx-6.1.3
commit
9743bde25e
File diff suppressed because one or more lines are too long
@ -0,0 +1,12 @@
|
|||||||
|
{% if preferences %}
|
||||||
|
<select class="form-control" name='language'>
|
||||||
|
{% else %}
|
||||||
|
<select class="time_range" id='language' name='language'>
|
||||||
|
{% endif %}
|
||||||
|
<option value="all" {% if current_language == 'all' %}selected="selected"{% endif %}>{{ _('Default language') }}</option>
|
||||||
|
{% for lang_id,lang_name,country_name,english_name in language_codes | sort(attribute=1) %}
|
||||||
|
<option value="{{ lang_id }}" {% if lang_id == current_language %}selected="selected"{% endif %}>
|
||||||
|
{{ lang_name }} {% if country_name %}({{ country_name }}) {% endif %}- {{ lang_id }}
|
||||||
|
</option>
|
||||||
|
{% endfor %}
|
||||||
|
</select>
|
@ -0,0 +1,171 @@
|
|||||||
|
# -*- coding: utf-8 -*-
|
||||||
|
|
||||||
|
# This script generates languages.py from intersecting each engine's supported languages.
|
||||||
|
#
|
||||||
|
# The country names are obtained from http://api.geonames.org which requires registering as a user.
|
||||||
|
#
|
||||||
|
# Output files (engines_languages.json and languages.py)
|
||||||
|
# are written in current directory to avoid overwriting in case something goes wrong.
|
||||||
|
|
||||||
|
from requests import get
|
||||||
|
from urllib import urlencode
|
||||||
|
from lxml.html import fromstring
|
||||||
|
from json import loads, dumps
|
||||||
|
import io
|
||||||
|
from sys import path
|
||||||
|
path.append('../searx') # noqa
|
||||||
|
from searx.engines import engines
|
||||||
|
|
||||||
|
# Geonames API for country names.
|
||||||
|
geonames_user = '' # ADD USER NAME HERE
|
||||||
|
country_names_url = 'http://api.geonames.org/countryInfoJSON?{parameters}'
|
||||||
|
|
||||||
|
# Output files.
|
||||||
|
engines_languages_file = 'engines_languages.json'
|
||||||
|
languages_file = 'languages.py'
|
||||||
|
|
||||||
|
engines_languages = {}
|
||||||
|
languages = {}
|
||||||
|
|
||||||
|
|
||||||
|
# To filter out invalid codes and dialects.
|
||||||
|
def valid_code(lang_code):
|
||||||
|
# filter invalid codes
|
||||||
|
# sl-SL is technically not invalid, but still a mistake
|
||||||
|
invalid_codes = ['sl-SL', 'wt-WT', 'jw']
|
||||||
|
invalid_countries = ['UK', 'XA', 'XL']
|
||||||
|
if lang_code[:2] == 'xx'\
|
||||||
|
or lang_code in invalid_codes\
|
||||||
|
or lang_code[-2:] in invalid_countries\
|
||||||
|
or is_dialect(lang_code):
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
# Language codes with any additional tags other than language and country.
|
||||||
|
def is_dialect(lang_code):
|
||||||
|
lang_code = lang_code.split('-')
|
||||||
|
if len(lang_code) > 2 or len(lang_code[0]) > 3:
|
||||||
|
return True
|
||||||
|
if len(lang_code) == 2 and len(lang_code[1]) > 2:
|
||||||
|
return True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
# Get country name in specified language.
|
||||||
|
def get_country_name(locale):
|
||||||
|
if geonames_user is '':
|
||||||
|
return ''
|
||||||
|
|
||||||
|
locale = locale.split('-')
|
||||||
|
if len(locale) != 2:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
url = country_names_url.format(parameters=urlencode({'lang': locale[0],
|
||||||
|
'country': locale[1],
|
||||||
|
'username': geonames_user}))
|
||||||
|
response = get(url)
|
||||||
|
json = loads(response.text)
|
||||||
|
content = json.get('geonames', None)
|
||||||
|
if content is None or len(content) != 1:
|
||||||
|
print "No country name found for " + locale[0] + "-" + locale[1]
|
||||||
|
return ''
|
||||||
|
|
||||||
|
return content[0].get('countryName', '')
|
||||||
|
|
||||||
|
|
||||||
|
# Fetchs supported languages for each engine and writes json file with those.
|
||||||
|
def fetch_supported_languages():
|
||||||
|
for engine_name in engines:
|
||||||
|
if hasattr(engines[engine_name], 'fetch_supported_languages'):
|
||||||
|
try:
|
||||||
|
engines_languages[engine_name] = engines[engine_name].fetch_supported_languages()
|
||||||
|
except Exception as e:
|
||||||
|
print e
|
||||||
|
|
||||||
|
# write json file
|
||||||
|
with io.open(engines_languages_file, "w", encoding="utf-8") as f:
|
||||||
|
f.write(unicode(dumps(engines_languages, ensure_ascii=False, encoding="utf-8")))
|
||||||
|
|
||||||
|
|
||||||
|
# Join all language lists.
|
||||||
|
# Iterate all languages supported by each engine.
|
||||||
|
def join_language_lists():
|
||||||
|
# include wikipedia first for more accurate language names
|
||||||
|
languages.update({code: lang for code, lang
|
||||||
|
in engines_languages['wikipedia'].iteritems()
|
||||||
|
if valid_code(code)})
|
||||||
|
|
||||||
|
for engine_name in engines_languages:
|
||||||
|
for locale in engines_languages[engine_name]:
|
||||||
|
if not valid_code(locale):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# if language is not on list or if it has no name yet
|
||||||
|
if locale not in languages or not languages[locale].get('name'):
|
||||||
|
if isinstance(engines_languages[engine_name], dict):
|
||||||
|
languages[locale] = engines_languages[engine_name][locale]
|
||||||
|
else:
|
||||||
|
languages[locale] = {}
|
||||||
|
|
||||||
|
# get locales that have no name or country yet
|
||||||
|
for locale in languages.keys():
|
||||||
|
# try to get language names
|
||||||
|
if not languages[locale].get('name'):
|
||||||
|
name = languages.get(locale.split('-')[0], {}).get('name', None)
|
||||||
|
if name:
|
||||||
|
languages[locale]['name'] = name
|
||||||
|
else:
|
||||||
|
# filter out locales with no name
|
||||||
|
del languages[locale]
|
||||||
|
continue
|
||||||
|
|
||||||
|
# try to get language name in english
|
||||||
|
if not languages[locale].get('english_name'):
|
||||||
|
languages[locale]['english_name'] = languages.get(locale.split('-')[0], {}).get('english_name', '')
|
||||||
|
|
||||||
|
# try to get country name
|
||||||
|
if locale.find('-') > 0 and not languages[locale].get('country'):
|
||||||
|
languages[locale]['country'] = get_country_name(locale) or ''
|
||||||
|
|
||||||
|
|
||||||
|
# Remove countryless language if language is featured in only one country.
|
||||||
|
def filter_single_country_languages():
|
||||||
|
prev_lang = None
|
||||||
|
for code in sorted(languages):
|
||||||
|
lang = code.split('-')[0]
|
||||||
|
if lang == prev_lang:
|
||||||
|
countries += 1
|
||||||
|
else:
|
||||||
|
if prev_lang is not None and countries == 1:
|
||||||
|
del languages[prev_lang]
|
||||||
|
countries = 0
|
||||||
|
prev_lang = lang
|
||||||
|
|
||||||
|
|
||||||
|
# Write languages.py.
|
||||||
|
def write_languages_file():
|
||||||
|
new_file = open(languages_file, 'w')
|
||||||
|
file_content = '# -*- coding: utf-8 -*-\n'\
|
||||||
|
+ '# list of language codes\n'\
|
||||||
|
+ '# this file is generated automatically by utils/update_search_languages.py\n'\
|
||||||
|
+ '\nlanguage_codes = ('
|
||||||
|
for code in sorted(languages):
|
||||||
|
file_content += '\n (u"' + code + '"'\
|
||||||
|
+ ', u"' + languages[code]['name'].split(' (')[0] + '"'\
|
||||||
|
+ ', u"' + languages[code].get('country', '') + '"'\
|
||||||
|
+ ', u"' + languages[code].get('english_name', '').split(' (')[0] + '"),'
|
||||||
|
# remove last comma
|
||||||
|
file_content = file_content[:-1]
|
||||||
|
file_content += '\n)\n'
|
||||||
|
new_file.write(file_content.encode('utf8'))
|
||||||
|
new_file.close()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
fetch_supported_languages()
|
||||||
|
join_language_lists()
|
||||||
|
filter_single_country_languages()
|
||||||
|
write_languages_file()
|
Loading…
Reference in New Issue