From fc20c561bf540d9b187a89a71730b6e3b0bee2da Mon Sep 17 00:00:00 2001 From: Alexandre Flament Date: Mon, 6 Sep 2021 08:47:11 +0200 Subject: [PATCH 1/4] [mod] oa_doi_rewrite plugin: get_doi_resolver: remove args parameter doi_resolvers.get_value('preferences') already contains the value from request.args.get('doi_resolver') --- searx/plugins/oa_doi_rewrite.py | 15 ++++++--------- searx/webapp.py | 4 +--- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/searx/plugins/oa_doi_rewrite.py b/searx/plugins/oa_doi_rewrite.py index 02a712942..2dcc01e05 100644 --- a/searx/plugins/oa_doi_rewrite.py +++ b/searx/plugins/oa_doi_rewrite.py @@ -11,8 +11,6 @@ description = gettext('Avoid paywalls by redirecting to open-access versions of default_on = False preference_section = 'general' -doi_resolvers = settings['doi_resolvers'] - def extract_doi(url): match = regex.search(url.path) @@ -25,13 +23,12 @@ def extract_doi(url): return None -def get_doi_resolver(args, preference_doi_resolver): +def get_doi_resolver(preferences): doi_resolvers = settings['doi_resolvers'] - doi_resolver = args.get('doi_resolver', preference_doi_resolver)[0] - if doi_resolver not in doi_resolvers: - doi_resolver = settings['default_doi_resolver'] - doi_resolver_url = doi_resolvers[doi_resolver] - return doi_resolver_url + selected_resolver = preferences.get_value('doi_resolver')[0] + if selected_resolver not in doi_resolvers: + selected_resolver = settings['default_doi_resolver'] + return doi_resolvers[selected_resolver] def on_result(request, search, result): @@ -43,6 +40,6 @@ def on_result(request, search, result): for suffix in ('/', '.pdf', '.xml', '/full', '/meta', '/abstract'): if doi.endswith(suffix): doi = doi[:-len(suffix)] - result['url'] = get_doi_resolver(request.args, request.preferences.get_value('doi_resolver')) + doi + result['url'] = get_doi_resolver(request.preferences) + doi result['parsed_url'] = urlparse(result['url']) return True diff --git a/searx/webapp.py b/searx/webapp.py index 6fcf7c464..cffde08a3 100755 --- a/searx/webapp.py +++ b/searx/webapp.py @@ -1040,9 +1040,7 @@ def preferences(): themes = themes, plugins = plugins, doi_resolvers = settings['doi_resolvers'], - current_doi_resolver = get_doi_resolver( - request.args, request.preferences.get_value('doi_resolver') - ), + current_doi_resolver = get_doi_resolver(request.preferences), allowed_plugins = allowed_plugins, theme = get_current_theme_name(), preferences_url_params = request.preferences.get_as_url_params(), From 660c1801705e56a81cf02574f48cc194fec95b1f Mon Sep 17 00:00:00 2001 From: Alexandre Flament Date: Mon, 6 Sep 2021 08:49:13 +0200 Subject: [PATCH 2/4] [mod] plugin: call on_result after each engine from the ResultContainer Currently, searx.search.Search calls on_result once the engine results have been merged (ResultContainer.order_results). on_result plugins can rewrite the results: once the URL(s) are modified, even they can be merged, it won't be the case since ResultContainer.order_results has already be called. This commit call on_result inside for each result of each engines. In addition the on_result function can return False to remove the result. Note: the on_result function now run on the engine thread instead of the Flask thread. --- searx/results.py | 90 +++++++++++++++++++++++++--------------- searx/search/__init__.py | 20 ++++++--- 2 files changed, 70 insertions(+), 40 deletions(-) diff --git a/searx/results.py b/searx/results.py index d0cb4df3f..ae8cf2498 100644 --- a/searx/results.py +++ b/searx/results.py @@ -145,7 +145,7 @@ class ResultContainer: """docstring for ResultContainer""" __slots__ = '_merged_results', 'infoboxes', 'suggestions', 'answers', 'corrections', '_number_of_results',\ - '_ordered', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data' + '_closed', 'paging', 'unresponsive_engines', 'timings', 'redirect_url', 'engine_data', 'on_result' def __init__(self): super().__init__() @@ -156,43 +156,48 @@ class ResultContainer: self.corrections = set() self._number_of_results = [] self.engine_data = defaultdict(dict) - self._ordered = False + self._closed = False self.paging = False self.unresponsive_engines = set() self.timings = [] self.redirect_url = None + self.on_result = lambda _: True def extend(self, engine_name, results): + if self._closed: + return + standard_result_count = 0 error_msgs = set() for result in list(results): result['engine'] = engine_name - if 'suggestion' in result: + if 'suggestion' in result and self.on_result(result): self.suggestions.add(result['suggestion']) - elif 'answer' in result: + elif 'answer' in result and self.on_result(result): self.answers[result['answer']] = result - elif 'correction' in result: + elif 'correction' in result and self.on_result(result): self.corrections.add(result['correction']) - elif 'infobox' in result: + elif 'infobox' in result and self.on_result(result): self._merge_infobox(result) - elif 'number_of_results' in result: + elif 'number_of_results' in result and self.on_result(result): self._number_of_results.append(result['number_of_results']) - elif 'engine_data' in result: + elif 'engine_data' in result and self.on_result(result): self.engine_data[engine_name][result['key']] = result['engine_data'] - else: + elif 'url' in result: # standard result (url, title, content) - if 'url' in result and not isinstance(result['url'], str): - logger.debug('result: invalid URL: %s', str(result)) - error_msgs.add('invalid URL') - elif 'title' in result and not isinstance(result['title'], str): - logger.debug('result: invalid title: %s', str(result)) - error_msgs.add('invalid title') - elif 'content' in result and not isinstance(result['content'], str): - logger.debug('result: invalid content: %s', str(result)) - error_msgs.add('invalid content') - else: - self._merge_result(result, standard_result_count + 1) - standard_result_count += 1 + if not self._is_valid_url_result(result, error_msgs): + continue + # normalize the result + self._normalize_url_result(result) + # call on_result call searx.search.SearchWithPlugins._on_result + # which calls the plugins + if not self.on_result(result): + continue + self.__merge_url_result(result, standard_result_count + 1) + standard_result_count += 1 + elif self.on_result(result): + self.__merge_result_no_url(result, standard_result_count + 1) + standard_result_count += 1 if len(error_msgs) > 0: for msg in error_msgs: @@ -219,14 +224,29 @@ class ResultContainer: if add_infobox: self.infoboxes.append(infobox) - def _merge_result(self, result, position): + def _is_valid_url_result(self, result, error_msgs): if 'url' in result: - self.__merge_url_result(result, position) - return + if not isinstance(result['url'], str): + logger.debug('result: invalid URL: %s', str(result)) + error_msgs.add('invalid URL') + return False - self.__merge_result_no_url(result, position) + if 'title' in result and not isinstance(result['title'], str): + logger.debug('result: invalid title: %s', str(result)) + error_msgs.add('invalid title') + return False - def __merge_url_result(self, result, position): + if 'content' in result: + if not isinstance(result['content'], str): + logger.debug('result: invalid content: %s', str(result)) + error_msgs.add('invalid content') + return False + + return True + + def _normalize_url_result(self, result): + """Return True if the result is valid + """ result['parsed_url'] = urlparse(result['url']) # if the result has no scheme, use http as default @@ -234,12 +254,13 @@ class ResultContainer: result['parsed_url'] = result['parsed_url']._replace(scheme="http") result['url'] = result['parsed_url'].geturl() - result['engines'] = set([result['engine']]) - # strip multiple spaces and cariage returns from content - if result.get('content'): - result['content'] = WHITESPACE_REGEX.sub(' ', result['content']) + result['content'] = WHITESPACE_REGEX.sub(' ', result['content']) + return True + + def __merge_url_result(self, result, position): + result['engines'] = set([result['engine']]) duplicated = self.__find_duplicated_http_result(result) if duplicated: self.__merge_duplicated_http_result(duplicated, result, position) @@ -295,7 +316,9 @@ class ResultContainer: with RLock(): self._merged_results.append(result) - def order_results(self): + def close(self): + self._closed = True + for result in self._merged_results: score = result_score(result) result['score'] = score @@ -349,12 +372,11 @@ class ResultContainer: categoryPositions[category] = {'index': len(gresults), 'count': 8} # update _merged_results - self._ordered = True self._merged_results = gresults def get_ordered_results(self): - if not self._ordered: - self.order_results() + if not self._closed: + self.close() return self._merged_results def results_length(self): diff --git a/searx/search/__init__.py b/searx/search/__init__.py index d8d3e1e1c..6c750a3f9 100644 --- a/searx/search/__init__.py +++ b/searx/search/__init__.py @@ -1,6 +1,6 @@ # SPDX-License-Identifier: AGPL-3.0-or-later # lint: pylint -# pylint: disable=missing-module-docstring +# pylint: disable=missing-module-docstring, too-few-public-methods import typing import threading @@ -179,7 +179,18 @@ class SearchWithPlugins(Search): def __init__(self, search_query, ordered_plugin_list, request): super().__init__(search_query) self.ordered_plugin_list = ordered_plugin_list - self.request = request + self.result_container.on_result = self._on_result + # pylint: disable=line-too-long + # get the "real" request to use it outside the Flask context. + # see + # * https://github.com/pallets/flask/blob/d01d26e5210e3ee4cbbdef12f05c886e08e92852/src/flask/globals.py#L55 + # * https://github.com/pallets/werkzeug/blob/3c5d3c9bd0d9ce64590f0af8997a38f3823b368d/src/werkzeug/local.py#L548-L559 + # * https://werkzeug.palletsprojects.com/en/2.0.x/local/#werkzeug.local.LocalProxy._get_current_object + # pylint: enable=line-too-long + self.request = request._get_current_object() + + def _on_result(self, result): + return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result) def search(self): if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self): @@ -187,9 +198,6 @@ class SearchWithPlugins(Search): plugins.call(self.ordered_plugin_list, 'post_search', self.request, self) - results = self.result_container.get_ordered_results() - - for result in results: - plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result) + self.result_container.close() return self.result_container From b941763e206a572018b70a9218ecd01c133c942d Mon Sep 17 00:00:00 2001 From: Alexandre Flament Date: Tue, 7 Sep 2021 19:39:00 +0200 Subject: [PATCH 3/4] [mod] ahmia_filter: use on_result instead of post_search see commit 6c9ae7911e9639bc46cd53af215734b4bdb61ba9 --- searx/plugins/ahmia_filter.py | 8 +------- 1 file changed, 1 insertion(+), 7 deletions(-) diff --git a/searx/plugins/ahmia_filter.py b/searx/plugins/ahmia_filter.py index 83b05e4d2..70f216ee1 100644 --- a/searx/plugins/ahmia_filter.py +++ b/searx/plugins/ahmia_filter.py @@ -20,14 +20,8 @@ def get_ahmia_blacklist(): return ahmia_blacklist -def not_blacklisted(result): +def on_result(request, search, result): if not result.get('is_onion') or not result.get('parsed_url'): return True result_hash = md5(result['parsed_url'].hostname.encode()).hexdigest() return result_hash not in get_ahmia_blacklist() - - -def post_search(request, search): - filtered_results = list(filter(not_blacklisted, search.result_container._merged_results)) - search.result_container._merged_results = filtered_results - return True From 0b27c8698f7b5bbca5083cf234fabbc7d7308349 Mon Sep 17 00:00:00 2001 From: Alexandre Flament Date: Thu, 9 Sep 2021 11:23:57 +0200 Subject: [PATCH 4/4] [doc] update docs/dev/plugins.rst --- docs/dev/plugins.rst | 58 ++++++++++++++++++++++++++++++--------- docs/src/searx.search.rst | 38 +++++++++++++++++++++++++ searx/search/__init__.py | 10 +++---- searx/search/models.py | 1 + 4 files changed, 89 insertions(+), 18 deletions(-) create mode 100644 docs/src/searx.search.rst diff --git a/docs/dev/plugins.rst b/docs/dev/plugins.rst index 16262ea6d..44401e34f 100644 --- a/docs/dev/plugins.rst +++ b/docs/dev/plugins.rst @@ -26,8 +26,8 @@ Example plugin # attach callback to the post search hook # request: flask request object # ctx: the whole local context of the post search hook - def post_search(request, ctx): - ctx['search'].suggestions.add('example') + def post_search(request, search): + search.result_container.suggestions.add('example') return True External plugins @@ -50,20 +50,52 @@ Plugin entry points Entry points (hooks) define when a plugin runs. Right now only three hooks are implemented. So feel free to implement a hook if it fits the behaviour of your -plugin. +plugin. A plugin doesn't need to implement all the hooks. -Pre search hook ---------------- -Runs BEFORE the search request. Function to implement: ``pre_search`` +.. py:function:: pre_search(request, search) -> bool -Post search hook ----------------- + Runs BEFORE the search request. -Runs AFTER the search request. Function to implement: ``post_search`` + `search.result_container` can be changed. -Result hook ------------ + Return a boolean: -Runs when a new result is added to the result list. Function to implement: -``on_result`` + * True to continue the search + * False to stop the search + + :param flask.request request: + :param searx.search.SearchWithPlugins search: + :return: False to stop the search + :rtype: bool + + +.. py:function:: post_search(request, search) -> None + + Runs AFTER the search request. + + :param flask.request request: Flask request. + :param searx.search.SearchWithPlugins search: Context. + + +.. py:function:: on_result(request, search, result) -> bool + + Runs for each result of each engine. + + `result` can be changed. + + If `result["url"]` is defined, then `result["parsed_url"] = urlparse(result['url'])` + + .. warning:: + `result["url"]` can be changed, but `result["parsed_url"]` must be updated too. + + Return a boolean: + + * True to keep the result + * False to remove the result + + :param flask.request request: + :param searx.search.SearchWithPlugins search: + :param typing.Dict result: Result, see - :ref:`engine results` + :return: True to keep the result + :rtype: bool diff --git a/docs/src/searx.search.rst b/docs/src/searx.search.rst new file mode 100644 index 000000000..ad76d4183 --- /dev/null +++ b/docs/src/searx.search.rst @@ -0,0 +1,38 @@ +.. _searx.search: + +====== +Search +====== + +.. autoclass:: searx.search.EngineRef + :members: + +.. autoclass:: searx.search.SearchQuery + :members: + +.. autoclass:: searx.search.Search + + .. attribute:: search_query + :type: searx.search.SearchQuery + + .. attribute:: result_container + :type: searx.results.ResultContainer + + .. automethod:: search() -> searx.results.ResultContainer + +.. autoclass:: searx.search.SearchWithPlugins + :members: + + .. attribute:: search_query + :type: searx.search.SearchQuery + + .. attribute:: result_container + :type: searx.results.ResultContainer + + .. attribute:: ordered_plugin_list + :type: typing.List + + .. attribute:: request + :type: flask.request + + .. automethod:: search() -> searx.results.ResultContainer diff --git a/searx/search/__init__.py b/searx/search/__init__.py index 6c750a3f9..69d7ffb25 100644 --- a/searx/search/__init__.py +++ b/searx/search/__init__.py @@ -39,7 +39,7 @@ class Search: __slots__ = "search_query", "result_container", "start_time", "actual_timeout" - def __init__(self, search_query): + def __init__(self, search_query: SearchQuery): # init vars super().__init__() self.search_query = search_query @@ -163,7 +163,7 @@ class Search: return True # do search-request - def search(self): + def search(self) -> ResultContainer: self.start_time = default_timer() if not self.search_external_bang(): if not self.search_answerers(): @@ -172,11 +172,11 @@ class Search: class SearchWithPlugins(Search): - """Similar to the Search class but call the plugins.""" + """Inherit from the Search class, add calls to the plugins.""" __slots__ = 'ordered_plugin_list', 'request' - def __init__(self, search_query, ordered_plugin_list, request): + def __init__(self, search_query: SearchQuery, ordered_plugin_list, request: "flask.Request"): super().__init__(search_query) self.ordered_plugin_list = ordered_plugin_list self.result_container.on_result = self._on_result @@ -192,7 +192,7 @@ class SearchWithPlugins(Search): def _on_result(self, result): return plugins.call(self.ordered_plugin_list, 'on_result', self.request, self, result) - def search(self): + def search(self) -> ResultContainer: if plugins.call(self.ordered_plugin_list, 'pre_search', self.request, self): super().search() diff --git a/searx/search/models.py b/searx/search/models.py index 7233fac42..e48cb3611 100644 --- a/searx/search/models.py +++ b/searx/search/models.py @@ -4,6 +4,7 @@ import typing class EngineRef: + """Reference by names to an engine and category""" __slots__ = 'name', 'category'