From efe6dead5566d4800587491e5252474a33ddff60 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Mon, 2 Feb 2015 17:55:39 +0100 Subject: [PATCH 01/14] Duckduckgo unit test --- searx/engines/duckduckgo.py | 10 +-- searx/tests/engines/test_duckduckgo.py | 90 ++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 96 insertions(+), 5 deletions(-) create mode 100644 searx/tests/engines/test_duckduckgo.py diff --git a/searx/engines/duckduckgo.py b/searx/engines/duckduckgo.py index 583e33f73..e35a6334c 100644 --- a/searx/engines/duckduckgo.py +++ b/searx/engines/duckduckgo.py @@ -15,7 +15,7 @@ from urllib import urlencode from lxml.html import fromstring -from searx.utils import html_to_text +from searx.engines.xpath import extract_text # engine dependent config categories = ['general'] @@ -28,8 +28,8 @@ url = 'https://duckduckgo.com/html?{query}&s={offset}' # specific xpath variables result_xpath = '//div[@class="results_links results_links_deep web-result"]' # noqa url_xpath = './/a[@class="large"]/@href' -title_xpath = './/a[@class="large"]//text()' -content_xpath = './/div[@class="snippet"]//text()' +title_xpath = './/a[@class="large"]' +content_xpath = './/div[@class="snippet"]' # do search-request @@ -64,8 +64,8 @@ def response(resp): if not res_url: continue - title = html_to_text(''.join(r.xpath(title_xpath))) - content = html_to_text(''.join(r.xpath(content_xpath))) + title = extract_text(r.xpath(title_xpath)) + content = extract_text(r.xpath(content_xpath)) # append result results.append({'title': title, diff --git a/searx/tests/engines/test_duckduckgo.py b/searx/tests/engines/test_duckduckgo.py new file mode 100644 index 000000000..8ff0fb7f5 --- /dev/null +++ b/searx/tests/engines/test_duckduckgo.py @@ -0,0 +1,90 @@ +from collections import defaultdict +import mock +from searx.engines import duckduckgo +from searx.testing import SearxTestCase + + +class TestBingEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = duckduckgo.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('duckduckgo.com', params['url']) + self.assertIn('fr-fr', params['url']) + + dicto['language'] = 'all' + params = duckduckgo.request(query, dicto) + self.assertIn('en-us', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, duckduckgo.response, None) + self.assertRaises(AttributeError, duckduckgo.response, []) + self.assertRaises(AttributeError, duckduckgo.response, '') + self.assertRaises(AttributeError, duckduckgo.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(duckduckgo.response(response), []) + + html = """ + + """ + response = mock.Mock(text=html) + results = duckduckgo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') + self.assertEqual(results[0]['content'], 'This should be the content.') + + html = """ + + + """ + response = mock.Mock(text=html) + results = duckduckgo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index ff8185b1e..13fa753aa 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -6,6 +6,7 @@ from searx.tests.engines.test_dailymotion import * # noqa from searx.tests.engines.test_deezer import * # noqa from searx.tests.engines.test_deviantart import * # noqa from searx.tests.engines.test_digg import * # noqa +from searx.tests.engines.test_duckduckgo import * # noqa from searx.tests.engines.test_dummy import * # noqa from searx.tests.engines.test_flickr import * # noqa from searx.tests.engines.test_flickr_noapi import * # noqa From 1ea5bc63a51d98527da7be65807575d965df61f9 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Mon, 2 Feb 2015 18:39:50 +0100 Subject: [PATCH 02/14] Currency converter's unit test + DDG correction Does anyone know how to trigger the except in the currency converter while still being matched by the regex ? --- searx/tests/engines/test_currency_convert.py | 44 ++++++++++++++++++++ searx/tests/engines/test_duckduckgo.py | 2 +- searx/tests/test_engines.py | 1 + 3 files changed, 46 insertions(+), 1 deletion(-) create mode 100644 searx/tests/engines/test_currency_convert.py diff --git a/searx/tests/engines/test_currency_convert.py b/searx/tests/engines/test_currency_convert.py new file mode 100644 index 000000000..271ed03a2 --- /dev/null +++ b/searx/tests/engines/test_currency_convert.py @@ -0,0 +1,44 @@ +from collections import defaultdict +from datetime import datetime +import mock +from searx.engines import currency_convert +from searx.testing import SearxTestCase + + +class TestCurrencyConvertEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + params = currency_convert.request(query, dicto) + self.assertNotIn('url', params) + + query = '1.1.1 EUR in USD' + params = currency_convert.request(query, dicto) + self.assertNotIn('url', params) + + query = '10 eur in usd' + params = currency_convert.request(query, dicto) + self.assertIn('url', params) + self.assertIn('finance.yahoo.com', params['url']) + self.assertIn('EUR', params['url']) + self.assertIn('USD', params['url']) + + def test_response(self): + dicto = defaultdict(dict) + dicto['ammount'] = 10 + dicto['from'] = "EUR" + dicto['to'] = "USD" + response = mock.Mock(text='a,b,c,d', search_params=dicto) + self.assertEqual(currency_convert.response(response), []) + + csv = "2,0.5,1" + response = mock.Mock(text=csv, search_params=dicto) + results = currency_convert.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['answer'], '10 EUR = 5.0 USD (1 EUR = 0.5 USD)') + now_date = datetime.now().strftime('%Y%m%d') + self.assertEqual(results[0]['url'], 'http://finance.yahoo.com/currency/converter-results/' + + now_date + '/10-eur-to-usd.html') diff --git a/searx/tests/engines/test_duckduckgo.py b/searx/tests/engines/test_duckduckgo.py index 8ff0fb7f5..6f085cbc2 100644 --- a/searx/tests/engines/test_duckduckgo.py +++ b/searx/tests/engines/test_duckduckgo.py @@ -4,7 +4,7 @@ from searx.engines import duckduckgo from searx.testing import SearxTestCase -class TestBingEngine(SearxTestCase): +class TestDuckduckgoEngine(SearxTestCase): def test_request(self): query = 'test_query' diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 13fa753aa..d0c0e69a4 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -2,6 +2,7 @@ from searx.tests.engines.test_bing import * # noqa from searx.tests.engines.test_bing_images import * # noqa from searx.tests.engines.test_bing_news import * # noqa from searx.tests.engines.test_btdigg import * # noqa +from searx.tests.engines.test_currency_convert import * # noqa from searx.tests.engines.test_dailymotion import * # noqa from searx.tests.engines.test_deezer import * # noqa from searx.tests.engines.test_deviantart import * # noqa From a96208be965bfc082524f3e22d9339364e2a9976 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Tue, 3 Feb 2015 19:56:26 +0100 Subject: [PATCH 03/14] Mediawiki's unit test --- searx/engines/currency_convert.py | 9 +- searx/tests/engines/test_mediawiki.py | 130 ++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 134 insertions(+), 6 deletions(-) create mode 100644 searx/tests/engines/test_mediawiki.py diff --git a/searx/engines/currency_convert.py b/searx/engines/currency_convert.py index d8841c1d1..4618c82b1 100644 --- a/searx/engines/currency_convert.py +++ b/searx/engines/currency_convert.py @@ -13,12 +13,9 @@ def request(query, params): if not m: # wrong query return params - try: - ammount, from_currency, to_currency = m.groups() - ammount = float(ammount) - except: - # wrong params - return params + + ammount, from_currency, to_currency = m.groups() + ammount = float(ammount) q = (from_currency + to_currency).upper() diff --git a/searx/tests/engines/test_mediawiki.py b/searx/tests/engines/test_mediawiki.py new file mode 100644 index 000000000..63f7da6b2 --- /dev/null +++ b/searx/tests/engines/test_mediawiki.py @@ -0,0 +1,130 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import mediawiki +from searx.testing import SearxTestCase + + +class TestMediawikiEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = mediawiki.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('wikipedia.org', params['url']) + self.assertIn('fr', params['url']) + + dicto['language'] = 'all' + params = mediawiki.request(query, dicto) + self.assertIn('en', params['url']) + + mediawiki.base_url = "http://test.url/" + mediawiki.search_url = mediawiki.base_url +\ + 'w/api.php?action=query'\ + '&list=search'\ + '&{query}'\ + '&srprop=timestamp'\ + '&format=json'\ + '&sroffset={offset}'\ + '&srlimit={limit}' # noqa + params = mediawiki.request(query, dicto) + self.assertIn('test.url', params['url']) + + def test_response(self): + dicto = defaultdict(dict) + dicto['language'] = 'fr' + mediawiki.base_url = "https://{language}.wikipedia.org/" + + self.assertRaises(AttributeError, mediawiki.response, None) + self.assertRaises(AttributeError, mediawiki.response, []) + self.assertRaises(AttributeError, mediawiki.response, '') + self.assertRaises(AttributeError, mediawiki.response, '[]') + + response = mock.Mock(text='{}', search_params=dicto) + self.assertEqual(mediawiki.response(response), []) + + response = mock.Mock(text='{"data": []}', search_params=dicto) + self.assertEqual(mediawiki.response(response), []) + + json = """ + { + "query-continue": { + "search": { + "sroffset": 1 + } + }, + "query": { + "searchinfo": { + "totalhits": 29721 + }, + "search": [ + { + "ns": 0, + "title": "This is the title étude", + "timestamp": "2014-12-19T17:42:52Z" + } + ] + } + } + """ + response = mock.Mock(text=json, search_params=dicto) + results = mediawiki.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], u'This is the title étude') + self.assertIn('fr.wikipedia.org', results[0]['url']) + self.assertIn('This_is_the_title', results[0]['url']) + self.assertIn('%C3%A9tude', results[0]['url']) + self.assertEqual(results[0]['content'], '') + + json = """ + { + "query-continue": { + "search": { + "sroffset": 1 + } + }, + "query": { + "searchinfo": { + "totalhits": 29721 + }, + "search": [ + ] + } + } + """ + response = mock.Mock(text=json, search_params=dicto) + results = mediawiki.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) + + json = """ + { + "query-continue": { + "search": { + "sroffset": 1 + } + }, + "query": { + } + } + """ + response = mock.Mock(text=json, search_params=dicto) + results = mediawiki.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) + + json = """ + {"toto":[ + {"id":200,"name":"Artist Name", + "link":"http:\/\/www.mediawiki.com\/artist\/1217","type":"artist"} + ]} + """ + response = mock.Mock(text=json, search_params=dicto) + results = mediawiki.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index d0c0e69a4..067616f0e 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -16,6 +16,7 @@ from searx.tests.engines.test_www1x import * # noqa from searx.tests.engines.test_google_images import * # noqa from searx.tests.engines.test_google_news import * # noqa from searx.tests.engines.test_kickass import * # noqa +from searx.tests.engines.test_mediawiki import * # noqa from searx.tests.engines.test_mixcloud import * # noqa from searx.tests.engines.test_piratebay import * # noqa from searx.tests.engines.test_searchcode_code import * # noqa From d6e511fc2f090a848fe5656382266fea816f7b01 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Wed, 4 Feb 2015 19:39:31 +0100 Subject: [PATCH 04/14] Twitter's unit test There is a commented line of a test that I didn't succed to make it work. It's an issue of unicode, utf-8, ascii, latin1... I think I tried everything, but if you have an idea... I'm still a newbie in python... --- searx/engines/twitter.py | 13 +- searx/tests/engines/test_twitter.py | 502 ++++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 511 insertions(+), 5 deletions(-) create mode 100644 searx/tests/engines/test_twitter.py diff --git a/searx/engines/twitter.py b/searx/engines/twitter.py index bd9a8c2fc..0e35e6188 100644 --- a/searx/engines/twitter.py +++ b/searx/engines/twitter.py @@ -13,8 +13,8 @@ from urlparse import urljoin from urllib import urlencode from lxml import html -from cgi import escape from datetime import datetime +from searx.engines.xpath import extract_text # engine dependent config categories = ['social media'] @@ -22,12 +22,12 @@ language_support = True # search-url base_url = 'https://twitter.com/' -search_url = base_url+'search?' +search_url = base_url + 'search?' # specific xpath variables results_xpath = '//li[@data-item-type="tweet"]' link_xpath = './/small[@class="time"]//a' -title_xpath = './/span[@class="username js-action-profile-name"]//text()' +title_xpath = './/span[@class="username js-action-profile-name"]' content_xpath = './/p[@class="js-tweet-text tweet-text"]' timestamp_xpath = './/span[contains(@class,"_timestamp")]' @@ -39,6 +39,8 @@ def request(query, params): # set language if specified if params['language'] != 'all': params['cookies']['lang'] = params['language'].split('_')[0] + else: + params['cookies']['lang'] = 'en' return params @@ -53,8 +55,9 @@ def response(resp): for tweet in dom.xpath(results_xpath): link = tweet.xpath(link_xpath)[0] url = urljoin(base_url, link.attrib.get('href')) - title = ''.join(tweet.xpath(title_xpath)) - content = escape(html.tostring(tweet.xpath(content_xpath)[0], method='text', encoding='UTF-8').decode("utf-8")) + title = extract_text(tweet.xpath(title_xpath)) + content = extract_text(tweet.xpath(content_xpath)[0]) + pubdate = tweet.xpath(timestamp_xpath) if len(pubdate) > 0: timestamp = float(pubdate[0].attrib.get('data-time')) diff --git a/searx/tests/engines/test_twitter.py b/searx/tests/engines/test_twitter.py new file mode 100644 index 000000000..b444b48ee --- /dev/null +++ b/searx/tests/engines/test_twitter.py @@ -0,0 +1,502 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import twitter +from searx.testing import SearxTestCase + + +class TestTwitterEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 0 + dicto['language'] = 'fr_FR' + params = twitter.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('twitter.com', params['url']) + self.assertIn('cookies', params) + self.assertIn('lang', params['cookies']) + self.assertIn('fr', params['cookies']['lang']) + + dicto['language'] = 'all' + params = twitter.request(query, dicto) + self.assertIn('cookies', params) + self.assertIn('lang', params['cookies']) + self.assertIn('en', params['cookies']['lang']) + + def test_response(self): + self.assertRaises(AttributeError, twitter.response, None) + self.assertRaises(AttributeError, twitter.response, []) + self.assertRaises(AttributeError, twitter.response, '') + self.assertRaises(AttributeError, twitter.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(twitter.response(response), []) + + html = """ +
  • +
    +
    +
    +
    + +

    + This is the content étude à€ + + +

    +
    +
    + +
    +
    +
  • + """ + response = mock.Mock(text=html) + results = twitter.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], '@TitleName') + self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url') + self.assertIn(u'This is the content', results[0]['content']) + # self.assertIn(u'This is the content étude à€', results[0]['content']) + + html = """ +
  • +
    +
    +
    +
    + +

    + This is the content étude à€ + + +

    +
    +
    + +
    +
    +
  • + """ + response = mock.Mock(text=html) + results = twitter.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], '@TitleName') + self.assertEqual(results[0]['url'], 'https://twitter.com/this.is.the.url') + self.assertIn(u'This is the content', results[0]['content']) + + html = """ +
  • +
    + +
    + + this.meta.com + + + + +
    +

    + This should be the content.

    +
    +
  • + """ + response = mock.Mock(text=html) + results = twitter.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 067616f0e..ccef28908 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -23,6 +23,7 @@ from searx.tests.engines.test_searchcode_code import * # noqa from searx.tests.engines.test_searchcode_doc import * # noqa from searx.tests.engines.test_soundcloud import * # noqa from searx.tests.engines.test_stackoverflow import * # noqa +from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa from searx.tests.engines.test_www500px import * # noqa from searx.tests.engines.test_youtube import * # noqa From ff2ad57a8781c72886848abf013ec84778807c97 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Wed, 4 Feb 2015 20:07:26 +0100 Subject: [PATCH 05/14] Yahoo News' unit test --- searx/tests/engines/test_yahoo_news.py | 143 +++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 2 files changed, 144 insertions(+) create mode 100644 searx/tests/engines/test_yahoo_news.py diff --git a/searx/tests/engines/test_yahoo_news.py b/searx/tests/engines/test_yahoo_news.py new file mode 100644 index 000000000..797dc11b7 --- /dev/null +++ b/searx/tests/engines/test_yahoo_news.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +from datetime import datetime +import mock +from searx.engines import yahoo_news +from searx.testing import SearxTestCase + + +class TestYahooNewsEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = yahoo_news.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('news.search.yahoo.com', params['url']) + self.assertIn('fr', params['url']) + self.assertIn('cookies', params) + self.assertIn('sB', params['cookies']) + self.assertIn('fr', params['cookies']['sB']) + + dicto['language'] = 'all' + params = yahoo_news.request(query, dicto) + self.assertIn('cookies', params) + self.assertIn('sB', params['cookies']) + self.assertIn('en', params['cookies']['sB']) + self.assertIn('en', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, yahoo_news.response, None) + self.assertRaises(AttributeError, yahoo_news.response, []) + self.assertRaises(AttributeError, yahoo_news.response, '') + self.assertRaises(AttributeError, yahoo_news.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(yahoo_news.response(response), []) + + html = """ +
    + + Business via Yahoo! Finance   Feb 03 09:45am +
    + This is the content +
    +
    + """ + response = mock.Mock(text=html) + results = yahoo_news.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the title...') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/') + self.assertEqual(results[0]['content'], 'This is the content') + + html = """ +
    + + Business via Yahoo!   2 hours, 22 minutes ago +
    + This is the content +
    +
    +
    + + Business via Yahoo!   22 minutes ago +
    + This is the content +
    +
    +
    + + Business via Yahoo!   Feb 03 09:45am 1900 +
    + This is the content +
    +
    + """ + response = mock.Mock(text=html) + results = yahoo_news.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 3) + self.assertEqual(results[0]['title'], 'This is the title...') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/') + self.assertEqual(results[0]['content'], 'This is the content') + self.assertEqual(results[2]['publishedDate'].year, datetime.now().year) + + html = """ +
  • +
    + +
    + + this.meta.com + + + + +
    +

    + This should be the content.

    +
    +
  • + """ + response = mock.Mock(text=html) + results = yahoo_news.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index ccef28908..65f182e8f 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -27,3 +27,4 @@ from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa from searx.tests.engines.test_www500px import * # noqa from searx.tests.engines.test_youtube import * # noqa +from searx.tests.engines.test_yahoo_news import * # noqa From 9f13af8d3c96741e69f50b23abca9bd5a5e19998 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Wed, 4 Feb 2015 20:41:40 +0100 Subject: [PATCH 06/14] Yahoo's unit test --- searx/engines/yahoo.py | 4 +- searx/tests/engines/test_yahoo.py | 154 ++++++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 157 insertions(+), 2 deletions(-) create mode 100644 searx/tests/engines/test_yahoo.py diff --git a/searx/engines/yahoo.py b/searx/engines/yahoo.py index c6c5b0d0d..161f7513b 100644 --- a/searx/engines/yahoo.py +++ b/searx/engines/yahoo.py @@ -35,7 +35,7 @@ suggestion_xpath = '//div[@id="satat"]//a' def parse_url(url_string): endings = ['/RS', '/RK'] endpositions = [] - start = url_string.find('http', url_string.find('/RU=')+1) + start = url_string.find('http', url_string.find('/RU=') + 1) for ending in endings: endpos = url_string.rfind(ending) @@ -91,7 +91,7 @@ def response(resp): 'content': content}) # if no suggestion found, return results - if not suggestion_xpath: + if not dom.xpath(suggestion_xpath): return results # parse suggestion diff --git a/searx/tests/engines/test_yahoo.py b/searx/tests/engines/test_yahoo.py new file mode 100644 index 000000000..e5c78701d --- /dev/null +++ b/searx/tests/engines/test_yahoo.py @@ -0,0 +1,154 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import yahoo +from searx.testing import SearxTestCase + + +class TestYahooEngine(SearxTestCase): + + def test_parse_url(self): + test_url = 'http://r.search.yahoo.com/_ylt=A0LEb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\ + '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RK=0/RS=' +\ + 'dtcJsfP4mEeBOjnVfUQ-' + url = yahoo.parse_url(test_url) + self.assertEqual('https://this.is.the.url/', url) + + test_url = 'http://r.search.yahoo.com/_ylt=A0LElb9JUSKcAEGRXNyoA;_ylu=X3oDMTEzZm1qazYwBHNlYwNzcgRwb3MDMQRjb' +\ + '2xvA2Jm2dGlkA1NNRTcwM18x/RV=2/RE=1423106085/RO=10/RU=https%3a%2f%2fthis.is.the.url%2f/RS=' +\ + 'dtcJsfP4mEeBOjnVfUQ-' + url = yahoo.parse_url(test_url) + self.assertEqual('https://this.is.the.url/', url) + + test_url = 'https://this.is.the.url/' + url = yahoo.parse_url(test_url) + self.assertEqual('https://this.is.the.url/', url) + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = yahoo.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('search.yahoo.com', params['url']) + self.assertIn('fr', params['url']) + self.assertIn('cookies', params) + self.assertIn('sB', params['cookies']) + self.assertIn('fr', params['cookies']['sB']) + + dicto['language'] = 'all' + params = yahoo.request(query, dicto) + self.assertIn('cookies', params) + self.assertIn('sB', params['cookies']) + self.assertIn('en', params['cookies']['sB']) + self.assertIn('en', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, yahoo.response, None) + self.assertRaises(AttributeError, yahoo.response, []) + self.assertRaises(AttributeError, yahoo.response, '') + self.assertRaises(AttributeError, yahoo.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(yahoo.response(response), []) + + html = """ +
    + + www.test.com +
    + This is the content +
    +
    +
    +

    Also Try

    + + + + + + +
    + + + This is the suggestion + + +
    +
    + """ + response = mock.Mock(text=html) + results = yahoo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 2) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'https://this.is.the.url/') + self.assertEqual(results[0]['content'], 'This is the content') + self.assertEqual(results[1]['suggestion'], 'This is the suggestion') + + html = """ +
    + + www.test.com +
    + This is the content +
    +
    +
    + + www.test.com +
    + This is the content +
    +
    +
    +
    +

    +

    +
    + www.test.com +
    + This is the content +
    +
    + """ + response = mock.Mock(text=html) + results = yahoo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'https://this.is.the.url/') + self.assertEqual(results[0]['content'], 'This is the content') + + html = """ +
  • +
  • + """ + response = mock.Mock(text=html) + results = yahoo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 65f182e8f..259ebcf35 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -27,4 +27,5 @@ from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa from searx.tests.engines.test_www500px import * # noqa from searx.tests.engines.test_youtube import * # noqa +from searx.tests.engines.test_yahoo import * # noqa from searx.tests.engines.test_yahoo_news import * # noqa From 3a4d6045c1da950d13d1d14192247389c5932631 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Fri, 6 Feb 2015 16:39:59 +0100 Subject: [PATCH 07/14] Subtitleseeker's unit test --- searx/engines/subtitleseeker.py | 15 +- searx/tests/engines/test_subtitleseeker.py | 169 +++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 178 insertions(+), 7 deletions(-) create mode 100644 searx/tests/engines/test_subtitleseeker.py diff --git a/searx/engines/subtitleseeker.py b/searx/engines/subtitleseeker.py index 9aaf1947b..acefe30ea 100644 --- a/searx/engines/subtitleseeker.py +++ b/searx/engines/subtitleseeker.py @@ -12,6 +12,7 @@ from cgi import escape from urllib import quote_plus from lxml import html from searx.languages import language_codes +from searx.engines.xpath import extract_text # engine dependent config categories = ['videos'] @@ -20,7 +21,7 @@ language = "" # search-url url = 'http://www.subtitleseeker.com/' -search_url = url+'search/TITLES/{query}&p={pageno}' +search_url = url + 'search/TITLES/{query}&p={pageno}' # specific xpath variables results_xpath = '//div[@class="boxRows"]' @@ -44,7 +45,7 @@ def response(resp): if resp.search_params['language'] != 'all': search_lang = [lc[1] for lc in language_codes - if lc[0][:2] == resp.search_params['language']][0] + if lc[0][:2] == resp.search_params['language'].split('_')[0]][0] # parse results for result in dom.xpath(results_xpath): @@ -56,17 +57,17 @@ def response(resp): elif search_lang: href = href + search_lang + '/' - title = escape(link.xpath(".//text()")[0]) + title = escape(extract_text(link)) - content = result.xpath('.//div[contains(@class,"red")]//text()')[0] + content = extract_text(result.xpath('.//div[contains(@class,"red")]')) content = content + " - " - text = result.xpath('.//div[contains(@class,"grey-web")]')[0] - content = content + html.tostring(text, method='text') + text = extract_text(result.xpath('.//div[contains(@class,"grey-web")]')[0]) + content = content + text if result.xpath(".//span") != []: content = content +\ " - (" +\ - result.xpath(".//span//text()")[0].strip() +\ + extract_text(result.xpath(".//span")) +\ ")" # append result diff --git a/searx/tests/engines/test_subtitleseeker.py b/searx/tests/engines/test_subtitleseeker.py new file mode 100644 index 000000000..a641601b2 --- /dev/null +++ b/searx/tests/engines/test_subtitleseeker.py @@ -0,0 +1,169 @@ +from collections import defaultdict +import mock +from searx.engines import subtitleseeker +from searx.testing import SearxTestCase + + +class TestSubtitleseekerEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + params = subtitleseeker.request(query, dicto) + self.assertTrue('url' in params) + self.assertTrue(query in params['url']) + self.assertTrue('subtitleseeker.com' in params['url']) + + def test_response(self): + dicto = defaultdict(dict) + dicto['language'] = 'fr_FR' + response = mock.Mock(search_params=dicto) + + self.assertRaises(AttributeError, subtitleseeker.response, None) + self.assertRaises(AttributeError, subtitleseeker.response, []) + self.assertRaises(AttributeError, subtitleseeker.response, '') + self.assertRaises(AttributeError, subtitleseeker.response, '[]') + + response = mock.Mock(text='', search_params=dicto) + self.assertEqual(subtitleseeker.response(response), []) + + html = """ +
    +
    + + + This is the Title + +

    + + "Alternative Title" + +
    +
    + 1998 +
    +
    + + 1039 Subs +
    +
    + + 1 hours ago +
    +
    +
    + """ + response = mock.Mock(text=html, search_params=dicto) + results = subtitleseeker.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the Title') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/French/') + self.assertIn('1998', results[0]['content']) + self.assertIn('1039 Subs', results[0]['content']) + self.assertIn('Alternative Title', results[0]['content']) + + html = """ +
    + +
    + 1998 +
    +
    + + 1039 Subs +
    +
    + + 1 hours ago +
    +
    +
    + """ + dicto['language'] = 'all' + response = mock.Mock(text=html, search_params=dicto) + results = subtitleseeker.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the Title') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/') + self.assertIn('1998', results[0]['content']) + self.assertIn('1039 Subs', results[0]['content']) + + html = """ +
    + +
    + 1998 +
    +
    + + 1039 Subs +
    +
    + + 1 hours ago +
    +
    +
    + """ + subtitleseeker.language = 'English' + response = mock.Mock(text=html, search_params=dicto) + results = subtitleseeker.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the Title') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/English/') + self.assertIn('1998', results[0]['content']) + self.assertIn('1039 Subs', results[0]['content']) + + html = """ + +
    + 1998 +
    +
    + + 1039 Subs +
    +
    + + 1 hours ago +
    + """ + response = mock.Mock(text=html, search_params=dicto) + results = subtitleseeker.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 259ebcf35..7fa1e2b8b 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -23,6 +23,7 @@ from searx.tests.engines.test_searchcode_code import * # noqa from searx.tests.engines.test_searchcode_doc import * # noqa from searx.tests.engines.test_soundcloud import * # noqa from searx.tests.engines.test_stackoverflow import * # noqa +from searx.tests.engines.test_subtitleseeker import * # noqa from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa from searx.tests.engines.test_www500px import * # noqa From f1c10f4fe45f34c12994b9bbc4aca133202fd7ca Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Fri, 6 Feb 2015 17:31:10 +0100 Subject: [PATCH 08/14] Startpage's unit test --- searx/engines/startpage.py | 13 +-- searx/tests/engines/test_startpage.py | 140 ++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 146 insertions(+), 8 deletions(-) create mode 100644 searx/tests/engines/test_startpage.py diff --git a/searx/engines/startpage.py b/searx/engines/startpage.py index d60ecd978..9d5b4befe 100644 --- a/searx/engines/startpage.py +++ b/searx/engines/startpage.py @@ -13,6 +13,7 @@ from lxml import html from cgi import escape import re +from searx.engines.xpath import extract_text # engine dependent config categories = ['general'] @@ -45,8 +46,7 @@ def request(query, params): # set language if specified if params['language'] != 'all': - params['data']['with_language'] = ('lang_' + - params['language'].split('_')[0]) + params['data']['with_language'] = ('lang_' + params['language'].split('_')[0]) return params @@ -64,18 +64,15 @@ def response(resp): continue link = links[0] url = link.attrib.get('href') - try: - title = escape(link.text_content()) - except UnicodeDecodeError: - continue # block google-ad url's if re.match("^http(s|)://www.google.[a-z]+/aclk.*$", url): continue + title = escape(extract_text(link)) + if result.xpath('./p[@class="desc"]'): - content = escape(result.xpath('./p[@class="desc"]')[0] - .text_content()) + content = escape(extract_text(result.xpath('./p[@class="desc"]'))) else: content = '' diff --git a/searx/tests/engines/test_startpage.py b/searx/tests/engines/test_startpage.py new file mode 100644 index 000000000..07f13ee27 --- /dev/null +++ b/searx/tests/engines/test_startpage.py @@ -0,0 +1,140 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import startpage +from searx.testing import SearxTestCase + + +class TestStartpageEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = startpage.request(query, dicto) + self.assertIn('url', params) + self.assertIn('startpage.com', params['url']) + self.assertIn('data', params) + self.assertIn('query', params['data']) + self.assertIn(query, params['data']['query']) + self.assertIn('with_language', params['data']) + self.assertIn('lang_fr', params['data']['with_language']) + + dicto['language'] = 'all' + params = startpage.request(query, dicto) + self.assertNotIn('with_language', params['data']) + + def test_response(self): + self.assertRaises(AttributeError, startpage.response, None) + self.assertRaises(AttributeError, startpage.response, []) + self.assertRaises(AttributeError, startpage.response, '') + self.assertRaises(AttributeError, startpage.response, '[]') + + response = mock.Mock(content='') + self.assertEqual(startpage.response(response), []) + + html = """ +
    +

    + + This should be the title + + +

    +

    + This should be the content. +

    +

    + www.speedtest.net/fr/ + + - + + Navigation avec Ixquick Proxy + + - + + Mis en surbrillance + +

    +
    + """ + response = mock.Mock(content=html) + results = startpage.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This should be the title') + self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') + self.assertEqual(results[0]['content'], 'This should be the content.') + + html = """ +
    +

    + + This should be the title + + +

    +

    + This should be the content. +

    +

    + www.speedtest.net/fr/ + + - + + Navigation avec Ixquick Proxy + + - + + Mis en surbrillance + +

    +
    +
    +

    + +

    +

    + This should be the content. +

    +

    + www.speedtest.net/fr/ + +

    +
    + + """ + response = mock.Mock(content=html) + results = startpage.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['content'], '') diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 7fa1e2b8b..0a3559665 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -23,6 +23,7 @@ from searx.tests.engines.test_searchcode_code import * # noqa from searx.tests.engines.test_searchcode_doc import * # noqa from searx.tests.engines.test_soundcloud import * # noqa from searx.tests.engines.test_stackoverflow import * # noqa +from searx.tests.engines.test_startpage import * # noqa from searx.tests.engines.test_subtitleseeker import * # noqa from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa From d0a1df881aa29be713f7446ffff746043b3d0302 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Sat, 7 Feb 2015 01:15:04 +0100 Subject: [PATCH 09/14] Openstreetmap's unit test --- searx/engines/openstreetmap.py | 9 +- searx/tests/engines/test_openstreetmap.py | 199 ++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 205 insertions(+), 4 deletions(-) create mode 100644 searx/tests/engines/test_openstreetmap.py diff --git a/searx/engines/openstreetmap.py b/searx/engines/openstreetmap.py index 68446ef5f..60c3c13ca 100644 --- a/searx/engines/openstreetmap.py +++ b/searx/engines/openstreetmap.py @@ -38,6 +38,9 @@ def response(resp): # parse results for r in json: + if 'display_name' not in r: + continue + title = r['display_name'] osm_type = r.get('osm_type', r.get('type')) url = result_base_url.format(osm_type=osm_type, @@ -49,10 +52,8 @@ def response(resp): geojson = r.get('geojson') # if no geojson is found and osm_type is a node, add geojson Point - if not geojson and\ - osm_type == 'node': - geojson = {u'type': u'Point', - u'coordinates': [r['lon'], r['lat']]} + if not geojson and osm_type == 'node': + geojson = {u'type': u'Point', u'coordinates': [r['lon'], r['lat']]} address_raw = r.get('address') address = {} diff --git a/searx/tests/engines/test_openstreetmap.py b/searx/tests/engines/test_openstreetmap.py new file mode 100644 index 000000000..7b7783f04 --- /dev/null +++ b/searx/tests/engines/test_openstreetmap.py @@ -0,0 +1,199 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import openstreetmap +from searx.testing import SearxTestCase + + +class TestOpenstreetmapEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + params = openstreetmap.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('openstreetmap.org', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, openstreetmap.response, None) + self.assertRaises(AttributeError, openstreetmap.response, []) + self.assertRaises(AttributeError, openstreetmap.response, '') + self.assertRaises(AttributeError, openstreetmap.response, '[]') + + response = mock.Mock(text='{}') + self.assertEqual(openstreetmap.response(response), []) + + response = mock.Mock(text='{"data": []}') + self.assertEqual(openstreetmap.response(response), []) + + json = """ + [ + { + "place_id": "127732055", + "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", + "osm_type": "relation", + "osm_id": "7444", + "boundingbox": [ + "48.8155755", + "48.902156", + "2.224122", + "2.4697602" + ], + "lat": "48.8565056", + "lon": "2.3521334", + "display_name": "This is the title", + "class": "place", + "type": "city", + "importance": 0.96893459932191, + "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", + "address": { + "city": "Paris", + "county": "Paris", + "state": "Île-de-France", + "country": "France", + "country_code": "fr" + }, + "geojson": { + "type": "Polygon", + "coordinates": [ + [ + [ + 2.224122, + 48.854199 + ] + ] + ] + } + } + ] + """ + response = mock.Mock(text=json) + results = openstreetmap.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'https://openstreetmap.org/relation/7444') + self.assertIn('coordinates', results[0]['geojson']) + self.assertEqual(results[0]['geojson']['coordinates'][0][0][0], 2.224122) + self.assertEqual(results[0]['geojson']['coordinates'][0][0][1], 48.854199) + self.assertEqual(results[0]['address'], None) + self.assertIn('48.8155755', results[0]['boundingbox']) + self.assertIn('48.902156', results[0]['boundingbox']) + self.assertIn('2.224122', results[0]['boundingbox']) + self.assertIn('2.4697602', results[0]['boundingbox']) + + json = """ + [ + { + "place_id": "127732055", + "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", + "osm_type": "relation", + "osm_id": "7444", + "boundingbox": [ + "48.8155755", + "48.902156", + "2.224122", + "2.4697602" + ], + "lat": "48.8565056", + "lon": "2.3521334", + "display_name": "This is the title", + "class": "tourism", + "type": "city", + "importance": 0.96893459932191, + "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", + "address": { + "city": "Paris", + "county": "Paris", + "state": "Île-de-France", + "country": "France", + "country_code": "fr", + "address29": "Address" + }, + "geojson": { + "type": "Polygon", + "coordinates": [ + [ + [ + 2.224122, + 48.854199 + ] + ] + ] + } + }, + { + "place_id": "127732055", + "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", + "osm_type": "relation", + "osm_id": "7444", + "boundingbox": [ + "48.8155755", + "48.902156", + "2.224122", + "2.4697602" + ], + "lat": "48.8565056", + "lon": "2.3521334", + "display_name": "This is the title", + "class": "tourism", + "type": "city", + "importance": 0.96893459932191, + "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", + "address": { + "city": "Paris", + "county": "Paris", + "state": "Île-de-France", + "country": "France", + "postcode": 75000, + "country_code": "fr" + }, + "geojson": { + "type": "Polygon", + "coordinates": [ + [ + [ + 2.224122, + 48.854199 + ] + ] + ] + } + }, + { + "place_id": "127732055", + "licence": "Data © OpenStreetMap contributors, ODbL 1.0. http://www.openstreetmap.org/copyright", + "osm_type": "node", + "osm_id": "7444", + "boundingbox": [ + "48.8155755", + "48.902156", + "2.224122", + "2.4697602" + ], + "lat": "48.8565056", + "lon": "2.3521334", + "display_name": "This is the title", + "class": "tourism", + "type": "city", + "importance": 0.96893459932191, + "icon": "https://nominatim.openstreetmap.org/images/mapicons/poi_place_city.p.20.png", + "address": { + "city": "Paris", + "county": "Paris", + "state": "Île-de-France", + "country": "France", + "country_code": "fr", + "address29": "Address" + } + } + ] + """ + response = mock.Mock(text=json) + results = openstreetmap.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 3) + self.assertIn('48.8565056', results[2]['geojson']['coordinates']) + self.assertIn('2.3521334', results[2]['geojson']['coordinates']) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 0a3559665..cd2f3fdfc 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -18,6 +18,7 @@ from searx.tests.engines.test_google_news import * # noqa from searx.tests.engines.test_kickass import * # noqa from searx.tests.engines.test_mediawiki import * # noqa from searx.tests.engines.test_mixcloud import * # noqa +from searx.tests.engines.test_openstreetmap import * # noqa from searx.tests.engines.test_piratebay import * # noqa from searx.tests.engines.test_searchcode_code import * # noqa from searx.tests.engines.test_searchcode_doc import * # noqa From fb04f76698f8fd7347aa54a42e99ecb7ea5f7df0 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Sat, 7 Feb 2015 15:35:12 +0100 Subject: [PATCH 10/14] Faroo's unit test --- searx/engines/faroo.py | 8 +-- searx/tests/engines/test_faroo.py | 116 ++++++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 120 insertions(+), 5 deletions(-) create mode 100644 searx/tests/engines/test_faroo.py diff --git a/searx/engines/faroo.py b/searx/engines/faroo.py index 5360ea156..4a5e60a60 100644 --- a/searx/engines/faroo.py +++ b/searx/engines/faroo.py @@ -37,7 +37,7 @@ search_category = {'general': 'web', # do search-request def request(query, params): - offset = (params['pageno']-1) * number_of_results + 1 + offset = (params['pageno'] - 1) * number_of_results + 1 categorie = search_category.get(params['category'], 'web') if params['language'] == 'all': @@ -45,11 +45,11 @@ def request(query, params): else: language = params['language'].split('_')[0] - # skip, if language is not supported + # if language is not supported, put it in english if language != 'en' and\ language != 'de' and\ language != 'zh': - return params + language = 'en' params['url'] = search_url.format(offset=offset, number_of_results=number_of_results, @@ -69,12 +69,10 @@ def response(resp): # HTTP-Code 401: api-key is not valide if resp.status_code == 401: raise Exception("API key is not valide") - return [] # HTTP-Code 429: rate limit exceeded if resp.status_code == 429: raise Exception("rate limit has been exceeded!") - return [] results = [] diff --git a/searx/tests/engines/test_faroo.py b/searx/tests/engines/test_faroo.py new file mode 100644 index 000000000..acebdda86 --- /dev/null +++ b/searx/tests/engines/test_faroo.py @@ -0,0 +1,116 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import faroo +from searx.testing import SearxTestCase + + +class TestFarooEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + dicto['category'] = 'general' + params = faroo.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('faroo.com', params['url']) + self.assertIn('en', params['url']) + self.assertIn('web', params['url']) + + dicto['language'] = 'all' + params = faroo.request(query, dicto) + self.assertIn('en', params['url']) + + dicto['language'] = 'de_DE' + params = faroo.request(query, dicto) + self.assertIn('de', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, faroo.response, None) + self.assertRaises(AttributeError, faroo.response, []) + self.assertRaises(AttributeError, faroo.response, '') + self.assertRaises(AttributeError, faroo.response, '[]') + + response = mock.Mock(text='{}') + self.assertEqual(faroo.response(response), []) + + response = mock.Mock(text='{"data": []}') + self.assertEqual(faroo.response(response), []) + + response = mock.Mock(text='{"data": []}', status_code=401) + self.assertRaises(Exception, faroo.response, response) + + response = mock.Mock(text='{"data": []}', status_code=429) + self.assertRaises(Exception, faroo.response, response) + + json = """ + { + "results": [ + { + "title": "This is the title", + "kwic": "This is the content", + "content": "", + "url": "http://this.is.the.url/", + "iurl": "", + "domain": "css3test.com", + "author": "Jim Dalrymple", + "news": true, + "votes": "10", + "date": 1360622563000, + "related": [] + }, + { + "title": "This is the title2", + "kwic": "This is the content2", + "content": "", + "url": "http://this.is.the.url2/", + "iurl": "", + "domain": "css3test.com", + "author": "Jim Dalrymple", + "news": false, + "votes": "10", + "related": [] + }, + { + "title": "This is the title3", + "kwic": "This is the content3", + "content": "", + "url": "http://this.is.the.url3/", + "iurl": "http://upload.wikimedia.org/optimized.jpg", + "domain": "css3test.com", + "author": "Jim Dalrymple", + "news": false, + "votes": "10", + "related": [] + } + ], + "query": "test", + "suggestions": [], + "count": 100, + "start": 1, + "length": 10, + "time": "15" + } + """ + response = mock.Mock(text=json) + results = faroo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 4) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'http://this.is.the.url/') + self.assertEqual(results[0]['content'], 'This is the content') + self.assertEqual(results[1]['title'], 'This is the title2') + self.assertEqual(results[1]['url'], 'http://this.is.the.url2/') + self.assertEqual(results[1]['content'], 'This is the content2') + self.assertEqual(results[3]['img_src'], 'http://upload.wikimedia.org/optimized.jpg') + + json = """ + {} + """ + response = mock.Mock(text=json) + results = faroo.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index cd2f3fdfc..e3e0938cf 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -9,6 +9,7 @@ from searx.tests.engines.test_deviantart import * # noqa from searx.tests.engines.test_digg import * # noqa from searx.tests.engines.test_duckduckgo import * # noqa from searx.tests.engines.test_dummy import * # noqa +from searx.tests.engines.test_faroo import * # noqa from searx.tests.engines.test_flickr import * # noqa from searx.tests.engines.test_flickr_noapi import * # noqa from searx.tests.engines.test_github import * # noqa From 0a537d3b89964c227724d3cd95adebd2bbafb720 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Mon, 9 Feb 2015 16:55:01 +0100 Subject: [PATCH 11/14] Yacy's unit test --- searx/engines/yacy.py | 19 ++++--- searx/tests/engines/test_yacy.py | 96 ++++++++++++++++++++++++++++++++ searx/tests/test_engines.py | 3 +- 3 files changed, 109 insertions(+), 9 deletions(-) create mode 100644 searx/tests/engines/test_yacy.py diff --git a/searx/engines/yacy.py b/searx/engines/yacy.py index 17e2a7aab..3d26c9cc4 100644 --- a/searx/engines/yacy.py +++ b/searx/engines/yacy.py @@ -25,10 +25,10 @@ number_of_results = 5 # search-url base_url = 'http://localhost:8090' search_url = '/yacysearch.json?{query}'\ - '&startRecord={offset}'\ - '&maximumRecords={limit}'\ - '&contentdom={search_type}'\ - '&resource=global' # noqa + '&startRecord={offset}'\ + '&maximumRecords={limit}'\ + '&contentdom={search_type}'\ + '&resource=global' # yacy specific type-definitions search_types = {'general': 'text', @@ -41,7 +41,7 @@ search_types = {'general': 'text', # do search-request def request(query, params): offset = (params['pageno'] - 1) * number_of_results - search_type = search_types.get(params['category'], '0') + search_type = search_types.get(params.get('category'), '0') params['url'] = base_url +\ search_url.format(query=urlencode({'query': query}), @@ -66,9 +66,12 @@ def response(resp): if not raw_search_results: return [] - search_results = raw_search_results.get('channels', {})[0].get('items', []) + search_results = raw_search_results.get('channels', []) - for result in search_results: + if len(search_results) == 0: + return [] + + for result in search_results[0].get('items', []): # parse image results if result.get('image'): # append result @@ -88,7 +91,7 @@ def response(resp): 'content': result['description'], 'publishedDate': publishedDate}) - #TODO parse video, audio and file results + # TODO parse video, audio and file results # return results return results diff --git a/searx/tests/engines/test_yacy.py b/searx/tests/engines/test_yacy.py new file mode 100644 index 000000000..f49532cf4 --- /dev/null +++ b/searx/tests/engines/test_yacy.py @@ -0,0 +1,96 @@ +from collections import defaultdict +import mock +from searx.engines import yacy +from searx.testing import SearxTestCase + + +class TestYacyEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = yacy.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('localhost', params['url']) + self.assertIn('fr', params['url']) + + dicto['language'] = 'all' + params = yacy.request(query, dicto) + self.assertIn('url', params) + self.assertNotIn('lr=lang_', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, yacy.response, None) + self.assertRaises(AttributeError, yacy.response, []) + self.assertRaises(AttributeError, yacy.response, '') + self.assertRaises(AttributeError, yacy.response, '[]') + + response = mock.Mock(text='{}') + self.assertEqual(yacy.response(response), []) + + response = mock.Mock(text='{"data": []}') + self.assertEqual(yacy.response(response), []) + + json = """ + { + "channels": [ + { + "title": "YaCy P2P-Search for test", + "description": "Search for test", + "link": "http://search.yacy.de:7001/yacysearch.html?query=test&resource=global&contentdom=0", + "image": { + "url": "http://search.yacy.de:7001/env/grafics/yacy.png", + "title": "Search for test", + "link": "http://search.yacy.de:7001/yacysearch.html?query=test&resource=global&contentdom=0" + }, + "totalResults": "249", + "startIndex": "0", + "itemsPerPage": "5", + "searchTerms": "test", + "items": [ + { + "title": "This is the title", + "link": "http://this.is.the.url", + "code": "", + "description": "This should be the content", + "pubDate": "Sat, 08 Jun 2013 02:00:00 +0200", + "size": "44213", + "sizename": "43 kbyte", + "guid": "lzh_1T_5FP-A", + "faviconCode": "XTS4uQ_5FP-A", + "host": "www.gamestar.de", + "path": "/spiele/city-of-heroes-freedom/47019.html", + "file": "47019.html", + "urlhash": "lzh_1T_5FP-A", + "ranking": "0.20106804" + }, + { + "title": "This is the title2", + "icon": "/ViewImage.png?maxwidth=96&maxheight=96&code=7EbAbW6BpPOA", + "image": "http://image.url/image.png", + "cache": "/ViewImage.png?quadratic=&url=http://golem.ivwbox.de/cgi-bin/ivw/CP/G_INET?d=14071378", + "url": "http://this.is.the.url", + "urlhash": "7EbAbW6BpPOA", + "host": "www.golem.de", + "width": "-1", + "height": "-1" + } + ] + } + ] + } + """ + response = mock.Mock(text=json) + results = yacy.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 2) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'http://this.is.the.url') + self.assertEqual(results[0]['content'], 'This should be the content') + self.assertEqual(results[1]['img_src'], 'http://image.url/image.png') + self.assertEqual(results[1]['content'], '') + self.assertEqual(results[1]['url'], 'http://this.is.the.url') + self.assertEqual(results[1]['title'], 'This is the title2') diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index e3e0938cf..4a27f5adb 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -30,6 +30,7 @@ from searx.tests.engines.test_subtitleseeker import * # noqa from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa from searx.tests.engines.test_www500px import * # noqa -from searx.tests.engines.test_youtube import * # noqa +from searx.tests.engines.test_yacy import * # noqa from searx.tests.engines.test_yahoo import * # noqa +from searx.tests.engines.test_youtube import * # noqa from searx.tests.engines.test_yahoo_news import * # noqa From 8c2a5f04926473a2a89667b7603e21cc31b80f61 Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Mon, 9 Feb 2015 18:28:08 +0100 Subject: [PATCH 12/14] DDG Definitions' unit tests --- searx/engines/duckduckgo_definitions.py | 5 +- .../engines/test_duckduckgo_definitions.py | 250 ++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 254 insertions(+), 2 deletions(-) create mode 100644 searx/tests/engines/test_duckduckgo_definitions.py diff --git a/searx/engines/duckduckgo_definitions.py b/searx/engines/duckduckgo_definitions.py index b66d6c0f2..793e97d22 100644 --- a/searx/engines/duckduckgo_definitions.py +++ b/searx/engines/duckduckgo_definitions.py @@ -25,9 +25,10 @@ def request(query, params): def response(resp): - search_res = json.loads(resp.text) results = [] + search_res = json.loads(resp.text) + content = '' heading = search_res.get('Heading', '') attributes = [] @@ -68,7 +69,7 @@ def response(resp): results.append({'title': heading, 'url': firstURL}) # related topics - for ddg_result in search_res.get('RelatedTopics', None): + for ddg_result in search_res.get('RelatedTopics', []): if 'FirstURL' in ddg_result: suggestion = result_to_text(ddg_result.get('FirstURL', None), ddg_result.get('Text', None), diff --git a/searx/tests/engines/test_duckduckgo_definitions.py b/searx/tests/engines/test_duckduckgo_definitions.py new file mode 100644 index 000000000..71c84235c --- /dev/null +++ b/searx/tests/engines/test_duckduckgo_definitions.py @@ -0,0 +1,250 @@ +from collections import defaultdict +import mock +from searx.engines import duckduckgo_definitions +from searx.testing import SearxTestCase + + +class TestDDGDefinitionsEngine(SearxTestCase): + + def test_result_to_text(self): + url = '' + text = 'Text' + html_result = 'Html' + result = duckduckgo_definitions.result_to_text(url, text, html_result) + self.assertEqual(result, text) + + html_result = 'Text in link' + result = duckduckgo_definitions.result_to_text(url, text, html_result) + self.assertEqual(result, 'Text in link') + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + params = duckduckgo_definitions.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('duckduckgo.com', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, duckduckgo_definitions.response, None) + self.assertRaises(AttributeError, duckduckgo_definitions.response, []) + self.assertRaises(AttributeError, duckduckgo_definitions.response, '') + self.assertRaises(AttributeError, duckduckgo_definitions.response, '[]') + + response = mock.Mock(text='{}') + self.assertEqual(duckduckgo_definitions.response(response), []) + + response = mock.Mock(text='{"data": []}') + self.assertEqual(duckduckgo_definitions.response(response), []) + + json = """ + { + "DefinitionSource": "definition source", + "Heading": "heading", + "ImageWidth": 0, + "RelatedTopics": [ + { + "Result": "Top-level domains", + "Icon": { + "URL": "", + "Height": "", + "Width": "" + }, + "FirstURL": "https://first.url", + "Text": "text" + }, + { + "Topics": [ + { + "Result": "result topic", + "Icon": { + "URL": "", + "Height": "", + "Width": "" + }, + "FirstURL": "https://duckduckgo.com/?q=2%2F2", + "Text": "result topic text" + } + ], + "Name": "name" + } + ], + "Entity": "Entity", + "Type": "A", + "Redirect": "", + "DefinitionURL": "http://definition.url", + "AbstractURL": "https://abstract.url", + "Definition": "this is the definition", + "AbstractSource": "abstract source", + "Infobox": { + "content": [ + { + "data_type": "string", + "value": "1999", + "label": "Introduced", + "wiki_order": 0 + } + ], + "meta": [ + { + "data_type": "string", + "value": ".test", + "label": "article_title" + } + ] + }, + "Image": "image.png", + "ImageIsLogo": 0, + "Abstract": "abstract", + "AbstractText": "abstract text", + "AnswerType": "", + "ImageHeight": 0, + "Results": [{ + "Result" : "result title", + "Icon" : { + "URL" : "result url", + "Height" : 16, + "Width" : 16 + }, + "FirstURL" : "result first url", + "Text" : "result text" + } + ], + "Answer": "answer" + } + """ + response = mock.Mock(text=json) + results = duckduckgo_definitions.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 4) + self.assertEqual(results[0]['answer'], 'answer') + self.assertEqual(results[1]['title'], 'heading') + self.assertEqual(results[1]['url'], 'result first url') + self.assertEqual(results[2]['suggestion'], 'text') + self.assertEqual(results[3]['infobox'], 'heading') + self.assertEqual(results[3]['id'], 'http://definition.url') + self.assertEqual(results[3]['entity'], 'Entity') + self.assertIn('abstract', results[3]['content']) + self.assertIn('this is the definition', results[3]['content']) + self.assertEqual(results[3]['img_src'], 'image.png') + self.assertIn('Introduced', results[3]['attributes'][0]['label']) + self.assertIn('1999', results[3]['attributes'][0]['value']) + self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[3]['urls']) + self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[3]['urls']) + self.assertIn({'name': 'name', 'suggestions': ['result topic text']}, results[3]['relatedTopics']) + + json = """ + { + "DefinitionSource": "definition source", + "Heading": "heading", + "ImageWidth": 0, + "RelatedTopics": [], + "Entity": "Entity", + "Type": "A", + "Redirect": "", + "DefinitionURL": "", + "AbstractURL": "https://abstract.url", + "Definition": "", + "AbstractSource": "abstract source", + "Image": "", + "ImageIsLogo": 0, + "Abstract": "", + "AbstractText": "abstract text", + "AnswerType": "", + "ImageHeight": 0, + "Results": [], + "Answer": "" + } + """ + response = mock.Mock(text=json) + results = duckduckgo_definitions.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['url'], 'https://abstract.url') + self.assertEqual(results[0]['title'], 'heading') + self.assertEqual(results[0]['content'], '') + + json = """ + { + "DefinitionSource": "definition source", + "Heading": "heading", + "ImageWidth": 0, + "RelatedTopics": [ + { + "Result": "Top-level domains", + "Icon": { + "URL": "", + "Height": "", + "Width": "" + }, + "FirstURL": "https://first.url", + "Text": "heading" + }, + { + "Name": "name" + }, + { + "Topics": [ + { + "Result": "result topic", + "Icon": { + "URL": "", + "Height": "", + "Width": "" + }, + "FirstURL": "https://duckduckgo.com/?q=2%2F2", + "Text": "heading" + } + ], + "Name": "name" + } + ], + "Entity": "Entity", + "Type": "A", + "Redirect": "", + "DefinitionURL": "http://definition.url", + "AbstractURL": "https://abstract.url", + "Definition": "this is the definition", + "AbstractSource": "abstract source", + "Infobox": { + "meta": [ + { + "data_type": "string", + "value": ".test", + "label": "article_title" + } + ] + }, + "Image": "image.png", + "ImageIsLogo": 0, + "Abstract": "abstract", + "AbstractText": "abstract text", + "AnswerType": "", + "ImageHeight": 0, + "Results": [{ + "Result" : "result title", + "Icon" : { + "URL" : "result url", + "Height" : 16, + "Width" : 16 + }, + "Text" : "result text" + } + ], + "Answer": "" + } + """ + response = mock.Mock(text=json) + results = duckduckgo_definitions.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['infobox'], 'heading') + self.assertEqual(results[0]['id'], 'http://definition.url') + self.assertEqual(results[0]['entity'], 'Entity') + self.assertIn('abstract', results[0]['content']) + self.assertIn('this is the definition', results[0]['content']) + self.assertEqual(results[0]['img_src'], 'image.png') + self.assertIn({'url': 'https://abstract.url', 'title': 'abstract source'}, results[0]['urls']) + self.assertIn({'url': 'http://definition.url', 'title': 'definition source'}, results[0]['urls']) + self.assertIn({'name': 'name', 'suggestions': []}, results[0]['relatedTopics']) diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 4a27f5adb..0570a5296 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -8,6 +8,7 @@ from searx.tests.engines.test_deezer import * # noqa from searx.tests.engines.test_deviantart import * # noqa from searx.tests.engines.test_digg import * # noqa from searx.tests.engines.test_duckduckgo import * # noqa +from searx.tests.engines.test_duckduckgo_definitions import * # noqa from searx.tests.engines.test_dummy import * # noqa from searx.tests.engines.test_faroo import * # noqa from searx.tests.engines.test_flickr import * # noqa From f703a77fc9d44e1a2718a4c26a507973f3bf976d Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Tue, 10 Feb 2015 18:44:49 +0100 Subject: [PATCH 13/14] Photon's unit test As in Flickr, a continue is not detected by coverage as covered but it is. --- searx/engines/photon.py | 2 +- searx/tests/engines/test_photon.py | 166 +++++++++++++++++++++++++++++ searx/tests/test_engines.py | 1 + 3 files changed, 168 insertions(+), 1 deletion(-) create mode 100644 searx/tests/engines/test_photon.py diff --git a/searx/engines/photon.py b/searx/engines/photon.py index 16340d24a..a9c558c4b 100644 --- a/searx/engines/photon.py +++ b/searx/engines/photon.py @@ -61,7 +61,7 @@ def response(resp): continue # get title - title = properties['name'] + title = properties.get('name') # get osm-type if properties.get('osm_type') == 'N': diff --git a/searx/tests/engines/test_photon.py b/searx/tests/engines/test_photon.py new file mode 100644 index 000000000..734497884 --- /dev/null +++ b/searx/tests/engines/test_photon.py @@ -0,0 +1,166 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +from searx.engines import photon +from searx.testing import SearxTestCase + + +class TestPhotonEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'all' + params = photon.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('photon.komoot.de', params['url']) + + dicto['language'] = 'all' + params = photon.request(query, dicto) + self.assertNotIn('lang', params['url']) + + dicto['language'] = 'al' + params = photon.request(query, dicto) + self.assertNotIn('lang', params['url']) + + dicto['language'] = 'fr' + params = photon.request(query, dicto) + self.assertIn('fr', params['url']) + + def test_response(self): + self.assertRaises(AttributeError, photon.response, None) + self.assertRaises(AttributeError, photon.response, []) + self.assertRaises(AttributeError, photon.response, '') + self.assertRaises(AttributeError, photon.response, '[]') + + response = mock.Mock(text='{}') + self.assertEqual(photon.response(response), []) + + response = mock.Mock(text='{"data": []}') + self.assertEqual(photon.response(response), []) + + json = """ + { + "features": [ + { + "properties": { + "osm_key": "waterway", + "extent": [ + -1.4508446, + 51.1614997, + -1.4408036, + 51.1525635 + ], + "name": "This is the title", + "state": "England", + "osm_id": 114823817, + "osm_type": "W", + "osm_value": "river", + "city": "Test Valley", + "country": "United Kingdom" + }, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [ + -1.4458571, + 51.1576661 + ] + } + }, + { + "properties": { + "osm_key": "place", + "street": "Rue", + "state": "Ile-de-France", + "osm_id": 129211377, + "osm_type": "R", + "housenumber": "10", + "postcode": "75011", + "osm_value": "house", + "city": "Paris", + "country": "France" + }, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [ + 2.3725025, + 48.8654481 + ] + } + }, + { + "properties": { + "osm_key": "amenity", + "street": "Allée", + "name": "Bibliothèque", + "state": "Ile-de-France", + "osm_id": 1028573132, + "osm_type": "N", + "postcode": "75001", + "osm_value": "library", + "city": "Paris", + "country": "France" + }, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [ + 2.3445634, + 48.862494 + ] + } + }, + { + "properties": { + "osm_key": "amenity", + "osm_id": 1028573132, + "osm_type": "Y", + "postcode": "75001", + "osm_value": "library", + "city": "Paris", + "country": "France" + }, + "type": "Feature", + "geometry": { + "type": "Point", + "coordinates": [ + 2.3445634, + 48.862494 + ] + } + }, + { + } + ], + "type": "FeatureCollection" + } + """ + response = mock.Mock(text=json) + results = photon.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 3) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['content'], '') + self.assertEqual(results[0]['longitude'], -1.4458571) + self.assertEqual(results[0]['latitude'], 51.1576661) + self.assertIn(-1.4508446, results[0]['boundingbox']) + self.assertIn(51.1614997, results[0]['boundingbox']) + self.assertIn(-1.4408036, results[0]['boundingbox']) + self.assertIn(51.1525635, results[0]['boundingbox']) + self.assertIn('type', results[0]['geojson']) + self.assertEqual(results[0]['geojson']['type'], 'Point') + self.assertEqual(results[0]['address'], None) + self.assertEqual(results[0]['osm']['type'], 'way') + self.assertEqual(results[0]['osm']['id'], 114823817) + self.assertEqual(results[0]['url'], 'https://openstreetmap.org/way/114823817') + self.assertEqual(results[1]['osm']['type'], 'relation') + self.assertEqual(results[2]['address']['name'], u'Bibliothèque') + self.assertEqual(results[2]['address']['house_number'], None) + self.assertEqual(results[2]['address']['locality'], 'Paris') + self.assertEqual(results[2]['address']['postcode'], '75001') + self.assertEqual(results[2]['address']['country'], 'France') + self.assertEqual(results[2]['osm']['type'], 'node') diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 0570a5296..966b5f1be 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -21,6 +21,7 @@ from searx.tests.engines.test_kickass import * # noqa from searx.tests.engines.test_mediawiki import * # noqa from searx.tests.engines.test_mixcloud import * # noqa from searx.tests.engines.test_openstreetmap import * # noqa +from searx.tests.engines.test_photon import * # noqa from searx.tests.engines.test_piratebay import * # noqa from searx.tests.engines.test_searchcode_code import * # noqa from searx.tests.engines.test_searchcode_doc import * # noqa From f96154b7c454a3b02bf688f248b4471c2020c28f Mon Sep 17 00:00:00 2001 From: Cqoicebordel Date: Wed, 11 Feb 2015 17:16:52 +0100 Subject: [PATCH 14/14] Google's unit test --- searx/tests/engines/test_google.py | 162 +++++++++++++++++++++++++++++ searx/tests/test_engines.py | 3 +- 2 files changed, 164 insertions(+), 1 deletion(-) create mode 100644 searx/tests/engines/test_google.py diff --git a/searx/tests/engines/test_google.py b/searx/tests/engines/test_google.py new file mode 100644 index 000000000..2c3d8e5f6 --- /dev/null +++ b/searx/tests/engines/test_google.py @@ -0,0 +1,162 @@ +# -*- coding: utf-8 -*- +from collections import defaultdict +import mock +import lxml +from searx.engines import google +from searx.testing import SearxTestCase + + +class TestGoogleEngine(SearxTestCase): + + def test_request(self): + query = 'test_query' + dicto = defaultdict(dict) + dicto['pageno'] = 1 + dicto['language'] = 'fr_FR' + params = google.request(query, dicto) + self.assertIn('url', params) + self.assertIn(query, params['url']) + self.assertIn('google.com', params['url']) + self.assertIn('PREF', params['cookies']) + self.assertIn('fr', params['headers']['Accept-Language']) + + dicto['language'] = 'all' + params = google.request(query, dicto) + self.assertIn('en', params['headers']['Accept-Language']) + + def test_response(self): + self.assertRaises(AttributeError, google.response, None) + self.assertRaises(AttributeError, google.response, []) + self.assertRaises(AttributeError, google.response, '') + self.assertRaises(AttributeError, google.response, '[]') + + response = mock.Mock(text='') + self.assertEqual(google.response(response), []) + + html = """ +
  • +

    + + This is the title + +

    +
    +
    + + test.psychologies.com/ + +
    ‎ + + +
    +
    + + This should be the content. + +
    + +
    +
  • +
  • +

    + + This + +

    +
  • +
  • +

    + + This is + +

    +
  • +
  • +

    + + This is the + +

    +
  • +
  • +

    + + This is the + +

    +
  • +

    + + suggestion title + +

    + """ + response = mock.Mock(text=html) + results = google.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 2) + self.assertEqual(results[0]['title'], 'This is the title') + self.assertEqual(results[0]['url'], 'http://this.should.be.the.link/') + self.assertEqual(results[0]['content'], 'This should be the content.') + self.assertEqual(results[1]['suggestion'], 'suggestion title') + + html = """ +
  • +
  • + """ + response = mock.Mock(text=html) + results = google.response(response) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 0) + + def test_parse_images(self): + html = """ +
  • +
    + + + +
    +
  • + """ + dom = lxml.html.fromstring(html) + results = google.parse_images(dom) + self.assertEqual(type(results), list) + self.assertEqual(len(results), 1) + self.assertEqual(results[0]['url'], 'http://this.is.the.url/') + self.assertEqual(results[0]['title'], '') + self.assertEqual(results[0]['content'], '') + self.assertEqual(results[0]['img_src'], 'https://this.is.the.image/image.jpg') diff --git a/searx/tests/test_engines.py b/searx/tests/test_engines.py index 966b5f1be..81296c304 100644 --- a/searx/tests/test_engines.py +++ b/searx/tests/test_engines.py @@ -14,7 +14,7 @@ from searx.tests.engines.test_faroo import * # noqa from searx.tests.engines.test_flickr import * # noqa from searx.tests.engines.test_flickr_noapi import * # noqa from searx.tests.engines.test_github import * # noqa -from searx.tests.engines.test_www1x import * # noqa +from searx.tests.engines.test_google import * # noqa from searx.tests.engines.test_google_images import * # noqa from searx.tests.engines.test_google_news import * # noqa from searx.tests.engines.test_kickass import * # noqa @@ -31,6 +31,7 @@ from searx.tests.engines.test_startpage import * # noqa from searx.tests.engines.test_subtitleseeker import * # noqa from searx.tests.engines.test_twitter import * # noqa from searx.tests.engines.test_vimeo import * # noqa +from searx.tests.engines.test_www1x import * # noqa from searx.tests.engines.test_www500px import * # noqa from searx.tests.engines.test_yacy import * # noqa from searx.tests.engines.test_yahoo import * # noqa