first commit
This commit is contained in:
@@ -0,0 +1,55 @@
|
||||
import os
|
||||
|
||||
import aiounittest
|
||||
|
||||
os.environ.pop('SEARX_DEBUG', None)
|
||||
os.environ.pop('SEARX_DEBUG_LOG_LEVEL', None)
|
||||
os.environ.pop('SEARX_DISABLE_ETC_SETTINGS', None)
|
||||
os.environ.pop('SEARX_SETTINGS_PATH', None)
|
||||
|
||||
os.environ.pop('SEARXNG_SETTINGS_PATH', None)
|
||||
|
||||
os.environ['SEARXNG_DEBUG'] = '1'
|
||||
os.environ['SEARXNG_DEBUG_LOG_LEVEL'] = 'WARNING'
|
||||
os.environ['SEARXNG_DISABLE_ETC_SETTINGS'] = '1'
|
||||
|
||||
|
||||
class SearxTestLayer:
|
||||
"""Base layer for non-robot tests."""
|
||||
|
||||
__name__ = 'SearxTestLayer'
|
||||
|
||||
@classmethod
|
||||
def setUp(cls):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def tearDown(cls):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def testSetUp(cls):
|
||||
pass
|
||||
|
||||
@classmethod
|
||||
def testTearDown(cls):
|
||||
pass
|
||||
|
||||
|
||||
class SearxTestCase(aiounittest.AsyncTestCase):
|
||||
"""Base test case for non-robot tests."""
|
||||
|
||||
layer = SearxTestLayer
|
||||
|
||||
def setattr4test(self, obj, attr, value):
|
||||
"""
|
||||
setattr(obj, attr, value)
|
||||
but reset to the previous value in the cleanup.
|
||||
"""
|
||||
previous_value = getattr(obj, attr)
|
||||
|
||||
def cleanup_patch():
|
||||
setattr(obj, attr, previous_value)
|
||||
|
||||
self.addCleanup(cleanup_patch)
|
||||
setattr(obj, attr, value)
|
||||
@@ -0,0 +1,76 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
"""Shared testing code."""
|
||||
|
||||
# pylint: disable=missing-function-docstring
|
||||
|
||||
import sys
|
||||
import os
|
||||
import subprocess
|
||||
import traceback
|
||||
import pathlib
|
||||
|
||||
from splinter import Browser
|
||||
|
||||
import tests as searx_tests
|
||||
from tests.robot import test_webapp
|
||||
|
||||
|
||||
class SearxRobotLayer:
|
||||
"""Searx Robot Test Layer"""
|
||||
|
||||
def setUp(self):
|
||||
os.setpgrp() # create new process group, become its leader
|
||||
|
||||
tests_path = pathlib.Path(searx_tests.__file__).resolve().parent
|
||||
|
||||
# get program paths
|
||||
webapp = str(tests_path.parent / 'searx' / 'webapp.py')
|
||||
exe = 'python'
|
||||
|
||||
# The Flask app is started by Flask.run(...), don't enable Flask's debug
|
||||
# mode, the debugger from Flask will cause wired process model, where
|
||||
# the server never dies. Further read:
|
||||
#
|
||||
# - debug mode: https://flask.palletsprojects.com/quickstart/#debug-mode
|
||||
# - Flask.run(..): https://flask.palletsprojects.com/api/#flask.Flask.run
|
||||
|
||||
os.environ['SEARXNG_DEBUG'] = '0'
|
||||
|
||||
# set robot settings path
|
||||
os.environ['SEARXNG_SETTINGS_PATH'] = str(tests_path / 'robot' / 'settings_robot.yml')
|
||||
|
||||
# run the server
|
||||
self.server = subprocess.Popen( # pylint: disable=consider-using-with
|
||||
[exe, webapp], stdout=subprocess.PIPE, stderr=subprocess.STDOUT
|
||||
)
|
||||
if hasattr(self.server.stdout, 'read1'):
|
||||
print(self.server.stdout.read1(1024).decode())
|
||||
|
||||
def tearDown(self):
|
||||
os.kill(self.server.pid, 9)
|
||||
# remove previously set environment variable
|
||||
del os.environ['SEARXNG_SETTINGS_PATH']
|
||||
|
||||
|
||||
def run_robot_tests(tests):
|
||||
print('Running {0} tests'.format(len(tests)))
|
||||
for test in tests:
|
||||
with Browser('firefox', headless=True, profile_preferences={'intl.accept_languages': 'en'}) as browser:
|
||||
test(browser)
|
||||
|
||||
|
||||
def main():
|
||||
test_layer = SearxRobotLayer()
|
||||
try:
|
||||
test_layer.setUp()
|
||||
run_robot_tests([getattr(test_webapp, x) for x in dir(test_webapp) if x.startswith('test_')])
|
||||
except Exception: # pylint: disable=broad-except
|
||||
print('Error occured: {0}'.format(traceback.format_exc()))
|
||||
sys.exit(1)
|
||||
finally:
|
||||
test_layer.tearDown()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -0,0 +1,59 @@
|
||||
general:
|
||||
debug: false
|
||||
instance_name: "searx_test"
|
||||
|
||||
brand:
|
||||
git_url: https://github.com/searxng/searxng
|
||||
git_branch: master
|
||||
issue_url: https://github.com/searxng/searxng/issues
|
||||
new_issue_url: https://github.com/searxng/searxng/issues/new
|
||||
docs_url: https://docs.searxng.org
|
||||
public_instances: https://searx.space
|
||||
wiki_url: https://github.com/searxng/searxng/wiki
|
||||
|
||||
search:
|
||||
language: "all"
|
||||
|
||||
server:
|
||||
port: 11111
|
||||
bind_address: 127.0.0.1
|
||||
secret_key: "changedultrasecretkey"
|
||||
base_url: false
|
||||
http_protocol_version: "1.0"
|
||||
|
||||
ui:
|
||||
static_path: ""
|
||||
templates_path: ""
|
||||
default_theme: simple
|
||||
|
||||
preferences:
|
||||
lock: []
|
||||
|
||||
outgoing:
|
||||
request_timeout: 1.0 # seconds
|
||||
useragent_suffix: ""
|
||||
|
||||
categories_as_tabs:
|
||||
general:
|
||||
dummy:
|
||||
|
||||
engines:
|
||||
- name: general dummy
|
||||
engine: dummy
|
||||
categories: general
|
||||
shortcut: gd
|
||||
|
||||
- name: dummy dummy
|
||||
engine: dummy
|
||||
categories: dummy
|
||||
shortcut: dd
|
||||
|
||||
doi_resolvers:
|
||||
oadoi.org: 'https://oadoi.org/'
|
||||
doi.org: 'https://doi.org/'
|
||||
doai.io: 'https://dissem.in/'
|
||||
sci-hub.se: 'https://sci-hub.se/'
|
||||
sci-hub.st: 'https://sci-hub.st/'
|
||||
sci-hub.ru: 'https://sci-hub.ru/'
|
||||
|
||||
default_doi_resolver: 'oadoi.org'
|
||||
@@ -0,0 +1,78 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
# pylint: disable=missing-module-docstring,missing-function-docstring
|
||||
|
||||
from time import sleep
|
||||
|
||||
url = "http://localhost:11111/"
|
||||
|
||||
|
||||
def test_index(browser):
|
||||
# Visit URL
|
||||
browser.visit(url)
|
||||
assert browser.is_text_present('searxng')
|
||||
|
||||
|
||||
def test_404(browser):
|
||||
# Visit URL
|
||||
browser.visit(url + 'missing_link')
|
||||
assert browser.is_text_present('Page not found')
|
||||
|
||||
|
||||
def test_about(browser):
|
||||
browser.visit(url)
|
||||
browser.links.find_by_text('searxng').click()
|
||||
assert browser.is_text_present('Why use it?')
|
||||
|
||||
|
||||
def test_preferences(browser):
|
||||
browser.visit(url)
|
||||
browser.links.find_by_href('/preferences').click()
|
||||
assert browser.is_text_present('Preferences')
|
||||
assert browser.is_text_present('COOKIES')
|
||||
|
||||
assert browser.is_element_present_by_xpath('//label[@for="checkbox_dummy"]')
|
||||
|
||||
|
||||
def test_preferences_engine_select(browser):
|
||||
browser.visit(url)
|
||||
browser.links.find_by_href('/preferences').click()
|
||||
|
||||
assert browser.is_element_present_by_xpath('//label[@for="tab-engines"]')
|
||||
browser.find_by_xpath('//label[@for="tab-engines"]').first.click()
|
||||
|
||||
assert not browser.find_by_xpath('//input[@id="engine_general_dummy__general"]').first.checked
|
||||
browser.find_by_xpath('//label[@for="engine_general_dummy__general"]').first.check()
|
||||
browser.find_by_xpath('//input[@type="submit"]').first.click()
|
||||
|
||||
# waiting for the redirect - without this the test is flaky..
|
||||
sleep(1)
|
||||
|
||||
browser.visit(url)
|
||||
browser.links.find_by_href('/preferences').click()
|
||||
browser.find_by_xpath('//label[@for="tab-engines"]').first.click()
|
||||
|
||||
assert browser.find_by_xpath('//input[@id="engine_general_dummy__general"]').first.checked
|
||||
|
||||
|
||||
def test_preferences_locale(browser):
|
||||
browser.visit(url)
|
||||
browser.links.find_by_href('/preferences').click()
|
||||
|
||||
browser.find_by_xpath('//label[@for="tab-ui"]').first.click()
|
||||
browser.select('locale', 'fr')
|
||||
browser.find_by_xpath('//input[@type="submit"]').first.click()
|
||||
|
||||
# waiting for the redirect - without this the test is flaky..
|
||||
sleep(1)
|
||||
|
||||
browser.visit(url)
|
||||
browser.links.find_by_href('/preferences').click()
|
||||
browser.is_text_present('Préférences')
|
||||
|
||||
|
||||
def test_search(browser):
|
||||
browser.visit(url)
|
||||
browser.fill('q', 'test search query')
|
||||
browser.find_by_xpath('//button[@type="submit"]').first.click()
|
||||
assert browser.is_text_present('didn\'t find any results')
|
||||
@@ -0,0 +1,5 @@
|
||||
import os
|
||||
from os.path import dirname, sep, abspath
|
||||
|
||||
# In unit tests the user settings from unit/settings/test_settings.yml are used.
|
||||
os.environ['SEARXNG_SETTINGS_PATH'] = abspath(dirname(__file__) + sep + 'settings' + sep + 'test_settings.yml')
|
||||
@@ -0,0 +1,237 @@
|
||||
'''
|
||||
searx is free software: you can redistribute it and/or modify
|
||||
it under the terms of the GNU Affero General Public License as published by
|
||||
the Free Software Foundation, either version 3 of the License, or
|
||||
(at your option) any later version.
|
||||
|
||||
searx is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
GNU Affero General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Affero General Public License
|
||||
along with searx. If not, see < http://www.gnu.org/licenses/ >.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
from searx.engines import command as command_engine
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestCommandEngine(SearxTestCase):
|
||||
def test_basic_seq_command_engine(self):
|
||||
ls_engine = command_engine
|
||||
ls_engine.command = ['seq', '{{QUERY}}']
|
||||
ls_engine.delimiter = {'chars': ' ', 'keys': ['number']}
|
||||
expected_results = [
|
||||
{'number': '1', 'template': 'key-value.html'},
|
||||
{'number': '2', 'template': 'key-value.html'},
|
||||
{'number': '3', 'template': 'key-value.html'},
|
||||
{'number': '4', 'template': 'key-value.html'},
|
||||
{'number': '5', 'template': 'key-value.html'},
|
||||
]
|
||||
results = ls_engine.search('5'.encode('utf-8'), {'pageno': 1})
|
||||
self.assertEqual(results, expected_results)
|
||||
|
||||
def test_delimiter_parsing_command_engine(self):
|
||||
searx_logs = '''DEBUG:searx.webapp:static directory is /home/n/p/searx/searx/static
|
||||
DEBUG:searx.webapp:templates directory is /home/n/p/searx/searx/templates
|
||||
DEBUG:searx.engines:soundcloud engine: Starting background initialization
|
||||
DEBUG:searx.engines:wolframalpha engine: Starting background initialization
|
||||
DEBUG:searx.engines:locate engine: Starting background initialization
|
||||
DEBUG:searx.engines:regex search in files engine: Starting background initialization
|
||||
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): www.wolframalpha.com
|
||||
DEBUG:urllib3.connectionpool:Starting new HTTPS connection (1): soundcloud.com
|
||||
DEBUG:searx.engines:find engine: Starting background initialization
|
||||
DEBUG:searx.engines:pattern search in files engine: Starting background initialization
|
||||
DEBUG:searx.webapp:starting webserver on 127.0.0.1:8888
|
||||
WARNING:werkzeug: * Debugger is active!
|
||||
INFO:werkzeug: * Debugger PIN: 299-578-362'''
|
||||
echo_engine = command_engine
|
||||
echo_engine.command = ['echo', searx_logs]
|
||||
echo_engine.delimiter = {'chars': ':', 'keys': ['level', 'component', 'message']}
|
||||
|
||||
expected_results_by_page = [
|
||||
[
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'static directory is /home/n/p/searx/searx/static',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'templates directory is /home/n/p/searx/searx/templates',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'soundcloud engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'wolframalpha engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'locate engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'regex search in files engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'urllib3.connectionpool',
|
||||
'message': 'Starting new HTTPS connection (1): www.wolframalpha.com',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'urllib3.connectionpool',
|
||||
'message': 'Starting new HTTPS connection (1): soundcloud.com',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'find engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'searx.engines',
|
||||
'message': 'pattern search in files engine: Starting background initialization',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
'component': 'searx.webapp',
|
||||
'message': 'starting webserver on 127.0.0.1:8888',
|
||||
'template': 'key-value.html',
|
||||
'level': 'DEBUG',
|
||||
},
|
||||
{
|
||||
'component': 'werkzeug',
|
||||
'message': ' * Debugger is active!',
|
||||
'template': 'key-value.html',
|
||||
'level': 'WARNING',
|
||||
},
|
||||
{
|
||||
'component': 'werkzeug',
|
||||
'message': ' * Debugger PIN: 299-578-362',
|
||||
'template': 'key-value.html',
|
||||
'level': 'INFO',
|
||||
},
|
||||
],
|
||||
]
|
||||
|
||||
for i in [0, 1]:
|
||||
results = echo_engine.search(''.encode('utf-8'), {'pageno': i + 1})
|
||||
self.assertEqual(results, expected_results_by_page[i])
|
||||
|
||||
def test_regex_parsing_command_engine(self):
|
||||
txt = '''commit 35f9a8c81d162a361b826bbcd4a1081a4fbe76a7
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Tue Oct 15 11:31:33 2019 +0200
|
||||
|
||||
first interesting message
|
||||
|
||||
commit 6c3c206316153ccc422755512bceaa9ab0b14faa
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Mon Oct 14 17:10:08 2019 +0200
|
||||
|
||||
second interesting message
|
||||
|
||||
commit d8594d2689b4d5e0d2f80250223886c3a1805ef5
|
||||
Author: Noémi Ványi <sitbackandwait@gmail.com>
|
||||
Date: Mon Oct 14 14:45:05 2019 +0200
|
||||
|
||||
third interesting message
|
||||
|
||||
commit '''
|
||||
git_log_engine = command_engine
|
||||
git_log_engine.command = ['echo', txt]
|
||||
git_log_engine.result_separator = '\n\ncommit '
|
||||
git_log_engine.delimiter = {}
|
||||
git_log_engine.parse_regex = {
|
||||
'commit': '\w{40}',
|
||||
'author': '[\w* ]* <\w*@?\w*\.?\w*>',
|
||||
'date': 'Date: .*',
|
||||
'message': '\n\n.*$',
|
||||
}
|
||||
expected_results = [
|
||||
{
|
||||
'commit': '35f9a8c81d162a361b826bbcd4a1081a4fbe76a7',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Tue Oct 15 11:31:33 2019 +0200',
|
||||
'message': '\n\nfirst interesting message',
|
||||
'template': 'key-value.html',
|
||||
},
|
||||
{
|
||||
'commit': '6c3c206316153ccc422755512bceaa9ab0b14faa',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Mon Oct 14 17:10:08 2019 +0200',
|
||||
'message': '\n\nsecond interesting message',
|
||||
'template': 'key-value.html',
|
||||
},
|
||||
{
|
||||
'commit': 'd8594d2689b4d5e0d2f80250223886c3a1805ef5',
|
||||
'author': ' Noémi Ványi <sitbackandwait@gmail.com>',
|
||||
'date': 'Date: Mon Oct 14 14:45:05 2019 +0200',
|
||||
'message': '\n\nthird interesting message',
|
||||
'template': 'key-value.html',
|
||||
},
|
||||
]
|
||||
|
||||
results = git_log_engine.search(''.encode('utf-8'), {'pageno': 1})
|
||||
self.assertEqual(results, expected_results)
|
||||
|
||||
def test_working_dir_path_query(self):
|
||||
ls_engine = command_engine
|
||||
ls_engine.command = ['ls', '{{QUERY}}']
|
||||
ls_engine.result_separator = '\n'
|
||||
ls_engine.delimiter = {'chars': ' ', 'keys': ['file']}
|
||||
ls_engine.query_type = 'path'
|
||||
|
||||
results = ls_engine.search('.'.encode(), {'pageno': 1})
|
||||
self.assertTrue(len(results) != 0)
|
||||
|
||||
forbidden_paths = [
|
||||
'..',
|
||||
'../..',
|
||||
'./..',
|
||||
'~',
|
||||
'/var',
|
||||
]
|
||||
for forbidden_path in forbidden_paths:
|
||||
self.assertRaises(ValueError, ls_engine.search, '..'.encode(), {'pageno': 1})
|
||||
|
||||
def test_enum_queries(self):
|
||||
echo_engine = command_engine
|
||||
echo_engine.command = ['echo', '{{QUERY}}']
|
||||
echo_engine.query_type = 'enum'
|
||||
echo_engine.query_enum = ['i-am-allowed-to-say-this', 'and-that']
|
||||
|
||||
for allowed in echo_engine.query_enum:
|
||||
results = echo_engine.search(allowed.encode(), {'pageno': 1})
|
||||
self.assertTrue(len(results) != 0)
|
||||
|
||||
forbidden_queries = [
|
||||
'forbidden',
|
||||
'banned',
|
||||
'prohibited',
|
||||
]
|
||||
for forbidden in forbidden_queries:
|
||||
self.assertRaises(ValueError, echo_engine.search, forbidden.encode(), {'pageno': 1})
|
||||
@@ -0,0 +1,120 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
from collections import defaultdict
|
||||
import mock
|
||||
from searx.engines import xpath
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestXpathEngine(SearxTestCase):
|
||||
def test_request(self):
|
||||
xpath.search_url = 'https://url.com/{query}'
|
||||
xpath.categories = []
|
||||
xpath.paging = False
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
params = xpath.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEquals('https://url.com/test_query', params['url'])
|
||||
|
||||
xpath.search_url = 'https://url.com/q={query}&p={pageno}'
|
||||
xpath.paging = True
|
||||
query = 'test_query'
|
||||
dicto = defaultdict(dict)
|
||||
dicto['pageno'] = 1
|
||||
params = xpath.request(query, dicto)
|
||||
self.assertIn('url', params)
|
||||
self.assertEquals('https://url.com/q=test_query&p=1', params['url'])
|
||||
|
||||
def test_response(self):
|
||||
# without results_xpath
|
||||
xpath.url_xpath = '//div[@class="search_result"]//a[@class="result"]/@href'
|
||||
xpath.title_xpath = '//div[@class="search_result"]//a[@class="result"]'
|
||||
xpath.content_xpath = '//div[@class="search_result"]//p[@class="content"]'
|
||||
|
||||
self.assertRaises(AttributeError, xpath.response, None)
|
||||
self.assertRaises(AttributeError, xpath.response, [])
|
||||
self.assertRaises(AttributeError, xpath.response, '')
|
||||
self.assertRaises(AttributeError, xpath.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(xpath.response(response), [])
|
||||
|
||||
html = u"""
|
||||
<div>
|
||||
<div class="search_result">
|
||||
<a class="result" href="https://result1.com">Result 1</a>
|
||||
<p class="content">Content 1</p>
|
||||
<a class="cached" href="https://cachedresult1.com">Cache</a>
|
||||
</div>
|
||||
<div class="search_result">
|
||||
<a class="result" href="https://result2.com">Result 2</a>
|
||||
<p class="content">Content 2</p>
|
||||
<a class="cached" href="https://cachedresult2.com">Cache</a>
|
||||
</div>
|
||||
</div>
|
||||
"""
|
||||
response = mock.Mock(text=html)
|
||||
results = xpath.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Result 1')
|
||||
self.assertEqual(results[0]['url'], 'https://result1.com/')
|
||||
self.assertEqual(results[0]['content'], 'Content 1')
|
||||
self.assertEqual(results[1]['title'], 'Result 2')
|
||||
self.assertEqual(results[1]['url'], 'https://result2.com/')
|
||||
self.assertEqual(results[1]['content'], 'Content 2')
|
||||
|
||||
# with cached urls, without results_xpath
|
||||
xpath.cached_xpath = '//div[@class="search_result"]//a[@class="cached"]/@href'
|
||||
results = xpath.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
|
||||
self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls (no results_xpath)
|
||||
xpath.categories = ['onions']
|
||||
results = xpath.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
|
||||
# with results_xpath
|
||||
xpath.results_xpath = '//div[@class="search_result"]'
|
||||
xpath.url_xpath = './/a[@class="result"]/@href'
|
||||
xpath.title_xpath = './/a[@class="result"]'
|
||||
xpath.content_xpath = './/p[@class="content"]'
|
||||
xpath.cached_xpath = None
|
||||
xpath.categories = []
|
||||
|
||||
self.assertRaises(AttributeError, xpath.response, None)
|
||||
self.assertRaises(AttributeError, xpath.response, [])
|
||||
self.assertRaises(AttributeError, xpath.response, '')
|
||||
self.assertRaises(AttributeError, xpath.response, '[]')
|
||||
|
||||
response = mock.Mock(text='<html></html>')
|
||||
self.assertEqual(xpath.response(response), [])
|
||||
|
||||
response = mock.Mock(text=html)
|
||||
results = xpath.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['title'], 'Result 1')
|
||||
self.assertEqual(results[0]['url'], 'https://result1.com/')
|
||||
self.assertEqual(results[0]['content'], 'Content 1')
|
||||
self.assertEqual(results[1]['title'], 'Result 2')
|
||||
self.assertEqual(results[1]['url'], 'https://result2.com/')
|
||||
self.assertEqual(results[1]['content'], 'Content 2')
|
||||
|
||||
# with cached urls, with results_xpath
|
||||
xpath.cached_xpath = './/a[@class="cached"]/@href'
|
||||
results = xpath.response(response)
|
||||
self.assertEqual(type(results), list)
|
||||
self.assertEqual(len(results), 2)
|
||||
self.assertEqual(results[0]['cached_url'], 'https://cachedresult1.com')
|
||||
self.assertEqual(results[1]['cached_url'], 'https://cachedresult2.com')
|
||||
self.assertFalse(results[0].get('is_onion', False))
|
||||
|
||||
# results are onion urls (with results_xpath)
|
||||
xpath.categories = ['onions']
|
||||
results = xpath.response(response)
|
||||
self.assertTrue(results[0]['is_onion'])
|
||||
@@ -0,0 +1,242 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
from mock import patch
|
||||
|
||||
import httpx
|
||||
|
||||
from searx.network.network import Network, NETWORKS, initialize
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestNetwork(SearxTestCase):
|
||||
def setUp(self):
|
||||
initialize()
|
||||
|
||||
def test_simple(self):
|
||||
network = Network()
|
||||
|
||||
self.assertEqual(next(network._local_addresses_cycle), None)
|
||||
self.assertEqual(next(network._proxies_cycle), ())
|
||||
|
||||
def test_ipaddress_cycle(self):
|
||||
network = NETWORKS['ipv6']
|
||||
self.assertEqual(next(network._local_addresses_cycle), '::')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '::')
|
||||
|
||||
network = NETWORKS['ipv4']
|
||||
self.assertEqual(next(network._local_addresses_cycle), '0.0.0.0')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '0.0.0.0')
|
||||
|
||||
network = Network(local_addresses=['192.168.0.1', '192.168.0.2'])
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.1')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.2')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.1')
|
||||
|
||||
network = Network(local_addresses=['192.168.0.0/30'])
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.1')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.2')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.1')
|
||||
self.assertEqual(next(network._local_addresses_cycle), '192.168.0.2')
|
||||
|
||||
network = Network(local_addresses=['fe80::/10'])
|
||||
self.assertEqual(next(network._local_addresses_cycle), 'fe80::1')
|
||||
self.assertEqual(next(network._local_addresses_cycle), 'fe80::2')
|
||||
self.assertEqual(next(network._local_addresses_cycle), 'fe80::3')
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Network(local_addresses=['not_an_ip_address'])
|
||||
|
||||
def test_proxy_cycles(self):
|
||||
network = Network(proxies='http://localhost:1337')
|
||||
self.assertEqual(next(network._proxies_cycle), (('all://', 'http://localhost:1337'),))
|
||||
|
||||
network = Network(proxies={'https': 'http://localhost:1337', 'http': 'http://localhost:1338'})
|
||||
self.assertEqual(
|
||||
next(network._proxies_cycle), (('https://', 'http://localhost:1337'), ('http://', 'http://localhost:1338'))
|
||||
)
|
||||
self.assertEqual(
|
||||
next(network._proxies_cycle), (('https://', 'http://localhost:1337'), ('http://', 'http://localhost:1338'))
|
||||
)
|
||||
|
||||
network = Network(
|
||||
proxies={'https': ['http://localhost:1337', 'http://localhost:1339'], 'http': 'http://localhost:1338'}
|
||||
)
|
||||
self.assertEqual(
|
||||
next(network._proxies_cycle), (('https://', 'http://localhost:1337'), ('http://', 'http://localhost:1338'))
|
||||
)
|
||||
self.assertEqual(
|
||||
next(network._proxies_cycle), (('https://', 'http://localhost:1339'), ('http://', 'http://localhost:1338'))
|
||||
)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
Network(proxies=1)
|
||||
|
||||
def test_get_kwargs_clients(self):
|
||||
kwargs = {
|
||||
'verify': True,
|
||||
'max_redirects': 5,
|
||||
'timeout': 2,
|
||||
'allow_redirects': True,
|
||||
}
|
||||
kwargs_client = Network.extract_kwargs_clients(kwargs)
|
||||
|
||||
self.assertEqual(len(kwargs_client), 2)
|
||||
self.assertEqual(len(kwargs), 2)
|
||||
|
||||
self.assertEqual(kwargs['timeout'], 2)
|
||||
self.assertEqual(kwargs['follow_redirects'], True)
|
||||
|
||||
self.assertTrue(kwargs_client['verify'])
|
||||
self.assertEqual(kwargs_client['max_redirects'], 5)
|
||||
|
||||
async def test_get_client(self):
|
||||
network = Network(verify=True)
|
||||
client1 = await network.get_client()
|
||||
client2 = await network.get_client(verify=True)
|
||||
client3 = await network.get_client(max_redirects=10)
|
||||
client4 = await network.get_client(verify=True)
|
||||
client5 = await network.get_client(verify=False)
|
||||
client6 = await network.get_client(max_redirects=10)
|
||||
|
||||
self.assertEqual(client1, client2)
|
||||
self.assertEqual(client1, client4)
|
||||
self.assertNotEqual(client1, client3)
|
||||
self.assertNotEqual(client1, client5)
|
||||
self.assertEqual(client3, client6)
|
||||
|
||||
await network.aclose()
|
||||
|
||||
async def test_aclose(self):
|
||||
network = Network(verify=True)
|
||||
await network.get_client()
|
||||
await network.aclose()
|
||||
|
||||
async def test_request(self):
|
||||
a_text = 'Lorem Ipsum'
|
||||
response = httpx.Response(status_code=200, text=a_text)
|
||||
with patch.object(httpx.AsyncClient, 'request', return_value=response):
|
||||
network = Network(enable_http=True)
|
||||
response = await network.request('GET', 'https://example.com/')
|
||||
self.assertEqual(response.text, a_text)
|
||||
await network.aclose()
|
||||
|
||||
|
||||
class TestNetworkRequestRetries(SearxTestCase):
|
||||
|
||||
TEXT = 'Lorem Ipsum'
|
||||
|
||||
@classmethod
|
||||
def get_response_404_then_200(cls):
|
||||
first = True
|
||||
|
||||
async def get_response(*args, **kwargs):
|
||||
nonlocal first
|
||||
if first:
|
||||
first = False
|
||||
return httpx.Response(status_code=403, text=TestNetworkRequestRetries.TEXT)
|
||||
return httpx.Response(status_code=200, text=TestNetworkRequestRetries.TEXT)
|
||||
|
||||
return get_response
|
||||
|
||||
async def test_retries_ok(self):
|
||||
with patch.object(httpx.AsyncClient, 'request', new=TestNetworkRequestRetries.get_response_404_then_200()):
|
||||
network = Network(enable_http=True, retries=1, retry_on_http_error=403)
|
||||
response = await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.text, TestNetworkRequestRetries.TEXT)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_fail_int(self):
|
||||
with patch.object(httpx.AsyncClient, 'request', new=TestNetworkRequestRetries.get_response_404_then_200()):
|
||||
network = Network(enable_http=True, retries=0, retry_on_http_error=403)
|
||||
response = await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_fail_list(self):
|
||||
with patch.object(httpx.AsyncClient, 'request', new=TestNetworkRequestRetries.get_response_404_then_200()):
|
||||
network = Network(enable_http=True, retries=0, retry_on_http_error=[403, 429])
|
||||
response = await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_fail_bool(self):
|
||||
with patch.object(httpx.AsyncClient, 'request', new=TestNetworkRequestRetries.get_response_404_then_200()):
|
||||
network = Network(enable_http=True, retries=0, retry_on_http_error=True)
|
||||
response = await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_exception_then_200(self):
|
||||
request_count = 0
|
||||
|
||||
async def get_response(*args, **kwargs):
|
||||
nonlocal request_count
|
||||
request_count += 1
|
||||
if request_count < 3:
|
||||
raise httpx.RequestError('fake exception', request=None)
|
||||
return httpx.Response(status_code=200, text=TestNetworkRequestRetries.TEXT)
|
||||
|
||||
with patch.object(httpx.AsyncClient, 'request', new=get_response):
|
||||
network = Network(enable_http=True, retries=2)
|
||||
response = await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.status_code, 200)
|
||||
self.assertEqual(response.text, TestNetworkRequestRetries.TEXT)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_exception(self):
|
||||
async def get_response(*args, **kwargs):
|
||||
raise httpx.RequestError('fake exception', request=None)
|
||||
|
||||
with patch.object(httpx.AsyncClient, 'request', new=get_response):
|
||||
network = Network(enable_http=True, retries=0)
|
||||
with self.assertRaises(httpx.RequestError):
|
||||
await network.request('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
await network.aclose()
|
||||
|
||||
|
||||
class TestNetworkStreamRetries(SearxTestCase):
|
||||
|
||||
TEXT = 'Lorem Ipsum'
|
||||
|
||||
@classmethod
|
||||
def get_response_exception_then_200(cls):
|
||||
first = True
|
||||
|
||||
def stream(*args, **kwargs):
|
||||
nonlocal first
|
||||
if first:
|
||||
first = False
|
||||
raise httpx.RequestError('fake exception', request=None)
|
||||
return httpx.Response(status_code=200, text=TestNetworkStreamRetries.TEXT)
|
||||
|
||||
return stream
|
||||
|
||||
async def test_retries_ok(self):
|
||||
with patch.object(httpx.AsyncClient, 'stream', new=TestNetworkStreamRetries.get_response_exception_then_200()):
|
||||
network = Network(enable_http=True, retries=1, retry_on_http_error=403)
|
||||
response = await network.stream('GET', 'https://example.com/')
|
||||
self.assertEqual(response.text, TestNetworkStreamRetries.TEXT)
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_fail(self):
|
||||
with patch.object(httpx.AsyncClient, 'stream', new=TestNetworkStreamRetries.get_response_exception_then_200()):
|
||||
network = Network(enable_http=True, retries=0, retry_on_http_error=403)
|
||||
with self.assertRaises(httpx.RequestError):
|
||||
await network.stream('GET', 'https://example.com/')
|
||||
await network.aclose()
|
||||
|
||||
async def test_retries_exception(self):
|
||||
first = True
|
||||
|
||||
def stream(*args, **kwargs):
|
||||
nonlocal first
|
||||
if first:
|
||||
first = False
|
||||
return httpx.Response(status_code=403, text=TestNetworkRequestRetries.TEXT)
|
||||
return httpx.Response(status_code=200, text=TestNetworkRequestRetries.TEXT)
|
||||
|
||||
with patch.object(httpx.AsyncClient, 'stream', new=stream):
|
||||
network = Network(enable_http=True, retries=0, retry_on_http_error=403)
|
||||
response = await network.stream('GET', 'https://example.com/', raise_for_httperror=False)
|
||||
self.assertEqual(response.status_code, 403)
|
||||
await network.aclose()
|
||||
@@ -0,0 +1,2 @@
|
||||
Test:
|
||||
"**********"
|
||||
@@ -0,0 +1,10 @@
|
||||
# This SearXNG setup is used in unit tests
|
||||
|
||||
use_default_settings: true
|
||||
search:
|
||||
formats: [html, csv, json, rss]
|
||||
|
||||
engines:
|
||||
- name: general dummy
|
||||
engine: demo_offline
|
||||
timeout: 3
|
||||
@@ -0,0 +1,111 @@
|
||||
general:
|
||||
debug: false
|
||||
instance_name: "searx"
|
||||
|
||||
search:
|
||||
safe_search: 0
|
||||
autocomplete: ""
|
||||
default_lang: ""
|
||||
ban_time_on_fail: 5
|
||||
max_ban_time_on_fail: 120
|
||||
|
||||
server:
|
||||
port: 9000
|
||||
bind_address: "0.0.0.0"
|
||||
secret_key: "user_settings_secret"
|
||||
base_url: false
|
||||
image_proxy: false
|
||||
http_protocol_version: "1.0"
|
||||
method: "POST"
|
||||
default_http_headers:
|
||||
X-Content-Type-Options: nosniff
|
||||
X-XSS-Protection: 1; mode=block
|
||||
X-Download-Options: noopen
|
||||
X-Robots-Tag: noindex, nofollow
|
||||
Referrer-Policy: no-referrer
|
||||
|
||||
ui:
|
||||
static_path: ""
|
||||
templates_path: ""
|
||||
default_theme: simple
|
||||
default_locale: ""
|
||||
theme_args:
|
||||
simple_style: auto
|
||||
|
||||
engines:
|
||||
- name: wikidata
|
||||
engine: wikidata
|
||||
shortcut: wd
|
||||
timeout: 3.0
|
||||
weight: 2
|
||||
|
||||
- name: wikibooks
|
||||
engine: mediawiki
|
||||
shortcut: wb
|
||||
categories: general
|
||||
base_url: "https://{language}.wikibooks.org/"
|
||||
number_of_results: 5
|
||||
search_type: text
|
||||
|
||||
- name: wikinews
|
||||
engine: mediawiki
|
||||
shortcut: wn
|
||||
categories: news
|
||||
base_url: "https://{language}.wikinews.org/"
|
||||
number_of_results: 5
|
||||
search_type: text
|
||||
|
||||
- name: wikiquote
|
||||
engine: mediawiki
|
||||
shortcut: wq
|
||||
categories: general
|
||||
base_url: "https://{language}.wikiquote.org/"
|
||||
number_of_results: 5
|
||||
search_type: text
|
||||
|
||||
locales:
|
||||
en: English
|
||||
ar: العَرَبِيَّة (Arabic)
|
||||
bg: Български (Bulgarian)
|
||||
bo: བོད་སྐད་ (Tibetian)
|
||||
ca: Català (Catalan)
|
||||
cs: Čeština (Czech)
|
||||
cy: Cymraeg (Welsh)
|
||||
da: Dansk (Danish)
|
||||
de: Deutsch (German)
|
||||
el_GR: Ελληνικά (Greek_Greece)
|
||||
eo: Esperanto (Esperanto)
|
||||
es: Español (Spanish)
|
||||
et: Eesti (Estonian)
|
||||
eu: Euskara (Basque)
|
||||
fa_IR: (fārsī) فارسى (Persian)
|
||||
fi: Suomi (Finnish)
|
||||
fil: Wikang Filipino (Filipino)
|
||||
fr: Français (French)
|
||||
gl: Galego (Galician)
|
||||
he: עברית (Hebrew)
|
||||
hr: Hrvatski (Croatian)
|
||||
hu: Magyar (Hungarian)
|
||||
ia: Interlingua (Interlingua)
|
||||
it: Italiano (Italian)
|
||||
ja: 日本語 (Japanese)
|
||||
lt: Lietuvių (Lithuanian)
|
||||
nl: Nederlands (Dutch)
|
||||
nl_BE: Vlaams (Dutch_Belgium)
|
||||
oc: Lenga D'òc (Occitan)
|
||||
pl: Polski (Polish)
|
||||
pt: Português (Portuguese)
|
||||
pt_BR: Português (Portuguese_Brazil)
|
||||
ro: Română (Romanian)
|
||||
ru: Русский (Russian)
|
||||
sk: Slovenčina (Slovak)
|
||||
sl: Slovenski (Slovene)
|
||||
sr: српски (Serbian)
|
||||
sv: Svenska (Swedish)
|
||||
te: తెలుగు (telugu)
|
||||
ta: தமிழ் (Tamil)
|
||||
tr: Türkçe (Turkish)
|
||||
uk: українська мова (Ukrainian)
|
||||
vi: tiếng việt (Vietnamese)
|
||||
zh: 中文 (Chinese)
|
||||
zh_TW: 國語 (Taiwanese Mandarin)
|
||||
@@ -0,0 +1,14 @@
|
||||
use_default_settings:
|
||||
engines:
|
||||
keep_only:
|
||||
- wikibooks
|
||||
- wikinews
|
||||
server:
|
||||
secret_key: "user_secret_key"
|
||||
bind_address: "0.0.0.0"
|
||||
default_http_headers:
|
||||
Custom-Header: Custom-Value
|
||||
engines:
|
||||
- name: wikipedia
|
||||
- name: newengine
|
||||
engine: dummy
|
||||
@@ -0,0 +1,10 @@
|
||||
use_default_settings:
|
||||
engines:
|
||||
remove:
|
||||
- wikibooks
|
||||
- wikinews
|
||||
server:
|
||||
secret_key: "user_secret_key"
|
||||
bind_address: "0.0.0.0"
|
||||
default_http_headers:
|
||||
Custom-Header: Custom-Value
|
||||
@@ -0,0 +1,15 @@
|
||||
use_default_settings:
|
||||
engines:
|
||||
remove:
|
||||
- wikibooks
|
||||
- wikinews
|
||||
server:
|
||||
secret_key: "user_secret_key"
|
||||
bind_address: "0.0.0.0"
|
||||
default_http_headers:
|
||||
Custom-Header: Custom-Value
|
||||
engines:
|
||||
- name: wikipedia
|
||||
tokens: ['secret_token']
|
||||
- name: newengine
|
||||
engine: dummy
|
||||
@@ -0,0 +1,9 @@
|
||||
use_default_settings: true
|
||||
server:
|
||||
secret_key: "user_secret_key"
|
||||
bind_address: "0.0.0.0"
|
||||
default_http_headers:
|
||||
Custom-Header: Custom-Value
|
||||
result_proxy:
|
||||
url: https://localhost/morty
|
||||
key: "$ecretKey"
|
||||
@@ -0,0 +1,15 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from mock import Mock
|
||||
|
||||
from searx.answerers import answerers
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class AnswererTest(SearxTestCase):
|
||||
def test_unicode_input(self):
|
||||
query = Mock()
|
||||
unicode_payload = 'árvíztűrő tükörfúrógép'
|
||||
for answerer in answerers:
|
||||
query.query = '{} {}'.format(answerer.keywords[0], unicode_payload)
|
||||
self.assertTrue(isinstance(answerer.answer(query), list))
|
||||
@@ -0,0 +1,77 @@
|
||||
from searx import settings, engines
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestEnginesInit(SearxTestCase):
|
||||
@classmethod
|
||||
def tearDownClass(cls):
|
||||
settings['outgoing']['using_tor_proxy'] = False
|
||||
settings['outgoing']['extra_proxy_timeout'] = 0
|
||||
|
||||
def test_initialize_engines_default(self):
|
||||
engine_list = [
|
||||
{'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1'},
|
||||
{'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2'},
|
||||
]
|
||||
|
||||
engines.load_engines(engine_list)
|
||||
self.assertEqual(len(engines.engines), 2)
|
||||
self.assertIn('engine1', engines.engines)
|
||||
self.assertIn('engine2', engines.engines)
|
||||
|
||||
def test_initialize_engines_exclude_onions(self):
|
||||
settings['outgoing']['using_tor_proxy'] = False
|
||||
engine_list = [
|
||||
{'engine': 'dummy', 'name': 'engine1', 'shortcut': 'e1', 'categories': 'general'},
|
||||
{'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions'},
|
||||
]
|
||||
|
||||
engines.load_engines(engine_list)
|
||||
self.assertEqual(len(engines.engines), 1)
|
||||
self.assertIn('engine1', engines.engines)
|
||||
self.assertNotIn('onions', engines.categories)
|
||||
|
||||
def test_initialize_engines_include_onions(self):
|
||||
settings['outgoing']['using_tor_proxy'] = True
|
||||
settings['outgoing']['extra_proxy_timeout'] = 100.0
|
||||
engine_list = [
|
||||
{
|
||||
'engine': 'dummy',
|
||||
'name': 'engine1',
|
||||
'shortcut': 'e1',
|
||||
'categories': 'general',
|
||||
'timeout': 20.0,
|
||||
'onion_url': 'http://engine1.onion',
|
||||
},
|
||||
{'engine': 'dummy', 'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions'},
|
||||
]
|
||||
|
||||
engines.load_engines(engine_list)
|
||||
self.assertEqual(len(engines.engines), 2)
|
||||
self.assertIn('engine1', engines.engines)
|
||||
self.assertIn('engine2', engines.engines)
|
||||
self.assertIn('onions', engines.categories)
|
||||
self.assertIn('http://engine1.onion', engines.engines['engine1'].search_url)
|
||||
self.assertEqual(engines.engines['engine1'].timeout, 120.0)
|
||||
|
||||
def test_missing_name_field(self):
|
||||
settings['outgoing']['using_tor_proxy'] = False
|
||||
engine_list = [
|
||||
{'engine': 'dummy', 'shortcut': 'e1', 'categories': 'general'},
|
||||
]
|
||||
with self.assertLogs('searx.engines', level='ERROR') as cm:
|
||||
engines.load_engines(engine_list)
|
||||
self.assertEqual(len(engines.engines), 0)
|
||||
self.assertEqual(cm.output, ['ERROR:searx.engines:An engine does not have a "name" field'])
|
||||
|
||||
def test_missing_engine_field(self):
|
||||
settings['outgoing']['using_tor_proxy'] = False
|
||||
engine_list = [
|
||||
{'name': 'engine2', 'shortcut': 'e2', 'categories': 'onions'},
|
||||
]
|
||||
with self.assertLogs('searx.engines', level='ERROR') as cm:
|
||||
engines.load_engines(engine_list)
|
||||
self.assertEqual(len(engines.engines), 0)
|
||||
self.assertEqual(
|
||||
cm.output, ['ERROR:searx.engines:The "engine" field is missing for the engine named "engine2"']
|
||||
)
|
||||
@@ -0,0 +1,41 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
from tests import SearxTestCase
|
||||
import searx.exceptions
|
||||
from searx import get_setting
|
||||
|
||||
|
||||
class TestExceptions(SearxTestCase):
|
||||
def test_default_suspend_time(self):
|
||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
||||
raise searx.exceptions.SearxEngineAccessDeniedException()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time,
|
||||
get_setting(searx.exceptions.SearxEngineAccessDeniedException.SUSPEND_TIME_SETTING),
|
||||
)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
||||
raise searx.exceptions.SearxEngineCaptchaException()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time, get_setting(searx.exceptions.SearxEngineCaptchaException.SUSPEND_TIME_SETTING)
|
||||
)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
||||
raise searx.exceptions.SearxEngineTooManyRequestsException()
|
||||
self.assertEqual(
|
||||
e.exception.suspended_time,
|
||||
get_setting(searx.exceptions.SearxEngineTooManyRequestsException.SUSPEND_TIME_SETTING),
|
||||
)
|
||||
|
||||
def test_custom_suspend_time(self):
|
||||
with self.assertRaises(searx.exceptions.SearxEngineAccessDeniedException) as e:
|
||||
raise searx.exceptions.SearxEngineAccessDeniedException(suspended_time=1337)
|
||||
self.assertEqual(e.exception.suspended_time, 1337)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineCaptchaException) as e:
|
||||
raise searx.exceptions.SearxEngineCaptchaException(suspended_time=1409)
|
||||
self.assertEqual(e.exception.suspended_time, 1409)
|
||||
|
||||
with self.assertRaises(searx.exceptions.SearxEngineTooManyRequestsException) as e:
|
||||
raise searx.exceptions.SearxEngineTooManyRequestsException(suspended_time=1543)
|
||||
self.assertEqual(e.exception.suspended_time, 1543)
|
||||
@@ -0,0 +1,120 @@
|
||||
from searx.external_bang import (
|
||||
get_node,
|
||||
resolve_bang_definition,
|
||||
get_bang_url,
|
||||
get_bang_definition_and_autocomplete,
|
||||
LEAF_KEY,
|
||||
)
|
||||
from searx.search import SearchQuery, EngineRef
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
TEST_DB = {
|
||||
'trie': {
|
||||
'exam': {
|
||||
'ple': '//example.com/' + chr(2) + chr(1) + '0',
|
||||
LEAF_KEY: '//wikipedia.org/wiki/' + chr(2) + chr(1) + '0',
|
||||
},
|
||||
'sea': {
|
||||
LEAF_KEY: 'sea' + chr(2) + chr(1) + '0',
|
||||
'rch': {
|
||||
LEAF_KEY: 'search' + chr(2) + chr(1) + '0',
|
||||
'ing': 'searching' + chr(2) + chr(1) + '0',
|
||||
},
|
||||
's': {
|
||||
'on': 'season' + chr(2) + chr(1) + '0',
|
||||
'capes': 'seascape' + chr(2) + chr(1) + '0',
|
||||
},
|
||||
},
|
||||
'error': ['error in external_bangs.json'],
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class TestGetNode(SearxTestCase):
|
||||
|
||||
DB = {
|
||||
'trie': {
|
||||
'exam': {
|
||||
'ple': 'test',
|
||||
LEAF_KEY: 'not used',
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def test_found(self):
|
||||
node, before, after = get_node(TestGetNode.DB, 'example')
|
||||
|
||||
self.assertEqual(node, 'test')
|
||||
self.assertEqual(before, 'example')
|
||||
self.assertEqual(after, '')
|
||||
|
||||
def test_get_partial(self):
|
||||
node, before, after = get_node(TestGetNode.DB, 'examp')
|
||||
self.assertEqual(node, TestGetNode.DB['trie']['exam'])
|
||||
self.assertEqual(before, 'exam')
|
||||
self.assertEqual(after, 'p')
|
||||
|
||||
def test_not_found(self):
|
||||
node, before, after = get_node(TestGetNode.DB, 'examples')
|
||||
self.assertEqual(node, 'test')
|
||||
self.assertEqual(before, 'example')
|
||||
self.assertEqual(after, 's')
|
||||
|
||||
|
||||
class TestResolveBangDefinition(SearxTestCase):
|
||||
def test_https(self):
|
||||
url, rank = resolve_bang_definition('//example.com/' + chr(2) + chr(1) + '42', 'query')
|
||||
self.assertEqual(url, 'https://example.com/query')
|
||||
self.assertEqual(rank, 42)
|
||||
|
||||
def test_http(self):
|
||||
url, rank = resolve_bang_definition('http://example.com/' + chr(2) + chr(1) + '0', 'text')
|
||||
self.assertEqual(url, 'http://example.com/text')
|
||||
self.assertEqual(rank, 0)
|
||||
|
||||
|
||||
class TestGetBangDefinitionAndAutocomplete(SearxTestCase):
|
||||
def test_found(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('exam', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam'][LEAF_KEY])
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_found_optimized(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('example', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['exam']['ple'])
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
def test_partial(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('examp', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, ['example'])
|
||||
|
||||
def test_partial2(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('sea', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, TEST_DB['trie']['sea'][LEAF_KEY])
|
||||
self.assertEqual(new_autocomplete, ['search', 'searching', 'seascapes', 'season'])
|
||||
|
||||
def test_error(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('error', external_bangs_db=TEST_DB)
|
||||
self.assertEqual(bang_definition, None)
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
def test_actual_data(self):
|
||||
bang_definition, new_autocomplete = get_bang_definition_and_autocomplete('duckduckgo')
|
||||
self.assertTrue(bang_definition.startswith('//duckduckgo.com/?q='))
|
||||
self.assertEqual(new_autocomplete, [])
|
||||
|
||||
|
||||
class TestExternalBangJson(SearxTestCase):
|
||||
def test_no_external_bang_query(self):
|
||||
result = get_bang_url(SearchQuery('test', engineref_list=[EngineRef('wikipedia', 'general')]))
|
||||
self.assertEqual(result, None)
|
||||
|
||||
def test_get_bang_url(self):
|
||||
url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='example'), external_bangs_db=TEST_DB)
|
||||
self.assertEqual(url, 'https://example.com/test')
|
||||
|
||||
def test_actual_data(self):
|
||||
google_url = get_bang_url(SearchQuery('test', engineref_list=[], external_bang='g'))
|
||||
self.assertEqual(google_url, 'https://www.google.com/search?q=test')
|
||||
@@ -0,0 +1,111 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
# lint: pylint
|
||||
"""Test some code from module :py:obj:`searx.locales`"""
|
||||
|
||||
from searx import locales
|
||||
from searx.sxng_locales import sxng_locales
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestLocales(SearxTestCase):
|
||||
"""Implemented tests:
|
||||
|
||||
- :py:obj:`searx.locales.match_locale`
|
||||
"""
|
||||
|
||||
def test_match_locale(self):
|
||||
|
||||
locale_tag_list = [x[0] for x in sxng_locales]
|
||||
|
||||
# Test SearXNG search languages
|
||||
|
||||
self.assertEqual(locales.match_locale('de', locale_tag_list), 'de')
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr')
|
||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh')
|
||||
|
||||
# Test SearXNG search regions
|
||||
|
||||
self.assertEqual(locales.match_locale('ca-es', locale_tag_list), 'ca-ES')
|
||||
self.assertEqual(locales.match_locale('de-at', locale_tag_list), 'de-AT')
|
||||
self.assertEqual(locales.match_locale('de-de', locale_tag_list), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('en-UK', locale_tag_list), 'en-GB')
|
||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
||||
self.assertEqual(locales.match_locale('fr-be', locale_tag_list), 'fr-BE')
|
||||
self.assertEqual(locales.match_locale('fr-ca', locale_tag_list), 'fr-CA')
|
||||
self.assertEqual(locales.match_locale('fr-ch', locale_tag_list), 'fr-CH')
|
||||
self.assertEqual(locales.match_locale('zh-cn', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-tw', locale_tag_list), 'zh-TW')
|
||||
self.assertEqual(locales.match_locale('zh-hk', locale_tag_list), 'zh-HK')
|
||||
|
||||
# Test language script code
|
||||
|
||||
self.assertEqual(locales.match_locale('zh-hans', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-hans-cn', locale_tag_list), 'zh-CN')
|
||||
self.assertEqual(locales.match_locale('zh-hant', locale_tag_list), 'zh-TW')
|
||||
self.assertEqual(locales.match_locale('zh-hant-tw', locale_tag_list), 'zh-TW')
|
||||
|
||||
# Test individual locale lists
|
||||
|
||||
self.assertEqual(locales.match_locale('es', [], fallback='fallback'), 'fallback')
|
||||
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('de', ['de-CH', 'de-DE']), 'de-DE')
|
||||
self.assertEqual(locales.match_locale('es', ['ES']), 'ES')
|
||||
self.assertEqual(locales.match_locale('es', ['es-AR', 'es-ES', 'es-MX']), 'es-ES')
|
||||
self.assertEqual(locales.match_locale('es-AR', ['es-AR', 'es-ES', 'es-MX']), 'es-AR')
|
||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR', 'es-ES']), 'es-ES')
|
||||
self.assertEqual(locales.match_locale('es-CO', ['es-AR']), 'es-AR')
|
||||
|
||||
# Tests from the commit message of 9ae409a05a
|
||||
|
||||
# Assumption:
|
||||
# A. When a user selects a language the results should be optimized according to
|
||||
# the selected language.
|
||||
#
|
||||
# B. When user selects a language and a territory the results should be
|
||||
# optimized with first priority on territory and second on language.
|
||||
|
||||
# Assume we have an engine that supports the follwoing locales:
|
||||
locale_tag_list = ['zh-CN', 'zh-HK', 'nl-BE', 'fr-CA']
|
||||
|
||||
# Examples (Assumption A.)
|
||||
# ------------------------
|
||||
|
||||
# A user selects region 'zh-TW' which should end in zh_HK.
|
||||
# hint: CN is 'Hans' and HK ('Hant') fits better to TW ('Hant')
|
||||
self.assertEqual(locales.match_locale('zh-TW', locale_tag_list), 'zh-HK')
|
||||
|
||||
# A user selects only the language 'zh' which should end in CN
|
||||
self.assertEqual(locales.match_locale('zh', locale_tag_list), 'zh-CN')
|
||||
|
||||
# A user selects only the language 'fr' which should end in fr_CA
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-CA')
|
||||
|
||||
# The difference in priority on the territory is best shown with a
|
||||
# engine that supports the following locales:
|
||||
locale_tag_list = ['fr-FR', 'fr-CA', 'en-GB', 'nl-BE']
|
||||
|
||||
# A user selects only a language
|
||||
self.assertEqual(locales.match_locale('en', locale_tag_list), 'en-GB')
|
||||
|
||||
# hint: the engine supports fr_FR and fr_CA since no territory is given,
|
||||
# fr_FR takes priority ..
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-FR')
|
||||
|
||||
# Examples (Assumption B.)
|
||||
# ------------------------
|
||||
|
||||
# A user selects region 'fr-BE' which should end in nl-BE
|
||||
self.assertEqual(locales.match_locale('fr-BE', locale_tag_list), 'nl-BE')
|
||||
|
||||
# If the user selects a language and there are two locales like the
|
||||
# following:
|
||||
|
||||
locale_tag_list = ['fr-BE', 'fr-CH']
|
||||
|
||||
# The get_engine_locale selects the locale by looking at the "population
|
||||
# percent" and this percentage has an higher amount in BE (68.%)
|
||||
# compared to CH (21%)
|
||||
|
||||
self.assertEqual(locales.match_locale('fr', locale_tag_list), 'fr-BE')
|
||||
@@ -0,0 +1,156 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from searx import plugins
|
||||
from mock import Mock
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
def get_search_mock(query, **kwargs):
|
||||
return Mock(search_query=Mock(query=query, **kwargs), result_container=Mock(answers=dict()))
|
||||
|
||||
|
||||
class PluginMock:
|
||||
default_on = False
|
||||
name = 'Default plugin'
|
||||
description = 'Default plugin description'
|
||||
|
||||
|
||||
class PluginStoreTest(SearxTestCase):
|
||||
def test_PluginStore_init(self):
|
||||
store = plugins.PluginStore()
|
||||
self.assertTrue(isinstance(store.plugins, list) and len(store.plugins) == 0)
|
||||
|
||||
def test_PluginStore_register(self):
|
||||
store = plugins.PluginStore()
|
||||
testplugin = PluginMock()
|
||||
store.register(testplugin)
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
|
||||
def test_PluginStore_call(self):
|
||||
store = plugins.PluginStore()
|
||||
testplugin = PluginMock()
|
||||
store.register(testplugin)
|
||||
setattr(testplugin, 'asdf', Mock())
|
||||
request = Mock()
|
||||
store.call([], 'asdf', request, Mock())
|
||||
|
||||
self.assertFalse(testplugin.asdf.called) # pylint: disable=E1101
|
||||
|
||||
store.call([testplugin], 'asdf', request, Mock())
|
||||
self.assertTrue(testplugin.asdf.called) # pylint: disable=E1101
|
||||
|
||||
|
||||
class SelfIPTest(SearxTestCase):
|
||||
def test_PluginStore_init(self):
|
||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.self_info', False, (None, {}))
|
||||
store = plugins.PluginStore()
|
||||
store.register(plugin)
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
|
||||
# IP test
|
||||
request = Mock()
|
||||
request.remote_addr = '127.0.0.1'
|
||||
request.headers = {'X-Forwarded-For': '1.2.3.4, 127.0.0.1', 'X-Real-IP': '127.0.0.1'}
|
||||
search = get_search_mock(
|
||||
query='ip',
|
||||
pageno=1,
|
||||
)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('127.0.0.1' in search.result_container.answers["ip"]["answer"])
|
||||
|
||||
search = get_search_mock(query='ip', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('ip' in search.result_container.answers)
|
||||
|
||||
# User agent test
|
||||
request = Mock(user_agent='Mock')
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='user-agent', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue('Mock' in search.result_container.answers["user-agent"]["answer"])
|
||||
|
||||
search = get_search_mock(query='What is my User-Agent?', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('user-agent' in search.result_container.answers)
|
||||
|
||||
|
||||
class HashPluginTest(SearxTestCase):
|
||||
def test_PluginStore_init(self):
|
||||
store = plugins.PluginStore()
|
||||
plugin = plugins.load_and_initialize_plugin('searx.plugins.hash_plugin', False, (None, {}))
|
||||
store.register(plugin)
|
||||
|
||||
self.assertTrue(len(store.plugins) == 1)
|
||||
|
||||
request = Mock(remote_addr='127.0.0.1')
|
||||
|
||||
# MD5
|
||||
search = get_search_mock(query='md5 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'md5 hash digest: 098f6bcd4621d373cade4e832627b4f6' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
search = get_search_mock(query=b'md5 test', pageno=2)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertFalse('hash' in search.result_container.answers)
|
||||
|
||||
# SHA1
|
||||
search = get_search_mock(query='sha1 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha1 hash digest: a94a8fe5ccb19ba61c4c0873d391e9879'
|
||||
'82fbbd3' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA224
|
||||
search = get_search_mock(query='sha224 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha224 hash digest: 90a3ed9e32b2aaf4c61c410eb9254261'
|
||||
'19e1a9dc53d4286ade99a809' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA256
|
||||
search = get_search_mock(query='sha256 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha256 hash digest: 9f86d081884c7d659a2feaa0c55ad015a'
|
||||
'3bf4f1b2b0b822cd15d6c15b0f00a08' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA384
|
||||
search = get_search_mock(query='sha384 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha384 hash digest: 768412320f7b0aa5812fce428dc4706b3c'
|
||||
'ae50e02a64caa16a782249bfe8efc4b7ef1ccb126255d196047dfedf1'
|
||||
'7a0a9' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
|
||||
# SHA512
|
||||
search = get_search_mock(query='sha512 test', pageno=1)
|
||||
store.call(store.plugins, 'post_search', request, search)
|
||||
self.assertTrue(
|
||||
'sha512 hash digest: ee26b0dd4af7e749aa1a8ee3c10ae9923f6'
|
||||
'18980772e473f8819a5d4940e0db27ac185f8a0e1d5f84f88bc887fd67b143732c304cc5'
|
||||
'fa9ad8e6f57f50028a8ff' in search.result_container.answers['hash']['answer']
|
||||
)
|
||||
@@ -0,0 +1,156 @@
|
||||
from searx.locales import locales_initialize
|
||||
from searx.preferences import (
|
||||
EnumStringSetting,
|
||||
MapSetting,
|
||||
SearchLanguageSetting,
|
||||
MultipleChoiceSetting,
|
||||
PluginsSetting,
|
||||
ValidationException,
|
||||
)
|
||||
from tests import SearxTestCase
|
||||
|
||||
locales_initialize()
|
||||
|
||||
|
||||
class PluginStub:
|
||||
def __init__(self, plugin_id, default_on):
|
||||
self.id = plugin_id
|
||||
self.default_on = default_on
|
||||
|
||||
|
||||
class TestSettings(SearxTestCase):
|
||||
# map settings
|
||||
|
||||
def test_map_setting_invalid_default_value(self):
|
||||
with self.assertRaises(ValidationException):
|
||||
MapSetting(3, map={'dog': 1, 'bat': 2})
|
||||
|
||||
def test_map_setting_invalid_choice(self):
|
||||
setting = MapSetting(2, map={'dog': 1, 'bat': 2})
|
||||
with self.assertRaises(ValidationException):
|
||||
setting.parse('cat')
|
||||
|
||||
def test_map_setting_valid_default(self):
|
||||
setting = MapSetting(3, map={'dog': 1, 'bat': 2, 'cat': 3})
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
|
||||
def test_map_setting_valid_choice(self):
|
||||
setting = MapSetting(3, map={'dog': 1, 'bat': 2, 'cat': 3})
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
setting.parse('bat')
|
||||
self.assertEqual(setting.get_value(), 2)
|
||||
|
||||
# enum settings
|
||||
|
||||
def test_enum_setting_invalid_default_value(self):
|
||||
with self.assertRaises(ValidationException):
|
||||
EnumStringSetting(3, choices=[0, 1, 2])
|
||||
|
||||
def test_enum_setting_invalid_choice(self):
|
||||
setting = EnumStringSetting(0, choices=[0, 1, 2])
|
||||
with self.assertRaises(ValidationException):
|
||||
setting.parse(3)
|
||||
|
||||
def test_enum_setting_valid_default(self):
|
||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
|
||||
def test_enum_setting_valid_choice(self):
|
||||
setting = EnumStringSetting(3, choices=[1, 2, 3])
|
||||
self.assertEqual(setting.get_value(), 3)
|
||||
setting.parse(2)
|
||||
self.assertEqual(setting.get_value(), 2)
|
||||
|
||||
# multiple choice settings
|
||||
|
||||
def test_multiple_setting_invalid_default_value(self):
|
||||
with self.assertRaises(ValidationException):
|
||||
MultipleChoiceSetting(['3', '4'], choices=['0', '1', '2'])
|
||||
|
||||
def test_multiple_setting_invalid_choice(self):
|
||||
setting = MultipleChoiceSetting(['1', '2'], choices=['0', '1', '2'])
|
||||
with self.assertRaises(ValidationException):
|
||||
setting.parse('4, 3')
|
||||
|
||||
def test_multiple_setting_valid_default(self):
|
||||
setting = MultipleChoiceSetting(['3'], choices=['1', '2', '3'])
|
||||
self.assertEqual(setting.get_value(), ['3'])
|
||||
|
||||
def test_multiple_setting_valid_choice(self):
|
||||
setting = MultipleChoiceSetting(['3'], choices=['1', '2', '3'])
|
||||
self.assertEqual(setting.get_value(), ['3'])
|
||||
setting.parse('2')
|
||||
self.assertEqual(setting.get_value(), ['2'])
|
||||
|
||||
# search language settings
|
||||
def test_lang_setting_valid_choice(self):
|
||||
setting = SearchLanguageSetting('all', choices=['all', 'de', 'en'])
|
||||
setting.parse('de')
|
||||
self.assertEqual(setting.get_value(), 'de')
|
||||
|
||||
def test_lang_setting_invalid_choice(self):
|
||||
setting = SearchLanguageSetting('all', choices=['all', 'de', 'en'])
|
||||
setting.parse('xx')
|
||||
self.assertEqual(setting.get_value(), 'all')
|
||||
|
||||
def test_lang_setting_old_cookie_choice(self):
|
||||
setting = SearchLanguageSetting('all', choices=['all', 'es', 'es-ES'])
|
||||
setting.parse('es_XA')
|
||||
self.assertEqual(setting.get_value(), 'es')
|
||||
|
||||
def test_lang_setting_old_cookie_format(self):
|
||||
setting = SearchLanguageSetting('all', choices=['all', 'es', 'es-ES'])
|
||||
setting.parse('es_ES')
|
||||
self.assertEqual(setting.get_value(), 'es-ES')
|
||||
|
||||
# plugins settings
|
||||
def test_plugins_setting_all_default_enabled(self):
|
||||
plugin1 = PluginStub('plugin1', True)
|
||||
plugin2 = PluginStub('plugin2', True)
|
||||
setting = PluginsSetting(['3'], plugins=[plugin1, plugin2])
|
||||
self.assertEqual(set(setting.get_enabled()), set(['plugin1', 'plugin2']))
|
||||
|
||||
def test_plugins_setting_few_default_enabled(self):
|
||||
plugin1 = PluginStub('plugin1', True)
|
||||
plugin2 = PluginStub('plugin2', False)
|
||||
plugin3 = PluginStub('plugin3', True)
|
||||
setting = PluginsSetting('name', plugins=[plugin1, plugin2, plugin3])
|
||||
self.assertEqual(set(setting.get_enabled()), set(['plugin1', 'plugin3']))
|
||||
|
||||
|
||||
class TestPreferences(SearxTestCase):
|
||||
def test_encode(self):
|
||||
from searx.preferences import Preferences
|
||||
|
||||
pref = Preferences(['simple'], ['general'], {}, [])
|
||||
url_params = (
|
||||
'eJx1Vk1z4zYM_TXxRZNMd7eddg8-pe21nWnvGoiEJEQkofDDtvzrC1qSRdnbQxQTBA'
|
||||
'Hw8eGRCiJ27AnDsUOHHszBgOsSdHjU-Pr7HwfDCkweHCBFVmxHgxGPB7LiU4-eL9Px'
|
||||
'TzABDxZjz_r491___HsI0GJA8Ko__nSIPVo8BspLDx5DMjHU7GqH5zpCsyzXTLVMsj'
|
||||
'mhPzLI8I19d5iX1SFOUkUu4QD6BE6hrpcE8_LPhH6qydWRonjORnItOYqyXHk2Zs1E'
|
||||
'ARojAdB15GTrMA6VJe_Z13VLBsPL1_ccmk5YUajrBRqxNhSbpAaMdU1Rxkqp13iq6x'
|
||||
'Np5LxMI15RwtgUSOWx7iqNtyqI3S4Wej6TrmsWfHx2lcD5r-PSa7NWN8glxPxf5r5c'
|
||||
'ikGrPedw6wZaj1gFbuMZPFaaPKrIAtFceOvJDQSqCNBRJ7BAiGX6TtCEZt0ta2zQd8'
|
||||
'uwY-4MVqOBqYJxDFvucsbyiXLVd4i6kbUuMeqh8ZA_S1yyutlgIQfFYnLykziFH9vW'
|
||||
'kB8Uet5iDKQGCEWBhiSln6q80UDlBDch4psPSy1wNZMnVYR2o13m3ASwreQRnceRi2'
|
||||
'AjSNqOwsqWmbAZxSp_7kcBFnJBeHez4CKpKqieDQgsQREK5fNcBB_H3HrFIUUeJo4s'
|
||||
'Wx7Abekn6HnHpTM10348UMM8hEejdKbY8ncxfCaO-OgVOHn1ZJX2DRSf8px4eqj6y7'
|
||||
'dvv162anXS6LYjC3h1YEt_yx-IQ2lxcMo82gw-NVOHdj28EdHH1GDBFYuaQFIMQsrz'
|
||||
'GZtiyicrqlAYznyhgd2bHFeYHLvJYlHfy_svL7995bOjofp4ef_55fv36zRANbIJA2'
|
||||
'FX0C_v34oE3Es9oHtQIOFFZcilS5WdV_J5YUHRoeAvdCrZ0IDTCuy4sTOvHvMe96rl'
|
||||
'usfxs5rcrLuTv1lmOApYmqip6_bEz4eORSyR2xA8tmWxKnkvP3fM0Hgi4bpstFisWR'
|
||||
'TWV31adSdvSkPc7SkKbtOOTxgny05ALE6pNdL5vhQ5dFQKhYxjbpJZ0ChuSWcN22nh'
|
||||
'rGpPwC32HXSL7Qm8xf6Dzu6XfLfk19dFoZ4li1sRD9fJVVnWYOmiDCe97Uw0RGi4am'
|
||||
'o-JJA7IMMYUO7fIvM6N6ZG4ILlotrPhyjXSbSQqQZj7i2d-2pzGntRIHefJS8viwaK'
|
||||
'-iW6NN9uyTSuTP88CwtKrG-GPaSz6Qn92fwEtGxVk4QMrAhMdev7m6yMBLMOF86iZN'
|
||||
'JIe_xEadXAQuzW8HltyDCkJrmYVqVOI_oU7ijL64W03LLC81jcA8kFuQpDX1R90-b9'
|
||||
'_iZOD2J1t9xfE0BGSJ5PqHA7kUUudYuG7HFjz12C2Mz3zNhD8eQgFa_sdiy3InNWHg'
|
||||
'pV9OCCkWPUZRivRfA2g3DytC3fnlajSaJs4Zihvrwto7eeQxRVR3noCSDzhbZzYKjn'
|
||||
'd-DZy7PtaVp2WgvPBpzCXUL_J1OGex48RVmOXzBU8_N3kqekkefRDzxNK2_Klp9mBJ'
|
||||
'wsUnXyRqq1mScHuYalUY7_AZTCR4s=&q='
|
||||
)
|
||||
pref.parse_encoded_data(url_params)
|
||||
self.assertEqual(
|
||||
vars(pref.key_value_settings['categories']),
|
||||
{'value': ['general'], 'locked': False, 'choices': ['general', 'none']},
|
||||
)
|
||||
@@ -0,0 +1,285 @@
|
||||
from searx import settings
|
||||
from searx.engines import load_engines
|
||||
from searx.query import RawTextQuery
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
TEST_ENGINES = [
|
||||
{
|
||||
'name': 'dummy engine',
|
||||
'engine': 'dummy',
|
||||
'categories': 'general',
|
||||
'shortcut': 'du',
|
||||
'timeout': 3.0,
|
||||
'tokens': [],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class TestQuery(SearxTestCase):
|
||||
def test_simple_query(self):
|
||||
query_text = 'the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(len(query.user_query_parts), 2)
|
||||
self.assertEqual(len(query.languages), 0)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_multiple_spaces_query(self):
|
||||
query_text = '\tthe query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), 'the query')
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(len(query.user_query_parts), 2)
|
||||
self.assertEqual(len(query.languages), 0)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_str_method(self):
|
||||
query_text = '<7 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
self.assertEqual(str(query), '<7 the query')
|
||||
|
||||
def test_repr_method(self):
|
||||
query_text = '<8 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
r = repr(query)
|
||||
self.assertTrue(r.startswith(f"<RawTextQuery query='{query_text}' "))
|
||||
|
||||
def test_change_query(self):
|
||||
query_text = '<8 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
another_query = query.changeQuery('another text')
|
||||
self.assertEqual(query, another_query)
|
||||
self.assertEqual(query.getFullQuery(), '<8 another text')
|
||||
|
||||
|
||||
class TestLanguageParser(SearxTestCase):
|
||||
def test_language_code(self):
|
||||
language = 'es-ES'
|
||||
query_text = 'the query'
|
||||
full_query = ':' + language + ' ' + query_text
|
||||
query = RawTextQuery(full_query, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), full_query)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(len(query.languages), 1)
|
||||
self.assertIn(language, query.languages)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_language_name(self):
|
||||
language = 'english'
|
||||
query_text = 'the query'
|
||||
full_query = ':' + language + ' ' + query_text
|
||||
query = RawTextQuery(full_query, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), full_query)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertIn('en', query.languages)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_unlisted_language_code(self):
|
||||
language = 'all'
|
||||
query_text = 'the query'
|
||||
full_query = ':' + language + ' ' + query_text
|
||||
query = RawTextQuery(full_query, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), full_query)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertIn('all', query.languages)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_auto_language_code(self):
|
||||
language = 'auto'
|
||||
query_text = 'una consulta'
|
||||
full_query = ':' + language + ' ' + query_text
|
||||
query = RawTextQuery(full_query, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), full_query)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertIn('auto', query.languages)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_invalid_language_code(self):
|
||||
language = 'not_a_language'
|
||||
query_text = 'the query'
|
||||
full_query = ':' + language + ' ' + query_text
|
||||
query = RawTextQuery(full_query, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), full_query)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(len(query.languages), 0)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_empty_colon_in_query(self):
|
||||
query_text = 'the : query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(len(query.languages), 0)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_autocomplete_empty(self):
|
||||
query_text = 'the query :'
|
||||
query = RawTextQuery(query_text, [])
|
||||
self.assertEqual(query.autocomplete_list, [":en", ":en_us", ":english", ":united_kingdom"])
|
||||
|
||||
def test_autocomplete(self):
|
||||
query = RawTextQuery(':englis', [])
|
||||
self.assertEqual(query.autocomplete_list, [":english"])
|
||||
|
||||
query = RawTextQuery(':deutschla', [])
|
||||
self.assertEqual(query.autocomplete_list, [":deutschland"])
|
||||
|
||||
query = RawTextQuery(':new_zea', [])
|
||||
self.assertEqual(query.autocomplete_list, [":new_zealand"])
|
||||
|
||||
query = RawTextQuery(':hu-H', [])
|
||||
self.assertEqual(query.autocomplete_list, [":hu-hu"])
|
||||
|
||||
query = RawTextQuery(':zh-', [])
|
||||
self.assertEqual(query.autocomplete_list, [':zh-cn', ':zh-hk', ':zh-tw'])
|
||||
|
||||
|
||||
class TestTimeoutParser(SearxTestCase):
|
||||
def test_timeout_below100(self):
|
||||
query_text = '<3 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 3)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_above100(self):
|
||||
query_text = '<350 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 0.35)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_above1000(self):
|
||||
query_text = '<3500 the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertEqual(query.timeout_limit, 3.5)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_invalid(self):
|
||||
# invalid number: it is not bang but it is part of the query
|
||||
query_text = '<xxx the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(query.getQuery(), query_text)
|
||||
self.assertEqual(query.timeout_limit, None)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_timeout_autocomplete(self):
|
||||
# invalid number: it is not bang but it is part of the query
|
||||
query_text = 'the query <'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertEqual(query.getQuery(), query_text)
|
||||
self.assertEqual(query.timeout_limit, None)
|
||||
self.assertFalse(query.specific)
|
||||
self.assertEqual(query.autocomplete_list, ['<3', '<850'])
|
||||
|
||||
|
||||
class TestExternalBangParser(SearxTestCase):
|
||||
def test_external_bang(self):
|
||||
query_text = '!!ddg the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_external_bang_not_found(self):
|
||||
query_text = '!!notfoundbang the query'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), query_text)
|
||||
self.assertEqual(query.external_bang, None)
|
||||
self.assertFalse(query.specific)
|
||||
|
||||
def test_external_bang_autocomplete(self):
|
||||
query_text = 'the query !!dd'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), '!!dd the query')
|
||||
self.assertEqual(len(query.query_parts), 1)
|
||||
self.assertFalse(query.specific)
|
||||
self.assertGreater(len(query.autocomplete_list), 0)
|
||||
|
||||
a = query.autocomplete_list[0]
|
||||
self.assertEqual(query.get_autocomplete_full_query(a), a + ' the query')
|
||||
|
||||
def test_external_bang_autocomplete_empty(self):
|
||||
query_text = 'the query !!'
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), 'the query !!')
|
||||
self.assertEqual(len(query.query_parts), 0)
|
||||
self.assertFalse(query.specific)
|
||||
self.assertGreater(len(query.autocomplete_list), 2)
|
||||
|
||||
a = query.autocomplete_list[0]
|
||||
self.assertEqual(query.get_autocomplete_full_query(a), 'the query ' + a)
|
||||
|
||||
|
||||
class TestBang(SearxTestCase):
|
||||
|
||||
SPECIFIC_BANGS = ['!dummy_engine', '!du', '!general']
|
||||
THE_QUERY = 'the query'
|
||||
|
||||
def test_bang(self):
|
||||
load_engines(TEST_ENGINES)
|
||||
|
||||
for bang in TestBang.SPECIFIC_BANGS:
|
||||
with self.subTest(msg="Check bang", bang=bang):
|
||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||
query = RawTextQuery(query_text, [])
|
||||
|
||||
self.assertEqual(query.getFullQuery(), bang + ' ' + TestBang.THE_QUERY)
|
||||
self.assertEqual(query.query_parts, [bang])
|
||||
self.assertEqual(query.user_query_parts, TestBang.THE_QUERY.split(' '))
|
||||
|
||||
def test_specific(self):
|
||||
for bang in TestBang.SPECIFIC_BANGS:
|
||||
with self.subTest(msg="Check bang is specific", bang=bang):
|
||||
query_text = TestBang.THE_QUERY + ' ' + bang
|
||||
query = RawTextQuery(query_text, [])
|
||||
self.assertTrue(query.specific)
|
||||
|
||||
def test_bang_not_found(self):
|
||||
load_engines(TEST_ENGINES)
|
||||
query = RawTextQuery('the query !bang_not_found', [])
|
||||
self.assertEqual(query.getFullQuery(), 'the query !bang_not_found')
|
||||
|
||||
def test_bang_autocomplete(self):
|
||||
load_engines(TEST_ENGINES)
|
||||
query = RawTextQuery('the query !dum', [])
|
||||
self.assertEqual(query.autocomplete_list, ['!dummy_engine'])
|
||||
|
||||
query = RawTextQuery('!dum the query', [])
|
||||
self.assertEqual(query.autocomplete_list, [])
|
||||
self.assertEqual(query.getQuery(), '!dum the query')
|
||||
|
||||
def test_bang_autocomplete_empty(self):
|
||||
load_engines(settings['engines'])
|
||||
query = RawTextQuery('the query !', [])
|
||||
self.assertEqual(query.autocomplete_list, ['!images', '!wikipedia', '!osm'])
|
||||
|
||||
query = RawTextQuery('the query !', ['osm'])
|
||||
self.assertEqual(query.autocomplete_list, ['!images', '!wikipedia'])
|
||||
@@ -0,0 +1,41 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from searx.results import ResultContainer
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
def fake_result(url='https://aa.bb/cc?dd=ee#ff', title='aaa', content='bbb', engine='wikipedia', **kwargs):
|
||||
result = {
|
||||
# fmt: off
|
||||
'url': url,
|
||||
'title': title,
|
||||
'content': content,
|
||||
'engine': engine,
|
||||
# fmt: on
|
||||
}
|
||||
result.update(kwargs)
|
||||
return result
|
||||
|
||||
|
||||
# TODO
|
||||
class ResultContainerTestCase(SearxTestCase):
|
||||
def test_empty(self):
|
||||
c = ResultContainer()
|
||||
self.assertEqual(c.get_ordered_results(), [])
|
||||
|
||||
def test_one_result(self):
|
||||
c = ResultContainer()
|
||||
c.extend('wikipedia', [fake_result()])
|
||||
self.assertEqual(c.results_length(), 1)
|
||||
|
||||
def test_one_suggestion(self):
|
||||
c = ResultContainer()
|
||||
c.extend('wikipedia', [fake_result(suggestion=True)])
|
||||
self.assertEqual(len(c.suggestions), 1)
|
||||
self.assertEqual(c.results_length(), 0)
|
||||
|
||||
def test_result_merge(self):
|
||||
c = ResultContainer()
|
||||
c.extend('wikipedia', [fake_result()])
|
||||
c.extend('wikidata', [fake_result(), fake_result(url='https://example.com/')])
|
||||
self.assertEqual(c.results_length(), 2)
|
||||
@@ -0,0 +1,136 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from copy import copy
|
||||
|
||||
import searx.search
|
||||
from searx.search import SearchQuery, EngineRef
|
||||
from searx import settings
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
SAFESEARCH = 0
|
||||
PAGENO = 1
|
||||
PUBLIC_ENGINE_NAME = 'general dummy'
|
||||
TEST_ENGINES = [
|
||||
{
|
||||
'name': PUBLIC_ENGINE_NAME,
|
||||
'engine': 'dummy',
|
||||
'categories': 'general',
|
||||
'shortcut': 'gd',
|
||||
'timeout': 3.0,
|
||||
'tokens': [],
|
||||
},
|
||||
]
|
||||
|
||||
|
||||
class SearchQueryTestCase(SearxTestCase):
|
||||
def test_repr(self):
|
||||
s = SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, '1', 5.0, 'g')
|
||||
self.assertEqual(
|
||||
repr(s), "SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, '1', 5.0, 'g')"
|
||||
) # noqa
|
||||
|
||||
def test_eq(self):
|
||||
s = SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, None, None, None)
|
||||
t = SearchQuery('test', [EngineRef('google', 'general')], 'all', 0, 1, None, None, None)
|
||||
self.assertEqual(s, s)
|
||||
self.assertNotEqual(s, t)
|
||||
|
||||
def test_copy(self):
|
||||
s = SearchQuery('test', [EngineRef('bing', 'general')], 'all', 0, 1, None, None, None)
|
||||
t = copy(s)
|
||||
self.assertEqual(s, t)
|
||||
|
||||
|
||||
class SearchTestCase(SearxTestCase):
|
||||
def setUp(self):
|
||||
|
||||
from searx import webapp # pylint disable=import-outside-toplevel
|
||||
|
||||
self.app = webapp.app
|
||||
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
searx.search.initialize(TEST_ENGINES)
|
||||
|
||||
def test_timeout_simple(self):
|
||||
settings['outgoing']['max_request_timeout'] = None
|
||||
search_query = SearchQuery(
|
||||
'test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')], 'en-US', SAFESEARCH, PAGENO, None, None
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 3.0)
|
||||
|
||||
def test_timeout_query_above_default_nomax(self):
|
||||
settings['outgoing']['max_request_timeout'] = None
|
||||
search_query = SearchQuery(
|
||||
'test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')], 'en-US', SAFESEARCH, PAGENO, None, 5.0
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 3.0)
|
||||
|
||||
def test_timeout_query_below_default_nomax(self):
|
||||
settings['outgoing']['max_request_timeout'] = None
|
||||
search_query = SearchQuery(
|
||||
'test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')], 'en-US', SAFESEARCH, PAGENO, None, 1.0
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 1.0)
|
||||
|
||||
def test_timeout_query_below_max(self):
|
||||
settings['outgoing']['max_request_timeout'] = 10.0
|
||||
search_query = SearchQuery(
|
||||
'test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')], 'en-US', SAFESEARCH, PAGENO, None, 5.0
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 5.0)
|
||||
|
||||
def test_timeout_query_above_max(self):
|
||||
settings['outgoing']['max_request_timeout'] = 10.0
|
||||
search_query = SearchQuery(
|
||||
'test', [EngineRef(PUBLIC_ENGINE_NAME, 'general')], 'en-US', SAFESEARCH, PAGENO, None, 15.0
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
search.search()
|
||||
self.assertEqual(search.actual_timeout, 10.0)
|
||||
|
||||
def test_external_bang(self):
|
||||
search_query = SearchQuery(
|
||||
'yes yes',
|
||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||
'en-US',
|
||||
SAFESEARCH,
|
||||
PAGENO,
|
||||
None,
|
||||
None,
|
||||
external_bang="yt",
|
||||
)
|
||||
search = searx.search.Search(search_query)
|
||||
results = search.search()
|
||||
# For checking if the user redirected with the youtube external bang
|
||||
self.assertTrue(results.redirect_url is not None)
|
||||
|
||||
search_query = SearchQuery(
|
||||
'youtube never gonna give you up',
|
||||
[EngineRef(PUBLIC_ENGINE_NAME, 'general')],
|
||||
'en-US',
|
||||
SAFESEARCH,
|
||||
PAGENO,
|
||||
None,
|
||||
None,
|
||||
)
|
||||
|
||||
search = searx.search.Search(search_query)
|
||||
with self.app.test_request_context('/search'):
|
||||
results = search.search()
|
||||
# This should not redirect
|
||||
self.assertTrue(results.redirect_url is None)
|
||||
@@ -0,0 +1,122 @@
|
||||
# SPDX-License-Identifier: AGPL-3.0-or-later
|
||||
|
||||
from os.path import dirname, join, abspath
|
||||
from unittest.mock import patch
|
||||
|
||||
from searx.exceptions import SearxSettingsException
|
||||
from searx import settings_loader
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
test_dir = abspath(dirname(__file__))
|
||||
|
||||
|
||||
class TestLoad(SearxTestCase):
|
||||
def test_load_zero(self):
|
||||
with self.assertRaises(SearxSettingsException):
|
||||
settings_loader.load_yaml('/dev/zero')
|
||||
|
||||
with self.assertRaises(SearxSettingsException):
|
||||
settings_loader.load_yaml(join(test_dir, '/settings/syntaxerror_settings.yml'))
|
||||
|
||||
with self.assertRaises(SearxSettingsException):
|
||||
settings_loader.load_yaml(join(test_dir, '/settings/empty_settings.yml'))
|
||||
|
||||
def test_existing_filename_or_none(self):
|
||||
self.assertIsNone(settings_loader.existing_filename_or_none('/dev/zero'))
|
||||
|
||||
bad_settings_path = join(test_dir, 'settings/syntaxerror_settings.yml')
|
||||
self.assertEqual(settings_loader.existing_filename_or_none(bad_settings_path), bad_settings_path)
|
||||
|
||||
|
||||
class TestDefaultSettings(SearxTestCase):
|
||||
def test_load(self):
|
||||
settings, msg = settings_loader.load_settings(load_user_settings=False)
|
||||
self.assertTrue(msg.startswith('load the default settings from'))
|
||||
self.assertFalse(settings['general']['debug'])
|
||||
self.assertTrue(isinstance(settings['general']['instance_name'], str))
|
||||
self.assertEqual(settings['server']['secret_key'], "ultrasecretkey")
|
||||
self.assertTrue(isinstance(settings['server']['port'], int))
|
||||
self.assertTrue(isinstance(settings['server']['bind_address'], str))
|
||||
self.assertTrue(isinstance(settings['engines'], list))
|
||||
self.assertTrue(isinstance(settings['doi_resolvers'], dict))
|
||||
self.assertTrue(isinstance(settings['default_doi_resolver'], str))
|
||||
|
||||
|
||||
class TestUserSettings(SearxTestCase):
|
||||
def test_is_use_default_settings(self):
|
||||
self.assertFalse(settings_loader.is_use_default_settings({}))
|
||||
self.assertTrue(settings_loader.is_use_default_settings({'use_default_settings': True}))
|
||||
self.assertTrue(settings_loader.is_use_default_settings({'use_default_settings': {}}))
|
||||
with self.assertRaises(ValueError):
|
||||
self.assertFalse(settings_loader.is_use_default_settings({'use_default_settings': 1}))
|
||||
with self.assertRaises(ValueError):
|
||||
self.assertFalse(settings_loader.is_use_default_settings({'use_default_settings': 0}))
|
||||
|
||||
def test_user_settings_not_found(self):
|
||||
with patch.dict(settings_loader.environ, {'SEARXNG_SETTINGS_PATH': '/dev/null'}):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('load the default settings from'))
|
||||
self.assertEqual(settings['server']['secret_key'], "ultrasecretkey")
|
||||
|
||||
def test_user_settings(self):
|
||||
with patch.dict(
|
||||
settings_loader.environ, {'SEARXNG_SETTINGS_PATH': join(test_dir, 'settings/user_settings_simple.yml')}
|
||||
):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('merge the default settings'))
|
||||
self.assertEqual(settings['server']['secret_key'], "user_secret_key")
|
||||
self.assertEqual(settings['server']['default_http_headers']['Custom-Header'], "Custom-Value")
|
||||
|
||||
def test_user_settings_remove(self):
|
||||
with patch.dict(
|
||||
settings_loader.environ, {'SEARXNG_SETTINGS_PATH': join(test_dir, 'settings/user_settings_remove.yml')}
|
||||
):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('merge the default settings'))
|
||||
self.assertEqual(settings['server']['secret_key'], "user_secret_key")
|
||||
self.assertEqual(settings['server']['default_http_headers']['Custom-Header'], "Custom-Value")
|
||||
engine_names = [engine['name'] for engine in settings['engines']]
|
||||
self.assertNotIn('wikinews', engine_names)
|
||||
self.assertNotIn('wikibooks', engine_names)
|
||||
self.assertIn('wikipedia', engine_names)
|
||||
|
||||
def test_user_settings_remove2(self):
|
||||
with patch.dict(
|
||||
settings_loader.environ, {'SEARXNG_SETTINGS_PATH': join(test_dir, 'settings/user_settings_remove2.yml')}
|
||||
):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('merge the default settings'))
|
||||
self.assertEqual(settings['server']['secret_key'], "user_secret_key")
|
||||
self.assertEqual(settings['server']['default_http_headers']['Custom-Header'], "Custom-Value")
|
||||
engine_names = [engine['name'] for engine in settings['engines']]
|
||||
self.assertNotIn('wikinews', engine_names)
|
||||
self.assertNotIn('wikibooks', engine_names)
|
||||
self.assertIn('wikipedia', engine_names)
|
||||
wikipedia = list(filter(lambda engine: (engine.get('name')) == 'wikipedia', settings['engines']))
|
||||
self.assertEqual(wikipedia[0]['engine'], 'wikipedia')
|
||||
self.assertEqual(wikipedia[0]['tokens'], ['secret_token'])
|
||||
newengine = list(filter(lambda engine: (engine.get('name')) == 'newengine', settings['engines']))
|
||||
self.assertEqual(newengine[0]['engine'], 'dummy')
|
||||
|
||||
def test_user_settings_keep_only(self):
|
||||
with patch.dict(
|
||||
settings_loader.environ, {'SEARXNG_SETTINGS_PATH': join(test_dir, 'settings/user_settings_keep_only.yml')}
|
||||
):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('merge the default settings'))
|
||||
engine_names = [engine['name'] for engine in settings['engines']]
|
||||
self.assertEqual(engine_names, ['wikibooks', 'wikinews', 'wikipedia', 'newengine'])
|
||||
# wikipedia has been removed, then added again with the "engine" section of user_settings_keep_only.yml
|
||||
self.assertEqual(len(settings['engines'][2]), 1)
|
||||
|
||||
def test_custom_settings(self):
|
||||
with patch.dict(
|
||||
settings_loader.environ, {'SEARXNG_SETTINGS_PATH': join(test_dir, 'settings/user_settings.yml')}
|
||||
):
|
||||
settings, msg = settings_loader.load_settings()
|
||||
self.assertTrue(msg.startswith('load the user settings from'))
|
||||
self.assertEqual(settings['server']['port'], 9000)
|
||||
self.assertEqual(settings['server']['secret_key'], "user_settings_secret")
|
||||
engine_names = [engine['name'] for engine in settings['engines']]
|
||||
self.assertEqual(engine_names, ['wikidata', 'wikibooks', 'wikinews', 'wikiquote'])
|
||||
@@ -0,0 +1,223 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import lxml.etree
|
||||
from lxml import html
|
||||
|
||||
from searx.exceptions import SearxXPathSyntaxException, SearxEngineXPathException
|
||||
from searx import utils
|
||||
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestUtils(SearxTestCase):
|
||||
def test_gen_useragent(self):
|
||||
self.assertIsInstance(utils.gen_useragent(), str)
|
||||
self.assertIsNotNone(utils.gen_useragent())
|
||||
self.assertTrue(utils.gen_useragent().startswith('Mozilla'))
|
||||
|
||||
def test_searx_useragent(self):
|
||||
self.assertIsInstance(utils.searx_useragent(), str)
|
||||
self.assertIsNotNone(utils.searx_useragent())
|
||||
self.assertTrue(utils.searx_useragent().startswith('searx'))
|
||||
|
||||
def test_html_to_text(self):
|
||||
html_str = """
|
||||
<a href="/testlink" class="link_access_account">
|
||||
<style>
|
||||
.toto {
|
||||
color: red;
|
||||
}
|
||||
</style>
|
||||
<span class="toto">
|
||||
<span>
|
||||
<img src="test.jpg" />
|
||||
</span>
|
||||
</span>
|
||||
<span class="titi">
|
||||
Test text
|
||||
</span>
|
||||
<script>value='dummy';</script>
|
||||
</a>
|
||||
"""
|
||||
self.assertIsInstance(utils.html_to_text(html_str), str)
|
||||
self.assertIsNotNone(utils.html_to_text(html_str))
|
||||
self.assertEqual(utils.html_to_text(html_str), "Test text")
|
||||
|
||||
def test_extract_text(self):
|
||||
html_str = """
|
||||
<a href="/testlink" class="link_access_account">
|
||||
<span class="toto">
|
||||
<span>
|
||||
<img src="test.jpg" />
|
||||
</span>
|
||||
</span>
|
||||
<span class="titi">
|
||||
Test text
|
||||
</span>
|
||||
</a>
|
||||
"""
|
||||
dom = html.fromstring(html_str)
|
||||
self.assertEqual(utils.extract_text(dom), 'Test text')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//span')), 'Test text')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//span/text()')), 'Test text')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('count(//span)')), '3.0')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('boolean(//span)')), 'True')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//img/@src')), 'test.jpg')
|
||||
self.assertEqual(utils.extract_text(dom.xpath('//unexistingtag')), '')
|
||||
self.assertEqual(utils.extract_text(None, allow_none=True), None)
|
||||
with self.assertRaises(ValueError):
|
||||
utils.extract_text(None)
|
||||
with self.assertRaises(ValueError):
|
||||
utils.extract_text({})
|
||||
|
||||
def test_extract_url(self):
|
||||
def f(html_str, search_url):
|
||||
return utils.extract_url(html.fromstring(html_str), search_url)
|
||||
|
||||
self.assertEqual(f('<span id="42">https://example.com</span>', 'http://example.com/'), 'https://example.com/')
|
||||
self.assertEqual(f('https://example.com', 'http://example.com/'), 'https://example.com/')
|
||||
self.assertEqual(f('//example.com', 'http://example.com/'), 'http://example.com/')
|
||||
self.assertEqual(f('//example.com', 'https://example.com/'), 'https://example.com/')
|
||||
self.assertEqual(f('/path?a=1', 'https://example.com'), 'https://example.com/path?a=1')
|
||||
with self.assertRaises(lxml.etree.ParserError):
|
||||
f('', 'https://example.com')
|
||||
with self.assertRaises(Exception):
|
||||
utils.extract_url([], 'https://example.com')
|
||||
|
||||
def test_html_to_text_invalid(self):
|
||||
html = '<p><b>Lorem ipsum</i>dolor sit amet</p>'
|
||||
self.assertEqual(utils.html_to_text(html), "Lorem ipsum")
|
||||
|
||||
def test_ecma_unscape(self):
|
||||
self.assertEqual(utils.ecma_unescape('text%20with%20space'), 'text with space')
|
||||
self.assertEqual(utils.ecma_unescape('text using %xx: %F3'), 'text using %xx: ó')
|
||||
self.assertEqual(utils.ecma_unescape('text using %u: %u5409, %u4E16%u754c'), 'text using %u: 吉, 世界')
|
||||
|
||||
|
||||
class TestHTMLTextExtractor(SearxTestCase):
|
||||
def setUp(self):
|
||||
self.html_text_extractor = utils._HTMLTextExtractor()
|
||||
|
||||
def test__init__(self):
|
||||
self.assertEqual(self.html_text_extractor.result, [])
|
||||
|
||||
def test_handle_charref(self):
|
||||
self.html_text_extractor.handle_charref('xF')
|
||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
||||
self.html_text_extractor.handle_charref('XF')
|
||||
self.assertIn('\x0f', self.html_text_extractor.result)
|
||||
|
||||
self.html_text_extractor.handle_charref('97')
|
||||
self.assertIn('a', self.html_text_extractor.result)
|
||||
|
||||
def test_handle_entityref(self):
|
||||
entity = 'test'
|
||||
self.html_text_extractor.handle_entityref(entity)
|
||||
self.assertIn(entity, self.html_text_extractor.result)
|
||||
|
||||
def test_invalid_html(self):
|
||||
text = '<p><b>Lorem ipsum</i>dolor sit amet</p>'
|
||||
with self.assertRaises(utils._HTMLTextExtractorException):
|
||||
self.html_text_extractor.feed(text)
|
||||
|
||||
|
||||
class TestXPathUtils(SearxTestCase):
|
||||
|
||||
TEST_DOC = """<ul>
|
||||
<li>Text in <b>bold</b> and <i>italic</i> </li>
|
||||
<li>Another <b>text</b> <img src="data:image/gif;base64,R0lGODlhAQABAIAAAAUEBAAAACwAAAAAAQABAAACAkQBADs="></li>
|
||||
</ul>"""
|
||||
|
||||
def test_get_xpath_cache(self):
|
||||
xp1 = utils.get_xpath('//a')
|
||||
xp2 = utils.get_xpath('//div')
|
||||
xp3 = utils.get_xpath('//a')
|
||||
|
||||
self.assertEqual(id(xp1), id(xp3))
|
||||
self.assertNotEqual(id(xp1), id(xp2))
|
||||
|
||||
def test_get_xpath_type(self):
|
||||
utils.get_xpath(lxml.etree.XPath('//a'))
|
||||
|
||||
with self.assertRaises(TypeError):
|
||||
utils.get_xpath([])
|
||||
|
||||
def test_get_xpath_invalid(self):
|
||||
invalid_xpath = '//a[0].text'
|
||||
with self.assertRaises(SearxXPathSyntaxException) as context:
|
||||
utils.get_xpath(invalid_xpath)
|
||||
|
||||
self.assertEqual(context.exception.message, 'Invalid expression')
|
||||
self.assertEqual(context.exception.xpath_str, invalid_xpath)
|
||||
|
||||
def test_eval_xpath_unregistered_function(self):
|
||||
doc = html.fromstring(TestXPathUtils.TEST_DOC)
|
||||
|
||||
invalid_function_xpath = 'int(//a)'
|
||||
with self.assertRaises(SearxEngineXPathException) as context:
|
||||
utils.eval_xpath(doc, invalid_function_xpath)
|
||||
|
||||
self.assertEqual(context.exception.message, 'Unregistered function')
|
||||
self.assertEqual(context.exception.xpath_str, invalid_function_xpath)
|
||||
|
||||
def test_eval_xpath(self):
|
||||
doc = html.fromstring(TestXPathUtils.TEST_DOC)
|
||||
|
||||
self.assertEqual(utils.eval_xpath(doc, '//p'), [])
|
||||
self.assertEqual(utils.eval_xpath(doc, '//i/text()'), ['italic'])
|
||||
self.assertEqual(utils.eval_xpath(doc, 'count(//i)'), 1.0)
|
||||
|
||||
def test_eval_xpath_list(self):
|
||||
doc = html.fromstring(TestXPathUtils.TEST_DOC)
|
||||
|
||||
# check a not empty list
|
||||
self.assertEqual(utils.eval_xpath_list(doc, '//i/text()'), ['italic'])
|
||||
|
||||
# check min_len parameter
|
||||
with self.assertRaises(SearxEngineXPathException) as context:
|
||||
utils.eval_xpath_list(doc, '//p', min_len=1)
|
||||
self.assertEqual(context.exception.message, 'len(xpath_str) < 1')
|
||||
self.assertEqual(context.exception.xpath_str, '//p')
|
||||
|
||||
def test_eval_xpath_getindex(self):
|
||||
doc = html.fromstring(TestXPathUtils.TEST_DOC)
|
||||
|
||||
# check index 0
|
||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 0), 'italic')
|
||||
|
||||
# default is 'something'
|
||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default='something'), 'something')
|
||||
|
||||
# default is None
|
||||
self.assertEqual(utils.eval_xpath_getindex(doc, '//i/text()', 1, default=None), None)
|
||||
|
||||
# index not found
|
||||
with self.assertRaises(SearxEngineXPathException) as context:
|
||||
utils.eval_xpath_getindex(doc, '//i/text()', 1)
|
||||
self.assertEqual(context.exception.message, 'index 1 not found')
|
||||
|
||||
# not a list
|
||||
with self.assertRaises(SearxEngineXPathException) as context:
|
||||
utils.eval_xpath_getindex(doc, 'count(//i)', 1)
|
||||
self.assertEqual(context.exception.message, 'the result is not a list')
|
||||
|
||||
def test_detect_language(self):
|
||||
# make sure new line are not an issue
|
||||
# fasttext.predict('') does not accept new line.
|
||||
l = utils.detect_language('The quick brown fox jumps over\nthe lazy dog')
|
||||
self.assertEqual(l, 'en')
|
||||
|
||||
l = utils.detect_language('いろはにほへと ちりぬるを わかよたれそ つねならむ うゐのおくやま けふこえて あさきゆめみし ゑひもせす')
|
||||
self.assertEqual(l, 'ja')
|
||||
|
||||
l = utils.detect_language('Pijamalı hasta yağız şoföre çabucak güvendi.')
|
||||
self.assertEqual(l, 'tr')
|
||||
|
||||
l = utils.detect_language('')
|
||||
self.assertIsNone(l)
|
||||
|
||||
# mix languages --> None
|
||||
l = utils.detect_language('The いろはにほへと Pijamalı')
|
||||
self.assertIsNone(l)
|
||||
|
||||
with self.assertRaises(ValueError):
|
||||
utils.detect_language(None)
|
||||
@@ -0,0 +1,53 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from searx.preferences import Preferences
|
||||
from searx.engines import engines
|
||||
|
||||
import searx.search
|
||||
from searx.search import EngineRef
|
||||
from searx.webadapter import validate_engineref_list
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
PRIVATE_ENGINE_NAME = 'general private offline'
|
||||
TEST_ENGINES = [
|
||||
{
|
||||
'name': PRIVATE_ENGINE_NAME,
|
||||
'engine': 'dummy-offline',
|
||||
'categories': 'general',
|
||||
'shortcut': 'do',
|
||||
'timeout': 3.0,
|
||||
'engine_type': 'offline',
|
||||
'tokens': ['my-token'],
|
||||
},
|
||||
]
|
||||
SEARCHQUERY = [EngineRef(PRIVATE_ENGINE_NAME, 'general')]
|
||||
|
||||
|
||||
class ValidateQueryCase(SearxTestCase):
|
||||
@classmethod
|
||||
def setUpClass(cls):
|
||||
searx.search.initialize(TEST_ENGINES)
|
||||
|
||||
def test_query_private_engine_without_token(self):
|
||||
preferences = Preferences(['simple'], ['general'], engines, [])
|
||||
valid, unknown, invalid_token = validate_engineref_list(SEARCHQUERY, preferences)
|
||||
self.assertEqual(len(valid), 0)
|
||||
self.assertEqual(len(unknown), 0)
|
||||
self.assertEqual(len(invalid_token), 1)
|
||||
|
||||
def test_query_private_engine_with_incorrect_token(self):
|
||||
preferences_with_tokens = Preferences(['simple'], ['general'], engines, [])
|
||||
preferences_with_tokens.parse_dict({'tokens': 'bad-token'})
|
||||
valid, unknown, invalid_token = validate_engineref_list(SEARCHQUERY, preferences_with_tokens)
|
||||
self.assertEqual(len(valid), 0)
|
||||
self.assertEqual(len(unknown), 0)
|
||||
self.assertEqual(len(invalid_token), 1)
|
||||
|
||||
def test_query_private_engine_with_correct_token(self):
|
||||
preferences_with_tokens = Preferences(['simple'], ['general'], engines, [])
|
||||
preferences_with_tokens.parse_dict({'tokens': 'my-token'})
|
||||
valid, unknown, invalid_token = validate_engineref_list(SEARCHQUERY, preferences_with_tokens)
|
||||
self.assertEqual(len(valid), 1)
|
||||
self.assertEqual(len(unknown), 0)
|
||||
self.assertEqual(len(invalid_token), 0)
|
||||
@@ -0,0 +1,257 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import json
|
||||
from urllib.parse import ParseResult
|
||||
from mock import Mock
|
||||
from searx.results import Timing
|
||||
|
||||
import searx.search.processors
|
||||
from searx.search import Search
|
||||
from searx.preferences import Preferences
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class ViewsTestCase(SearxTestCase):
|
||||
def setUp(self):
|
||||
# skip init function (no external HTTP request)
|
||||
def dummy(*args, **kwargs):
|
||||
pass
|
||||
|
||||
self.setattr4test(searx.search.processors, 'initialize_processor', dummy)
|
||||
|
||||
from searx import webapp # pylint disable=import-outside-toplevel
|
||||
|
||||
webapp.app.config['TESTING'] = True # to get better error messages
|
||||
self.app = webapp.app.test_client()
|
||||
|
||||
# remove sha for the static file
|
||||
# so the tests don't have to care about the changing URLs
|
||||
for k in webapp.static_files:
|
||||
webapp.static_files[k] = None
|
||||
|
||||
# set some defaults
|
||||
test_results = [
|
||||
{
|
||||
'content': 'first test content',
|
||||
'title': 'First Test',
|
||||
'url': 'http://first.test.xyz',
|
||||
'engines': ['youtube', 'startpage'],
|
||||
'engine': 'startpage',
|
||||
'parsed_url': ParseResult(
|
||||
scheme='http', netloc='first.test.xyz', path='/', params='', query='', fragment=''
|
||||
),
|
||||
'template': 'default.html',
|
||||
},
|
||||
{
|
||||
'content': 'second test content',
|
||||
'title': 'Second Test',
|
||||
'url': 'http://second.test.xyz',
|
||||
'engines': ['youtube', 'startpage'],
|
||||
'engine': 'youtube',
|
||||
'parsed_url': ParseResult(
|
||||
scheme='http', netloc='second.test.xyz', path='/', params='', query='', fragment=''
|
||||
),
|
||||
'template': 'default.html',
|
||||
},
|
||||
]
|
||||
|
||||
timings = [
|
||||
Timing(engine='startpage', total=0.8, load=0.7),
|
||||
Timing(engine='youtube', total=0.9, load=0.6),
|
||||
]
|
||||
|
||||
def search_mock(search_self, *args):
|
||||
search_self.result_container = Mock(
|
||||
get_ordered_results=lambda: test_results,
|
||||
answers=dict(),
|
||||
corrections=set(),
|
||||
suggestions=set(),
|
||||
infoboxes=[],
|
||||
unresponsive_engines=set(),
|
||||
results=test_results,
|
||||
number_of_results=3,
|
||||
results_length=lambda: len(test_results),
|
||||
get_timings=lambda: timings,
|
||||
redirect_url=None,
|
||||
engine_data={},
|
||||
)
|
||||
|
||||
self.setattr4test(Search, 'search', search_mock)
|
||||
|
||||
original_preferences_get_value = Preferences.get_value
|
||||
|
||||
def preferences_get_value(preferences_self, user_setting_name: str):
|
||||
if user_setting_name == 'theme':
|
||||
return 'simple'
|
||||
return original_preferences_get_value(preferences_self, user_setting_name)
|
||||
|
||||
self.setattr4test(Preferences, 'get_value', preferences_get_value)
|
||||
|
||||
self.maxDiff = None # to see full diffs
|
||||
|
||||
def test_index_empty(self):
|
||||
result = self.app.post('/')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
b'<div class="title"><h1>SearXNG</h1></div>',
|
||||
result.data,
|
||||
)
|
||||
|
||||
def test_index_html_post(self):
|
||||
result = self.app.post('/', data={'q': 'test'})
|
||||
self.assertEqual(result.status_code, 308)
|
||||
self.assertEqual(result.location, '/search')
|
||||
|
||||
def test_index_html_get(self):
|
||||
result = self.app.post('/?q=test')
|
||||
self.assertEqual(result.status_code, 308)
|
||||
self.assertEqual(result.location, '/search?q=test')
|
||||
|
||||
def test_search_empty_html(self):
|
||||
result = self.app.post('/search', data={'q': ''})
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'<div class="title"><h1>SearXNG</h1></div>', result.data)
|
||||
|
||||
def test_search_empty_json(self):
|
||||
result = self.app.post('/search', data={'q': '', 'format': 'json'})
|
||||
self.assertEqual(result.status_code, 400)
|
||||
|
||||
def test_search_empty_csv(self):
|
||||
result = self.app.post('/search', data={'q': '', 'format': 'csv'})
|
||||
self.assertEqual(result.status_code, 400)
|
||||
|
||||
def test_search_empty_rss(self):
|
||||
result = self.app.post('/search', data={'q': '', 'format': 'rss'})
|
||||
self.assertEqual(result.status_code, 400)
|
||||
|
||||
def test_search_html(self):
|
||||
result = self.app.post('/search', data={'q': 'test'})
|
||||
|
||||
self.assertIn(
|
||||
b'<span class="url_o1"><span class="url_i1">http://second.test.xyz</span></span>',
|
||||
result.data,
|
||||
)
|
||||
self.assertIn(
|
||||
b'<p class="content">\n second <span class="highlight">test</span> ',
|
||||
result.data,
|
||||
)
|
||||
|
||||
def test_index_json(self):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'json'})
|
||||
self.assertEqual(result.status_code, 308)
|
||||
|
||||
def test_search_json(self):
|
||||
result = self.app.post('/search', data={'q': 'test', 'format': 'json'})
|
||||
result_dict = json.loads(result.data.decode())
|
||||
|
||||
self.assertEqual('test', result_dict['query'])
|
||||
self.assertEqual(len(result_dict['results']), 2)
|
||||
self.assertEqual(result_dict['results'][0]['content'], 'first test content')
|
||||
self.assertEqual(result_dict['results'][0]['url'], 'http://first.test.xyz')
|
||||
|
||||
def test_index_csv(self):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'csv'})
|
||||
self.assertEqual(result.status_code, 308)
|
||||
|
||||
def test_search_csv(self):
|
||||
result = self.app.post('/search', data={'q': 'test', 'format': 'csv'})
|
||||
|
||||
self.assertEqual(
|
||||
b'title,url,content,host,engine,score,type\r\n'
|
||||
b'First Test,http://first.test.xyz,first test content,first.test.xyz,startpage,,result\r\n' # noqa
|
||||
b'Second Test,http://second.test.xyz,second test content,second.test.xyz,youtube,,result\r\n', # noqa
|
||||
result.data,
|
||||
)
|
||||
|
||||
def test_index_rss(self):
|
||||
result = self.app.post('/', data={'q': 'test', 'format': 'rss'})
|
||||
self.assertEqual(result.status_code, 308)
|
||||
|
||||
def test_search_rss(self):
|
||||
result = self.app.post('/search', data={'q': 'test', 'format': 'rss'})
|
||||
|
||||
self.assertIn(b'<description>Search results for "test" - searx</description>', result.data)
|
||||
|
||||
self.assertIn(b'<opensearch:totalResults>3</opensearch:totalResults>', result.data)
|
||||
|
||||
self.assertIn(b'<title>First Test</title>', result.data)
|
||||
|
||||
self.assertIn(b'<link>http://first.test.xyz</link>', result.data)
|
||||
|
||||
self.assertIn(b'<description>first test content</description>', result.data)
|
||||
|
||||
def test_redirect_about(self):
|
||||
result = self.app.get('/about')
|
||||
self.assertEqual(result.status_code, 302)
|
||||
|
||||
def test_info_page(self):
|
||||
result = self.app.get('/info/en/search-syntax')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'<h1>Search syntax</h1>', result.data)
|
||||
|
||||
def test_health(self):
|
||||
result = self.app.get('/healthz')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'OK', result.data)
|
||||
|
||||
def test_preferences(self):
|
||||
result = self.app.get('/preferences')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'<form id="search_form" method="post" action="/preferences"', result.data)
|
||||
self.assertIn(b'<div id="categories_container">', result.data)
|
||||
self.assertIn(b'<legend id="pref_ui_locale">Interface language</legend>', result.data)
|
||||
|
||||
def test_browser_locale(self):
|
||||
result = self.app.get('/preferences', headers={'Accept-Language': 'zh-tw;q=0.8'})
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
b'<option value="zh-Hant-TW" selected="selected">',
|
||||
result.data,
|
||||
'Interface locale ignored browser preference.',
|
||||
)
|
||||
self.assertIn(
|
||||
b'<option value="zh-Hant-TW" selected="selected">',
|
||||
result.data,
|
||||
'Search language ignored browser preference.',
|
||||
)
|
||||
|
||||
def test_brower_empty_locale(self):
|
||||
result = self.app.get('/preferences', headers={'Accept-Language': ''})
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
b'<option value="en" selected="selected">', result.data, 'Interface locale ignored browser preference.'
|
||||
)
|
||||
|
||||
def test_locale_occitan(self):
|
||||
result = self.app.get('/preferences?locale=oc')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
b'<option value="oc" selected="selected">', result.data, 'Interface locale ignored browser preference.'
|
||||
)
|
||||
|
||||
def test_stats(self):
|
||||
result = self.app.get('/stats')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'<h1>Engine stats</h1>', result.data)
|
||||
|
||||
def test_robots_txt(self):
|
||||
result = self.app.get('/robots.txt')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(b'Allow: /', result.data)
|
||||
|
||||
def test_opensearch_xml(self):
|
||||
result = self.app.get('/opensearch.xml')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
self.assertIn(
|
||||
b'<Description>SearXNG is a metasearch engine that respects your privacy.</Description>', result.data
|
||||
)
|
||||
|
||||
def test_favicon(self):
|
||||
result = self.app.get('/favicon.ico')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
|
||||
def test_config(self):
|
||||
result = self.app.get('/config')
|
||||
self.assertEqual(result.status_code, 200)
|
||||
json_result = result.get_json()
|
||||
self.assertTrue(json_result)
|
||||
@@ -0,0 +1,90 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
import mock
|
||||
from searx import webutils
|
||||
from tests import SearxTestCase
|
||||
|
||||
|
||||
class TestWebUtils(SearxTestCase):
|
||||
def test_prettify_url(self):
|
||||
data = (
|
||||
('https://searx.me/', 'https://searx.me/'),
|
||||
('https://searx.me/ű', 'https://searx.me/ű'),
|
||||
('https://searx.me/' + (100 * 'a'), 'https://searx.me/[...]aaaaaaaaaaaaaaaaa'),
|
||||
('https://searx.me/' + (100 * 'ű'), 'https://searx.me/[...]űűűűűűűűűűűűűűűűű'),
|
||||
)
|
||||
|
||||
for test_url, expected in data:
|
||||
self.assertEqual(webutils.prettify_url(test_url, max_length=32), expected)
|
||||
|
||||
def test_highlight_content(self):
|
||||
self.assertEqual(webutils.highlight_content(0, None), None)
|
||||
self.assertEqual(webutils.highlight_content(None, None), None)
|
||||
self.assertEqual(webutils.highlight_content('', None), None)
|
||||
self.assertEqual(webutils.highlight_content(False, None), None)
|
||||
|
||||
contents = ['<html></html>not<']
|
||||
for content in contents:
|
||||
self.assertEqual(webutils.highlight_content(content, None), content)
|
||||
|
||||
content = 'a'
|
||||
query = 'test'
|
||||
self.assertEqual(webutils.highlight_content(content, query), 'a')
|
||||
query = 'a test'
|
||||
self.assertEqual(webutils.highlight_content(content, query), '<span class="highlight">a</span>')
|
||||
|
||||
data = (
|
||||
('" test "', 'a test string', 'a <span class="highlight">test</span> string'),
|
||||
('"a"', 'this is a test string', 'this is <span class="highlight">a</span> test string'),
|
||||
(
|
||||
'a test',
|
||||
'this is a test string that matches entire query',
|
||||
'this is <span class="highlight">a</span> <span class="highlight">test</span> string that matches entire query',
|
||||
),
|
||||
(
|
||||
'this a test',
|
||||
'this is a string to test.',
|
||||
(
|
||||
'<span class="highlight">this</span> is <span class="highlight">a</span> string to <span class="highlight">test</span>.'
|
||||
),
|
||||
),
|
||||
(
|
||||
'match this "exact phrase"',
|
||||
'this string contains the exact phrase we want to match',
|
||||
''.join(
|
||||
[
|
||||
'<span class="highlight">this</span> string contains the <span class="highlight">exact</span> ',
|
||||
'<span class="highlight">phrase</span> we want to <span class="highlight">match</span>',
|
||||
]
|
||||
),
|
||||
),
|
||||
)
|
||||
for query, content, expected in data:
|
||||
self.assertEqual(webutils.highlight_content(content, query), expected)
|
||||
|
||||
|
||||
class TestUnicodeWriter(SearxTestCase):
|
||||
def setUp(self):
|
||||
self.unicode_writer = webutils.CSVWriter(mock.MagicMock())
|
||||
|
||||
def test_write_row(self):
|
||||
row = [1, 2, 3]
|
||||
self.assertEqual(self.unicode_writer.writerow(row), None)
|
||||
|
||||
def test_write_rows(self):
|
||||
self.unicode_writer.writerow = mock.MagicMock()
|
||||
rows = [1, 2, 3]
|
||||
self.unicode_writer.writerows(rows)
|
||||
self.assertEqual(self.unicode_writer.writerow.call_count, len(rows))
|
||||
|
||||
|
||||
class TestNewHmac(SearxTestCase):
|
||||
def test_bytes(self):
|
||||
data = b'http://example.com'
|
||||
with self.assertRaises(AttributeError):
|
||||
webutils.new_hmac(b'secret', data)
|
||||
|
||||
with self.assertRaises(AttributeError):
|
||||
webutils.new_hmac(1, data)
|
||||
|
||||
res = webutils.new_hmac('secret', data)
|
||||
self.assertEqual(res, '23e2baa2404012a5cc8e4a18b4aabf0dde4cb9b56f679ddc0fd6d7c24339d819')
|
||||
Reference in New Issue
Block a user