Commit a73f7468 authored by Gaël Duval's avatar Gaël Duval 🏄🏼
Browse files

Merge branch 'nicofonk/tests' into 'master'

Add basic functional test

See merge request e/cloud/my-spot!24
parents ff600724 ce092d6e
...@@ -3,6 +3,7 @@ image: $CI_REGISTRY_IMAGE/env:latest ...@@ -3,6 +3,7 @@ image: $CI_REGISTRY_IMAGE/env:latest
stages: stages:
- build - build
- test - test
- report
front-end: front-end:
stage: build stage: build
...@@ -21,10 +22,40 @@ coding-rules: ...@@ -21,10 +22,40 @@ coding-rules:
script: script:
- ./manage.sh pep8_check - ./manage.sh pep8_check
unittest: unit-test:
stage: test stage: test
before_script: before_script:
- ./manage.sh update_dev_packages - ./manage.sh update_dev_packages
script: script:
- ./manage.sh unit_tests - ./manage.sh unit_tests
artifacts:
paths:
- coverage
expire_in: 1 hour
functional-test:
stage: test
image: docker:stable
services:
- docker:dind
variables:
DOCKER_HOST: tcp://docker:2375/
DOCKER_DRIVER: overlay2
before_script:
- docker run -id --rm -v $(pwd):/ws -e DOCKER_HOST=tcp://$(cat /etc/hosts | grep docker | cut -f1):2375/ -w /ws --name spotenv $CI_REGISTRY_IMAGE/env:latest sh
- docker exec -i spotenv ./manage.sh update_dev_packages
script:
- docker exec -i spotenv ./manage.sh functional_tests
artifacts:
paths:
- coverage
expire_in: 1 hour
coverage:
stage: report
script:
- ./manage.sh coverage
dependencies:
- unit-test
- functional-test
coverage: '/TOTAL.*\s+(\d+%)$/' coverage: '/TOTAL.*\s+(\d+%)$/'
...@@ -6,8 +6,6 @@ EXPOSE 8888 ...@@ -6,8 +6,6 @@ EXPOSE 8888
WORKDIR /usr/local/searx WORKDIR /usr/local/searx
CMD ["python", "searx/webapp.py"] CMD ["python", "searx/webapp.py"]
RUN adduser -D -h /usr/local/searx -s /bin/sh searx searx
COPY requirements.txt ./requirements.txt COPY requirements.txt ./requirements.txt
RUN apk -U add \ RUN apk -U add \
...@@ -22,6 +20,7 @@ RUN apk -U add \ ...@@ -22,6 +20,7 @@ RUN apk -U add \
ca-certificates \ ca-certificates \
&& pip install --upgrade pip \ && pip install --upgrade pip \
&& pip install --no-cache -r requirements.txt \ && pip install --no-cache -r requirements.txt \
&& pip install --no-cache coverage \
&& apk del \ && apk del \
build-base \ build-base \
libffi-dev \ libffi-dev \
...@@ -32,11 +31,7 @@ RUN apk -U add \ ...@@ -32,11 +31,7 @@ RUN apk -U add \
ca-certificates \ ca-certificates \
&& rm -f /var/cache/apk/* && rm -f /var/cache/apk/*
COPY . . COPY searx /usr/local/searx/searx
RUN chown -R searx:searx *
USER searx
RUN sed -i "s/127.0.0.1/0.0.0.0/g" searx/settings.yml RUN sed -i "s/127.0.0.1/0.0.0.0/g" searx/settings.yml
......
...@@ -6,6 +6,7 @@ RUN dnf install -y\ ...@@ -6,6 +6,7 @@ RUN dnf install -y\
wget\ wget\
python2-pip\ python2-pip\
npm\ npm\
docker \
&& dnf groupinstall -y "Development Tools" \ && dnf groupinstall -y "Development Tools" \
&& pip3 install ipdb ipython \ && pip3 install ipdb ipython \
&& pip3 install -r /requirements.txt \ && pip3 install -r /requirements.txt \
......
...@@ -20,6 +20,7 @@ with one command. ...@@ -20,6 +20,7 @@ with one command.
- Run the docker-compose **up** command to start the project ``docker-compose up --build`` - Run the docker-compose **up** command to start the project ``docker-compose up --build``
- Getting the ip of the spot service and go to http://<spot-ip>:8888 - Getting the ip of the spot service and go to http://<spot-ip>:8888
- Or you can use the command line ``curl -X POST -F 'category=general' -F 'language=en-US' -F 'q=lequipe' -F 'time_range=' -F 'output=json' http://<spot-ip>:8888/``
.. note:: Here the command to get the IP of the spot service .. note:: Here the command to get the IP of the spot service
``docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' my-spot_spot_1`` ``docker inspect -f '{{range .NetworkSettings.Networks}}{{.IPAddress}}{{end}}' my-spot_spot_1``
......
version: '3.6'
services:
spot:
entrypoint:
- coverage
- run
- --source=searx
command:
- searx/webapp.py
volumes:
- coverage:/coverage
environment:
COVERAGE_FILE: /coverage/func
volumes:
coverage:
name: spot-coverage
...@@ -8,6 +8,7 @@ set -e ...@@ -8,6 +8,7 @@ set -e
# subshell # subshell
PYTHONPATH="$BASE_DIR" PYTHONPATH="$BASE_DIR"
SEARX_DIR="$BASE_DIR/searx" SEARX_DIR="$BASE_DIR/searx"
COV_DIR="$BASE_DIR/coverage"
ACTION="$1" ACTION="$1"
...@@ -35,25 +36,39 @@ pep8_check() { ...@@ -35,25 +36,39 @@ pep8_check() {
# ignored rules: # ignored rules:
# E402 module level import not at top of file # E402 module level import not at top of file
# W503 line break before binary operator # W503 line break before binary operator
pep8 --exclude=searx/static --max-line-length=120 --ignore "E402,W503" "$SEARX_DIR" "$BASE_DIR/tests" # E722 do not use bare 'except'
pycodestyle --exclude=searx/static --max-line-length=120 --ignore "E402,W503,E722" "$SEARX_DIR" "$BASE_DIR/tests"
} }
unit_tests() { unit_tests() {
echo '[!] Running unit tests' echo '[!] Running unit tests'
PYTHONPATH="$BASE_DIR" pytest --cov=searx --disable-pytest-warnings "$BASE_DIR/tests/unit" mkdir -p "$COV_DIR"
chmod a+w "$COV_DIR"
PYTHONPATH="$BASE_DIR" COVERAGE_FILE="$COV_DIR"/unit pytest --cov=searx "$BASE_DIR/tests/unit"
} }
py_test_coverage() { functional_tests() {
echo '[!] Running python test coverage' echo '[!] Running unit tests'
PYTHONPATH="`pwd`" python3 -m nose2 -C --log-capture --with-coverage --coverage "$SEARX_DIR" -s "$BASE_DIR/tests/unit" \ mkdir -p "$COV_DIR"
&& coverage report \ chmod a+w "$COV_DIR"
&& coverage html PYTHONPATH="$BASE_DIR" COMPOSE_FILE=docker-compose.yml:docker-compose-coverage.yml \
pytest "$BASE_DIR/tests/functional"
docker run -itd --rm --name tmp-vol -v spot-coverage:/coverage alpine
docker cp tmp-vol:/coverage/func $COV_DIR
docker stop tmp-vol
}
coverage() {
sed -i 's!/usr/local/searx!'$BASE_DIR'!g' "$COV_DIR"/func
coverage3 combine coverage/func coverage/unit
coverage3 report
} }
tests() { tests() {
set -e set -e
pep8_check pep8_check
unit_tests unit_tests
functional_tests
set +e set +e
} }
...@@ -128,7 +143,6 @@ Commands ...@@ -128,7 +143,6 @@ Commands
------------------ ------------------
update_packages - Check & update production dependency changes update_packages - Check & update production dependency changes
update_dev_packages - Check & update development and production dependency changes update_dev_packages - Check & update development and production dependency changes
install_geckodriver - Download & install geckodriver if not already installed (required for robot_tests)
npm_packages - Download & install npm dependencies (source manage.sh to update the PATH) npm_packages - Download & install npm dependencies (source manage.sh to update the PATH)
Build Build
...@@ -140,10 +154,9 @@ Commands ...@@ -140,10 +154,9 @@ Commands
Tests Tests
----- -----
unit_tests - Run unit tests unit_tests - Run unit tests
functional_tests - Run functional tests
pep8_check - Pep8 validation pep8_check - Pep8 validation
robot_tests - Run selenium tests tests - Run all python tests (pep8, unit, functional)
tests - Run all python tests (pep8, unit, robot_tests)
py_test_coverage - Unit test coverage
" "
} }
......
[pytest]
addopts = -s --dockerc-build --dockerc-attach-network --disable-pytest-warnings
babel==2.3.4 babel==2.3.4
mock==2.0.0 mock==2.0.0
pep8==1.7.0 pycodestyle==2.4.0
mockredispy==2.9.3 mockredispy==2.9.3
pytest==4.1.0 pytest==4.1.0
pytest-cov==2.6.1 pytest-cov==2.6.1
pytest-dockerc==1.0.5
...@@ -23,7 +23,7 @@ from io import open ...@@ -23,7 +23,7 @@ from io import open
from ssl import OPENSSL_VERSION_INFO, OPENSSL_VERSION from ssl import OPENSSL_VERSION_INFO, OPENSSL_VERSION
try: try:
from yaml import load from yaml import load
except: except ImportError:
from sys import exit, stderr from sys import exit, stderr
stderr.write('[E] install pyyaml\n') stderr.write('[E] install pyyaml\n')
exit(2) exit(2)
...@@ -38,6 +38,7 @@ def check_settings_yml(file_name): ...@@ -38,6 +38,7 @@ def check_settings_yml(file_name):
else: else:
return None return None
# find location of settings.yml # find location of settings.yml
if 'SEARX_SETTINGS_PATH' in environ: if 'SEARX_SETTINGS_PATH' in environ:
# if possible set path to settings using the # if possible set path to settings using the
......
...@@ -116,7 +116,7 @@ def _fetch_supported_languages(resp): ...@@ -116,7 +116,7 @@ def _fetch_supported_languages(resp):
regions = dom.xpath(regions_xpath) regions = dom.xpath(regions_xpath)
for region in regions: for region in regions:
code = re.search('setmkt=[^\&]+', region).group()[7:] code = re.search('setmkt=[^&]+', region).group()[7:]
if code == 'nb-NO': if code == 'nb-NO':
code = 'no-NO' code = 'no-NO'
......
...@@ -72,6 +72,7 @@ def parse_album(hit): ...@@ -72,6 +72,7 @@ def parse_album(hit):
result.update({'content': 'Released: {}'.format(year)}) result.update({'content': 'Released: {}'.format(year)})
return result return result
parse = {'lyric': parse_lyric, 'song': parse_lyric, 'artist': parse_artist, 'album': parse_album} parse = {'lyric': parse_lyric, 'song': parse_lyric, 'artist': parse_artist, 'album': parse_album}
......
...@@ -89,9 +89,8 @@ url_map = 'https://www.openstreetmap.org/'\ ...@@ -89,9 +89,8 @@ url_map = 'https://www.openstreetmap.org/'\
# search-url # search-url
search_path = '/search' search_path = '/search'
search_url = ('https://{hostname}' + search_url = ('https://{hostname}' + search_path + '?{query}'
search_path + '&start={offset}&gws_rd=cr&gbv=1&lr={lang}&hl={lang_short}&ei=x')
'?{query}&start={offset}&gws_rd=cr&gbv=1&lr={lang}&hl={lang_short}&ei=x')
time_range_search = "&tbs=qdr:{range}" time_range_search = "&tbs=qdr:{range}"
time_range_dict = {'day': 'd', time_range_dict = {'day': 'd',
......
...@@ -76,11 +76,11 @@ def response(resp): ...@@ -76,11 +76,11 @@ def response(resp):
# get thumbnails # get thumbnails
script = str(dom.xpath('//script[contains(., "_setImagesSrc")]')[0].text) script = str(dom.xpath('//script[contains(., "_setImagesSrc")]')[0].text)
id = result.xpath('.//div[@class="s"]//img/@id')[0] id = result.xpath('.//div[@class="s"]//img/@id')[0]
thumbnails_data = re.findall('s=\'(.*?)(?:\\\\[a-z,1-9,\\\\]+\'|\')\;var ii=\[(?:|[\'vidthumb\d+\',]+)\'' + id, thumbnails_data = re.findall(r's=\'(.*?)(?:\\\\[a-z,1-9,\\\\]+\'|\')\;var ii=\[(?:|[\'vidthumb\d+\',]+)\'' + id,
script) script)
tmp = [] tmp = []
if len(thumbnails_data) != 0: if len(thumbnails_data) != 0:
tmp = re.findall('(data:image/jpeg;base64,[a-z,A-Z,0-9,/,\+]+)', thumbnails_data[0]) tmp = re.findall(r'(data:image/jpeg;base64,[a-z,A-Z,0-9,/,\+]+)', thumbnails_data[0])
thumbnail = '' thumbnail = ''
if len(tmp) != 0: if len(tmp) != 0:
thumbnail = tmp[-1] thumbnail = tmp[-1]
......
...@@ -28,10 +28,10 @@ search_string = '?{query}&page={page}' ...@@ -28,10 +28,10 @@ search_string = '?{query}&page={page}'
supported_languages_url = base_url supported_languages_url = base_url
# regex # regex
regex_json = re.compile('initialData: {"Request":(.|\n)*},\s*environment') regex_json = re.compile(r'initialData: {"Request":(.|\n)*},\s*environment')
regex_json_remove_start = re.compile('^initialData:\s*') regex_json_remove_start = re.compile(r'^initialData:\s*')
regex_json_remove_end = re.compile(',\s*environment$') regex_json_remove_end = re.compile(r',\s*environment$')
regex_img_url_remove_start = re.compile('^https?://i\.swisscows\.ch/\?link=') regex_img_url_remove_start = re.compile(r'^https?://i\.swisscows\.ch/\?link=')
# do search-request # do search-request
......
...@@ -49,7 +49,7 @@ class RawTextQuery(object): ...@@ -49,7 +49,7 @@ class RawTextQuery(object):
self.query_parts = [] self.query_parts = []
# split query, including whitespaces # split query, including whitespaces
raw_query_parts = re.split(r'(\s+)' if isinstance(self.query, str) else b'(\s+)', self.query) raw_query_parts = re.split(r'(\s+)' if isinstance(self.query, str) else r'(\s+)'.encode(), self.query)
parse_next = True parse_next = True
......
...@@ -171,7 +171,7 @@ class ResultContainer(object): ...@@ -171,7 +171,7 @@ class ResultContainer(object):
continue continue
try: try:
result['url'] = result['url'] result['url'] = result['url']
except: except KeyError:
pass pass
if 'title' in result and not isinstance(result['title'], str): if 'title' in result and not isinstance(result['title'], str):
continue continue
......
...@@ -6,6 +6,7 @@ import re ...@@ -6,6 +6,7 @@ import re
from babel.core import get_global from babel.core import get_global
from babel.dates import format_date from babel.dates import format_date
from babel import UnknownLocaleError
from codecs import getincrementalencoder from codecs import getincrementalencoder
from imp import load_source from imp import load_source
from numbers import Number from numbers import Number
...@@ -205,7 +206,7 @@ def format_date_by_locale(date, locale_string): ...@@ -205,7 +206,7 @@ def format_date_by_locale(date, locale_string):
# to avoid crashing if locale is not supported by babel # to avoid crashing if locale is not supported by babel
try: try:
formatted_date = format_date(date, locale=locale_string) formatted_date = format_date(date, locale=locale_string)
except: except UnknownLocaleError:
formatted_date = format_date(date, "YYYY-MM-dd") formatted_date = format_date(date, "YYYY-MM-dd")
return formatted_date return formatted_date
...@@ -255,7 +256,7 @@ def get_torrent_size(filesize, filesize_multiplier): ...@@ -255,7 +256,7 @@ def get_torrent_size(filesize, filesize_multiplier):
filesize = int(filesize * 1000 * 1000) filesize = int(filesize * 1000 * 1000)
elif filesize_multiplier == 'KiB': elif filesize_multiplier == 'KiB':
filesize = int(filesize * 1000) filesize = int(filesize * 1000)
except: except ValueError:
filesize = None filesize = None
return filesize return filesize
......
...@@ -40,8 +40,9 @@ logger = logger.getChild('webapp') ...@@ -40,8 +40,9 @@ logger = logger.getChild('webapp')
try: try:
from pygments import highlight from pygments import highlight
from pygments.lexers import get_lexer_by_name from pygments.lexers import get_lexer_by_name
from pygments.util import ClassNotFound
from pygments.formatters import HtmlFormatter from pygments.formatters import HtmlFormatter
except: except ImportError:
logger.critical("cannot import dependency: pygments") logger.critical("cannot import dependency: pygments")
from sys import exit from sys import exit
...@@ -178,7 +179,7 @@ def code_highlighter(codelines, language=None): ...@@ -178,7 +179,7 @@ def code_highlighter(codelines, language=None):
try: try:
# find lexer by programing language # find lexer by programing language
lexer = get_lexer_by_name(language, stripall=True) lexer = get_lexer_by_name(language, stripall=True)
except: except ClassNotFound:
# if lexer is not found, using default one # if lexer is not found, using default one
logger.debug('highlighter cannot find lexer for {0}'.format(language)) logger.debug('highlighter cannot find lexer for {0}'.format(language))
lexer = get_lexer_by_name('text', stripall=True) lexer = get_lexer_by_name('text', stripall=True)
...@@ -452,21 +453,26 @@ def config_results(results, query): ...@@ -452,21 +453,26 @@ def config_results(results, query):
result['publishedDate'] = format_date(publishedDate) result['publishedDate'] = format_date(publishedDate)
def index_error(): def index_error(exn, output):
request.errors.append(gettext('search error')) user_error = gettext("search error")
return render( if output == "json":
'index.html', return jsonify({"error": f"{user_error}: {exn}"})
)
request.errors.append(user_error)
return render('index.html')
@app.route('/search', methods=['GET', 'POST']) @app.route('/search', methods=['GET', 'POST'])
@app.route('/', methods=['GET', 'POST']) @app.route('/', methods=['GET', 'POST'])
def index(): def index():
# check the response format
output = request.form.get("output", "html")
# check if there is query # check if there is query
if request.form.get('q') is None: if request.form.get('q') is None:
return render( if output == 'json':
'index.html', return jsonify({}), 204
) return render('index.html')
selected_category = request.form.get('category') or 'general' selected_category = request.form.get('category') or 'general'
first_page = request.form.get('pageno') first_page = request.form.get('pageno')
...@@ -489,9 +495,9 @@ def index(): ...@@ -489,9 +495,9 @@ def index():
# is it an invalid input parameter or something else ? # is it an invalid input parameter or something else ?
if issubclass(e.__class__, SearxParameterException): if issubclass(e.__class__, SearxParameterException):
return index_error(), 400 return index_error(e, output), 400
else: else:
return index_error(), 500 return index_error(e, output), 500
if is_general_first_page: if is_general_first_page:
result_copy = copy.copy(search_data.results) result_copy = copy.copy(search_data.results)
...@@ -512,8 +518,7 @@ def index(): ...@@ -512,8 +518,7 @@ def index():
config_results(images, search_data.query) config_results(images, search_data.query)
config_results(videos, search_data.query) config_results(videos, search_data.query)
return render( response = dict(
'results.html',
results=search_data.results, results=search_data.results,
q=search_data.query, q=search_data.query,
selected_category=selected_category, selected_category=selected_category,
...@@ -521,12 +526,12 @@ def index(): ...@@ -521,12 +526,12 @@ def index():
time_range=search_data.time_range, time_range=search_data.time_range,
number_of_results=format_decimal(search_data.results_number), number_of_results=format_decimal(search_data.results_number),
advanced_search=request.form.get('advanced_search', None), advanced_search=request.form.get('advanced_search', None),
suggestions=search_data.suggestions, suggestions=list(search_data.suggestions),
answers=search_data.answers, answers=list(search_data.answers),
corrections=search_data.corrections, corrections=list(search_data.corrections),
infoboxes=search_data.infoboxes, infoboxes=search_data.infoboxes,
paging=search_data.paging, paging=search_data.paging,
unresponsive_engines=search_data.unresponsive_engines, unresponsive_engines=list(search_data.unresponsive_engines),
current_language=match_language(search_data.language, current_language=match_language(search_data.language,
LANGUAGE_CODES, LANGUAGE_CODES,
fallback=settings['search']['language']), fallback=settings['search']['language']),
...@@ -536,6 +541,9 @@ def index(): ...@@ -536,6 +541,9 @@ def index():
theme=get_current_theme_name(), theme=get_current_theme_name(),
favicons=global_favicons[themes.index(get_current_theme_name())] favicons=global_favicons[themes.index(get_current_theme_name())]
) )
if output == 'json':
return jsonify(response)
return render('results.html', **response)
@app.route('/about', methods=['GET']) @app.route('/about', methods=['GET'])
......
import pytest
import redis
import requests
from pytest_dockerc import Wait, Context
class SpotContext(Context):
@property
def url(self):
addr = self.container_addr("spot")
port = self.container_port("spot")
return f"http://{addr}:{port}"
def wait_for_running_state(self):
Wait(ignored_exns=(requests.ConnectionError,))(lambda: requests.get(self.url))
@pytest.fixture(scope="session")
def ctx(dockerc, dockerc_logs):
context = SpotContext(dockerc)
context.wait_for_running_state()
yield context
@pytest.fixture
def redisdb(ctx):
""" purge the db
"""