diff --git a/searx/results.py b/searx/results.py index cb204a682fe23d6c688c85fadc545eb454efa7c5..ab1c83e198ea89eb278ee236553cb4ff5b8e47c9 100644 --- a/searx/results.py +++ b/searx/results.py @@ -203,14 +203,14 @@ class ResultContainer(object): result['parsed_url'] = result['parsed_url']._replace(scheme="http") result['url'] = result['parsed_url'].geturl() - result['engines'] = set([result['engine']]) + result['engines'] = [result['engine']] # strip multiple spaces and cariage returns from content if result.get('content'): result['content'] = WHITESPACE_REGEX.sub(' ', result['content']) # check for duplicates - duplicated = False + duplicated = None for merged_result in self._merged_results: if compare_urls(result['parsed_url'], merged_result['parsed_url'])\ and result.get('template') == merged_result.get('template'): @@ -233,7 +233,8 @@ class ResultContainer(object): duplicated['positions'].append(position) # add engine to list of result-engines - duplicated['engines'].add(result['engine']) + if result['engine'] not in duplicated['engines']: + duplicated['engines'].append(result['engine']) # using https if possible if duplicated['parsed_url'].scheme != 'https' and result['parsed_url'].scheme == 'https': diff --git a/searx/search_database.py b/searx/search_database.py index f2267ef84d4b9da2c7fda69871eed9f3cee44202..fea98d5f9e2b8cceb8ba2fb8d786b9dbc9cfb9f7 100644 --- a/searx/search_database.py +++ b/searx/search_database.py @@ -118,9 +118,6 @@ def get_search_data(q, r): results_number = 0 results = r.get_ordered_results() for result in results: - result['engines'] = list(result['engines']) - if not type(result['engines']) is list: - print(result['engines']) if 'publishedDate' in result: try: result['pubdate'] = result['publishedDate'].strftime('%Y-%m-%d %H:%M:%S') diff --git a/searx/webapp.py b/searx/webapp.py index aad67777efc5ee9e70ce75ea80f7568d9393c2ae..6285f2117cbbcded285ead1a9bfb2c1b1dc1f9d6 100644 --- a/searx/webapp.py +++ b/searx/webapp.py @@ -46,7 +46,7 @@ except: from sys import exit exit(1) -from cgi import escape +from html import escape from datetime import datetime, timedelta from werkzeug.contrib.fixers import ProxyFix from flask import ( @@ -486,8 +486,6 @@ def index(): # search search_data = None try: - print(request.form) - if is_general_first_page: request.form['categories'] = ['general', 'videos', 'images'] else: