Donate to e Foundation | Murena handsets with /e/OS | Own a part of Murena! Learn more

Commit 7721632c authored by Nicolas Gelot's avatar Nicolas Gelot
Browse files

Add cache interface

In order to use another cache tool or to use spot without cache
system.
parent 38118ff8
Loading
Loading
Loading
Loading
+1 −0
Original line number Diff line number Diff line
@@ -89,6 +89,7 @@ if 'BASE_URL' in environ:
if 'IMAGE_PROXY' in environ:
    settings['server']['image_proxy'] = environ['IMAGE_PROXY']
if 'SEARX_REDIS_HOST' in environ:
    settings['redis']['enable'] = True
    settings['redis']['host'] = environ['SEARX_REDIS_HOST']
if 'HTTP_PROXY_URL' in environ:
    settings['proxies']['http'] = environ['HTTP_PROXY_URL']
+19 −21
Original line number Diff line number Diff line
@@ -185,27 +185,25 @@ def default_request_params():
    }


def search(request):
class Search:
    """Search information manager"""

    def __init__(self, cachecls=search_database.CacheInterface):
        self.cache = cachecls()

    def __call__(self, request):
        """ Entry point to perform search request on engines
        """
    search = Search()
    search_query = search.get_search_query_from_webapp(request.preferences, request.form)
    searchData = search_database.read(search_query)
    if searchData is None:
        result_container = search.search(search_query)
        searchData = search.create_search_data(search_query, result_container)
        threading.Thread(
            target=search_database.save,
            args=(searchData,),
            name='save_search_' + str(searchData)
        ).start()

    search.search_with_plugins(request, searchData)
    return searchData
        search_query = self.get_search_query_from_webapp(request.preferences, request.form)
        searchData = self.cache.read(search_query)

        if searchData is None:
            result_container = self.search(search_query)
            searchData = self.create_search_data(search_query, result_container)
            self.cache.save(searchData)

class Search(object):
    """Search information container"""
        self.search_with_plugins(request, searchData)
        return searchData

    def search(self, search_query):
        """ do search-request
+91 −67
Original line number Diff line number Diff line
import threading
import redis
import pickle

@@ -5,7 +6,28 @@ from searx import settings
from searx.query import SearchQuery


def make_key(q):
class CacheInterface:
    """ Cache interface to store SearchData object
    """
    def read(self, q):
        pass

    def save(self, d):
        pass

    def update(self, d):
        pass

    def get_twenty_queries(self, x):
        return []


class RedisCache(CacheInterface):
    def __init__(self):
        self.pool = redis.ConnectionPool(host=settings['redis']['host'])
        self.running = threading.Event()

    def make_key(self, q):
        if q.time_range is None:
            q.time_range = ""

@@ -19,32 +41,35 @@ def make_key(q):
            q.time_range,
        )

    def _get_connection(self):
        return redis.Redis(connection_pool=self.pool)

def _get_connection():
    return redis.Redis(host=settings['redis']['host'])


def read(q):
    conn = _get_connection()
    key = make_key(q)
    def read(self, q):
        conn = self._get_connection()
        key = self.make_key(q)
        response = conn.get(key)
        if not response:
            return None
        return pickle.loads(response)


def save(d):
    conn = _get_connection()
    key = make_key(d)
    def _save(self, d):
        conn = self._get_connection()
        key = self.make_key(d)
        history = conn.incr("SEARCH_HISTORY_INDEX")
        conn.zadd("SEARCH_HISTORY_KEYS", {key: history})
        conn.set(key, pickle.dumps(d, protocol=4))

    def save(self, d):
        threading.Thread(
            target=self._save,
            args=(d,),
            name='save_search_' + str(d)
        ).start()

def get_twenty_queries(x):
    def get_twenty_queries(self, x):
        result = []

    conn = _get_connection()
        conn = self._get_connection()
        keys = conn.zrange('SEARCH_HISTORY_KEYS', int(x), int(x) + 20)
        if not keys:
            return result
@@ -69,11 +94,10 @@ def get_twenty_queries(x):

        return result


def update(d):
    conn = _get_connection()
    key = make_key(d)
    current = read(d)
    def update(self, d):
        conn = self._get_connection()
        key = self.make_key(d)
        current = self.read(d)
        current.results = d.results
        current.paging = d.paging
        current.results_number = d.results_number
+1 −0
Original line number Diff line number Diff line
@@ -17,6 +17,7 @@ server:
    http_protocol_version : "1.0"  # 1.0 and 1.1 are supported

redis:
    enable: False
    host : "127.0.0.1"
    user : "searx"
    password : "password" # change this!
+8 −7
Original line number Diff line number Diff line
@@ -33,7 +33,7 @@ import copy

import requests

from searx import logger, search_database
from searx import logger

logger = logger.getChild('webapp')

@@ -68,7 +68,8 @@ from searx.utils import (
)
from searx.version import VERSION_STRING
from searx.languages import language_codes as languages
from searx.search import Search, search
from searx.search import Search
from searx.search_database import RedisCache
from searx.query import RawTextQuery
from searx.autocomplete import searx_bang, backends as autocomplete_backends
from searx.plugins import plugins
@@ -77,7 +78,6 @@ from searx.preferences import Preferences, ValidationException, LANGUAGE_CODES
from searx.answerers import answerers
from searx.url_utils import urlencode, urlparse, urljoin
from searx.utils import new_hmac
from searx.search_database import get_twenty_queries
import threading

# check if the pyopenssl package is installed.
@@ -131,6 +131,8 @@ if not searx_debug \

babel = Babel(app)

search = Search(RedisCache) if settings["redis"]["enable"] else Search()

rtl_locales = ['ar', 'arc', 'bcc', 'bqi', 'ckb', 'dv', 'fa', 'glk', 'he',
               'ku', 'mzn', 'pnb', 'ps', 'sd', 'ug', 'ur', 'yi']

@@ -842,18 +844,17 @@ def wait_updating(start_time):


def update_results():
    search = Search()
    start_time = time.time()
    x = 0
    while not running.is_set():
        queries = get_twenty_queries(x)
        queries = search.cache.get_twenty_queries(x)
        for query in queries:
            result_container = search.search(query)
            searchData = search.create_search_data(query, result_container)
            search_database.update(searchData)
            search.cache.update(searchData)
            if running.is_set():
                return
        x += 20
        x += len(queries)
        if len(queries) < 20:
            x = 0
            wait_updating(start_time)