odoo_config icon indicating copy to clipboard operation
odoo_config copied to clipboard

odoo8 cache redis 测试

Open gilbert-yuan opened this issue 8 years ago • 7 comments

# -*- coding: utf-8 -*-
##############################################################################
#
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2013 OpenERP (<http://www.openerp.com>).
#    Copyright (C) 2013 Gilbert ([email protected]).
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################

# decorator makes wrappers that have the same API as their wrapped function;
# this is important for the openerp.api.guess() that relies on signatures
from collections import defaultdict
from decorator import decorator
from inspect import getargspec
import logging, redis
_logger = logging.getLogger(__name__)
from lxml import etree

class ormcache_counter(object):
    """ Statistic counters for cache entries. """
    __slots__ = ['hit', 'miss', 'err']

    def __init__(self):
        self.hit = 0
        self.miss = 0
        self.err = 0

    @property
    def ratio(self):
        return 100.0 * self.hit / (self.hit + self.miss or 1)

# statistic counters dictionary, maps (dbname, modelname, method) to counter
STAT = defaultdict(ormcache_counter)
redis_instance = redis.Redis(host='localhost', port=6379, db=4)
redis_instance.flushall()
class ormcache(object):
    """ LRU cache decorator for orm methods. """

    def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
        self.skiparg = skiparg

    def __call__(self, method):
        self.method = method
        lookup = decorator(self.lookup, method)
        lookup.clear_cache = self.clear
        return lookup

    def lru(self, model):
        if not hasattr(model, '_name'):
            return False, False
        counter = STAT[(model.pool.db_name, model._name, self.method.func_name)]
        return (model._name, self.method.func_name), counter

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        key = key0 + args[self.skiparg:]
        cache_val = redis_instance.hget(args[0]._name, str(key))
        if cache_val:
            try:
                cache_val = eval(cache_val)
            except Exception:
                if "<?xml" in cache_val:
                    cache_val = cache_val.encode()
                else:
                    cache_val = u'%s' % cache_val.encode()
            r = cache_val
            counter.hit += 1
            return r
        else:
            counter.miss += 1
            value = self.method(*args, **kwargs)
            redis_instance.hset(args[0]._name, key, value)
            return value
        counter.err += 1
        return self.method(*args, **kwargs)

    def clear(self, model, *args):
        """ Remove *args entry from the cache or all keys if *args is undefined """
        key0, _ = self.lru(model)
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        if args:
            _logger.warn("ormcache.clear arguments are deprecated and ignored "
                         "(while clearing caches on (%s).%s)",
                         model._name, self.method.__name__)
        redis_instance.hdel(model._name, str(key0))
        model.pool._any_cache_cleared = True


class ormcache_context(ormcache):
    def __init__(self, skiparg=2, size=8192, accepted_keys=()):
        super(ormcache_context,self).__init__(skiparg,size)
        self.accepted_keys = accepted_keys

    def __call__(self, method):
        # remember which argument is context
        args = getargspec(method)[0]
        self.context_pos = args.index('context')
        return super(ormcache_context, self).__call__(method)

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        # Note. The decorator() wrapper (used in __call__ above) will resolve
        # arguments, and pass them positionally to lookup(). This is why context
        # is not passed through kwargs!
        if self.context_pos < len(args):
            context = args[self.context_pos] or {}
        else:
            context = kwargs.get('context') or {}
        ckey = [(k, context[k]) for k in self.accepted_keys if k in context]

        # Beware: do not take the context from args!
        key = key0 + args[self.skiparg:self.context_pos] + tuple(ckey)
        cache_val = redis_instance.hget(args[0]._name, str(key))
        if cache_val:
            try:
                cache_val = eval(cache_val)
            except Exception:
                if "<?xml" in cache_val:
                    cache_val = cache_val.encode()
                else:
                    cache_val = u'%s' % cache_val.encode()
            r = cache_val
            counter.hit += 1
            return r
        else:
            counter.miss += 1
            value = self.method(*args, **kwargs)
            redis_instance.hset(args[0]._name, key, value)
            return value


class ormcache_multi(ormcache):
    def __init__(self, skiparg=2, size=8192, multi=3):
        assert skiparg <= multi
        super(ormcache_multi, self).__init__(skiparg, size)
        self.multi = multi

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        base_key = key0 + args[self.skiparg:self.multi] + args[self.multi+1:]
        ids = args[self.multi]
        result = {}
        missed = []

        # first take what is available in the cache
        for i in ids:
            key = base_key + (i,)
            cache_val = redis_instance.hget(args[0]._name, str(key))
            if cache_val:
                try:
                    cache_val = eval(cache_val)
                except Exception:
                    if "<?xml" in cache_val:
                        cache_val = cache_val.encode()
                    else:
                        cache_val = u'%s' % cache_val.encode()
                result[i] = cache_val
                counter.hit += 1
            else:
                counter.miss += 1
                missed.append(i)

        if missed:
            # call the method for the ids that were not in the cache
            args = list(args)
            args[self.multi] = missed
            result.update(method(*args, **kwargs))

            # store those new results back in the cache
            for i in missed:
                key = base_key + (i,)
                redis_instance.hset(args[0]._name, key, result[i])
        return result


class dummy_cache(object):
    """ Cache decorator replacement to actually do no caching. """
    def __init__(self, *l, **kw):
        pass

    def __call__(self, fn):
        fn.clear_cache = self.clear
        return fn

    def clear(self, *l, **kw):
        pass


def log_ormcache_stats(sig=None, frame=None):
    """ Log statistics of ormcache usage by database, model, and method. """
    from openerp.modules.registry import RegistryManager
    import threading

    me = threading.currentThread()
    me_dbname = me.dbname
    entries = defaultdict(int)
    for dbname, reg in RegistryManager.registries.iteritems():
        for key in reg.cache.iterkeys():
            entries[(dbname,) + key[:2]] += 1
    for key, count in sorted(entries.items()):
        dbname, model_name, method = key
        me.dbname = dbname
        stat = STAT[key]
        _logger.info("%6d entries, %6d hit, %6d miss, %6d err, %4.1f%% ratio, for %s.%s",
                     count, stat.hit, stat.miss, stat.err, stat.ratio, model_name, method.__name__)

    me.dbname = me_dbname

# For backward compatibility
cache = ormcache

# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

gilbert-yuan avatar Mar 27 '18 06:03 gilbert-yuan

只能覆盖cache.py 经过一段时间测试,没有报错了,速度和存在内存中差别不大。

gilbert-yuan avatar Mar 28 '18 09:03 gilbert-yuan

# -*- coding: utf-8 -*-
##############################################################################
#
#    OpenERP, Open Source Management Solution
#    Copyright (C) 2013 OpenERP (<http://www.openerp.com>).
#
#    This program is free software: you can redistribute it and/or modify
#    it under the terms of the GNU Affero General Public License as
#    published by the Free Software Foundation, either version 3 of the
#    License, or (at your option) any later version.
#
#    This program is distributed in the hope that it will be useful,
#    but WITHOUT ANY WARRANTY; without even the implied warranty of
#    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#    GNU Affero General Public License for more details.
#
#    You should have received a copy of the GNU Affero General Public License
#    along with this program.  If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################

# decorator makes wrappers that have the same API as their wrapped function;
# this is important for the openerp.api.guess() that relies on signatures
from collections import defaultdict
from decorator import decorator
from inspect import getargspec
import logging, redis, pickle
_logger = logging.getLogger(__name__)
from lxml import etree

class ormcache_counter(object):
    """ Statistic counters for cache entries. """
    __slots__ = ['hit', 'miss', 'err']

    def __init__(self):
        self.hit = 0
        self.miss = 0
        self.err = 0

    @property
    def ratio(self):
        return 100.0 * self.hit / (self.hit + self.miss or 1)

# statistic counters dictionary, maps (dbname, modelname, method) to counter
STAT = defaultdict(ormcache_counter)
redis_instance = redis.Redis(host='localhost', port=6379, db=4)
redis_instance.flushall()
class ormcache(object):
    """ LRU cache decorator for orm methods. """

    def __init__(self, skiparg=2, size=8192, multi=None, timeout=None):
        self.skiparg = skiparg

    def __call__(self, method):
        self.method = method
        lookup = decorator(self.lookup, method)
        lookup.clear_cache = self.clear
        return lookup

    def lru(self, model):
        if not hasattr(model, '_name'):
            return False, False
        counter = STAT[(model.pool.db_name, model._name, self.method.func_name)]
        return (model._name, self.method.func_name), counter

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        key = key0 + args[self.skiparg:]
        cache_val = redis_instance.hget(args[0]._name, str(key))
        if cache_val:
            r = pickle.loads(cache_val)
            counter.hit += 1
            return r
        else:
            counter.miss += 1
            value = self.method(*args, **kwargs)
            redis_instance.hset(args[0]._name, key, pickle.dumps(value))
            return value
        counter.err += 1
        return self.method(*args, **kwargs)

    def clear(self, model, *args):
        """ Remove *args entry from the cache or all keys if *args is undefined """
        key0, _ = self.lru(model)
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        if args:
            _logger.warn("ormcache.clear arguments are deprecated and ignored "
                         "(while clearing caches on (%s).%s)",
                         model._name, self.method.__name__)
        redis_instance.hdel(model._name, str(key0))
        model.pool._any_cache_cleared = True


class ormcache_context(ormcache):
    def __init__(self, skiparg=2, size=8192, accepted_keys=()):
        super(ormcache_context,self).__init__(skiparg,size)
        self.accepted_keys = accepted_keys

    def __call__(self, method):
        # remember which argument is context
        args = getargspec(method)[0]
        self.context_pos = args.index('context')
        return super(ormcache_context, self).__call__(method)

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        # Note. The decorator() wrapper (used in __call__ above) will resolve
        # arguments, and pass them positionally to lookup(). This is why context
        # is not passed through kwargs!
        if self.context_pos < len(args):
            context = args[self.context_pos] or {}
        else:
            context = kwargs.get('context') or {}
        ckey = [(k, context[k]) for k in self.accepted_keys if k in context]

        # Beware: do not take the context from args!
        key = key0 + args[self.skiparg:self.context_pos] + tuple(ckey)
        cache_val = redis_instance.hget(args[0]._name, str(key))
        if cache_val:
            r = pickle.loads(cache_val)
            counter.hit += 1
            return r
        else:
            counter.miss += 1
            value = self.method(*args, **kwargs)
            redis_instance.hset(args[0]._name, key, pickle.dumps(value))
            return value


class ormcache_multi(ormcache):
    def __init__(self, skiparg=2, size=8192, multi=3):
        assert skiparg <= multi
        super(ormcache_multi, self).__init__(skiparg, size)
        self.multi = multi

    def lookup(self, method, *args, **kwargs):
        key0, counter = self.lru(args[0])
        if not key0:
            counter.err += 1
            return self.method(*args, **kwargs)
        base_key = key0 + args[self.skiparg:self.multi] + args[self.multi+1:]
        ids = args[self.multi]
        result = {}
        missed = []

        # first take what is available in the cache
        for i in ids:
            key = base_key + (i,)
            cache_val = redis_instance.hget(args[0]._name, str(key))
            if cache_val:
                result[i] = pickle.loads(cache_val)
                counter.hit += 1
            else:
                counter.miss += 1
                missed.append(i)

        if missed:
            # call the method for the ids that were not in the cache
            args = list(args)
            args[self.multi] = missed
            result.update(method(*args, **kwargs))

            # store those new results back in the cache
            for i in missed:
                key = base_key + (i,)
                redis_instance.hset(args[0]._name, key, pickle.dumps(result[i]))
        return result


class dummy_cache(object):
    """ Cache decorator replacement to actually do no caching. """
    def __init__(self, *l, **kw):
        pass

    def __call__(self, fn):
        fn.clear_cache = self.clear
        return fn

    def clear(self, *l, **kw):
        pass


def log_ormcache_stats(sig=None, frame=None):
    """ Log statistics of ormcache usage by database, model, and method. """
    from openerp.modules.registry import RegistryManager
    import threading

    me = threading.currentThread()
    me_dbname = me.dbname
    entries = defaultdict(int)
    for dbname, reg in RegistryManager.registries.iteritems():
        for key in reg.cache.iterkeys():
            entries[(dbname,) + key[:2]] += 1
    for key, count in sorted(entries.items()):
        dbname, model_name, method = key
        me.dbname = dbname
        stat = STAT[key]
        _logger.info("%6d entries, %6d hit, %6d miss, %6d err, %4.1f%% ratio, for %s.%s",
                     count, stat.hit, stat.miss, stat.err, stat.ratio, model_name, method.__name__)

    me.dbname = me_dbname

# For backward compatibility
cache = ormcache

# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:

gilbert-yuan avatar Apr 11 '18 01:04 gilbert-yuan

数据持久化的添加,避免数据存入redis 中,再取出来的时候导致不对。

gilbert-yuan avatar Apr 11 '18 01:04 gilbert-yuan

def clear () 清除缓存部分优化 redis_instance.hdel(model._name, [redis_key for redis_key in redis_instance.hkeys(model._name) if str(key0) in redis_key])

gilbert-yuan avatar Apr 24 '18 02:04 gilbert-yuan

7c9bd6de-6cc5-43f7-9dce-5cfdaa5ec536

gilbert-yuan avatar Jul 06 '18 04:07 gilbert-yuan

请问这个不去修该LRU文件的话,缓存增多了之后,内存会不会爆炸啊

gimebreak avatar Jul 16 '20 07:07 gimebreak

默认就有最大缓存个数的限制。不会因为这个原因爆掉

gilbert-yuan avatar Jul 16 '20 08:07 gilbert-yuan