dogpile.cache icon indicating copy to clipboard operation
dogpile.cache copied to clipboard

Optionally specify reader host and url

Open bartjolinktopicus opened this issue 2 years ago • 2 comments

Right now, dogpile.cache reader_client is always the same as the writer_client, without any functionality to specify otherwise. When working with multiple nodes on a Elasticache cluster, it is recommended to use the primary node as writer_client and the secondary replica node as reader_client.

Source: https://docs.aws.amazon.com/AmazonElastiCache/latest/red-ug/GettingStarted.ConnectToCacheNode.html

bartjolinktopicus avatar Jun 24 '22 09:06 bartjolinktopicus

in test_redis_backend you'd also need to add a new assertion helper that can test for two separate calls to the redis backend.

Here's a patch for reader_client , close to how I think it should be done.

diff --git a/dogpile/cache/backends/redis.py b/dogpile/cache/backends/redis.py
index 1a6ba09..7f608ac 100644
--- a/dogpile/cache/backends/redis.py
+++ b/dogpile/cache/backends/redis.py
@@ -38,8 +38,11 @@ class RedisBackend(BytesBackend):
                 'port': 6379,
                 'db': 0,
                 'redis_expiration_time': 60*60*2,   # 2 hours
-                'distributed_lock': True,
-                'thread_local_lock': False
+                'thread_local_lock': False,
+
+                'reader_client': {
+                    'connection_pool': some_redis_pool
+                    }
                 }
         )
 
@@ -112,6 +115,7 @@ class RedisBackend(BytesBackend):
         self.lock_sleep = arguments.pop("lock_sleep", 0.1)
         self.thread_local_lock = arguments.pop("thread_local_lock", True)
         self.connection_kwargs = arguments.pop("connection_kwargs", {})
+        self.reader_client_args = arguments.pop("reader_client", {})
 
         if self.distributed_lock and self.thread_local_lock:
             warnings.warn(
@@ -129,33 +133,57 @@ class RedisBackend(BytesBackend):
         import redis  # noqa
 
     def _create_client(self):
-        if self.connection_pool is not None:
+        self.writer_client = self._create_client_from_args(
+            connection_pool=self.connection_pool,
+            connection_kwargs=self.connection_kwargs,
+            socket_timeout=self.socket_timeout,
+            url=self.url,
+            host=self.host,
+            password=self.password,
+            port=self.port,
+            db=self.db,
+        )
+        if self.reader_client_args:
+            self.reader_client = self._create_client_from_args(
+                **self.reader_client_args
+            )
+        else:
+            self.reader_client = self.writer_client
+
+    def _create_client_from_args(
+        self,
+        connection_pool=None,
+        connection_kwargs=None,
+        socket_timeout=None,
+        url=None,
+        host=None,
+        password=None,
+        port=None,
+        db=None,
+    ):
+        if connection_pool is not None:
             # the connection pool already has all other connection
             # options present within, so here we disregard socket_timeout
             # and others.
-            self.writer_client = redis.StrictRedis(
-                connection_pool=self.connection_pool
-            )
-            self.reader_client = self.writer_client
+            return redis.StrictRedis(connection_pool=self.connection_pool)
         else:
             args = {}
-            args.update(self.connection_kwargs)
-            if self.socket_timeout:
-                args["socket_timeout"] = self.socket_timeout
-
-            if self.url is not None:
-                args.update(url=self.url)
-                self.writer_client = redis.StrictRedis.from_url(**args)
-                self.reader_client = self.writer_client
+            if connection_kwargs:
+                args.update(connection_kwargs)
+            if socket_timeout:
+                args["socket_timeout"] = socket_timeout
+
+            if url is not None:
+                args.update(url=url)
+                return redis.StrictRedis.from_url(**args)
             else:
                 args.update(
-                    host=self.host,
-                    password=self.password,
-                    port=self.port,
-                    db=self.db,
+                    host=host,
+                    password=password,
+                    port=port,
+                    db=db,
                 )
-                self.writer_client = redis.StrictRedis(**args)
-                self.reader_client = self.writer_client
+                return redis.StrictRedis(**args)
 
     def get_mutex(self, key):
         if self.distributed_lock:

zzzeek avatar Jun 24 '22 12:06 zzzeek

Thanks for the response. Leaving for vacation right now, will be looking further into it in a few weeks.

bartjolinktopicus avatar Jun 24 '22 14:06 bartjolinktopicus