resty-redis-cluster
resty-redis-cluster copied to clipboard
Performance problem of set
function _M.access(self)
local host = ngx.var.host
local session_cookie = self:session_cookie(host)
local session_id, err = session_cookie:get()
local sso_session_store = self:get_sso_session_store()
local server_list = self.config.redis.nodes
local password = self.config.redis.password
local config = {
name = "esec_cluster",
enableSlaveRead = true,
serv_list = server_list,
auth = password,
keepalive_timeout = 60000,
keepalive_cons = 1000,
connect_timeout = 1000,
read_timeout = 1000,
send_timeout = 1000,
max_redirection = 3,
max_connection_attempts = 1
}
local redis = redis_client:new(config)
return redis:set(session_id, ngx.now())
end
root@ubuntu:/opt/wrk# wrk -t96 -c240 -d30s --script test.lua https://w3.esec.test.com Running 30s test @ https://w3.esec.test.com 96 threads and 240 connections Thread Stats Avg Stdev Max +/- Stdev Latency 116.09ms 123.13ms 1.07s 85.94% Req/Sec 21.75 16.95 170.00 81.88% 56053 requests in 30.10s, 44.53MB read Non-2xx or 3xx responses: 6626 Requests/sec: 1862.36 Transfer/sec: 1.48MB
**When the rediscluster is used to perform the set operation, the QPS is only about 2000, and when resty.redis is used, the QPS can reach about 30000. Does anyone know why? and I'm using a 48-core server.
function _M.access(self) local host = ngx.var.host local session_cookie = self:session_cookie(host) local session_id, err = session_cookie:get() local red = redis:new()
red:set_timeouts(1000, 1000, 1000)
local ok, err = red:connect("172.18.31.24", 7005)
if not ok then
ngx.say("failed to connect: ", err)
return false, err
end
ok, err = red:set(session_id, ngx.now())
if not ok then
ngx.say("failed to set session_id: ", err)
return false, err
end
local ok, err = red:set_keepalive(10000, 100)
if not ok then
ngx.say("failed to set keepalive: ", err)
return false, err
end
return true
end
root@ubuntu:/opt/wrk# wrk -t96 -c240 -d30s --script test.lua https://w3.esec.test.com Running 30s test @ https://w3.esec.test.com 96 threads and 240 connections Thread Stats Avg Stdev Max +/- Stdev Latency 7.41ms 23.76ms 1.01s 99.79% Req/Sec 305.92 17.04 0.97k 90.91% 879734 requests in 30.08s, 197.96MB read Requests/sec: 29243.78 Transfer/sec: 6.58MB