From edf5b2b4cc7b6c8be8b418367fe9890180c008e7 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 13:59:38 +0100 Subject: [PATCH 01/90] Tests for bad redis-connector params --- t/01-unit/ledge.t | 76 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 76 insertions(+) diff --git a/t/01-unit/ledge.t b/t/01-unit/ledge.t index 17fd426e..392c3b59 100644 --- a/t/01-unit/ledge.t +++ b/t/01-unit/ledge.t @@ -222,3 +222,79 @@ GET /ledge_9 dog --- no_error_log [error] + +=== TEST 10: Bad redis-connector params are caught +--- http_config +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; + +init_by_lua_block { + require("ledge").configure({ + redis_connector_params = { + bad_time = true + }, + }) + require("ledge").set_handler_defaults({ + storage_driver_config = { + redis_connector_params = { + bad_time2 = true + }, + } + }) +} +--- config +location /ledge_10 { + content_by_lua_block { + local ok, err = require("ledge").create_redis_connection() + assert(ok == nil and err ~= nil, + "create_redis_connection() should return negatively with error") + + local ok, err = require("ledge").create_storage_connection() + ngx.log(ngx.DEBUG, ok, " ", err) + assert(ok == nil and err ~= nil, + "create_storage_connection() should return negatively with error") + + local ok, err = require("ledge").create_qless_connection() + assert(ok == nil and err ~= nil, + "create_qless_connection() should return negatively with error") + + local ok, err = require("ledge").create_redis_slave_connection() + assert(ok == nil and err ~= nil, + "create_redis_slave_connection() should return negatively with error") + + -- Test broken redis-connector params are caught when closing redis somehow + local ok, err = require("ledge").close_redis_connection({dummy = true}) + assert(ok == nil and err ~= nil, + "close_redis_connection() should return negatively with error") + + -- Test trying to close a non-existent redis instance + local ok, err = require("ledge").close_redis_connection({}) + assert(ok == nil and err ~= nil, + "close_redis_connection() should return negatively with error") + + ngx.say("OK") + } +} +--- request +GET /ledge_10 +--- error_code: 200 +--- response_body +OK + +=== TEST 11: Closing an empty redis instance +--- ONLY +--- http_config eval: $::HttpConfig +--- config +location /ledge_11 { + content_by_lua_block { + local ok, err = require("ledge").close_redis_connection({}) + assert(ok == nil, + "close_redis_connection() should return negatively") + + ngx.say("OK") + } +} +--- request +GET /ledge_11 +--- error_code: 200 +--- response_body +OK From 4a2a0029370e772339b2bc9ad4c6591f936a5e9d Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 14:00:40 +0100 Subject: [PATCH 02/90] Tests for bad redis-connector params --- t/01-unit/ledge.t | 1 - 1 file changed, 1 deletion(-) diff --git a/t/01-unit/ledge.t b/t/01-unit/ledge.t index 392c3b59..d1a181fa 100644 --- a/t/01-unit/ledge.t +++ b/t/01-unit/ledge.t @@ -281,7 +281,6 @@ GET /ledge_10 OK === TEST 11: Closing an empty redis instance ---- ONLY --- http_config eval: $::HttpConfig --- config location /ledge_11 { From d3db64c60a295ea43e2bc8da8e99e87d538d8ea4 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 13:59:57 +0100 Subject: [PATCH 03/90] Return errors from storage driver new/connect --- lib/ledge.lua | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/lib/ledge.lua b/lib/ledge.lua index 78578ba2..57966664 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -220,11 +220,13 @@ local function create_storage_connection(driver_module, storage_driver_config) local ok, module = pcall(require, driver_module) if not ok then return nil, module end - local ok, driver = pcall(module.new) + local ok, driver, err = pcall(module.new) if not ok then return nil, driver end + if not driver then return nil, err end - local ok, conn = pcall(driver.connect, driver, storage_driver_config) + local ok, conn, err = pcall(driver.connect, driver, storage_driver_config) if not ok then return nil, conn end + if not conn then return nil, err end return conn, nil end From 091c756073cca8046cdab25e03bf5ac7208fa4d5 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 14:29:44 +0100 Subject: [PATCH 04/90] Catch failure to create response object in read_from_cache --- lib/ledge/handler.lua | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 0b44e223..f82d2278 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -301,9 +301,10 @@ end local function read_from_cache(self) - local res = response.new(self.redis, cache_key_chain(self)) - local ok, err = res:read() + local res, err = response.new(self.redis, cache_key_chain(self)) + if not res then return nil, err end + local ok, err = res:read() if err then -- Error, abort request ngx_log(ngx_ERR, "could not read response: ", err) From 965009135002bf69e4992714b38233d2e6790752 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 14:30:32 +0100 Subject: [PATCH 05/90] Test Authorization is passed through by default --- t/02-integration/stale-while-revalidate.t | 3 +++ 1 file changed, 3 insertions(+) diff --git a/t/02-integration/stale-while-revalidate.t b/t/02-integration/stale-while-revalidate.t index ccd8cc07..e2721d65 100644 --- a/t/02-integration/stale-while-revalidate.t +++ b/t/02-integration/stale-while-revalidate.t @@ -428,6 +428,7 @@ location /stale_5 { local hdr = ngx.req.get_headers() ngx.say("X-Test: ",hdr["X-Test"]) ngx.say("Cookie: ",hdr["Cookie"]) + ngx.say("Authorization: ",hdr["Authorization"]) } } --- request @@ -435,6 +436,7 @@ GET /stale_5_prx --- more_headers X-Test: foobar Cookie: baz=qux +Authorization: test --- response_body TEST 5 --- wait: 1 @@ -457,6 +459,7 @@ GET /stale_5_prx TEST 5b X-Test: foobar Cookie: baz=qux +Authorization: test --- no_error_log [error] From 6993abf15ebde46524eb823510836ab914289672 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 16:27:57 +0100 Subject: [PATCH 06/90] Adding more unit tests, fixing coverage reporting in some --- t/01-unit/events.t | 8 ++- t/01-unit/handler.t | 151 +++++++++++++++++++++++++++++++++++++++ t/01-unit/ledge.t | 29 ++++++-- t/01-unit/storage.t | 29 ++++++++ t/01-unit/worker.t | 50 +++++++++---- t/02-integration/cache.t | 26 +++++++ 6 files changed, 272 insertions(+), 21 deletions(-) diff --git a/t/01-unit/events.t b/t/01-unit/events.t index 908491db..99a711f3 100644 --- a/t/01-unit/events.t +++ b/t/01-unit/events.t @@ -105,11 +105,14 @@ function 3 === TEST 3: Default binds ---- http_config +--- http_config eval +qq { lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; init_by_lua_block { - require("luacov.runner").init() + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end require("ledge").bind("after_cache_read", function(arg) ngx.say("default 1: ", arg) @@ -119,6 +122,7 @@ init_by_lua_block { ngx.say("default 2: ", arg) end) } +} --- config location /t { rewrite ^(.*)_prx$ $1 break; diff --git a/t/01-unit/handler.t b/t/01-unit/handler.t index 3541f43d..ffd9c354 100644 --- a/t/01-unit/handler.t +++ b/t/01-unit/handler.t @@ -192,3 +192,154 @@ location /t { GET /t --- no_error_log [error] + +=== TEST 6: read from cache +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local handler = require("ledge").create_handler() + + local res, err = handler:read_from_cache() + assert(res == nil and err ~= nil, + "read_from_cache should error with no redis connections") + + handler.redis = require("ledge").create_redis_connection() + handler.storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + local res, err = handler:read_from_cache() + assert(res and not err, "read_from_cache should return positively") + } + +} +--- request +GET /t +--- no_error_log +[error] + + +=== TEST 7: Call run with bad redis details +--- http_config eval +qq{ +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; + +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + + require("ledge").configure({ + redis_connector_params = { + url = "redis://127.0.0.1:0/", + }, + qless_db = 123, + }) + + require("ledge").set_handler_defaults({ + upstream_host = "127.0.0.1", + upstream_port = 1234, + }) + + require("ledge.state_machine").set_debug(true) +} +} +--- config +location /t { + content_by_lua_block { + local ok, err = require("ledge").create_handler():run() + assert(ok == nil and err ~= nil, + "run should return negatively with an error") + ngx.say("OK") + } +} +--- request +GET /t +--- response_body +OK +--- no_error_log +[error] + +=== TEST 8: save to cache +--- http_config eval: $::HttpConfig +--- config +location /t_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + + local res, err = handler:save_to_cache() + assert(res == nil and err ~= nil, + "read_from_cache should error with no response") + + local res, err = handler:fetch_from_origin() + assert(res == nil and err ~= nil, + "fetch_from_origin should error with no redis") + + handler.redis = require("ledge").create_redis_connection() + handler.storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + local res, err = handler:fetch_from_origin() + assert(res and not err, "fetch_from_origin should return positively") + + local res, err = handler:save_to_cache(res) + ngx.log(ngx.DEBUG, res, " ", err) + assert(res and not err, "save_to_cache should return positively") + + ngx.say("OK") + } + +} +location /t { + echo "origin"; +} +--- request +GET /t_prx +--- no_error_log +[error] +--- response_body +OK + +=== TEST 8: save to cache, no body +--- http_config eval: $::HttpConfig +--- config +location /t_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + + local res, err = handler:save_to_cache() + assert(res == nil and err ~= nil, + "read_from_cache should error with no response") + + handler.redis = require("ledge").create_redis_connection() + handler.storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + + local res, err = handler:fetch_from_origin() + assert(res and not err, "fetch_from_origin should return positively") + + res.has_body = false + + local res, err = handler:save_to_cache(res) + ngx.log(ngx.DEBUG, res, " ", err) + assert(res and not err, "save_to_cache should return positively") + + ngx.say("OK") + } + +} +location /t { + echo "origin"; +} +--- request +GET /t_prx +--- no_error_log +[error] +--- response_body +OK diff --git a/t/01-unit/ledge.t b/t/01-unit/ledge.t index d1a181fa..be9c88ba 100644 --- a/t/01-unit/ledge.t +++ b/t/01-unit/ledge.t @@ -66,13 +66,18 @@ GET /ledge_2 === TEST 3: Non existent params cannot be set ---- http_config +--- http_config eval +qq { lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end local ok, err = pcall(require("ledge").configure, { foo = "bar" }) assert(string.find(err, "field foo does not exist"), "error 'field foo does not exist' should be thrown") } +} --- config location /ledge_3 { echo "OK"; @@ -124,16 +129,21 @@ dog === TEST 6: Create bad redis connection ---- http_config +--- http_config eval +qq{ lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end require("ledge").configure({ redis_connector_params = { port = 0, -- bad port }, }) } +} --- config location /ledge_6 { content_by_lua_block { @@ -169,10 +179,14 @@ false === TEST 8: Create bad storage connection ---- http_config +--- http_config eval +qq{ lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end require("ledge").set_handler_defaults({ storage_driver_config = { redis_connector_params = { @@ -181,6 +195,7 @@ init_by_lua_block { } }) } +} --- config location /ledge_8 { content_by_lua_block { @@ -224,10 +239,14 @@ dog [error] === TEST 10: Bad redis-connector params are caught ---- http_config +--- http_config eval +qq{ lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end require("ledge").configure({ redis_connector_params = { bad_time = true @@ -241,6 +260,7 @@ init_by_lua_block { } }) } +} --- config location /ledge_10 { content_by_lua_block { @@ -249,7 +269,6 @@ location /ledge_10 { "create_redis_connection() should return negatively with error") local ok, err = require("ledge").create_storage_connection() - ngx.log(ngx.DEBUG, ok, " ", err) assert(ok == nil and err ~= nil, "create_storage_connection() should return negatively with error") diff --git a/t/01-unit/storage.t b/t/01-unit/storage.t index 1ee45165..e0eaa2a3 100644 --- a/t/01-unit/storage.t +++ b/t/01-unit/storage.t @@ -872,3 +872,32 @@ location /storage { ] --- no_error_log [error] + +=== TEST 13: Handler run with bad config should return an error +--- http_config eval: $::HttpConfig +--- config +location /storage { + content_by_lua_block { + local config = get_backend(ngx.req.get_uri_args()["backend"]) + local ok, err = require("ledge").create_handler({ + storage_driver = config.module, + storage_driver_config = config.bad_params + }):run() + assert(ok == nil and err ~= nil, + "run should return negatively with an error") + + ngx.print(ngx.req.get_uri_args()["backend"], " OK") + } +} +--- request eval +[ + "GET /storage?backend=redis", + "GET /storage?backend=redis_notransact", +] +--- response_body eval +[ + "redis OK", + "redis_notransact OK", +] +--- no_error_log +[error] diff --git a/t/01-unit/worker.t b/t/01-unit/worker.t index fd916f28..2678996a 100644 --- a/t/01-unit/worker.t +++ b/t/01-unit/worker.t @@ -8,6 +8,18 @@ my $pwd = cwd(); $ENV{TEST_USE_RESTY_CORE} ||= 'nil'; $ENV{TEST_COVERAGE} ||= 0; +our $HttpConfig = qq{ + lua_package_path "./lib/?.lua;;"; + init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + } + init_worker_by_lua_block { + assert(require("ledge.worker").new()) + } +}; + no_long_string(); no_diff(); run_tests(); @@ -15,11 +27,7 @@ run_tests(); __DATA__ === TEST 1: Load module without errors. ---- http_config -lua_package_path "./lib/?.lua;;"; -init_worker_by_lua_block { - assert(require("ledge.worker").new()) -} +--- http_config eval: $::HttpConfig --- config location /worker_1 { echo "OK"; @@ -31,11 +39,7 @@ GET /worker_1 === TEST 2: Create worker with default config ---- http_config -lua_package_path "./lib/?.lua;;"; -init_worker_by_lua_block { - assert(require("ledge.worker").new()) -} +--- http_config eval: $::HttpConfig --- config location /worker_2 { echo "OK"; @@ -47,13 +51,20 @@ GET /worker_2 === TEST 4: Create worker with bad config key ---- http_config +--- http_config eval +qq { lua_package_path "./lib/?.lua;;"; +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end +} init_worker_by_lua_block { require("ledge.worker").new({ foo = "one", }) } +} --- config location /worker_4 { echo "OK"; @@ -65,11 +76,18 @@ field foo does not exist === TEST 5: Run workers without errors ---- http_config +--- http_config eval +qq { lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end +} init_worker_by_lua_block { require("ledge.worker").new():run() } +} --- config location /worker_5 { echo "OK"; @@ -81,11 +99,14 @@ GET /worker_5 === TEST 6: Push a job and confirm it runs ---- http_config +--- http_config eval +qq { lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; init_by_lua_block { foo = 1 - + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end package.loaded["ledge.job.test"] = { perform = function(job) foo = foo + 1 @@ -96,6 +117,7 @@ init_by_lua_block { init_worker_by_lua_block { require("ledge.worker").new():run() } +} --- config location /worker_6 { content_by_lua_block { diff --git a/t/02-integration/cache.t b/t/02-integration/cache.t index 4e990f20..e391ba20 100644 --- a/t/02-integration/cache.t +++ b/t/02-integration/cache.t @@ -73,6 +73,32 @@ TEST 1 --- no_error_log [error] +=== TEST 1b: Subzero request; X-Cache: MISS is prepended +--- http_config eval: $::HttpConfig +--- config +location /cache_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler():run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["X-Cache"] = "HIT from example.com" + ngx.say("TEST 1") + } +} +--- request +GET /cache_prx?append +--- response_headers_like +X-Cache: MISS from .+, HIT from example.com +--- response_body +TEST 1 +--- no_error_log +[error] + === TEST 2: Hot request; X-Cache: HIT --- http_config eval: $::HttpConfig From 55a83708500f77012189d1155fe5988a4fa9f066 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 16:28:50 +0100 Subject: [PATCH 07/90] Remove unused actions --- lib/ledge/state_machine/actions.lua | 23 ----------------------- 1 file changed, 23 deletions(-) diff --git a/lib/ledge/state_machine/actions.lua b/lib/ledge/state_machine/actions.lua index c087f125..f0b15ab2 100644 --- a/lib/ledge/state_machine/actions.lua +++ b/lib/ledge/state_machine/actions.lua @@ -168,29 +168,14 @@ return { ngx_req_set_header("If-None-Match", client_validators["If-None-Match"]) end, - add_validators_from_cache = function(handler) - local cached_res = handler.response - - ngx_req_set_header("If-Modified-Since", cached_res.header["Last-Modified"]) - ngx_req_set_header("If-None-Match", cached_res.header["Etag"]) - end, - add_stale_warning = function(handler) return handler:add_warning("110") end, - add_transformation_warning = function(handler) - return handler:add_warning("214") - end, - add_disconnected_warning = function(handler) return handler:add_warning("112") end, - serve = function(handler) - return handler:serve() - end, - set_json_response = function(handler) local res = response.new(handler.redis, handler:cache_key_chain()) res.header["Content-Type"] = "application/json" @@ -219,10 +204,6 @@ return { return handler:delete_from_cache() end, - release_collapse_lock = function(handler) - handler.redis:del(handler:cache_key_chain().fetching_lock) - end, - disable_output_buffers = function(handler) handler.output_buffers_enabled = false end, @@ -247,10 +228,6 @@ return { ngx.status = ngx.HTTP_GATEWAY_TIMEOUT end, - set_http_connection_timed_out = function(handler) - ngx.status = 524 - end, - set_http_internal_server_error = function(handler) ngx.status = ngx.HTTP_INTERNAL_SERVER_ERROR end, From 11604cac044107ab75e6582a6965fcf57034d0ea Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 16:29:05 +0100 Subject: [PATCH 08/90] Minor error handling improvements --- lib/ledge.lua | 2 +- lib/ledge/handler.lua | 5 ++++- lib/ledge/jobs/collect_entity.lua | 4 ++-- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/lib/ledge.lua b/lib/ledge.lua index 57966664..5068e67b 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -181,7 +181,7 @@ local function close_redis_connection(redis) if not next(redis) then -- Possible for this to be called before we've created a redis conn -- Ensure we actually have a resty-redis instance to close - return nil + return nil, "No redis connection to close" end local rc, err = redis_connector.new(config.redis_connector_params) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index f82d2278..7d1bf8f0 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -368,7 +368,8 @@ local hop_by_hop_headers = { -- Fetches a resource from the origin server. local function fetch_from_origin(self) - local res = response.new(self.redis, cache_key_chain(self)) + local res, err = response.new(self.redis, cache_key_chain(self)) + if not res then return nil, err end local method = ngx['HTTP_' .. ngx_req_get_method()] if not method then @@ -655,6 +656,7 @@ _M.fetch_in_background = fetch_in_background local function save_to_cache(self, res) + if not res then return nil, "no response to save" end emit(self, "before_save", res) -- Length is only set if there was a Content-Length header @@ -812,6 +814,7 @@ local function save_to_cache(self, res) put_background_job(unpack(gc_job_spec)) end end + return true end _M.save_to_cache = save_to_cache diff --git a/lib/ledge/jobs/collect_entity.lua b/lib/ledge/jobs/collect_entity.lua index 379da95c..86bd968c 100644 --- a/lib/ledge/jobs/collect_entity.lua +++ b/lib/ledge/jobs/collect_entity.lua @@ -11,13 +11,13 @@ local _M = { -- Cleans up expired items and keeps track of memory usage. function _M.perform(job) - local storage = create_storage_connection( + local storage, err = create_storage_connection( job.data.storage_driver, job.data.storage_driver_config ) if not storage then - return nil, "job-error", "could not connect to storage driver" + return nil, "job-error", "could not connect to storage driver: "..tostring(err) end local ok, err = storage:delete(job.data.entity_id) From 230d6d3fb31ecd3010531f924efb815b7a8ad9d8 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 16:36:24 +0100 Subject: [PATCH 09/90] Remove unused locals --- lib/ledge/esi.lua | 19 ++----------------- lib/ledge/esi/processor_1_0.lua | 4 ---- 2 files changed, 2 insertions(+), 21 deletions(-) diff --git a/lib/ledge/esi.lua b/lib/ledge/esi.lua index 043bd213..0b80a8b7 100644 --- a/lib/ledge/esi.lua +++ b/lib/ledge/esi.lua @@ -1,35 +1,20 @@ -local http = require "resty.http" -local cookie = require "resty.cookie" local h_util = require "ledge.header_util" -local tag_parser = require "ledge.esi.tag_parser" local util = require "ledge.util" -local tostring, type, tonumber, next, unpack, pcall, setfenv = - tostring, type, tonumber, next, unpack, pcall, setfenv +local tostring, type, tonumber, next = + tostring, type, tonumber, next local str_sub = string.sub local str_find = string.find -local str_len = string.len -local str_split = util.string.split local tbl_concat = table.concat local tbl_insert = table.insert -local co_yield = coroutine.yield -local co_wrap = util.coroutine.wrap - -local ngx_re_gsub = ngx.re.gsub -local ngx_re_sub = ngx.re.sub local ngx_re_match = ngx.re.match -local ngx_re_gmatch = ngx.re.gmatch -local ngx_re_find = ngx.re.find local ngx_req_get_headers = ngx.req.get_headers -local ngx_req_get_method = ngx.req.get_method local ngx_req_get_uri_args = ngx.req.get_uri_args -local ngx_crc32_long = ngx.crc32_long local ngx_encode_args = ngx.encode_args local ngx_req_set_uri_args = ngx.req.set_uri_args -local ngx_flush = ngx.flush local ngx_var = ngx.var local ngx_log = ngx.log local ngx_ERR = ngx.ERR diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index 69da062f..b1501021 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -8,7 +8,6 @@ local tostring, type, tonumber, next, unpack, pcall, setfenv = local str_sub = string.sub local str_find = string.find -local str_len = string.len local tbl_concat = table.concat local tbl_insert = table.insert @@ -19,12 +18,9 @@ local co_wrap = util.coroutine.wrap local ngx_re_gsub = ngx.re.gsub local ngx_re_sub = ngx.re.sub local ngx_re_match = ngx.re.match -local ngx_re_gmatch = ngx.re.gmatch local ngx_re_find = ngx.re.find local ngx_req_get_headers = ngx.req.get_headers -local ngx_req_get_method = ngx.req.get_method local ngx_req_get_uri_args = ngx.req.get_uri_args -local ngx_crc32_long = ngx.crc32_long local ngx_flush = ngx.flush local ngx_var = ngx.var local ngx_log = ngx.log From 6955eaf9a1ad5774d676b9cd5fcb3f4f017d2db4 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 31 Aug 2017 14:30:05 +0100 Subject: [PATCH 10/90] Unit tests for tag_parser --- t/01-unit/tag_parser.t | 385 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 385 insertions(+) create mode 100644 t/01-unit/tag_parser.t diff --git a/t/01-unit/tag_parser.t b/t/01-unit/tag_parser.t new file mode 100644 index 00000000..41043e94 --- /dev/null +++ b/t/01-unit/tag_parser.t @@ -0,0 +1,385 @@ +use Test::Nginx::Socket 'no_plan'; +use Cwd qw(cwd); +my $pwd = cwd(); + +$ENV{TEST_NGINX_PORT} |= 1984; +$ENV{TEST_COVERAGE} ||= 0; + +our $HttpConfig = qq{ +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; + +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + function print_next(tag, before, after) + if not tag.closing then + tag.closing = {} + end + ngx.say(tag.closing.from) + ngx.say(tag.closing.to) + ngx.say(tag.closing.tag) + ngx.say(tag.whole) + ngx.say(tag.contents) + ngx.say(before) + ngx.say(after) + end + function strip_whitespace(content) + return ngx.re.gsub(content, [[\\s*\\n\\s*]], "") + end + function check_regex(regex, content, msg) + local to, from = ngx.re.find(content, regex, "soj") + assert(from ~= nil and to ~= nil, (msg or "regex should match")) + end +} + +}; # HttpConfig + +no_long_string(); +#no_diff(); +run_tests(); + + +__DATA__ +=== TEST 1: Load module +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local parser = tag_parser.new("Content") + assert(parser, "tag_parser.new should return positively") + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK + +=== TEST 2: Find next tag +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local parser = tag_parser.new("content-beforeinsidecontent-after") + assert(parser, "tag_parser.new should return positively") + + local tag, before, after = parser:next("foo") + assert(tag, "next should find a tag") + print_next(tag, before, after) + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +26 +31 + +inside +inside +content-before +content-after + +=== TEST 3: Default next tag finds esi +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local parser = tag_parser.new("content-beforeinsidecontent-afterlast") + assert(parser, "tag_parser.new should return positively") + + local tag, before, after = parser:next() + assert(tag, "next should find a tag") + print_next(tag, before, after) + + ngx.say("##########") + + local tag, before, after = parser:next() + assert(tag, "next should find a tag") + print_next(tag, before, after) + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +30 +39 + +inside +inside +content-before +content-afterlast +########## +68 +70 +--> + +comment +content-after +last + +=== TEST 4: Find tag with attributes +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local parser = tag_parser.new("content-beforeinsidecontent-after") + assert(parser, "tag_parser.new should return positively") + + local tag, before, after = parser:next("foo") + assert(tag, "next should find a tag") + print_next(tag, before, after) + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +54 +59 + +inside +attr='value' attr2='value2'>inside +content-before +content-after + +=== TEST 4: Find nested tags +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local content = strip_whitespace([[ +content-before + + inside-foo + + inside-bar + + after-bar + + inside-foo-2 + + +content-after +]]) + + local parser = tag_parser.new(content) + assert(parser, "tag_parser.new should return positively") + + local tag, before, after = parser:next("foo") + assert(tag, "next should find a tag") + print_next(tag, before, after) + + ngx.say("#######") + + local parser = tag_parser.new(content) + assert(parser, "tag_parser.new should return positively") + + local tag, before, after = parser:next("bar") + assert(tag, "next should find a tag") + print_next(tag, before, after) + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +83 +88 + +inside-fooinside-barafter-barinside-foo-2 +inside-fooinside-barafter-barinside-foo-2 +content-before +content-after +####### +45 +50 + +inside-bar +inside-bar +content-beforeinside-foo +after-barinside-foo-2content-after + +=== TEST 5: Pattern functions return valid regex +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local ok, err = ngx.re.find("", tag_parser.open_pattern("tag")) + assert(err == nil, "open_pattern should return a valid regex") + + local ok, err = ngx.re.find("", tag_parser.close_pattern("tag")) + assert(err == nil, "open_pattern should return a valid regex") + + local ok, err = ngx.re.find("", tag_parser.either_pattern("tag")) + assert(err == nil, "open_pattern should return a valid regex") + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK + +=== TEST 5: open pattern matches +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local regex = tag_parser.open_pattern("tag") + ngx.log(ngx.DEBUG, regex) + + local checks = { + "start end", "simple tag", + "start end", "simple closed tag", + "start asdfsd end", "simple closed tag with content", + "start end", "simple tag whitespace", + "start end", "self-closing tag", + "start end", "self-closing tag whitespace", + "start end", "simple tag with attribute", + 'start end', "simple tag with attribute (single-quote)", + 'start end', "simple tag with attribute (numeric)", + 'start end', "simple tag with attribute (special chars)", + } + + for i=1,#checks,2 do + check_regex(regex, checks[i], "open_pattern should match "..checks[i+1]) + end + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK + +=== TEST 6: close pattern matches +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local regex = tag_parser.close_pattern("tag") + ngx.log(ngx.DEBUG, regex) + + local checks = { + "start end", "simple tag", + "start end", "simple closed tag", + "start asdfsd end", "simple closed tag with content", + "start end", "simple tag with whitespace", + } + + for i=1,#checks,2 do + check_regex(regex, checks[i], "close_pattern should match "..checks[i+1]) + end + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK + +=== TEST 7: either pattern matches +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local tag_parser = assert(require("ledge.esi.tag_parser"), + "module should load without errors") + + local regex = tag_parser.either_pattern("tag") + ngx.log(ngx.DEBUG, regex) + + local checks = { + "start end", "simple tag", + "start end", "simple closed tag", + "start asdfsd end", "simple closed tag with content", + "start end", "simple tag whitespace", + "start end", "self-closing tag", + "start end", "self-closing tag whitespace", + "start end", "simple tag with attribute", + 'start end', "simple tag with attribute (single-quote)", + 'start end', "simple tag with attribute (numeric)", + 'start end', "simple tag with attribute (special chars)", + + "start end", "simple tag", + "start end", "simple closed tag", + "start asdfsd end", "simple closed tag with content", + "start end", "simple tag with whitespace", + } + + for i=1,#checks,2 do + check_regex(regex, checks[i], "either_pattern should match "..checks[i+1]) + end + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK From 57eb004f6472f553e91062f16764628662d58fa3 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 31 Aug 2017 16:38:20 +0100 Subject: [PATCH 11/90] Unit tests for esi_eval_var --- t/01-unit/processor_1_0.t | 429 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 429 insertions(+) create mode 100644 t/01-unit/processor_1_0.t diff --git a/t/01-unit/processor_1_0.t b/t/01-unit/processor_1_0.t new file mode 100644 index 00000000..6eb65be9 --- /dev/null +++ b/t/01-unit/processor_1_0.t @@ -0,0 +1,429 @@ +use Test::Nginx::Socket 'no_plan'; +use Cwd qw(cwd); +my $pwd = cwd(); + +$ENV{TEST_LEDGE_REDIS_DATABASE} |= 2; +$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} |= 3; +$ENV{TEST_NGINX_PORT} |= 1984; +$ENV{TEST_COVERAGE} ||= 0; + +our $HttpConfig = qq{ +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; + +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + + require("ledge").configure({ + redis_connector_params = { + url = "redis://127.0.0.1:6379/$ENV{TEST_LEDGE_REDIS_DATABASE}", + }, + qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE}, + }) + + TEST_NGINX_PORT = $ENV{TEST_NGINX_PORT} + require("ledge").set_handler_defaults({ + upstream_host = "127.0.0.1", + upstream_port = TEST_NGINX_PORT, + }) + +} + +}; # HttpConfig + +no_long_string(); +#no_diff(); +run_tests(); + + +__DATA__ +=== TEST 1: Load module +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = assert(require("ledge.esi.processor_1_0"), + "module should load without errors") + + local processor = processor.new(require("ledge").create_handler()) + assert(processor, "processor_1_0.new should return positively") + + ngx.say("OK") + } +} + +--- request +GET /t +--- error_code: 200 +--- no_error_log +[error] +--- response_body +OK + +=== TEST 2: esi_eval_var - QUERY STRING +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"QUERY_STRING", nil, "default", "default_quoted" }, + {"QUERY_STRING", nil, nil, "default_quoted" }, + {"QUERY_STRING", "test_param", "default", "default_quoted" }, + {"QUERY_STRING", "test_param", nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t?test_param=test", + "GET /t?other_param=test" +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default_quoted +default +default_quoted +", + +"test_param=test +test_param=test +test +test +", + +"other_param=test +other_param=test +default +default_quoted +", +] + + +=== TEST 3: esi_eval_var - HTTP header +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"HTTP_X_TEST", nil, "default", "default_quoted" }, + {"HTTP_X_TEST", nil, nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t", +] +--- more_headers eval +[ +"X-Dummy: foo", +"X-TEST: test_val" +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default_quoted +", + +"test_val +test_val +", +] + +=== TEST 4: esi_eval_var - Duplicate HTTP header +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"HTTP_X_TEST", nil, "default", "default_quoted" }, + {"HTTP_X_TEST", nil, nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t", +] +--- more_headers eval +[ +"X-Dummy: foo", + +"X-TEST: test_val +X-TEST: test_val2" +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default_quoted +", + +"test_val, test_val2 +test_val, test_val2 +", +] + +=== TEST 5: esi_eval_var - Cookie +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"HTTP_COOKIE", nil, "default", "default_quoted" }, + {"HTTP_COOKIE", "test_cookie", "default", "default_quoted" }, + {"HTTP_COOKIE", "test_cookie", nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t", + "GET /t", +] +--- more_headers eval +[ +"", +"Cookie: none=here", +"Cookie: test_cookie=my_cookie" +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default +default_quoted +", + +"none=here +default +default_quoted +", + +"test_cookie=my_cookie +my_cookie +my_cookie +", +] + +=== TEST 6: esi_eval_var - Accept-Lang +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"HTTP_ACCEPT_LANGUAGE", nil, "default", "default_quoted" }, + {"HTTP_ACCEPT_LANGUAGE", "en", "default", "default_quoted" }, + {"HTTP_ACCEPT_LANGUAGE", "de", nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t", + "GET /t", + "GET /t", +] +--- more_headers eval +[ +"", + +"Accept-Language: en-gb", + +"Accept-Language: en-us, blah", + +"Accept-Language: en-gb +Accept-Language: test" +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default +default_quoted +", + +"en-gb +true +false +", + +"en-us, blah +true +false +", + +"en-gb, test +true +false +", +] + +=== TEST 7: esi_eval_var - ESI_ARGS +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + -- Fake ESI args + require("ledge.esi").filter_esi_args( + require("ledge").create_handler() + ) + ngx.log(ngx.DEBUG, require("cjson").encode(ngx.ctx.__ledge_esi_args)) + + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"ESI_ARGS", nil, "default", "default_quoted" }, + {"ESI_ARGS", "var1", "default", "default_quoted" }, + {"ESI_ARGS", "var2", nil, "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t?esi_var1=test1&esi_var2=test2&foo=bar", + "GET /t?esi_var2=test2&foo=bar", + "GET /t?esi_var1=test1&esi_other_var=foo&foo=bar", + "GET /t?esi_var1=test1&esi_var1=test2&foo=bar", +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default +default_quoted +", + +"esi_var2=test2&esi_var1=test1 +test1 +test2 +", + +"esi_var2=test2 +default +test2 +", + +"esi_other_var=foo&esi_var1=test1 +test1 +default_quoted +", + +"esi_var1=test1&esi_var1=test2 +test1,test2 +default_quoted +", +] + +=== TEST 8: esi_eval_var - custom vars +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + ngx.ctx.__ledge_esi_custom_variables = ngx.req.get_uri_args() or {} + + if ngx.ctx.__ledge_esi_custom_variables["empty"] then + ngx.ctx.__ledge_esi_custom_variables = {} + else + ngx.ctx.__ledge_esi_custom_variables["deep"] = {["table"] = "value!"} + end + + local processor = require("ledge.esi.processor_1_0") + local tests = { + --{"var_name", "key", "default", "default_quoted" }, + {"var1", nil, "default", "default_quoted" }, + {"var2", nil, nil, "default_quoted" }, + {"var1", "subvar", nil, "default_quoted" }, + {"deep", "table", "default", "default_quoted" }, + } + for _,test in ipairs(tests) do + ngx.say(processor.esi_eval_var(test)) + end + } +} + +--- request eval +[ + "GET /t", + "GET /t?var1=test1&var2=test2", + "GET /t?var2=test2", + "GET /t?empty=true", +] +--- no_error_log +[error] +--- response_body eval +[ +"default +default_quoted +default_quoted +value! +", + +"test1 +test2 +default_quoted +value! +", + +"default +test2 +default_quoted +value! +", + +"default +default_quoted +default_quoted +default +", +] From 668980464edb2b56600dd22cc677a042b30dd3c7 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 31 Aug 2017 16:38:56 +0100 Subject: [PATCH 12/90] Fix for esi_arg[key] is a table --- lib/ledge/esi/processor_1_0.lua | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index b1501021..6fce7012 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -134,7 +134,11 @@ local function esi_eval_var(var) -- __tostring metamethod turns these back into encoded URI args return tostring(esi_args) else - return tostring(esi_args[key] or default) + local value = esi_args[key] or default + if type(value) == "table" then + return tbl_concat(value, ",") + end + return tostring(value) end else local custom_variables = ngx.ctx.__ledge_esi_custom_variables @@ -159,6 +163,7 @@ local function esi_eval_var(var) return default end end +_M.esi_eval_var = esi_eval_var -- Used in esi_replace_vars. Declared locally to avoid runtime closure From 6c760c16847cffe83d3ab1d8b044e5316e90c0e7 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 4 Sep 2017 13:14:46 +0100 Subject: [PATCH 13/90] Add test for broken entities --- t/02-integration/memory_pressure.t | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/t/02-integration/memory_pressure.t b/t/02-integration/memory_pressure.t index 3b554aaa..7ec191b9 100644 --- a/t/02-integration/memory_pressure.t +++ b/t/02-integration/memory_pressure.t @@ -110,10 +110,12 @@ location "/mem_pressure_1" { } --- request eval ["GET /mem_pressure_1_prx?key=main", -"GET /mem_pressure_1_prx?key=headers"] +"GET /mem_pressure_1_prx?key=headers", +"GET /mem_pressure_1_prx?key=entities"] --- response_body eval ["MISSED: main", -"MISSED: headers"] +"MISSED: headers", +"MISSED: entities"] --- no_error_log [error] From 3c1fc7ffdfff7e2f74c42f26005bc581285a62fc Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 4 Sep 2017 13:30:15 +0100 Subject: [PATCH 14/90] Add test for entity deleted during serve --- t/02-integration/memory_pressure.t | 38 ++++++++++++++++++++++++++++++ 1 file changed, 38 insertions(+) diff --git a/t/02-integration/memory_pressure.t b/t/02-integration/memory_pressure.t index 7ec191b9..686a6320 100644 --- a/t/02-integration/memory_pressure.t +++ b/t/02-integration/memory_pressure.t @@ -169,3 +169,41 @@ GET /mem_pressure_2_prx --- response_body: ORIGIN --- no_error_log [error] + +=== TEST 3: Prime and break active entity during read +--- http_config eval: $::HttpConfig +--- config +location "/mem_pressure_3_prx" { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + if not ngx.req.get_uri_args()["prime"] then + handler:bind("before_serve", function(res) + ngx.log(ngx.DEBUG, "Deleting: ", res.entity_id) + handler.storage:delete(res.entity_id) + end) + else + -- Dummy log for prime request + ngx.log(ngx.DEBUG, "entity removed during read") + end + ngx.req.set_uri_args({}) + handler:run() + } +} +location "/mem_pressure_3" { + default_type text/html; + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("ORIGIN") + } +} +--- request eval +["GET /mem_pressure_3_prx?prime=true", "GET /mem_pressure_3_prx"] +--- response_body eval +["ORIGIN", ""] +--- response_headers_like eval +["X-Cache: MISS from .*", "X-Cache: HIT from .*"] +--- no_error_log +[error] +--- error_log +entity removed during read From 58c572ed875e250a95f7e50d3666410e19731f21 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 4 Sep 2017 13:38:28 +0100 Subject: [PATCH 15/90] Set chunk to nil when ngx.null returned from redis --- lib/ledge/storage/redis.lua | 1 + 1 file changed, 1 insertion(+) diff --git a/lib/ledge/storage/redis.lua b/lib/ledge/storage/redis.lua index 13b73e3d..119d0058 100644 --- a/lib/ledge/storage/redis.lua +++ b/lib/ledge/storage/redis.lua @@ -247,6 +247,7 @@ function _M.get_reader(self, res) "entity removed during read, ", entity_keys.body ) + chunk = nil end return chunk, nil, has_esi == "true" From 4bf4f7d26fe399e478eef7e0a4ba1c4ad5c9a098 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 04:11:09 +0100 Subject: [PATCH 16/90] Test for partial cache entry with stale headers --- t/02-integration/memory_pressure.t | 72 ++++++++++++++++++++++++++++++ 1 file changed, 72 insertions(+) diff --git a/t/02-integration/memory_pressure.t b/t/02-integration/memory_pressure.t index 686a6320..606d6049 100644 --- a/t/02-integration/memory_pressure.t +++ b/t/02-integration/memory_pressure.t @@ -207,3 +207,75 @@ location "/mem_pressure_3" { [error] --- error_log entity removed during read + +=== TEST 4: Prime some cache - stale headers +--- http_config eval: $::HttpConfig +--- config +location "/mem_pressure_4_prx" { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler():run() + } +} +location "/mem_pressure_4" { + default_type text/html; + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600, stale-if-error=2592000, stale-while-revalidate=129600" + ngx.header["Surrogate-Control"] = [[content="ESI/1.0"]] + ngx.print("Key: ", ngx.req.get_uri_args()["key"]) + } +} +--- request eval +["GET /mem_pressure_4_prx?key=main", +"GET /mem_pressure_4_prx?key=headers", +"GET /mem_pressure_4_prx?key=entities"] +--- response_body eval +["Key: main", +"Key: headers", +"Key: entities"] +--- no_error_log +[error] + + +=== TEST 4b: Break each key, in a different way for each, then try to serve +--- http_config eval: $::HttpConfig +--- config +location "/mem_pressure_4_prx" { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + local key_chain = handler:cache_key_chain() + + local evict = ngx.req.get_uri_args()["key"] + local key = key_chain[evict] + ngx.log(ngx.DEBUG, "will evict: ", key) + local res, err = redis:del(key) + if not res then + ngx.log(ngx.ERR, "could not evict: ", err) + end + redis:set(evict, "true") + ngx.log(ngx.DEBUG, tostring(res)) + + redis:close() + + handler:run() + } +} + +location "/mem_pressure_4" { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=0" + ngx.print("MISSED: ", ngx.req.get_uri_args()["key"]) + } +} +--- request eval +["GET /mem_pressure_4_prx?key=main", +"GET /mem_pressure_4_prx?key=headers", +"GET /mem_pressure_4_prx?key=entities"] +--- response_body eval +["MISSED: main", +"MISSED: headers", +"MISSED: entities"] +--- no_error_log +[error] From 0dd90119e26424b0c141fec8e732efa1b6ab3820 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 04:11:40 +0100 Subject: [PATCH 17/90] hgetall returns empty list when hash is missing --- lib/ledge/response.lua | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index a42d8bf5..c92fbe6d 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -201,7 +201,7 @@ function _M.read(self) -- No cache entry for this key local cache_parts_len = #cache_parts - if not cache_parts_len then + if not cache_parts_len or cache_parts_len == 0 then ngx_log(ngx_INFO, "live entity has no data") return nil end From 6801995911d67d81e97e587d0ee6140335ba46ac Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 11:46:10 +0100 Subject: [PATCH 18/90] Unit tests for esi_replace_vars --- lib/ledge/esi/processor_1_0.lua | 2 + t/01-unit/processor_1_0.t | 81 +++++++++++++++++++++++++++++++++ 2 files changed, 83 insertions(+) diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index 6fce7012..6e01dfaf 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -317,6 +317,7 @@ local function _esi_condition_lexer(condition) return true, tbl_concat(tokens or {}, " ") end +_M._esi_condition_lexer = _esi_condition_lexer local function _esi_evaluate_condition(condition) @@ -381,6 +382,7 @@ local function esi_replace_vars(chunk) return chunk end +_M.esi_replace_vars = esi_replace_vars function _M.esi_fetch_include(self, include_tag, buffer_size) diff --git a/t/01-unit/processor_1_0.t b/t/01-unit/processor_1_0.t index 6eb65be9..390f6983 100644 --- a/t/01-unit/processor_1_0.t +++ b/t/01-unit/processor_1_0.t @@ -427,3 +427,84 @@ default_quoted default ", ] + +=== TEST 9: esi_replace_vars +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + -- When tags + { + ["chunk"] = [[]], + ["res"] = [[]], + ["msg"] = "vars in when tag" + }, + { + ["chunk"] = [[]], + ["res"] = [[]], + ["msg"] = "vars in when tag - whitespace" + }, + { + ["chunk"] = [[]], + ["res"] = [[]], + ["msg"] = "vars in when tag - quoting" + }, + + -- vars tags + { + ["chunk"] = [[$(QUERY_STRING)]], + ["res"] = [[test_param=test]], + ["msg"] = "vars tag" + }, + { + ["chunk"] = [[ $(QUERY_STRING{test_param}) ]], + ["res"] = [[ test ]], + ["msg"] = "vars tag - whitespace" + }, + { + ["chunk"] = [[

$(QUERY_STRING)

]], + ["res"] = [[

test_param=test

]], + ["msg"] = "vars tag - html tags" + }, + { + ["chunk"] = [[]], + ["res"] = [[]], + ["msg"] = "empty vars tags removed" + }, + { + ["chunk"] = [[

foo

]], + ["res"] = [[

foo

]], + ["msg"] = "empty vars tags removed - content preserved" + }, + + -- other esi tags + { + ["chunk"] = [[$(QUERY_STRING)]], + ["res"] = [[test_param=test]], + ["msg"] = "foo tag" + }, + + } + for _, t in pairs(tests) do + local output = processor.esi_replace_vars(t["chunk"]) + ngx.log(ngx.DEBUG, "'", output, "'") + assert(output == t["res"], "esi_replace_vars mismatch: "..t["msg"] ) + end + ngx.say("OK") + } +} + +--- request +GET /t?test_param=test +--- no_error_log +[error] +--- response_body +OK + From cfe210938ba0760aed0615c63ac116d1fb3a9511 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 13:35:27 +0100 Subject: [PATCH 19/90] Unit tests for ESI process_escaping --- lib/ledge/esi/processor_1_0.lua | 1 + t/01-unit/processor_1_0.t | 41 +++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index 6e01dfaf..23fe758a 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -556,6 +556,7 @@ local function process_escaping(chunk, res, recursion) return chunk end end +_M.process_escaping = process_escaping -- Assumed chunk contains a complete conditional instruction set. Handles diff --git a/t/01-unit/processor_1_0.t b/t/01-unit/processor_1_0.t index 390f6983..34a54d2d 100644 --- a/t/01-unit/processor_1_0.t +++ b/t/01-unit/processor_1_0.t @@ -508,3 +508,44 @@ GET /t?test_param=test --- response_body OK + +=== TEST 12: process_escaping +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local processor = require("ledge.esi.processor_1_0") + local tests = { + { + ["chunk"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]], + ["res"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]], + ["msg"] = "nothing to escape" + }, + { + ["chunk"] = [[Loremconsectetur adipiscing elit.]], + ["res"] = [[Lorem ipsum dolor sit amet, consectetur adipiscing elit.]], + ["msg"] = "no esi inside" + }, + { + ["chunk"] = [[Loremconsectetur adipiscing elit.]], + ["res"] = [[Lorem $(QUERY_STRING)ipsum dolor sit amet, consectetur adipiscing elit.]], + ["msg"] = "esi:vars inside" + }, + + } + for _, t in pairs(tests) do + local output = processor.process_escaping(t["chunk"]) + ngx.log(ngx.DEBUG, "'", output, "'") + assert(output == t["res"], "process_escaping mismatch: "..t["msg"] ) + end + ngx.say("OK") + } +} + +--- request +GET /t?test_param=test +--- no_error_log +[error] +--- response_body +OK + From 0d4d24ba7eefa5fb22248c5eb519a95026c17b4d Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 15:09:56 +0100 Subject: [PATCH 20/90] Tests for ensuring main key is completely replaced on cache update --- t/01-unit/response.t | 58 +++++++++++++++++++++++++++++-- t/02-integration/cache.t | 73 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 128 insertions(+), 3 deletions(-) diff --git a/t/01-unit/response.t b/t/01-unit/response.t index 8544c413..d9b21a37 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -284,9 +284,9 @@ location /t { ) local ok, err = res2:read() - assert(ok and not err) + assert(ok and not err, "res2 should save without err") - assert(res2.uri == "http://example.com") + assert(res2.uri == "http://example.com", "res2 uri") res2.header["X-Save-Me"] = "ok" res2:save(60) @@ -297,7 +297,7 @@ location /t { ) res3:read() - assert(res3.header["X-Save-Me"] == "ok") + assert(res3.header["X-Save-Me"] == "ok", "res3 headers") local ok, err = res3:set_and_save("size", 99) assert(ok and not err, "set_and_save should return positively") @@ -372,3 +372,55 @@ GET /t --- error_code: 200 --- no_error_log [error] + +=== TEST 8: save should replace the has_esi flag +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local res, err = require("ledge.response").new( + handler.redis, + handler:cache_key_chain() + ) + + res.uri = "http://example.com" + res.status = 200 + + local ok, err = res:save(60) + assert(ok and not err, "res should save without err") + + res:set_and_save("has_esi", "dummy") + + local res2, err = require("ledge.response").new( + handler.redis, + handler:cache_key_chain() + ) + + local ok, err = res2:read() + assert(ok and not err, "res2 should save without err") + + assert(res2.uri == "http://example.com", "res2 uri") + assert(res2.has_esi == "dummy", "res2 has_esi") + + res2.header["X-Save-Me"] = "ok" + res2:save(60) + + local res3, err = require("ledge.response").new( + handler.redis, + handler:cache_key_chain() + ) + res3:read() + + assert(res3.header["X-Save-Me"] == "ok", "res3 headers") + assert(res3.has_esi == false, "res3 has_esi: "..tostring(res3.has_esi)) + + } +} +--- request +GET /t +--- no_error_log +[error] diff --git a/t/02-integration/cache.t b/t/02-integration/cache.t index e391ba20..d3b8b02f 100644 --- a/t/02-integration/cache.t +++ b/t/02-integration/cache.t @@ -857,3 +857,76 @@ X-Cache: MISS from .* TEST 16d --- no_error_log [error] + + +=== TEST 17: Main key is completely overriden +--- http_config eval: $::HttpConfig +--- config +location /cache_17_modify { + rewrite ^(.*)_modify$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local handler = require("ledge").create_handler() + local key = handler:cache_key_chain().main + local redis = require("ledge").create_redis_connection() + + -- Add new field to main key + redis:hset(key, "bogus_field", "foobar") + + -- Print result from redis + local main, err = redis:hgetall(key) + main = redis:array_to_hash(main) + ngx.print(key, " bogus_field: ", main["bogus_field"]) + + } +} +location /cache_17_check { + rewrite ^(.*)_check$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local handler = require("ledge").create_handler() + local key = handler:cache_key_chain().main + local redis = require("ledge").create_redis_connection() + + -- Print result from redis + local main, err = redis:hgetall(key) + main = redis:array_to_hash(main) + ngx.print(key, " bogus_field: ", main["bogus_field"]) + } +} +location /cache_17_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler():run() + } +} +location /cache_17 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=60" + ngx.print("TEST 17") + } +} +--- request eval +[ +"GET /cache_17_prx", +"GET /cache_17_modify", +"GET /cache_17_prx", +"GET /cache_17_check", +] +--- more_headers eval +[ +"", +"", +"Cache-Control: no-cache", +"", +] +--- response_body eval +[ +"TEST 17", +"ledge:cache:http:localhost:/cache_17:::main bogus_field: foobar", +"TEST 17", +"ledge:cache:http:localhost:/cache_17:::main bogus_field: nil", +] + +--- no_error_log +[error] From b718983e736b6456cc04ab10f63bc3ba6913405b Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 15:10:23 +0100 Subject: [PATCH 21/90] Delete the main key before setting new values --- lib/ledge/response.lua | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index c92fbe6d..4decf9f8 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -362,6 +362,9 @@ function _M.save(self, keep_cache_for) self.header["Date"] = ngx_http_time(ngx_time()) end + local ok, err = redis:del(key_chain.main) + if not ok then ngx_log(ngx_ERR, err) end + local ok, err = redis:hmset(key_chain.main, "entity", self.entity_id, "status", self.status, From 1897e6fef91f00b91ad94bf28f7290b52ec6999c Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 15:23:48 +0100 Subject: [PATCH 22/90] Test for incorrect has_esi flag on main key --- t/02-integration/cache.t | 2 -- t/02-integration/esi.t | 51 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 51 insertions(+), 2 deletions(-) diff --git a/t/02-integration/cache.t b/t/02-integration/cache.t index d3b8b02f..058f2b28 100644 --- a/t/02-integration/cache.t +++ b/t/02-integration/cache.t @@ -865,7 +865,6 @@ TEST 16d location /cache_17_modify { rewrite ^(.*)_modify$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler() local handler = require("ledge").create_handler() local key = handler:cache_key_chain().main local redis = require("ledge").create_redis_connection() @@ -883,7 +882,6 @@ location /cache_17_modify { location /cache_17_check { rewrite ^(.*)_check$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler() local handler = require("ledge").create_handler() local key = handler:cache_key_chain().main local redis = require("ledge").create_redis_connection() diff --git a/t/02-integration/esi.t b/t/02-integration/esi.t index 2e94d839..f27004b0 100644 --- a/t/02-integration/esi.t +++ b/t/02-integration/esi.t @@ -2667,3 +2667,54 @@ OK X-Cache: MISS from .* --- no_error_log [error] + +=== TEST 36: No error if res.has_esi incorrectly set_debug +--- http_config eval: $::HttpConfig +--- config +location /esi_36_break { + rewrite ^(.*)_break$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local key = handler:cache_key_chain().main + local redis = require("ledge").create_redis_connection() + + -- Incorrectly set has_esi flag on main key + redis:hset(key, "has_esi", "ESI/1.0") + ngx.print("OK") + } +} +location /esi_36_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + -- No surrogate control here + require("ledge").create_handler():run() + } +} +location /esi_36 { + default_type text/html; + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=60" + ngx.print("Hello") + } +} +--- request eval +[ + "GET /esi_36_prx", + "GET /esi_36_break", + "GET /esi_36_prx", +] +--- response_body eval +[ + "Hello", + "OK", + "Hello", +] +--- response_headers_like eval +[ + "X-Cache: MISS from .*", + "", + "X-Cache: HIT from .*", +] +--- no_error_log +[error] From 1554065d31d6718f346d6b5d8d7a3f87646211b1 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 5 Sep 2017 15:24:07 +0100 Subject: [PATCH 23/90] Handle failure to select esi processor in state machine --- lib/ledge/state_machine/states.lua | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index a11ac843..3a3a79a3 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -168,7 +168,14 @@ return { -- yet, so we must do that now -- TODO: Perhaps the state machine can load the processor to avoid this weird check if res.has_esi then - handler.esi_processor = esi.choose_esi_processor(handler) + local p, err = esi.choose_esi_processor(handler) + if not p then + -- This shouldn't happen + -- if res.has_esi is set then a processor should be selectedable + return sm:e "esi_process_not_required" + else + handler.esi_processor = p + end else -- We know there's nothing to do return sm:e "esi_process_not_required" From 4c6c016525e1e5e05649d67bbf317e552e47165e Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 11:15:52 +0100 Subject: [PATCH 24/90] Remove incorrect log message --- lib/ledge/response.lua | 1 - 1 file changed, 1 deletion(-) diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 4decf9f8..2d7299d7 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -202,7 +202,6 @@ function _M.read(self) -- No cache entry for this key local cache_parts_len = #cache_parts if not cache_parts_len or cache_parts_len == 0 then - ngx_log(ngx_INFO, "live entity has no data") return nil end From d8d0380bf2e013592825b820cd7a767c4cb4ec90 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 11:21:02 +0100 Subject: [PATCH 25/90] Unit tests for collect_entity job --- t/01-unit/jobs.t | 114 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 t/01-unit/jobs.t diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t new file mode 100644 index 00000000..76146a0c --- /dev/null +++ b/t/01-unit/jobs.t @@ -0,0 +1,114 @@ +use Test::Nginx::Socket 'no_plan'; +use Cwd qw(cwd); + +my $pwd = cwd(); + +$ENV{TEST_NGINX_PORT} |= 1984; +$ENV{TEST_LEDGE_REDIS_DATABASE} |= 2; +$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} |= 3; +$ENV{TEST_COVERAGE} ||= 0; + +our $HttpConfig = qq{ +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;;"; + +lua_shared_dict ledge_test 1m; + +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + + qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} + require("ledge").configure({ + redis_connector_params = { + url = "redis://127.0.0.1:6379/$ENV{TEST_LEDGE_REDIS_DATABASE}", + }, + qless_db = qless_db, + }) + + require("ledge").set_handler_defaults({ + upstream_host = "127.0.0.1", + upstream_port = $ENV{TEST_NGINX_PORT}, + storage_driver_config = { + redis_connector_params = { + db = $ENV{TEST_LEDGE_REDIS_DATABASE}, + }, + } + }) +} + +}; # HttpConfig + +no_long_string(); +no_diff(); +run_tests(); + +__DATA__ +=== TEST 1: Collect entity +Prime cache then collect the entity +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache break; + content_by_lua_block { + local collect_entity = require("ledge.jobs.collect_entity") + local handler = require("ledge").create_handler() + + local entity_id = ngx.shared.ledge_test:get("entity_id") + ngx.log(ngx.DEBUG, "Collecting: ", entity_id) + + local job = { + data = { + entity_id = entity_id, + storage_driver = handler.config.storage_driver, + storage_driver_config = handler.config.storage_driver_config, + } + } + local ok, err, msg = collect_entity.perform(job) + assert(err == nil, "collect_entity should not return an error") + + local storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + local ok, err = storage:exists(entity_id) + assert(ok == false, "Entity should not exist") + + -- Failure cases + job.data.storage_driver = "bad" + local ok, err, msg = collect_entity.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "collect_entity should return job-error") + + job.data.storage_driver = handler.config.storage_driver + job.data.storage_driver_config = { bad_config = "here" } + local ok, err, msg = collect_entity.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "collect_entity should return job-error") + } +} +location /cache_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + handler:bind("before_serve", function(res) + ngx.log(ngx.DEBUG, "primed entity: ", res.entity_id) + ngx.shared.ledge_test:set("entity_id", res.entity_id) + end) + handler:run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("TEST 1") + } +} +--- request eval +[ +"GET /cache_prx", +"GET /t" +] +--- no_error_log +[error] From a454a81da6ac06c2ab9b21e621efdc7bfd9c3a2a Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 13:01:16 +0100 Subject: [PATCH 26/90] Unit tests for revalidate job --- t/01-unit/jobs.t | 178 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 178 insertions(+) diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t index 76146a0c..75974ab4 100644 --- a/t/01-unit/jobs.t +++ b/t/01-unit/jobs.t @@ -112,3 +112,181 @@ location /cache { ] --- no_error_log [error] + + +=== TEST 2: Revalidate +Prime, Purge, revalidate +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache2 break; + content_by_lua_block { + local revalidate = require("ledge.jobs.revalidate") + local redis = require("ledge").create_redis_connection() + + local handler = require("ledge").create_handler() + + + local job = { + redis = redis, + data = { + key_chain = handler:cache_key_chain() + } + } + + + local ok, err, msg = revalidate.perform(job) + assert(err == nil, "revalidate should not return an error") + + assert(ngx.shared.ledge_test:get("test2") == "Revalidate Request received", + "Revalidate request was not received!" + ) + + + } +} +location /cache2_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + handler:run() + } +} + +location /cache2 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=10" + ngx.print("TEST 2") + if string.find(ngx.req.get_headers().user_agent, "revalidate", 1, true) then + ngx.shared.ledge_test:set("test2", "Revalidate Request received") + end + } +} +--- request eval +[ +"GET /cache2_prx", +"PURGE /cache2_prx", +"GET /t" +] +--- no_error_log +[error] + +=== TEST 3: Revalidate - inline params +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local revalidate = require("ledge.jobs.revalidate") + + local job = { + data = { + reval_params = { + server_addr = ngx.var.server_addr, + server_port = ngx.var.server_port, + scheme = ngx.var.scheme, + uri = "/cache3", + connect_timeout = 1000, + send_timeout = 1000, + read_timeout = 1000, + keepalive_timeout = 60, + keepalive_poolsize = 10, + }, + reval_headers = { + ["X-Test"] = "test_header" + } + } + } + + local ok, err, msg = revalidate.perform(job) + assert(err == nil, "revalidate should not return an error") + + assert(ngx.shared.ledge_test:get("test3") == "test_header", + "Revalidate request was not received!" + ) + + local job = { + data = { + reval_params = { + server_addr = ngx.var.server_addr, + server_port = ngx.var.server_port, + scheme = ngx.var.scheme, + uri = "/cache_slow", + connect_timeout = 1000, + send_timeout = 100, + read_timeout = 100, + keepalive_timeout = 60, + keepalive_poolsize = 10, + }, + reval_headers = { + ["X-Test"] = "test_header" + } + } + } + + local ok, err, msg = revalidate.perform(job) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") + + local job = { + data = { + reval_params = { + server_addr = ngx.var.server_addr, + server_port = ngx.var.server_port+1, + scheme = ngx.var.scheme, + uri = "/cache3", + connect_timeout = 1000, + send_timeout = 1000, + read_timeout = 1000, + keepalive_timeout = 60, + keepalive_poolsize = 10, + }, + reval_headers = { + ["X-Test"] = "test_header" + } + } + } + + local ok, err, msg = revalidate.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") + + local job = { + redis = { + hgetall = function(...) return ngx.null end + }, + data = { + key_chain = {} + } + } + + local ok, err, msg = revalidate.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") + + local job = { + redis = { + hgetall = function(...) return nil, "dummy error" end + }, + data = { + key_chain = {} + } + } + + local ok, err, msg = revalidate.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") + } +} +location /cache3 { + content_by_lua_block { + ngx.shared.ledge_test:set("test3", ngx.req.get_headers()["X-Test"]) + } +} +location /cache_slow { + content_by_lua_block{ + ngx.sleep(1) + ngx.print("OK") + } +} +--- request +GET /t +--- error_code: 200 From 4881bbdd98515b95be6ebb2587583a04eb8b794a Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 13:14:57 +0100 Subject: [PATCH 27/90] Use correct timeout vars in revalidate job resty-redis provides an array_to_hash utility already --- lib/ledge/jobs/revalidate.lua | 30 +++++++++--------------------- 1 file changed, 9 insertions(+), 21 deletions(-) diff --git a/lib/ledge/jobs/revalidate.lua b/lib/ledge/jobs/revalidate.lua index d2197fe7..dbaa6fb4 100644 --- a/lib/ledge/jobs/revalidate.lua +++ b/lib/ledge/jobs/revalidate.lua @@ -10,26 +10,16 @@ local _M = { -- Utility to return all items in a Redis hash as a Lua table. local function hgetall(redis, key) local res, err = redis:hgetall(key) - if not res then + if not res or res == ngx_null then return nil, "could not retrieve " .. tostring(key) .. " data:" .. tostring(err) end - local hash = {} - - local len = #res - for i = 1, len, 2 do - hash[res[i]] = res[i + 1] - end - - return hash + return redis:array_to_hash(res) end function _M.perform(job) - local redis = job.redis - local key_chain = job.data.key_chain - -- Normal background revalidation operates on stored metadata. -- A background fetch due to partial content from upstream however, uses the -- current request metadata for reval_headers / reval_params and passes it @@ -40,19 +30,17 @@ function _M.perform(job) -- If we don't have the metadata in job data, this is a background -- revalidation using stored metadata. if not reval_params and not reval_headers then - local err - reval_params, err = hgetall(redis, key_chain.reval_params) - if not reval_params or - reval_params == ngx_null or - not reval_params.server_addr then + local key_chain, redis, err = job.data.key_chain, job.redis, nil + reval_params, err = hgetall(redis, key_chain.reval_params) + if not reval_params or not next(reval_params) then return nil, "job-error", "Revalidation parameters are missing, presumed evicted. " .. tostring(err) end reval_headers, err = hgetall(redis, key_chain.reval_req_headers) - if not reval_headers or reval_headers == ngx_null then + if not reval_headers or not next(reval_headers) then return nil, "job-error", "Revalidation headers are missing, presumed evicted." end @@ -61,9 +49,9 @@ function _M.perform(job) -- Make outbound http request to revalidate local httpc = http.new() httpc:set_timeouts( - reval_params.upstream_connect_timeout, - reval_params.upstream_send_timeout, - reval_params.upstream_read_timeout + reval_params.connect_timeout, + reval_params.send_timeout, + reval_params.read_timeout ) local port = tonumber(reval_params.server_port) From f3d171df42eedea56e3bb883da5d0fa0b745079c Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 15:37:41 +0100 Subject: [PATCH 28/90] Unit tests for purge job --- t/01-unit/jobs.t | 74 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t index 75974ab4..1f14c9fd 100644 --- a/t/01-unit/jobs.t +++ b/t/01-unit/jobs.t @@ -290,3 +290,77 @@ location /cache_slow { --- request GET /t --- error_code: 200 + +=== TEST 4: purge +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache break; + content_by_lua_block { + local purge_job = require("ledge.jobs.purge") + local handler = require("ledge").create_handler() + local heartbeat_flag = false + + local job = { + redis = require("ledge").create_redis_connection(), + data = { + key_chain = { main = "*::main" }, + keyspace_scan_count = 2, + purge_mode = "invalidate", + storage_driver = handler.config.storage_driver, + storage_driver_config = handler.config.storage_driver_config, + }, + ttl = function() return 5 end, + heartbeat = function() + heartbeat_flag = true + return heartbeat_flag + end, + } + + -- Failure cases + job.data.storage_driver = "bad" + local ok, err, msg = purge_job.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "redis-error" and msg ~= nil, "purge should return redis-error") + + job.data.storage_driver = handler.config.storage_driver + job.data.storage_driver_config = { bad_config = "here" } + local ok, err, msg = purge_job.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "redis-error" and msg ~= nil, "purge should return redis-error") + + -- Passing case + job.data.storage_driver_config = handler.config.storage_driver_config + + local ok, err, msg = purge_job.perform(job) + assert(err == nil, "purge should not return an error") + assert(heartbeat_flag == true, "Purge should heartbeat") + + + } +} +location /cache4_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler():run() + } +} + +location /cache4 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("TEST 4") + } +} +--- request eval +[ +"GET /cache4_prx","GET /cache4_prx?a=1","GET /cache4_prx?a=2","GET /cache4_prx?a=3","GET /cache4_prx?a=4","GET /cache4_prx?a=5", +"GET /t", +"GET /cache4_prx?a=3" +] +--- response_headers_like eval +["X-Cache: MISS from .*", "X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*","X-Cache: MISS from .*", +"", +"X-Cache: MISS from .*"] +--- no_error_log +[error] From 421f16bfba31e15714db27f7e96f209f66a6003f Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 15:51:47 +0100 Subject: [PATCH 29/90] Only use 1 handler instance for wildcard purging --- lib/ledge/jobs/purge.lua | 62 ++++++++++++++++++++++------------------ 1 file changed, 34 insertions(+), 28 deletions(-) diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index 7ba6b0a1..d9d13c6e 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -1,13 +1,10 @@ -local redis_connector = require "resty.redis.connector" -local response = require "ledge.response" local ipairs, tonumber = ipairs, tonumber local str_len = string.len local str_sub = string.sub local ngx_log = ngx.log +local ngx_DEBUG = ngx.DEBUG local ngx_ERR = ngx.ERR local ngx_null = ngx.null -local ngx_md5 = ngx.md5 -local tbl_getn = table.getn local purge = require("ledge.purge").purge local create_redis_slave_connection = require("ledge").create_redis_slave_connection @@ -18,6 +15,9 @@ local _M = { } +local magic_len = -(str_len("::main") + 1) + + -- Scans the keyspace for keys which match, and expires them. We do this against -- the slave Redis instance if available. function _M.perform(job) @@ -32,9 +32,23 @@ function _M.perform(job) job.redis_slave = slave end + -- Setup handler + local handler = require("ledge").create_handler() + handler.redis = job.redis + + local storage, err = require("ledge").create_storage_connection( + job.data.storage_driver, + job.data.storage_driver_config + ) + if not storage then + return nil, "redis-error", err + end + + handler.storage = storage + -- This runs recursively using the SCAN cursor, until the entire keyspace -- has been scanned. - local res, err = _M.expire_pattern(0, job) + local res, err = _M.expire_pattern(0, job, handler) if slave then close_redis_connection(slave) @@ -47,10 +61,10 @@ end -- Scans the keyspace based on a pattern (asterisk), and runs a purge for each cache entry -function _M.expire_pattern(cursor, job) +function _M.expire_pattern(cursor, job, handler) if job:ttl() < 10 then if not job:heartbeat() then - return false, "Failed to heartbeat job" + return nil, "Failed to heartbeat job" end end @@ -64,26 +78,18 @@ function _M.expire_pattern(cursor, job) if not res or res == ngx_null then return nil, "SCAN error: " .. tostring(err) else - if tbl_getn(res[2]) > 0 then - local handler = require("ledge").create_handler() - handler.redis = require("ledge").create_redis_connection() - handler.storage = require("ledge").create_storage_connection( - job.data.storage_driver, - job.data.storage_driver_config - ) - - for _,key in ipairs(res[2]) do - -- Strip the "main" suffix to find the cache key - local cache_key = str_sub(key, 1, -(str_len("::main") + 1)) - handler._cache_key = cache_key - - local ok, err = purge(handler, job.data.purge_mode) - if ok == nil and err then ngx_log(ngx_ERR, tostring(err)) end - - -- reset these so that handler can be reused - handler._cache_key_chain = {} - handler._cache_key = "" - end + for _,key in ipairs(res[2]) do + -- Strip the "main" suffix to find the cache key + handler._cache_key = str_sub(key, 1, magic_len) + + ngx_log(ngx_DEBUG, "Purging key: ", handler._cache_key) + + local ok, err = purge(handler, job.data.purge_mode) + if ok == nil and err then ngx_log(ngx_ERR, tostring(err)) end + + -- reset these so that handler can be reused + handler._cache_key_chain = {} + handler._cache_key = "" end local cursor = tonumber(res[1]) @@ -92,7 +98,7 @@ function _M.expire_pattern(cursor, job) end -- If we have a valid cursor, recurse to move on. - return _M.expire_pattern(cursor, job) + return _M.expire_pattern(cursor, job, handler) end end From 3a020d8f71304c7ff45ab63acbce6203870467bf Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 16:33:42 +0100 Subject: [PATCH 30/90] Unit tests for purge.expire_keys --- t/01-unit/purge.t | 88 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 88 insertions(+) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index 5c44b9a1..ab26abd6 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -86,3 +86,91 @@ location /t { GET /t --- no_error_log [error] + +=== TEST 2: expire keys +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + handler.storage = storage + + local key_chain = handler:cache_key_chain() + local entity_id = handler:entity_id(key_chain) + + local ttl, err = redis:ttl(key_chain.main) + + local expire_keys = require("ledge.purge").expire_keys + + local ok, err = expire_keys(redis, storage, key_chain, entity_id) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok, "expire_keys should return positively") + + local expires, err = redis:hget(key_chain.main, "expires") + ngx.log(ngx.DEBUG,"expires: ", expires, " <= ", ngx.now()) + assert(tonumber(expires) <= ngx.now(), "Key not expired") + + local new_ttl = redis:ttl(key_chain.main) + ngx.log(ngx.DEBUG, "ttl: ", tonumber(ttl), " > ", tonumber(new_ttl)) + assert(tonumber(ttl) > tonumber(new_ttl), "TTL not reduced") + + -- non-existent key + local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == false and err == nil, "return false with no error on missing key") + + -- Stub out a partial main key + redis:hset("bogus_key", "key", "value") + + local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == nil and err ~= nil, "return nil with error on broken key") + + -- String expires value + redis:hset("bogus_key", "expires", "now!") + + local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == nil and err ~= nil, "return nil with error on string expires") + + -- No TTL + redis:hset("bogus_key", "expires", ngx.now()+3600) + + local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == nil and err ~= nil, "return nil with error when no ttl") + + } +} +location /cache_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + handler:bind("before_serve", function(res) + ngx.log(ngx.DEBUG, "primed entity: ", res.entity_id) + end) + handler:run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("TEST 2") + } +} +--- request eval +[ +"GET /cache_prx", +"GET /t" +] +--- no_error_log +[error] From c6a385dc08dee27d4db1b4bffeaf96f7868fc421 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 6 Sep 2017 16:42:47 +0100 Subject: [PATCH 31/90] Remove extra redis call in expire_keys, log redis errors --- lib/ledge/purge.lua | 79 +++++++++++++++++++++++++-------------------- 1 file changed, 44 insertions(+), 35 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 93b8e47f..a0a9d147 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -1,6 +1,8 @@ local pcall, tonumber, tostring, pairs = pcall, tonumber, tostring, pairs +local ngx_log = ngx.log +local ngx_ERR = ngx.ERR local ngx_null = ngx.null local ngx_time = ngx.time local ngx_md5 = ngx.md5 @@ -34,51 +36,58 @@ _M.create_purge_response = create_purge_response -- Expires the keys in key_chain and reduces the ttl in storage --- TODO review this for error cases etc local function expire_keys(redis, storage, key_chain, entity_id) - local exists, err = redis:exists(key_chain.main) - if exists == 1 then - local time = ngx_time() - local expires, err = redis:hget(key_chain.main, "expires") - if not expires or expires == ngx_null then - return nil, "could not determine existing expiry: " .. (err or "") - end + local ttl, err = redis:ttl(key_chain.main) + if not ttl or ttl == ngx_null or ttl == -1 then + return nil, "count not determine existing ttl: " .. (err or "") + end - -- If expires is in the past then this key is stale. Nothing to do here. - if tonumber(expires) <= time then - return false, nil - end + if ttl == -2 then + -- Key doesn't exist, do nothing + return false, nil + end - local ttl, err = redis:ttl(key_chain.main) - if not ttl or ttl == ngx_null then - return nil, "count not determine exsiting ttl: " .. (err or "") - end + local expires, err = redis:hget(key_chain.main, "expires") + expires = tonumber(expires) + + if not expires or expires == ngx_null then + return nil, "could not determine existing expiry: " .. (err or "") + end + + local time = ngx_time() - local ttl_reduction = expires - time - if ttl_reduction < 0 then ttl_reduction = 0 end + -- If expires is in the past then this key is stale. Nothing to do here. + if expires <= time then + return false, nil + end - redis:multi() + local ttl_reduction = expires - time + if ttl_reduction < 0 then ttl_reduction = 0 end + local new_ttl = ttl - ttl_reduction - -- Set the expires field of the main key to the new time, to control - -- its validity. - redis:hset(key_chain.main, "expires", tostring(time - 1)) + local _, e = redis:multi() + if e then ngx_log(ngx_ERR, e) end - -- Set new TTLs for all keys in the key chain - key_chain.fetching_lock = nil -- this looks after itself - for _,key in pairs(key_chain) do - redis:expire(key, ttl - ttl_reduction) - end + -- Set the expires field of the main key to the new time, to control + -- its validity. + _, e = redis:hset(key_chain.main, "expires", tostring(time - 1)) + if e then ngx_log(ngx_ERR, e) end - storage:set_ttl(entity_id, ttl - ttl_reduction) + -- Set new TTLs for all keys in the key chain + key_chain.fetching_lock = nil -- this looks after itself + for _,key in pairs(key_chain) do + local _, e = redis:expire(key, new_ttl) + if e then ngx_log(ngx_ERR, e) end + end - local ok, err = redis:exec() - if err then - return nil, err - else - return true, nil - end + _, e = storage:set_ttl(entity_id, new_ttl) + if e then ngx_log(ngx_ERR, e) end + + local ok, err = redis:exec() + if err then + return nil, err else - return false, nil + return true, nil end end _M.expire_keys = expire_keys From 340d6e9185a6b8678b0cddca1fe936a4d6aec2e2 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 10:11:02 +0100 Subject: [PATCH 32/90] Correctly check key_chain in entity_id() --- lib/ledge/handler.lua | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 7d1bf8f0..53443cf6 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -288,10 +288,9 @@ _M.cache_key_chain = cache_key_chain function _M.entity_id(self, key_chain) - if not key_chain and key_chain.main then return nil end - local redis = self.redis + if not key_chain or not key_chain.main then return nil end - local entity_id, err = redis:hget(key_chain.main, "entity") + local entity_id, err = self.redis:hget(key_chain.main, "entity") if not entity_id or entity_id == ngx_null then return nil, err end From 15b1f418d519aef8fbf979f18e86b3dab4c25109 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 13:41:17 +0100 Subject: [PATCH 33/90] Add luacheck config --- .luacheckrc | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 .luacheckrc diff --git a/.luacheckrc b/.luacheckrc new file mode 100644 index 00000000..77ab5dfd --- /dev/null +++ b/.luacheckrc @@ -0,0 +1,2 @@ +std = "ngx_lua" +redefined = false From ee81dd99ad7193e3a7db5e282afe11ebc42c3878 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 13:44:38 +0100 Subject: [PATCH 34/90] Add Make target for running luacheck --- Makefile | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 7546ae45..f29242cd 100644 --- a/Makefile +++ b/Makefile @@ -65,7 +65,7 @@ INSTALL ?= install .PHONY: all install test test_all start_redis_instances stop_redis_instances \ start_redis_instance stop_redis_instance cleanup_redis_instance flush_db \ - check_ports test_ledge test_sentinel coverage delete_sentinel_config + check_ports test_ledge test_sentinel coverage delete_sentinel_config check all: ; @@ -156,3 +156,6 @@ coverage: flush_db @$(TEST_LEDGE_REDIS_VARS) TEST_COVERAGE=1 $(PROVE) $(TEST_FILE) @luacov @tail -21 luacov.report.out + +check: + luacheck lib From e7b32495811305e9e5439ce690eba278399fe8d0 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 13:51:09 +0100 Subject: [PATCH 35/90] Remove unused variables and fix luacheck warnings --- lib/ledge.lua | 8 ++---- lib/ledge/esi.lua | 6 ++--- lib/ledge/esi/processor_1_0.lua | 26 +++++++++++------- lib/ledge/esi/tag_parser.lua | 1 + lib/ledge/handler.lua | 13 +++------ lib/ledge/header_util.lua | 2 -- lib/ledge/jobs/purge.lua | 2 +- lib/ledge/jobs/revalidate.lua | 7 ++--- lib/ledge/purge.lua | 6 +++-- lib/ledge/range.lua | 10 ++++--- lib/ledge/response.lua | 13 ++++----- lib/ledge/state_machine/actions.lua | 19 +++++--------- lib/ledge/state_machine/events.lua | 2 +- lib/ledge/state_machine/pre_transitions.lua | 2 +- lib/ledge/state_machine/states.lua | 29 +++++++++------------ lib/ledge/storage/redis.lua | 9 +++---- lib/ledge/util.lua | 10 +++---- lib/ledge/worker.lua | 4 +-- 18 files changed, 74 insertions(+), 95 deletions(-) diff --git a/lib/ledge.lua b/lib/ledge.lua index 5068e67b..e4f90c9e 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -1,9 +1,6 @@ -local setmetatable, require, error = - setmetatable, require, error +local setmetatable, require = + setmetatable, require - -local ngx_log = ngx.log -local ngx_ERR = ngx.ERR local ngx_get_phase = ngx.get_phase local ngx_null = ngx.null @@ -13,7 +10,6 @@ local util = require("ledge.util") local tbl_copy = util.table.copy local tbl_copy_merge_defaults = util.table.copy_merge_defaults local fixed_field_metatable = util.mt.fixed_field_metatable -local get_fixed_field_metatable_proxy = util.mt.get_fixed_field_metatable_proxy local redis_connector = require("resty.redis.connector") diff --git a/lib/ledge/esi.lua b/lib/ledge/esi.lua index 0b80a8b7..194ef5b3 100644 --- a/lib/ledge/esi.lua +++ b/lib/ledge/esi.lua @@ -1,8 +1,6 @@ local h_util = require "ledge.header_util" -local util = require "ledge.util" -local tostring, type, tonumber, next = - tostring, type, tonumber, next +local type, tonumber = type, tonumber local str_sub = string.sub local str_find = string.find @@ -18,7 +16,6 @@ local ngx_req_set_uri_args = ngx.req.set_uri_args local ngx_var = ngx.var local ngx_log = ngx.log local ngx_ERR = ngx.ERR -local ngx_INFO = ngx.INFO local _M = { @@ -167,6 +164,7 @@ function _M.filter_esi_args(handler) "^" .. esi_args_prefix .. "(\\S+)", "oj" ) + if err then ngx_log(ngx_ERR, err) end if m and m[1] then has_esi_args = true diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index 23fe758a..a0c1eff1 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -7,7 +7,8 @@ local tostring, type, tonumber, next, unpack, pcall, setfenv = tostring, type, tonumber, next, unpack, pcall, setfenv local str_sub = string.sub -local str_find = string.find +-- TODO: Find places we can use str_find over ngx_re_find +--local str_find = string.find local tbl_concat = table.concat local tbl_insert = table.insert @@ -44,8 +45,6 @@ function _M.new(handler) end -local default_recursion_limit = 10 - -- $1: variable name (e.g. QUERY_STRING) -- $2: substructure key -- $3: default value @@ -239,6 +238,7 @@ local function _esi_condition_lexer(condition) repeat local token, err = ngx_re_match(condition, p, "", ctx) + if err then ngx_log(ngx_ERR, err) end if token then local number, string, operator = token[1], token[2], token[3] local token_type @@ -405,6 +405,7 @@ function _M.esi_fetch_include(self, include_tag, buffer_size) [[src="([^"]+)"]], "oj" ) + if err then ngx_log(ngx_ERR, err) end if src then local httpc = http.new() @@ -509,6 +510,8 @@ function _M.esi_fetch_include(self, include_tag, buffer_size) local ch, err = reader(buffer_size) if ch then co_yield(ch) + elseif err then + ngx_log(ngx_ERR, err) end until not ch end @@ -589,14 +592,12 @@ local function evaluate_conditionals(chunk, res, recursion) local inner_parser = tag_parser.new(choose.contents) - local when_found = false local when_matched = false local otherwise repeat local tag = inner_parser:next("esi:when|esi:otherwise") if tag and tag.closing then if tag.tagname == "esi:when" and when_matched == false then - when_found = true local function process_when(m_when) -- We only show the first matching branch, others @@ -621,11 +622,12 @@ local function evaluate_conditionals(chunk, res, recursion) return "" end - local when_res = ngx_re_sub( + local ok, err = ngx_re_sub( tag.whole, esi_when_pattern, process_when ) + if not ok and err then ngx_log(ngx_ERR, err) end -- Break after the first winning expression elseif tag.tagname == "esi:otherwise" then @@ -673,7 +675,7 @@ function _M.get_scan_filter(self, res) repeat local chunk, err = reader(buffer_size) - local has_esi = false + if err then ngx_log(ngx_ERR, err) end if chunk then -- If we have a tag hint (partial opening ESI tag) from the @@ -737,10 +739,11 @@ function _M.get_scan_filter(self, res) else -- No complete tag found, but look for something -- resembling the beginning of an incomplete ESI tag - local start_from, start_to, err = ngx_re_find( + local start_from, _, err = ngx_re_find( chunk, "<(?:!--)?esi", "soj" ) + if err then ngx_log(ngx_ERR, err) end if start_from then -- Incomplete opening tag, so buffer and try again prev_chunk = chunk @@ -754,6 +757,7 @@ function _M.get_scan_filter(self, res) str_sub(chunk, -6, -1), "(?: chunk = process_escaping(chunk) @@ -834,6 +838,7 @@ function _M.get_process_filter(self, res) "oj", re_ctx ) + if err then ngx_log(ngx_ERR, err) end if from then -- Yield up to the start of the include tag @@ -873,6 +878,7 @@ function _M.get_process_filter(self, res) -- so that we can handle accidental recursion. repeat local chunk, err = inner_reader(buffer_size) + if err then ngx_log(ngx_ERR, err) end if chunk then -- If we see an abort instruction, we set a flag to stop -- further esi:includes. diff --git a/lib/ledge/esi/tag_parser.lua b/lib/ledge/esi/tag_parser.lua index f9f02a37..66f26106 100644 --- a/lib/ledge/esi/tag_parser.lua +++ b/lib/ledge/esi/tag_parser.lua @@ -91,6 +91,7 @@ function _M.find_whole_tag(self, tag) -- Find the first opening tag local opening_f, opening_t, err = ngx_re_find(markup, self.open_pattern(tag), "soj") if not opening_f then + if err then ngx_log(ngx_ERR, err) end -- Nothing here return nil end diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 53443cf6..5f04c821 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -22,19 +22,11 @@ local ngx_time = ngx.time local ngx_http_time = ngx.http_time local ngx_parse_http_time = ngx.parse_http_time -local ngx_re_find = ngx.re.find - local str_lower = string.lower local str_len = string.len local tbl_insert = table.insert local tbl_concat = table.concat -local co_yield = coroutine.yield -local co_wrap = require("ledge.util").coroutine.wrap - -local cjson_encode = require("cjson").encode -local cjson_decode = require("cjson").decode - local esi_capabilities = require("ledge.esi").esi_capabilities local append_server_port = require("ledge.util").append_server_port @@ -768,7 +760,7 @@ local function save_to_cache(self, res) else -- Transaction likely failed due to watch on main key -- Tell storage to clean up too - ok, e = storage:delete(res.entity_id) + ok, e = storage:delete(res.entity_id) -- luacheck: ignore ok if e then ngx_log(ngx_ERR, "failed to cleanup storage: ", e) end @@ -848,7 +840,7 @@ local function delete_from_cache(self) -- Delete everything in the keychain local keys = {} - for k, v in pairs(key_chain) do + for _, v in pairs(key_chain) do tbl_insert(keys, v) end return redis:del(unpack(keys)) @@ -865,6 +857,7 @@ local function serve_body(self, res, buffer_size) repeat local chunk, err = reader(buffer_size) + if err then ngx_log(ngx_ERR, err) end if chunk and self.output_buffers_enabled then local ok, err = ngx_print(chunk) if not ok then ngx_log(ngx_INFO, err) end diff --git a/lib/ledge/header_util.lua b/lib/ledge/header_util.lua index 9ed58193..e1b574d1 100644 --- a/lib/ledge/header_util.lua +++ b/lib/ledge/header_util.lua @@ -3,8 +3,6 @@ local type, tonumber, setmetatable = local ngx_re_match = ngx.re.match local ngx_re_find = ngx.re.find -local str_find = string.find -local str_gsub = string.gsub local tbl_concat = table.concat diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index d9d13c6e..80f27db8 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -25,7 +25,7 @@ function _M.perform(job) return nil, "job-error", "no redis connection provided" end - local slave, err = create_redis_slave_connection() + local slave, _ = create_redis_slave_connection() if not slave then job.redis_slave = job.redis else diff --git a/lib/ledge/jobs/revalidate.lua b/lib/ledge/jobs/revalidate.lua index dbaa6fb4..d497b2e6 100644 --- a/lib/ledge/jobs/revalidate.lua +++ b/lib/ledge/jobs/revalidate.lua @@ -30,7 +30,7 @@ function _M.perform(job) -- If we don't have the metadata in job data, this is a background -- revalidation using stored metadata. if not reval_params and not reval_headers then - local key_chain, redis, err = job.data.key_chain, job.redis, nil + local key_chain, redis, err = job.data.key_chain, job.redis reval_params, err = hgetall(redis, key_chain.reval_params) if not reval_params or not next(reval_params) then @@ -42,7 +42,8 @@ function _M.perform(job) reval_headers, err = hgetall(redis, key_chain.reval_req_headers) if not reval_headers or not next(reval_headers) then return nil, "job-error", - "Revalidation headers are missing, presumed evicted." + "Revalidation headers are missing, presumed evicted." .. + tostring(err) end end @@ -96,7 +97,7 @@ function _M.perform(job) local reader = res.body_reader -- Read and discard the body repeat - local chunk, err = reader() + local chunk, _ = reader() until not chunk httpc:set_keepalive( diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index a0a9d147..09bd7b14 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -83,7 +83,7 @@ local function expire_keys(redis, storage, key_chain, entity_id) _, e = storage:set_ttl(entity_id, new_ttl) if e then ngx_log(ngx_ERR, e) end - local ok, err = redis:exec() + local ok, err = redis:exec() -- luacheck: ignore ok if err then return nil, err else @@ -104,7 +104,9 @@ local function purge(handler, purge_mode) local redis = handler.redis local storage = handler.storage local key_chain = handler:cache_key_chain() + local entity_id, err = redis:hget(key_chain.main, "entity") + if err then ngx_log(ngx_ERR, err) end -- We 404 if we have nothing if not entity_id or entity_id == ngx_null @@ -133,7 +135,6 @@ local function purge(handler, purge_mode) local entity_id = handler:entity_id(key_chain) local ok, err = expire_keys(redis, storage, key_chain, entity_id) - local result if not ok and err then return nil, err @@ -167,6 +168,7 @@ local function purge_in_background(handler, purge_mode) priority = 5, } ) + if err then ngx_log(ngx_ERR, err) end -- Create a JSON payload for the response local res = create_purge_response(purge_mode, "scheduled", job) diff --git a/lib/ledge/range.lua b/lib/ledge/range.lua index 12d94ae2..dc35ff0c 100644 --- a/lib/ledge/range.lua +++ b/lib/ledge/range.lua @@ -12,6 +12,8 @@ local tbl_remove = table.remove local tbl_concat = table.concat local ngx_re_match = ngx.re.match +local ngx_log = ngx.log +local ngx_ERR = ngx.ERR local get_header_token = require("ledge.header_util").get_header_token @@ -79,6 +81,7 @@ local function parse_content_range(content_range) [[bytes\s+(\d+|\*)-(\d+|\*)/(\d+)]], "oj" ) + if err then ngx_log(ngx_ERR, err) end if not m then return nil @@ -104,7 +107,7 @@ function _M.handle_range_request(self, res) local ranges = {} - for i,range in ipairs(range_request) do + for _,range in ipairs(range_request) do local range_satisfiable = true if not range.to and not range.from then @@ -239,13 +242,14 @@ function _M.get_range_request_filter(self, reader) while true do local chunk, err = reader(buffer_size) + if err then ngx_log(ngx_ERR, err) end if not chunk then break end local chunklen = #chunk local nextplayhead = playhead + chunklen - for i, range in ipairs(ranges) do - if range.from >= nextplayhead or range.to < playhead then + for _, range in ipairs(ranges) do + if range.from >= nextplayhead or range.to < playhead then -- luacheck: ignore 542 -- Skip over non matching ranges (this is -- algorithmically simpler) else diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 2d7299d7..60591200 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -1,16 +1,14 @@ -local h_util = require "ledge.header_util" local http_headers = require "resty.http_headers" local util = require "ledge.util" -local pairs, ipairs, setmetatable, tonumber, unpack = - pairs, ipairs, setmetatable, tonumber, unpack +local pairs, setmetatable, tonumber, unpack = + pairs, setmetatable, tonumber, unpack local tbl_getn = table.getn local tbl_insert = table.insert local tbl_concat = table.concat local str_lower = string.lower -local str_gsub = string.gsub local str_find = string.find local str_sub = string.sub local str_rep = string.rep @@ -23,11 +21,9 @@ local ngx_ERR = ngx.ERR local ngx_INFO = ngx.INFO local ngx_DEBUG = ngx.DEBUG local ngx_re_gmatch = ngx.re.gmatch -local ngx_re_match = ngx.re.match local ngx_parse_http_time = ngx.parse_http_time local ngx_http_time = ngx.http_time local ngx_time = ngx.time -local ngx_req_get_headers = ngx.req.get_headers local ngx_re_find = ngx.re.find local header_has_directive = require("ledge.header_util").header_has_directive @@ -205,7 +201,6 @@ function _M.read(self) return nil end - local ttl = nil local time_in_cache = 0 local time_since_generated = 0 @@ -262,7 +257,7 @@ function _M.read(self) local header = headers[i] if str_find(header, ":") then -- We have multiple headers with the same field name - local index, key = unpack(str_split(header, ":")) + local _, key = unpack(str_split(header, ":")) if not self.header[key] then self.header[key] = {} end @@ -317,6 +312,8 @@ local function prepare_cacheable_headers(headers) local from, to, err = ngx_re_find(cc, pattern, "jo", re_ctx, 1) if from then uncacheable_headers[str_sub(cc, from, to)] = true + elseif err then + ngx_log(ngx_ERR, err) end until not from end diff --git a/lib/ledge/state_machine/actions.lua b/lib/ledge/state_machine/actions.lua index f0b15ab2..57b036bd 100644 --- a/lib/ledge/state_machine/actions.lua +++ b/lib/ledge/state_machine/actions.lua @@ -4,8 +4,6 @@ local esi = require("ledge.esi") local response = require("ledge.response") local ngx_var = ngx.var -local ngx_log = ngx.log -local ngx_INFO = ngx.INFO local ngx_HTTP_NOT_MODIFIED = ngx.HTTP_NOT_MODIFIED @@ -13,10 +11,7 @@ local ngx_req_set_header = ngx.req.set_header local get_gzip_decoder = require("ledge.gzip").get_gzip_decoder -local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable - - -local _M = { +local _M = { -- luacheck: no unused _VERSION = "2.0.0", } @@ -208,27 +203,27 @@ return { handler.output_buffers_enabled = false end, - set_http_ok = function(handler) + set_http_ok = function() ngx.status = ngx.HTTP_OK end, - set_http_not_found = function(handler) + set_http_not_found = function() ngx.status = ngx.HTTP_NOT_FOUND end, - set_http_not_modified = function(handler) + set_http_not_modified = function() ngx.status = ngx_HTTP_NOT_MODIFIED end, - set_http_service_unavailable = function(handler) + set_http_service_unavailable = function() ngx.status = ngx.HTTP_SERVICE_UNAVAILABLE end, - set_http_gateway_timeout = function(handler) + set_http_gateway_timeout = function() ngx.status = ngx.HTTP_GATEWAY_TIMEOUT end, - set_http_internal_server_error = function(handler) + set_http_internal_server_error = function() ngx.status = ngx.HTTP_INTERNAL_SERVER_ERROR end, diff --git a/lib/ledge/state_machine/events.lua b/lib/ledge/state_machine/events.lua index f623ddcd..e4708c31 100644 --- a/lib/ledge/state_machine/events.lua +++ b/lib/ledge/state_machine/events.lua @@ -1,4 +1,4 @@ -local _M = { +local _M = { -- luacheck: no unused _VERSION = "2.0.0", } diff --git a/lib/ledge/state_machine/pre_transitions.lua b/lib/ledge/state_machine/pre_transitions.lua index d1eb093c..c548dd60 100644 --- a/lib/ledge/state_machine/pre_transitions.lua +++ b/lib/ledge/state_machine/pre_transitions.lua @@ -1,4 +1,4 @@ -local _M = { +local _M = { -- luacheck: no unused _VERSION = "2.0.0", } diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index 3a3a79a3..cb293a46 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -4,17 +4,14 @@ local range = require("ledge.range") local ngx_log = ngx.log local ngx_ERR = ngx.ERR -local ngx_PARTIAL_CONTENT = ngx.PARTIAL_CONTENT local ngx_null = ngx.null + local ngx_PARTIAL_CONTENT = 206 -local ngx_RANGE_NOT_SATISFIABLE = 416 -local ngx_HTTP_NOT_MODIFIED = 304 local ngx_req_get_method = ngx.req.get_method local ngx_req_get_headers = ngx.req.get_headers local ngx_re_find = ngx.re.find -local ngx_re_match = ngx.re.match local header_has_directive = require("ledge.header_util").header_has_directive @@ -37,12 +34,10 @@ local create_purge_response = require("ledge.purge").create_purge_response local acquire_lock = require("ledge.collapse").acquire_lock -local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable - local parse_content_range = require("ledge.range").parse_content_range -local _M = { +local _M = { -- luacheck: no unused _VERSION = "2.0.0", } @@ -52,7 +47,7 @@ local _M = { -- calling state_machine:e(ev) with the event that has occurred. Place any -- further logic in actions triggered by the transition table. return { - checking_method = function(sm, handler) + checking_method = function(sm) local method = ngx_req_get_method() if method == "PURGE" then return sm:e "purge_requested" @@ -82,11 +77,11 @@ return { end end, - accept_cache = function(sm, handler) + accept_cache = function(sm) return sm:e "cache_accepted" end, - checking_request = function(sm, handler) + checking_request = function(sm) if req_accepts_cache() then return sm:e "cache_accepted" else @@ -168,7 +163,7 @@ return { -- yet, so we must do that now -- TODO: Perhaps the state machine can load the processor to avoid this weird check if res.has_esi then - local p, err = esi.choose_esi_processor(handler) + local p = esi.choose_esi_processor(handler) if not p then -- This shouldn't happen -- if res.has_esi is set then a processor should be selectedable @@ -303,7 +298,7 @@ return { return sm:e "published" end, - fetching_as_surrogate = function(sm, handler) + fetching_as_surrogate = function(sm) return sm:e "can_fetch" end, @@ -312,7 +307,7 @@ return { -- Extend the timeout to the size of the window redis:set_timeout(handler.config.collapsed_forwarding_window) - local res, err = redis:read_reply() -- block until we hear something or timeout + local res, _ = redis:read_reply() -- block until we hear something or timeout if not res then return sm:e "http_gateway_timeout" else @@ -411,7 +406,7 @@ return { end end, - considering_local_revalidation = function(sm, handler) + considering_local_revalidation = function(sm) if can_revalidate_locally() then return sm:e "can_revalidate_locally" else @@ -453,7 +448,7 @@ return { end end, - preparing_response = function(sm, handler) + preparing_response = function(sm) return sm:e "response_ready" end, @@ -467,11 +462,11 @@ return { return sm:e "served" end, - exiting = function(sm, handler) + exiting = function() ngx.exit(ngx.status) end, - cancelling_abort_request = function(sm, handler) + cancelling_abort_request = function() return true end, } diff --git a/lib/ledge/storage/redis.lua b/lib/ledge/storage/redis.lua index 119d0058..6062fbc7 100644 --- a/lib/ledge/storage/redis.lua +++ b/lib/ledge/storage/redis.lua @@ -1,17 +1,14 @@ -local redis = require "resty.redis" local redis_connector = require "resty.redis.connector" -local tostring, ipairs, pairs, type, tonumber, next, unpack, setmetatable = - tostring, ipairs, pairs, type, tonumber, next, unpack, setmetatable +local tostring, pairs, next, unpack, setmetatable = + tostring, pairs, next, unpack, setmetatable local ngx_null = ngx.null local ngx_log = ngx.log local ngx_ERR = ngx.ERR -local ngx_NOTICE = ngx.NOTICE local ngx_WARN = ngx.WARN local tbl_insert = table.insert -local tbl_copy = require("ledge.util").table.copy local tbl_copy_merge_defaults = require("ledge.util").table.copy_merge_defaults local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local get_fixed_field_metatable_proxy = @@ -159,7 +156,7 @@ function _M.delete(self, entity_id) local key_chain = entity_keys(entity_id) if key_chain then local keys = {} - for k, v in pairs(key_chain) do + for _, v in pairs(key_chain) do tbl_insert(keys, v) end local res, err = self.redis:del(unpack(keys)) diff --git a/lib/ledge/util.lua b/lib/ledge/util.lua index 2e855c32..7affdd3a 100644 --- a/lib/ledge/util.lua +++ b/lib/ledge/util.lua @@ -6,7 +6,6 @@ local type, next, setmetatable, getmetatable, error, tostring, select = local str_find = string.find local str_sub = string.sub -local tbl_insert = table.insert local co_create = coroutine.create local co_status = coroutine.status local co_resume = coroutine.resume @@ -56,7 +55,7 @@ _M.string.randomhex = randomhex local function str_split(str, delim) - local pos, endpos, prev, i = 0, 0, 0, 0 + local pos, endpos, prev, i = 0, 0, 0, 0 -- luacheck: ignore pos endpos local out = {} repeat pos, endpos = str_find(str, delim, prev, true) @@ -80,11 +79,11 @@ _M.string.split = str_split -- A metatable which prevents undefined fields from being created / accessed local fixed_field_metatable = { __index = - function(t, k) + function(t, k) -- luacheck: no unused error("field " .. tostring(k) .. " does not exist", 3) end, __newindex = - function(t, k, v) + function(t, k, v) -- luacheck: no unused error("attempt to create new field " .. tostring(k), 3) end, } @@ -104,7 +103,7 @@ _M.mt.fixed_field_metatable = fixed_field_metatable local function get_fixed_field_metatable_proxy(proxy) return { __index = - function(t, k) + function(t, k) -- luacheck: no unused return proxy[k] or error("field " .. tostring(k) .. " does not exist", 2) end, @@ -180,7 +179,6 @@ local function tbl_copy_merge_defaults(t1, defaults) if t1 == nil then t1 = {} end if defaults == nil then defaults = {} end if type(t1) == "table" and type(defaults) == "table" then - local mt = getmetatable(defaults) local copy = {} for t1_key, t1_value in next, t1, nil do copy[tbl_copy(t1_key)] = tbl_copy_merge_defaults( diff --git a/lib/ledge/worker.lua b/lib/ledge/worker.lua index 60e6daf8..84f24682 100644 --- a/lib/ledge/worker.lua +++ b/lib/ledge/worker.lua @@ -1,6 +1,4 @@ -local setmetatable, pairs, type, tostring, error = - setmetatable, pairs, type, tostring, error - +local setmetatable = setmetatable local co_yield = coroutine.yield local ngx_get_phase = ngx.get_phase From 3c0d5b944222007034702e958828ef9cd0af366e Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 30 Aug 2017 12:29:53 +0100 Subject: [PATCH 36/90] On collapsed forwarding subscriber error, fail and revert to fetching --- lib/ledge/state_machine/states.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index cb293a46..f1cc97f5 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -308,8 +308,8 @@ return { -- Extend the timeout to the size of the window redis:set_timeout(handler.config.collapsed_forwarding_window) local res, _ = redis:read_reply() -- block until we hear something or timeout - if not res then - return sm:e "http_gateway_timeout" + if not res or res == ngx_null then + return sm:e "collapsed_forwarding_failed" else -- TODO this config is now in the singleton redis:set_timeout(60) --handler.config.redis_read_timeout) From 22a4b6ed8001c6aa096713db2af20c8cd161e2df Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 4 Sep 2017 16:04:45 +0100 Subject: [PATCH 37/90] Test for collapsed forwarding exceeded window --- t/02-integration/collapsed_forwarding.t | 56 +++++++++++++++++++++++++ 1 file changed, 56 insertions(+) diff --git a/t/02-integration/collapsed_forwarding.t b/t/02-integration/collapsed_forwarding.t index 4bbbb7ba..a89b30af 100644 --- a/t/02-integration/collapsed_forwarding.t +++ b/t/02-integration/collapsed_forwarding.t @@ -370,3 +370,59 @@ If-None-Match: test7b GET /concurrent_collapsed --- error_code: 200 --- response_body + +=== TEST 8a: Prime cache (collapsed forwardind requires having seen a previously cacheable response) +--- http_config eval: $::HttpConfig +--- config +location /collapsed8_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler():run() + } +} +location /collapsed8 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("OK") + } +} +--- request eval +["GET /collapsed8_prx", "PURGE /collapsed8_prx"] +--- no_error_log +[error] + + +=== TEST 8b: Collapse window timed out +--- http_config eval: $::HttpConfig +--- config +location /concurrent_collapsed { + rewrite_by_lua_block { + ngx.shared.test:set("test_8", 0) + } + + echo_location_async "/collapsed8_prx"; + echo_sleep 0.05; + echo_location_async "/collapsed8_prx"; +} +location /collapsed8_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + collapsed_forwarding_window = 500, -- (ms) + }):run() + } +} +location /collapsed8 { + content_by_lua_block { + ngx.sleep(0.8) + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("OK " .. ngx.shared.test:incr("test_8", 1)) + } +} +--- request +GET /concurrent_collapsed +--- error_code: 200 +--- response_body +OK 1 +OK 2 From 0cab3354046ca371f4c319e59db74ee43f2a4d01 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 14:11:10 +0100 Subject: [PATCH 38/90] Avoid logging empty error messages --- lib/ledge/esi/tag_parser.lua | 2 +- lib/ledge/response.lua | 2 +- lib/ledge/state_machine/states.lua | 6 +++--- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lib/ledge/esi/tag_parser.lua b/lib/ledge/esi/tag_parser.lua index 66f26106..685c424d 100644 --- a/lib/ledge/esi/tag_parser.lua +++ b/lib/ledge/esi/tag_parser.lua @@ -103,7 +103,7 @@ function _M.find_whole_tag(self, tag) self.open_pattern(tag), "soj" ) if not opening_m then - ngx_log(ngx_ERR, err) + if err then ngx_log(ngx_ERR, err) end return nil end diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 60591200..6533f72d 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -405,7 +405,7 @@ function _M.set_and_save(self, field, value) local redis = self.redis local ok, err = redis:hset(self.key_chain.main, field, tostring(value)) if not ok then - ngx_log(ngx_ERR, err) + if err then ngx_log(ngx_ERR, err) end return nil, err end diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index f1cc97f5..b41d24cb 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -239,7 +239,7 @@ return { local res, err = acquire_lock(redis, lock_key, timeout) if res == nil then -- Lua script failed - ngx_log(ngx_ERR, err) + if err then ngx_log(ngx_ERR, err) end return sm:e "collapsed_forwarding_failed" elseif res then -- We have the lock return sm:e "obtained_collapsed_forwarding_lock" @@ -254,7 +254,7 @@ return { local ok, err = redis_subscriber:subscribe(key_chain.root) if not ok or ok == ngx_null then -- Failed to enter subscribe mode - ngx_log(ngx_ERR, err) + if err then ngx_log(ngx_ERR, err) end return sm:e "collapsed_forwarding_failed" end @@ -268,7 +268,7 @@ return { return sm:e "collapsed_forwarding_channel_closed" else -- Error checking lock still exists - ngx_log(ngx_ERR, err) + if err then ngx_log(ngx_ERR, err) end return sm:e "collapsed_forwarding_failed" end end From 9ea4aa6fdba2db156ae497ee39c39f65de173d9c Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 14:43:03 +0100 Subject: [PATCH 39/90] Fix luacov config --- .luacov | 31 +++++-------------------------- Makefile | 2 +- 2 files changed, 6 insertions(+), 27 deletions(-) diff --git a/.luacov b/.luacov index 665936fa..25231a56 100644 --- a/.luacov +++ b/.luacov @@ -1,29 +1,8 @@ modules = { - ["ledge.esi"] = "lib/ledge/esi.lua", - ["ledge.esi.processor_1_0"] = "lib/ledge/esi/processor_1_0.lua", - ["ledge.esi.tag_parser"] = "lib/ledge/esi/tag_parser.lua", - - ["ledge.jobs.collect_entity"] = "lib/ledge/jobs/collect_entity.lua", - ["ledge.jobs.purge"] = "lib/ledge/jobs/purge.lua", - ["ledge.jobs.revalidate"] = "lib/ledge/jobs/revalidate.lua", - - ["ledge.state_machine"] = "lib/ledge/state_machine.lua", - ["ledge.state_machine.actions"] = "lib/ledge/state_machine/actions.lua", - ["ledge.state_machine.events"] = "lib/ledge/state_machine/events.lua", - ["ledge.state_machine.pre_transitions"] = "lib/ledge/state_machine/pre_transitions.lua", - ["ledge.state_machines.states"] = "lib/ledge/state_machine/states.lua", - - ["ledge.storage.redis"] = "lib/ledge/storage/redis.lua", - - ["ledge.handler"] = "lib/ledge/handler.lua", - ["ledge.header_util"] = "lib/ledge/header_util.lua", - ["ledge.purge"] = "lib/ledge/purge.lua", - ["ledge.range"] = "lib/ledge/range.lua", - ["ledge.request"] = "lib/ledge/request.lua", - ["ledge.response"] = "lib/ledge/response.lua", - ["ledge.stale"] = "lib/ledge/stale.lua", - ["ledge.util"] = "lib/ledge/util.lua", - ["ledge.validation"] = "lib/ledge/validation.lua", - ["ledge.worker"] = "lib/ledge/worker.lua", ["ledge"] = "lib/ledge.lua", + ["ledge.esi.*"] = "lib/", + ["ledge.jobs.*"] = "lib/", + ["ledge.state_machine.*"] = "lib/", + ["ledge.storage.*"] = "lib/", + ["ledge.*"] = "lib/" } diff --git a/Makefile b/Makefile index f29242cd..08521405 100644 --- a/Makefile +++ b/Makefile @@ -155,7 +155,7 @@ coverage: flush_db @rm -f luacov.stats.out @$(TEST_LEDGE_REDIS_VARS) TEST_COVERAGE=1 $(PROVE) $(TEST_FILE) @luacov - @tail -21 luacov.report.out + @tail -30 luacov.report.out check: luacheck lib From 78d2136986c8911bc7053c1cd4d78d37bdc8e396 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 14:54:04 +0100 Subject: [PATCH 40/90] Tweak unit tests for purge.expire_keys --- t/01-unit/purge.t | 17 ++++++++--------- 1 file changed, 8 insertions(+), 9 deletions(-) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index ab26abd6..ac1d223d 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -127,27 +127,26 @@ location /t { if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == nil, "return false with no error on missing key") - -- Stub out a partial main key + -- Stub out a partial main key, no ttl redis:hset("bogus_key", "key", "value") local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) if err then ngx.log(ngx.DEBUG, err) end - assert(ok == nil and err ~= nil, "return nil with error on broken key") + assert(ok == nil and err ~= nil, "return nil with no ttl") - -- String expires value - redis:hset("bogus_key", "expires", "now!") + -- Set a TTL + redis:expire("bogus_key", 9000) local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) if err then ngx.log(ngx.DEBUG, err) end - assert(ok == nil and err ~= nil, "return nil with error on string expires") + assert(ok == nil and err ~= nil, "return nil with error on broken key") - -- No TTL - redis:hset("bogus_key", "expires", ngx.now()+3600) + -- String expires value + redis:hset("bogus_key", "expires", "now!") local ok, err = expire_keys(redis, storage, {main = "bogus_key"}, entity_id) if err then ngx.log(ngx.DEBUG, err) end - assert(ok == nil and err ~= nil, "return nil with error when no ttl") - + assert(ok == nil and err ~= nil, "return nil with error on string expires") } } location /cache_prx { From 56961d91f01c98d1fad409f9f961bc39c49bc196 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 15:12:23 +0100 Subject: [PATCH 41/90] Unit tests for purge --- t/01-unit/purge.t | 85 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 85 insertions(+) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index ac1d223d..de896567 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -173,3 +173,88 @@ location /cache { ] --- no_error_log [error] + +=== TEST 3: purge +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache3 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + handler.storage = storage + + local key_chain = handler:cache_key_chain() + + local purge = require("ledge.purge").purge + + -- invalidate - error + handler.cache_key_chain = function() return {main = "bogus_key"} end + local ok, err = purge(handler, "invalidate") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == false and err == "nothing to purge", "purge should return false - bad key") + handler.cache_key_chain = require("ledge.handler").cache_key_chain + + -- invalidate + local ok, err = purge(handler, "invalidate") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == true and err == "purged", "purge should return true - purged") + + -- revalidate + local reval_job = false + handler.revalidate_in_background = function() + reval_job = true + return "job" + end + + local ok, err, job = purge(handler, "revalidate") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == false and err == "already expired", "purge should return false - already expired") + assert(reval_job == true, "revalidate should schedule job") + assert(job == "job", "revalidate should return the job "..tostring(job)) + + -- delete, error + handler.delete_from_cache = function() return nil, "delete error" end + local ok, err = purge(handler, "delete") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == nil and err == "delete error", "purge should return nil, error") + handler.delete_from_cache = require("ledge.handler").delete_from_cache + + -- delete + local ok, err = purge(handler, "delete") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == true and err == "deleted", "purge should return true - deleted") + + -- delete, missing + local ok, err = purge(handler, "delete") + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == false and err == "nothing to purge", "purge should return false - nothing to purge") + } +} +location /cache3_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + handler:run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.say("TEST 3") + } +} +--- request eval +[ +"GET /cache3_prx", +"GET /t" +] +--- no_error_log +[error] From 6ac610fa19fbee4d6c8abbe1ec6a4e0792de88f1 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 15:23:31 +0100 Subject: [PATCH 42/90] Always return job from purge() --- lib/ledge/purge.lua | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 09bd7b14..fa6a69db 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -136,10 +136,10 @@ local function purge(handler, purge_mode) local ok, err = expire_keys(redis, storage, key_chain, entity_id) if not ok and err then - return nil, err + return nil, err, job elseif not ok then - return false, "already expired", nil + return false, "already expired", job elseif ok then return true, "purged", job From 501f21748edd50152196f071d501dd172032da4c Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 16:12:12 +0100 Subject: [PATCH 43/90] More unit tests for purge and revalidate --- t/01-unit/jobs.t | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t index 1f14c9fd..c22f4c7e 100644 --- a/t/01-unit/jobs.t +++ b/t/01-unit/jobs.t @@ -143,6 +143,13 @@ location /t { ) + redis:del(job.data.key_chain.reval_req_headers) + local ok, err, msg = revalidate.perform(job) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") + + redis:del(job.data.key_chain.reval_params) + local ok, err, msg = revalidate.perform(job) + assert(err == "job-error" and msg ~= nil, "revalidate should return an error") } } location /cache2_prx { @@ -336,6 +343,19 @@ location /t { assert(err == nil, "purge should not return an error") assert(heartbeat_flag == true, "Purge should heartbeat") + -- Heartbeat failure + job.heartbeat = function() return false end + local ok, err, msg = purge_job.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "redis-error" and msg == "Failed to heartbeat job", "purge should return heartbeat error") + job.heartbeat = function() return true end + + -- Missing redis driver + job.redis = nil + local ok, err, msg = purge_job.perform(job) + ngx.log(ngx.DEBUG, msg) + assert(err == "job-error" and msg ~= nil, "purge should return job-error") + } } From 8881f8316ce531cfe38afb0d9bb5475df68f6ff1 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 7 Sep 2017 16:51:27 +0100 Subject: [PATCH 44/90] Unit test, esi query string param with multiple values --- t/01-unit/processor_1_0.t | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/t/01-unit/processor_1_0.t b/t/01-unit/processor_1_0.t index 34a54d2d..1ddfc51e 100644 --- a/t/01-unit/processor_1_0.t +++ b/t/01-unit/processor_1_0.t @@ -84,7 +84,8 @@ location /t { [ "GET /t", "GET /t?test_param=test", - "GET /t?other_param=test" + "GET /t?other_param=test", + "GET /t?test_param=test&test_param=test2", ] --- no_error_log [error] @@ -107,6 +108,12 @@ other_param=test default default_quoted ", + +"test_param=test&test_param=test2 +test_param=test&test_param=test2 +test, test2 +test, test2 +", ] From 0afa744a1b6afcbf29de173bc924cd720c0a50d5 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 11 Sep 2017 12:09:09 +0100 Subject: [PATCH 45/90] Initial unit test for esi fetch include --- lib/ledge/esi.lua | 1 + t/01-unit/processor_1_0.t | 41 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/lib/ledge/esi.lua b/lib/ledge/esi.lua index 194ef5b3..00bd568b 100644 --- a/lib/ledge/esi.lua +++ b/lib/ledge/esi.lua @@ -158,6 +158,7 @@ function _M.filter_esi_args(handler) local non_esi_args = {} for k,v in pairs(args) do + -- TODO: optimise -- If we have the prefix, extract the suffix local m, err = ngx_re_match( k, diff --git a/t/01-unit/processor_1_0.t b/t/01-unit/processor_1_0.t index 1ddfc51e..2402a065 100644 --- a/t/01-unit/processor_1_0.t +++ b/t/01-unit/processor_1_0.t @@ -556,3 +556,44 @@ GET /t?test_param=test --- response_body OK +=== TEST 13: fetch include +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + -- Override the normal coroutine.yield function + local output + coroutine.yield = function(chunk) output = chunk end + + local processor = require("ledge.esi.processor_1_0") + local handler = require("ledge").create_handler() + local self = { + handler = handler + } + local buffer_size = 64*1024 + local tests = { + { + ["tag"] = [[]], + ["res"] = [[fragment]], + ["msg"] = "nothing to escape" + }, + + } + for _, t in pairs(tests) do + local ret = processor.esi_fetch_include(self, t["tag"], buffer_size) + ngx.log(ngx.DEBUG, "'", output, "'") + assert(output == t["res"], "esi_fetch_include mismatch: "..t["msg"] ) + end + ngx.say("OK") + } +} +location /f { + content_by_lua_block { ngx.print("fragment") } +} +--- request +GET /t +--- no_error_log +[error] +--- response_body +OK + From 937c580f250a013fcd25b14b651090f6f8ce5163 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 11 Sep 2017 16:47:50 +0100 Subject: [PATCH 46/90] Move cache key functions to a separate module --- lib/ledge/cache_key.lua | 105 ++++++++++++++++++++++++++++++++++++++++ lib/ledge/handler.lua | 90 +++------------------------------- 2 files changed, 113 insertions(+), 82 deletions(-) create mode 100644 lib/ledge/cache_key.lua diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua new file mode 100644 index 00000000..b1cdabbe --- /dev/null +++ b/lib/ledge/cache_key.lua @@ -0,0 +1,105 @@ +local ipairs, next, type, pcall, setmetatable = + ipairs, next, type, pcall, setmetatable + +local ngx_log = ngx.log +local ngx_ERR = ngx.ERR +local ngx_var = ngx.var + +local tbl_insert = table.insert +local tbl_concat = table.concat + +local req_args_sorted = require("ledge.request").args_sorted +local req_default_args = require("ledge.request").default_args + +local get_fixed_field_metatable_proxy = + require("ledge.util").mt.get_fixed_field_metatable_proxy + + +local _M = { + _VERSION = "2.0.0", +} + + +-- Generates the cache key. The default spec is: +-- ledge:cache_obj:http:example.com:/about:p=3&q=searchterms +local function generate_cache_key(key_spec, max_args) + -- If key_spec is empty, provide a default + if not next(key_spec) then + key_spec = { + "scheme", + "host", + "uri", + "args", + } + end + + local key = { + "ledge", + "cache", + } + + for _, field in ipairs(key_spec) do + if field == "scheme" then + tbl_insert(key, ngx_var.scheme) + elseif field == "host" then + tbl_insert(key, ngx_var.host) + elseif field == "port" then + tbl_insert(key, ngx_var.server_port) + elseif field == "uri" then + tbl_insert(key, ngx_var.uri) + elseif field == "args" then + tbl_insert( + key, + req_args_sorted(max_args) or req_default_args() + ) + + elseif type(field) == "function" then + local ok, res = pcall(field) + if not ok then + ngx_log(ngx_ERR, + "error in function supplied to cache_key_spec: ", res + ) + elseif type(res) ~= "string" then + ngx_log(ngx_ERR, + "functions supplied to cache_key_spec must " .. + "return a string" + ) + else + tbl_insert(key, res) + end + end + end + + return tbl_concat(key, ":") +end +_M.generate_cache_key = generate_cache_key + + +-- Returns the key chain for all cache keys, except the body entity +local function key_chain(cache_key) + return setmetatable({ + -- hash: cache key metadata + main = cache_key .. "::main", + + -- sorted set: current entities score with sizes + entities = cache_key .. "::entities", + + -- hash: response headers + headers = cache_key .. "::headers", + + -- hash: request headers for revalidation + reval_params = cache_key .. "::reval_params", + + -- hash: request params for revalidation + reval_req_headers = cache_key .. "::reval_req_headers", + + }, get_fixed_field_metatable_proxy({ + -- Hide "root" and "fetching_lock" from iterators. + root = cache_key, + fetching_lock = cache_key .. "::fetching", + })) +end +_M.key_chain = key_chain + + +return _M diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 5f04c821..ff1d3c95 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -31,10 +31,11 @@ local esi_capabilities = require("ledge.esi").esi_capabilities local append_server_port = require("ledge.util").append_server_port +local generate_cache_key = require("ledge.cache_key").generate_cache_key +local key_chain = require("ledge.cache_key").key_chain + local req_relative_uri = require("ledge.request").relative_uri local req_full_uri = require("ledge.request").full_uri -local req_args_sorted = require("ledge.request").args_sorted -local req_default_args = require("ledge.request").default_args local put_background_job = require("ledge.background").put_background_job local gc_wait = require("ledge.background").gc_wait @@ -183,96 +184,21 @@ end _M.emit = emit --- Generates or returns the cache key. The default spec is: --- ledge:cache_obj:http:example.com:/about:p=3&q=searchterms local function cache_key(self) - if self._cache_key ~= "" then return self._cache_key end - - local key_spec = self.config.cache_key_spec - - -- If key_spec is empty, provide a default - if not next(key_spec) then - key_spec = { - "scheme", - "host", - "uri", - "args", - } - end - - local key = { - "ledge", - "cache", - } - - for _, field in ipairs(key_spec) do - if field == "scheme" then - tbl_insert(key, ngx_var.scheme) - elseif field == "host" then - tbl_insert(key, ngx_var.host) - elseif field == "port" then - tbl_insert(key, ngx_var.server_port) - elseif field == "uri" then - tbl_insert(key, ngx_var.uri) - elseif field == "args" then - tbl_insert( - key, - req_args_sorted(self.config.max_uri_args) or req_default_args() + if self._cache_key == "" then + self._cache_key = generate_cache_key( + self.config.cache_key_spec, + self.config.max_uri_args ) - - elseif type(field) == "function" then - local ok, res = pcall(field) - if not ok then - ngx_log(ngx_ERR, - "error in function supplied to cache_key_spec: ", res - ) - elseif type(res) ~= "string" then - ngx_log(ngx_ERR, - "functions supplied to cache_key_spec must " .. - "return a string" - ) - else - tbl_insert(key, res) - end - end end - - self._cache_key = tbl_concat(key, ":") return self._cache_key end _M.cache_key = cache_key --- Returns the key chain for all cache keys, except the body entity -local function key_chain(cache_key) - return setmetatable({ - -- hash: cache key metadata - main = cache_key .. "::main", - - -- sorted set: current entities score with sizes - entities = cache_key .. "::entities", - - -- hash: response headers - headers = cache_key .. "::headers", - - -- hash: request headers for revalidation - reval_params = cache_key .. "::reval_params", - - -- hash: request params for revalidation - reval_req_headers = cache_key .. "::reval_req_headers", - - }, get_fixed_field_metatable_proxy({ - -- Hide "root" and "fetching_lock" from iterators. - root = cache_key, - fetching_lock = cache_key .. "::fetching", - })) -end - - local function cache_key_chain(self) if not next(self._cache_key_chain) then - local cache_key = cache_key(self) - self._cache_key_chain = key_chain(cache_key) + self._cache_key_chain = key_chain(cache_key(self)) end return self._cache_key_chain end From 088798b3055f24faeaec03ba243862772d7ced66 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 11 Sep 2017 16:19:31 +0100 Subject: [PATCH 47/90] Initial JSON purge API --- lib/ledge/purge.lua | 150 +++++++++++++++++++++++++++++ lib/ledge/state_machine/events.lua | 21 +++- lib/ledge/state_machine/states.lua | 24 ++++- t/02-integration/purge.t | 59 ++++++++++++ 4 files changed, 251 insertions(+), 3 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index fa6a69db..c1850d6f 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -6,9 +6,13 @@ local ngx_ERR = ngx.ERR local ngx_null = ngx.null local ngx_time = ngx.time local ngx_md5 = ngx.md5 +local ngx_HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST + +local http = require("resty.http") local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local cjson_encode = require("cjson").encode +local cjson_decode = require("cjson").decode local put_background_job = require("ledge.background").put_background_job @@ -179,4 +183,150 @@ end _M.purge_in_background = purge_in_background +local function parse_json_req() + ngx.req.read_body() + local body, err = ngx.req.get_body_data() + if not body then + return nil, "Could not read request body: " .. tostring(err) + end + + local ok, req = pcall(cjson_decode, body) + if not ok then + return nil, "Could not parse request body: " .. tostring(req) + end + + return req +end + + +local function validate_api_request(req) + local uris = req["uris"] + if not uris then + return false, "No URIs provided" + end + + if type(uris) ~= "table" then + return false, "Field 'uris' must be an array" + end + + if #uris == 0 then + return false, "No URIs provided" + end + + local mode = req["purge_mode"] + if mode and not ( + mode == "invalidate" + or mode == "revalidate" + or mode == "delete" + ) then + return false, "Invalid purge_mode" + end + + return true +end + + +local function key_chain_from_uri(handler, uri) + local parsed, err = http:parse_uri(uri, false) + if not parsed then + return nil, "URI Parse Error: "..err + end + + local args = parsed[5] + if args then + args = ngx.decode_args(args, handler.config.max_uri_args or 100) + else + args = {} + end + + --local scheme, host, port, path, query = unpack(parsed_uri) + local vars = { + ["scheme"] = parsed[1], + ["host"] = parsed[2], + ["port"] = parsed[3], + ["uri"] = parsed[4], + ["args"] = args, + } + + -- TODO: Fix this hack to force cache_key regeneration + handler._cache_key_chain = {} + handler._cache_key = "" + + -- Generate new cache_key + handler:cache_key(vars) + return handler:cache_key_chain() +end + + +-- Run the JSON PURGE API. +-- Accepts various inputs from a JSON request body and processes purges +-- Return true on success or false on error +local function purge_api(handler) + local response = handler.response + + local request, err = parse_json_req() + if not request then + response.status = ngx_HTTP_BAD_REQUEST + response:set_body(cjson_encode({["error"] = err})) + return false + end + + local ok, err = validate_api_request(request) + if not ok then + response.status = ngx_HTTP_BAD_REQUEST + response:set_body(cjson_encode({["error"] = err})) + return false + end + + local redis, storage = handler.redis, handler.storage + local purge_mode = request["purge_mode"] or "invalidate" -- Default to invalidating + local api_results = {} + + local uris = request["uris"] + for _, uri in ipairs(uris) do + ngx.log(ngx.DEBUG, "Purging: ", uri) + local res = {} + local key_chain, err = key_chain_from_uri(handler, uri) + if not key_chain then + res["error"] = err + + else + -- TODO: revalidate and delete + local entity_id, err = handler:entity_id(key_chain) + if not entity_id then + res["error"] = err + + else + local ok, err = expire_keys(redis, storage, key_chain, entity_id) + if not ok and err then + res["error"] = err + + elseif not ok then + res["result"] = "already expired" + + elseif ok then + res["result"] = "purged" + --res["job"] = job + else + res["wat"] = "dafuq" + + end + end + end + api_results[uri] = res + end + + local api_response, err = create_purge_response(purge_mode, api_results) + if not api_response then + handler.set:body(cjson_encode({["error"] = "JSON Response Error: "..tostring(err)})) + return false + end + + ngx.log(ngx.DEBUG, "API Response: \n", api_response) + handler.response:set_body(api_response) + return true +end +_M.purge_api = purge_api + + return setmetatable(_M, fixed_field_metatable) diff --git a/lib/ledge/state_machine/events.lua b/lib/ledge/state_machine/events.lua index e4708c31..848cc565 100644 --- a/lib/ledge/state_machine/events.lua +++ b/lib/ledge/state_machine/events.lua @@ -28,7 +28,18 @@ return { begin = "purging", but_first = "set_json_response" }, - { begin = "considering_wildcard_purge" }, + { + when = "considering_purge_api", + begin = "considering_wildcard_purge" + }, + { begin = "considering_purge_api" }, + }, + + purge_api_requested = { + { + begin = "purging_via_api", + but_first = "set_json_response" + }, }, wildcard_purge_requested = { @@ -44,6 +55,14 @@ return { { begin = "serving", but_first = "set_http_ok" }, }, + purge_api_completed = { + { begin = "serving", but_first = "set_http_ok" }, + }, + + purge_api_failed = { + { begin = "serving", but_first = "set_http_status_from_response" }, + }, + -- URI to purge was not found. Exit 404 Not Found. nothing_to_purge = { { begin = "serving", but_first = "set_http_not_found" }, diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index b41d24cb..24646f5b 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -11,7 +11,8 @@ local ngx_PARTIAL_CONTENT = 206 local ngx_req_get_method = ngx.req.get_method local ngx_req_get_headers = ngx.req.get_headers -local ngx_re_find = ngx.re.find +local str_find = string.find +local str_lower = string.lower local header_has_directive = require("ledge.header_util").header_has_directive @@ -29,6 +30,7 @@ local req_accepts_cache = require("ledge.request").accepts_cache local purge_mode = require("ledge.request").purge_mode local purge = require("ledge.purge").purge +local purge_api = require("ledge.purge").purge_api local purge_in_background = require("ledge.purge").purge_in_background local create_purge_response = require("ledge.purge").create_purge_response @@ -59,9 +61,18 @@ return { end end, + considering_purge_api = function(sm) + local ct = ngx_req_get_headers()["Content-Type"] + if ct and str_lower(ct) == "application/json" then + return sm:e "purge_api_requested" + else + return sm:e "purge_requested" + end + end, + considering_wildcard_purge = function(sm, handler) local key_chain = handler:cache_key_chain() - if ngx_re_find(key_chain.root, "\\*", "soj") then + if str_find(key_chain.root, "*", 1, true) then return sm:e "wildcard_purge_requested" else return sm:e "purge_requested" @@ -364,6 +375,15 @@ return { end end, + purging_via_api = function(sm, handler) + local ok = purge_api(handler) + if ok then + return sm:e "purge_api_completed" + else + return sm:e "purge_api_failed" + end + end, + purging = function(sm, handler) local mode = purge_mode() local ok, message, job = purge(handler, mode) diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index 57432816..06ffb444 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -869,3 +869,62 @@ qless_job.options.priority: 5 qless_job.options.tags.1: purge result: scheduled --- error_code: 200 + + +=== TEST 14: Purge API runs +--- http_config eval: $::HttpConfig +--- config +location /purge_api { + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_14_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_14 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 14: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +[ +"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2", +'PURGE /purge_api +{"uris": ["http://localhost/purge_cached_14?a=1","http://localhost/purge_cached_14?a=2"]}', +"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2" +] +--- more_headers eval +[ +"","", +"Content-Type: Application/JSON", +"","" +] +--- response_body eval +[ +"TEST 14: 1", "TEST 14: 2", +"purge_mode: invalidate +result.http://localhost/purge_cached_14?a=1.result: purged +result.http://localhost/purge_cached_14?a=2.result: purged +", +"TEST 14: 1", "TEST 14: 2" +] +--- response_headers_like eval +["X-Cache: MISS from .+", "X-Cache: MISS from .+", +"Content-Type: application/json", +"X-Cache: MISS from .+", "X-Cache: MISS from .+"] +--- no_error_log +[error] + From 2e0fd77239905ba3cfd4485e9f233c70a7c1b453 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 12 Sep 2017 13:52:10 +0100 Subject: [PATCH 48/90] Rework purging to use standalone cache key generation The following methods now require an explicit key chain argument handler.revalidate_in_background handler.delete_from_cache purge.purge Normal requests simply pass in handler:cache_key_chain() Headless API requests generate a key chain for each URI provided Wildcard purge jobs now generate a key chain from the SCAN'd main key and pass that to purge(). Without having to hack the handler key chain caching for each key --- lib/ledge/handler.lua | 10 +-- lib/ledge/jobs/purge.lua | 12 ++-- lib/ledge/purge.lua | 98 +++++++++++++++++------------ lib/ledge/state_machine/actions.lua | 4 +- lib/ledge/state_machine/states.lua | 2 +- 5 files changed, 71 insertions(+), 55 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index ff1d3c95..fd3a54dd 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -476,9 +476,8 @@ local function revalidation_data(self) end -local function revalidate_in_background(self, update_revalidation_data) +local function revalidate_in_background(self, key_chain, update_revalidation_data) local redis = self.redis - local key_chain = cache_key_chain(self) -- Revalidation data is updated if this is a proper request, but not if -- it's a purge request. @@ -736,12 +735,13 @@ end _M.save_to_cache = save_to_cache -local function delete_from_cache(self) +local function delete_from_cache(self, key_chain, entity_id) local redis = self.redis - local key_chain = cache_key_chain(self) + + -- Get entity_id if not already provided + entity_id = entity_id or self:entity_id(key_chain) -- Schedule entity collection - local entity_id = self:entity_id(key_chain) if entity_id then local config = self.config local size = redis:hget(key_chain.main, "size") diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index 80f27db8..433f719d 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -10,6 +10,8 @@ local purge = require("ledge.purge").purge local create_redis_slave_connection = require("ledge").create_redis_slave_connection local close_redis_connection = require("ledge").close_redis_connection +local key_chain = require("ledge.cache_key").key_chain + local _M = { _VERSION = "2.0.0", } @@ -80,16 +82,12 @@ function _M.expire_pattern(cursor, job, handler) else for _,key in ipairs(res[2]) do -- Strip the "main" suffix to find the cache key - handler._cache_key = str_sub(key, 1, magic_len) + local cache_key = str_sub(key, 1, magic_len) - ngx_log(ngx_DEBUG, "Purging key: ", handler._cache_key) + ngx_log(ngx_DEBUG, "Purging key: ", cache_key) - local ok, err = purge(handler, job.data.purge_mode) + local ok, err = purge(handler, job.data.purge_mode, key_chain(cache_key)) if ok == nil and err then ngx_log(ngx_ERR, tostring(err)) end - - -- reset these so that handler can be reused - handler._cache_key_chain = {} - handler._cache_key = "" end local cursor = tonumber(res[1]) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index c1850d6f..5741cae7 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -1,5 +1,7 @@ local pcall, tonumber, tostring, pairs = pcall, tonumber, tostring, pairs +local str_byte = string.byte +local str_find = string.find local ngx_log = ngx.log local ngx_ERR = ngx.ERR @@ -10,11 +12,15 @@ local ngx_HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST local http = require("resty.http") -local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local cjson_encode = require("cjson").encode local cjson_decode = require("cjson").decode + +local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local put_background_job = require("ledge.background").put_background_job +local generate_cache_key = require("ledge.cache_key").generate_cache_key +local key_chain = require("ledge.cache_key").key_chain + local _M = { _VERSION = "2.0.0", @@ -101,15 +107,15 @@ _M.expire_keys = expire_keys -- If there's nothing to do we return false which results in a 404. -- @param table handler instance -- @param string "invalidate" | "delete" | "revalidate +-- @param table key_chain to purge -- @return boolean success -- @return string message -- @return table qless job (for revalidate only) -local function purge(handler, purge_mode) +local function purge(handler, purge_mode, key_chain) local redis = handler.redis local storage = handler.storage - local key_chain = handler:cache_key_chain() - local entity_id, err = redis:hget(key_chain.main, "entity") + local entity_id, err = handler:entity_id(key_chain) if err then ngx_log(ngx_ERR, err) end -- We 404 if we have nothing @@ -121,7 +127,7 @@ local function purge(handler, purge_mode) -- Delete mode overrides everything else, since you can't revalidate if purge_mode == "delete" then - local res, err = handler:delete_from_cache() + local res, err = handler:delete_from_cache(key_chain, entity_id) if not res then return nil, err, nil else @@ -132,11 +138,10 @@ local function purge(handler, purge_mode) -- If we're revalidating, fire off the background job local job if purge_mode == "revalidate" then - job = handler:revalidate_in_background(false) + job = handler:revalidate_in_background(key_chain, false) end -- Invalidate the keys - local entity_id = handler:entity_id(key_chain) local ok, err = expire_keys(redis, storage, key_chain, entity_id) if not ok and err then @@ -153,10 +158,8 @@ end _M.purge = purge -local function purge_in_background(handler, purge_mode) - local key_chain = handler:cache_key_chain() - - local job, err = put_background_job( +local function schedule_purge_job(handler, purge_mode, key_chain) + return put_background_job( "ledge_purge", "ledge.jobs.purge", { @@ -172,6 +175,11 @@ local function purge_in_background(handler, purge_mode) priority = 5, } ) +end + + +local function purge_in_background(handler, purge_mode) + local job, err = schedule_purge_job(handler, purge_mode, handler:cache_key_chain()) if err then ngx_log(ngx_ERR, err) end -- Create a JSON payload for the response @@ -233,10 +241,20 @@ local function key_chain_from_uri(handler, uri) end local args = parsed[5] - if args then - args = ngx.decode_args(args, handler.config.max_uri_args or 100) + local uri = parsed[4] + + if args and args ~= "" then + -- Query string is in the URI + -- Check if we're purging /some/uri?* + if args ~= "*" then + args = ngx.decode_args(args, handler.config.max_uri_args or 100) + end + elseif str_byte(uri, -1) == 42 then + -- Purging /some/uri/* with no query string specified. + -- Default args to * + args = "*" else - args = {} + args = nil end --local scheme, host, port, path, query = unpack(parsed_uri) @@ -244,17 +262,18 @@ local function key_chain_from_uri(handler, uri) ["scheme"] = parsed[1], ["host"] = parsed[2], ["port"] = parsed[3], - ["uri"] = parsed[4], + ["uri"] = uri, ["args"] = args, } - -- TODO: Fix this hack to force cache_key regeneration - handler._cache_key_chain = {} - handler._cache_key = "" - -- Generate new cache_key - handler:cache_key(vars) - return handler:cache_key_chain() + local cache_key = generate_cache_key( + handler.config.cache_key_spec, + handler.config.max_uri_args, + vars + ) + ngx.log(ngx.DEBUG, "CACHE KEY: ", cache_key) + return key_chain(cache_key) end @@ -278,41 +297,41 @@ local function purge_api(handler) return false end - local redis, storage = handler.redis, handler.storage local purge_mode = request["purge_mode"] or "invalidate" -- Default to invalidating local api_results = {} local uris = request["uris"] for _, uri in ipairs(uris) do - ngx.log(ngx.DEBUG, "Purging: ", uri) local res = {} local key_chain, err = key_chain_from_uri(handler, uri) + if not key_chain then res["error"] = err else - -- TODO: revalidate and delete - local entity_id, err = handler:entity_id(key_chain) - if not entity_id then - res["error"] = err + if str_find(uri, "*", 1, true) ~= nil then + -- Schedule wildcard purge job + local job, err = schedule_purge_job(handler, purge_mode, key_chain) + if err then + res["error"] = "error" + else + res["result"] = "scheduled" + res["qless_job"] = job + end else - local ok, err = expire_keys(redis, storage, key_chain, entity_id) - if not ok and err then - res["error"] = err - - elseif not ok then - res["result"] = "already expired" - - elseif ok then - res["result"] = "purged" - --res["job"] = job + -- Purge the URI now + local ok, purge_result, job = purge(handler, purge_mode, key_chain) + res["qless_job"] = job + if ok == nil and purge_result then + res["error"] = purge_result else - res["wat"] = "dafuq" - + res["result"] = purge_result end + end end + api_results[uri] = res end @@ -322,7 +341,6 @@ local function purge_api(handler) return false end - ngx.log(ngx.DEBUG, "API Response: \n", api_response) handler.response:set_body(api_response) return true end diff --git a/lib/ledge/state_machine/actions.lua b/lib/ledge/state_machine/actions.lua index 57b036bd..4e5dab31 100644 --- a/lib/ledge/state_machine/actions.lua +++ b/lib/ledge/state_machine/actions.lua @@ -180,7 +180,7 @@ return { -- Updates the realidation_params key with data from the current request, -- and schedules a background revalidation job revalidate_in_background = function(handler) - return handler:revalidate_in_background(true) + return handler:revalidate_in_background(handler:cache_key_chain(), true) end, -- Triggered on upstream partial content, assumes no stored @@ -196,7 +196,7 @@ return { end, delete_from_cache = function(handler) - return handler:delete_from_cache() + return handler:delete_from_cache(handler:cache_key_chain()) end, disable_output_buffers = function(handler) diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index 24646f5b..d61a7641 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -386,7 +386,7 @@ return { purging = function(sm, handler) local mode = purge_mode() - local ok, message, job = purge(handler, mode) + local ok, message, job = purge(handler, mode, handler:cache_key_chain()) local json = create_purge_response(mode, message, job) handler.response:set_body(json) From 326d4d84279c9975a41039739635ce262f4eec8a Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 12 Sep 2017 13:56:09 +0100 Subject: [PATCH 49/90] Fix purge unit tests for method change --- t/01-unit/purge.t | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index de896567..016fddd3 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -195,14 +195,12 @@ location /t { local purge = require("ledge.purge").purge -- invalidate - error - handler.cache_key_chain = function() return {main = "bogus_key"} end - local ok, err = purge(handler, "invalidate") + local ok, err = purge(handler, "invalidate", {main = "bogus_key"}) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - bad key") - handler.cache_key_chain = require("ledge.handler").cache_key_chain -- invalidate - local ok, err = purge(handler, "invalidate") + local ok, err = purge(handler, "invalidate", key_chain) if err then ngx.log(ngx.DEBUG, err) end assert(ok == true and err == "purged", "purge should return true - purged") @@ -213,7 +211,7 @@ location /t { return "job" end - local ok, err, job = purge(handler, "revalidate") + local ok, err, job = purge(handler, "revalidate", key_chain) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "already expired", "purge should return false - already expired") assert(reval_job == true, "revalidate should schedule job") @@ -221,18 +219,18 @@ location /t { -- delete, error handler.delete_from_cache = function() return nil, "delete error" end - local ok, err = purge(handler, "delete") + local ok, err = purge(handler, "delete", key_chain) if err then ngx.log(ngx.DEBUG, err) end assert(ok == nil and err == "delete error", "purge should return nil, error") handler.delete_from_cache = require("ledge.handler").delete_from_cache -- delete - local ok, err = purge(handler, "delete") + local ok, err = purge(handler, "delete", key_chain) if err then ngx.log(ngx.DEBUG, err) end assert(ok == true and err == "deleted", "purge should return true - deleted") -- delete, missing - local ok, err = purge(handler, "delete") + local ok, err = purge(handler, "delete", key_chain) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - nothing to purge") } From 06aeafba1b1878bd060503886c11f028fe9a7012 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 12 Sep 2017 13:56:24 +0100 Subject: [PATCH 50/90] Tests for wildcard purges with JSON API --- t/02-integration/purge.t | 196 +++++++++++++++++++++++++++++++++++++-- 1 file changed, 190 insertions(+), 6 deletions(-) diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index 06ffb444..90a22ddc 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -902,29 +902,213 @@ location /purge_cached_14 { --- request eval [ "GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2", + 'PURGE /purge_api -{"uris": ["http://localhost/purge_cached_14?a=1","http://localhost/purge_cached_14?a=2"]}', -"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2" +{"uris": ["http://localhost/purge_cached_14?a=1", "http://localhost/purge_cached_14?a=2"]}', + +"GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2", ] --- more_headers eval [ "","", "Content-Type: Application/JSON", -"","" +"","", ] --- response_body eval [ "TEST 14: 1", "TEST 14: 2", + "purge_mode: invalidate result.http://localhost/purge_cached_14?a=1.result: purged result.http://localhost/purge_cached_14?a=2.result: purged ", -"TEST 14: 1", "TEST 14: 2" + +"TEST 14: 1", "TEST 14: 2", ] --- response_headers_like eval -["X-Cache: MISS from .+", "X-Cache: MISS from .+", +[ +"X-Cache: MISS from .+", "X-Cache: MISS from .+", "Content-Type: application/json", -"X-Cache: MISS from .+", "X-Cache: MISS from .+"] +"X-Cache: MISS from .+", "X-Cache: MISS from .+", +] --- no_error_log [error] + +=== TEST 15: Purge API wildcard query string +--- http_config eval: $::HttpConfig +--- config +location /purge_api { + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_15_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_15 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 15: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +[ +"GET /purge_cached_15_prx?a=1", "GET /purge_cached_15_prx?a=2", + +'PURGE /purge_api +{"uris": ["http://localhost/purge_cached_15?a*"]}', +] +--- more_headers eval +[ +"","", +"Content-Type: Application/JSON", +] +--- response_body_like eval +[ +"TEST 15: 1", "TEST 15: 2", + +"purge_mode: invalidate +result.http://localhost/purge_cached_15\\?a\\*.qless_job.jid: [a-f0-9]{32} +result.http://localhost/purge_cached_15\\?a\\*.qless_job.klass: ledge.jobs.purge +result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.jid: [a-f0-9]{32} +result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.priority: 5 +result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.tags.1: purge +result.http://localhost/purge_cached_15\\?a\\*.result: scheduled +", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .+", "X-Cache: MISS from .+", +"Content-Type: application/json", +] +--- wait: 2 +--- no_error_log +[error] + +=== TEST 15b: Purge API wildcard query string +--- http_config eval: $::HttpConfig +--- config +location /purge_cached_15_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_15 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 15b: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +["GET /purge_cached_15_prx?a=1", "GET /purge_cached_15_prx?a=2"] +--- response_body_like eval +["TEST 15b: 1", "TEST 15b: 2"] +--- response_headers_like eval +["X-Cache: MISS from .+", "X-Cache: MISS from .+"] +--- no_error_log +[error] + +=== TEST 16: Purge API wildcards +--- http_config eval: $::HttpConfig +--- config +location /purge_api { + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_16_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_16 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 16: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +[ +"GET /purge_cached_16_prx?a=1", "GET /purge_cached_16_prx?a=2", + +'PURGE /purge_api +{"uris": ["http://localhost/purge*"]}', +] +--- more_headers eval +[ +"","", +"Content-Type: Application/JSON", +] +--- response_body_like eval +[ +"TEST 16: 1", "TEST 16: 2", + +"purge_mode: invalidate +result.http://localhost/purge\\*.qless_job.jid: [a-f0-9]{32} +result.http://localhost/purge\\*.qless_job.klass: ledge.jobs.purge +result.http://localhost/purge\\*.qless_job.options.jid: [a-f0-9]{32} +result.http://localhost/purge\\*.qless_job.options.priority: 5 +result.http://localhost/purge\\*.qless_job.options.tags.1: purge +result.http://localhost/purge\\*.result: scheduled +", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .+", "X-Cache: MISS from .+", +"Content-Type: application/json", +] +--- wait: 2 +--- no_error_log +[error] + +=== TEST 16b: Purge API wildcard check +--- http_config eval: $::HttpConfig +--- config +location /purge_cached_16_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_16 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 16b: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +["GET /purge_cached_16_prx?a=1", "GET /purge_cached_16_prx?a=2"] +--- response_body_like eval +["TEST 16b: 1", "TEST 16b: 2"] +--- response_headers_like eval +["X-Cache: MISS from .+", "X-Cache: MISS from .+"] +--- no_error_log +[error] From 3edf21b827cdfa2d9db4ba7c3cf79866e3f8d194 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 13 Sep 2017 12:06:12 +0100 Subject: [PATCH 51/90] Support passing custom headers into PURGE API requests --- lib/ledge/purge.lua | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 5741cae7..554be618 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -234,7 +234,7 @@ local function validate_api_request(req) end -local function key_chain_from_uri(handler, uri) +local function key_chain_from_uri(handler, uri, headers) local parsed, err = http:parse_uri(uri, false) if not parsed then return nil, "URI Parse Error: "..err @@ -264,6 +264,7 @@ local function key_chain_from_uri(handler, uri) ["port"] = parsed[3], ["uri"] = uri, ["args"] = args, + ["headers"] = headers, } -- Generate new cache_key @@ -303,7 +304,7 @@ local function purge_api(handler) local uris = request["uris"] for _, uri in ipairs(uris) do local res = {} - local key_chain, err = key_chain_from_uri(handler, uri) + local key_chain, err = key_chain_from_uri(handler, uri, request["headers"]) if not key_chain then res["error"] = err From afd9df940f1bc9b78d1e91d1a27ee9a474433e62 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 13 Sep 2017 16:02:04 +0100 Subject: [PATCH 52/90] Purge API creates loopback purge requests --- lib/ledge/purge.lua | 118 +++++++++++++++++++------------------------- 1 file changed, 50 insertions(+), 68 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 554be618..1e25b0a0 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -1,8 +1,7 @@ local pcall, tonumber, tostring, pairs = pcall, tonumber, tostring, pairs -local str_byte = string.byte -local str_find = string.find +local ngx_var = ngx.var local ngx_log = ngx.log local ngx_ERR = ngx.ERR local ngx_null = ngx.null @@ -18,10 +17,6 @@ local cjson_decode = require("cjson").decode local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local put_background_job = require("ledge.background").put_background_job -local generate_cache_key = require("ledge.cache_key").generate_cache_key -local key_chain = require("ledge.cache_key").key_chain - - local _M = { _VERSION = "2.0.0", } @@ -234,47 +229,57 @@ local function validate_api_request(req) end -local function key_chain_from_uri(handler, uri, headers) - local parsed, err = http:parse_uri(uri, false) - if not parsed then - return nil, "URI Parse Error: "..err +local function send_purge_request(uri, purge_mode, headers) + local uri_parts, err = http:parse_uri(uri) + if not uri_parts then + return nil, err end - local args = parsed[5] - local uri = parsed[4] + local scheme, host, port, path = unpack(uri_parts) + + -- TODO: timeouts + local httpc = http.new() + local ok, err = httpc:connect(ngx_var.server_addr, port) + if not ok then + return nil, "HTTP Connect ("..ngx_var.server_addr..":"..port.."): "..err + end - if args and args ~= "" then - -- Query string is in the URI - -- Check if we're purging /some/uri?* - if args ~= "*" then - args = ngx.decode_args(args, handler.config.max_uri_args or 100) + if scheme == "https" then + local ok, err = httpc:ssl_handshake(nil, host, false) + if not ok then + return nil, "SSL Handshake: "..err end - elseif str_byte(uri, -1) == 42 then - -- Purging /some/uri/* with no query string specified. - -- Default args to * - args = "*" - else - args = nil end - --local scheme, host, port, path, query = unpack(parsed_uri) - local vars = { - ["scheme"] = parsed[1], - ["host"] = parsed[2], - ["port"] = parsed[3], - ["uri"] = uri, - ["args"] = args, - ["headers"] = headers, - } + headers = headers or {} + headers["Host"] = host + headers["X-Purge"] = purge_mode - -- Generate new cache_key - local cache_key = generate_cache_key( - handler.config.cache_key_spec, - handler.config.max_uri_args, - vars - ) - ngx.log(ngx.DEBUG, "CACHE KEY: ", cache_key) - return key_chain(cache_key) + local res, err = httpc:request({ + method = "PURGE", + path = path, + headers = headers + }) + + if not res then + return nil, "HTTP Request: "..err + end + + local body, err = res:read_body() + if not body then + return nil, "HTTP Response: "..err + end + + local ok, err = httpc:set_keepalive() + if not ok then ngx_log(ngx_ERR, err) end + + if res.headers["Content-Type"] == "application/json" then + body = cjson_decode(body) + else + return nil, { status = res.status, body = body, headers = res.headers} + end + + return body end @@ -303,34 +308,11 @@ local function purge_api(handler) local uris = request["uris"] for _, uri in ipairs(uris) do - local res = {} - local key_chain, err = key_chain_from_uri(handler, uri, request["headers"]) - - if not key_chain then - res["error"] = err - - else - if str_find(uri, "*", 1, true) ~= nil then - -- Schedule wildcard purge job - local job, err = schedule_purge_job(handler, purge_mode, key_chain) - if err then - res["error"] = "error" - else - res["result"] = "scheduled" - res["qless_job"] = job - end - - else - -- Purge the URI now - local ok, purge_result, job = purge(handler, purge_mode, key_chain) - res["qless_job"] = job - if ok == nil and purge_result then - res["error"] = purge_result - else - res["result"] = purge_result - end - - end + local res, err = send_purge_request(uri, purge_mode, request["headers"]) + if not res then + res = {["error"] = err} + elseif type(res) == "table" then + res["purge_mode"] = nil end api_results[uri] = res From bcf88409f5264b5e54e59faaffac8d415f597b2e Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 13 Sep 2017 16:02:16 +0100 Subject: [PATCH 53/90] Update tests for purge api --- t/02-integration/purge.t | 113 +++++++++++++++++++++++++++++---------- 1 file changed, 85 insertions(+), 28 deletions(-) diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index 90a22ddc..838c70aa 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -887,7 +887,7 @@ location /purge_api { location /purge_cached_14_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - require("ledge.state_machine").set_debug(false) + require("ledge.state_machine").set_debug(false) require("ledge").create_handler({ keep_cache_for = 3600, }):run() @@ -903,8 +903,8 @@ location /purge_cached_14 { [ "GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2", -'PURGE /purge_api -{"uris": ["http://localhost/purge_cached_14?a=1", "http://localhost/purge_cached_14?a=2"]}', +qq(PURGE /purge_api +{"uris": ["http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_14_prx?a=1", "http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_14_prx?a=2"]}), "GET /purge_cached_14_prx?a=1", "GET /purge_cached_14_prx?a=2", ] @@ -918,10 +918,10 @@ location /purge_cached_14 { [ "TEST 14: 1", "TEST 14: 2", -"purge_mode: invalidate -result.http://localhost/purge_cached_14?a=1.result: purged -result.http://localhost/purge_cached_14?a=2.result: purged -", +qq(purge_mode: invalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_14_prx?a=1.result: purged +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_14_prx?a=2.result: purged +), "TEST 14: 1", "TEST 14: 2", ] @@ -967,8 +967,8 @@ location /purge_cached_15 { [ "GET /purge_cached_15_prx?a=1", "GET /purge_cached_15_prx?a=2", -'PURGE /purge_api -{"uris": ["http://localhost/purge_cached_15?a*"]}', +qq(PURGE /purge_api +{"uris": ["http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx?a*"]}), ] --- more_headers eval [ @@ -979,14 +979,14 @@ location /purge_cached_15 { [ "TEST 15: 1", "TEST 15: 2", -"purge_mode: invalidate -result.http://localhost/purge_cached_15\\?a\\*.qless_job.jid: [a-f0-9]{32} -result.http://localhost/purge_cached_15\\?a\\*.qless_job.klass: ledge.jobs.purge -result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.jid: [a-f0-9]{32} -result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.priority: 5 -result.http://localhost/purge_cached_15\\?a\\*.qless_job.options.tags.1: purge -result.http://localhost/purge_cached_15\\?a\\*.result: scheduled -", +qq(purge_mode: invalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.klass: ledge.jobs.purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.priority: 5 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.tags.1: purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.result: scheduled +), ] --- response_headers_like eval [ @@ -1038,7 +1038,7 @@ location /purge_api { } } location /purge_cached_16_prx { - rewrite ^(.*)_prx$ $1 break; + rewrite ^(.*)_prx(.*)? $1$2 break; content_by_lua_block { require("ledge.state_machine").set_debug(false) require("ledge").create_handler({ @@ -1056,8 +1056,8 @@ location /purge_cached_16 { [ "GET /purge_cached_16_prx?a=1", "GET /purge_cached_16_prx?a=2", -'PURGE /purge_api -{"uris": ["http://localhost/purge*"]}', +qq(PURGE /purge_api +{"uris": ["http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx*"]}), ] --- more_headers eval [ @@ -1068,14 +1068,14 @@ location /purge_cached_16 { [ "TEST 16: 1", "TEST 16: 2", -"purge_mode: invalidate -result.http://localhost/purge\\*.qless_job.jid: [a-f0-9]{32} -result.http://localhost/purge\\*.qless_job.klass: ledge.jobs.purge -result.http://localhost/purge\\*.qless_job.options.jid: [a-f0-9]{32} -result.http://localhost/purge\\*.qless_job.options.priority: 5 -result.http://localhost/purge\\*.qless_job.options.tags.1: purge -result.http://localhost/purge\\*.result: scheduled -", +qq(purge_mode: invalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.klass: ledge.jobs.purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.priority: 5 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.tags.1: purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.result: scheduled +), ] --- response_headers_like eval [ @@ -1112,3 +1112,60 @@ location /purge_cached_16 { ["X-Cache: MISS from .+", "X-Cache: MISS from .+"] --- no_error_log [error] + +=== TEST 17: Purge API - bad request +--- http_config eval: $::HttpConfig +--- config +location /purge_api { + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} + +--- request eval +[ +'PURGE /purge_api +{"uris": ["foobar"]}', + +'PURGE /purge_api +this is not valid json', + +'PURGE /purge_api +{"foo": ["bar"]}', + +'PURGE /purge_api +{"uris": []}', + +'PURGE /purge_api +{"uris": "not an array"}', + +'PURGE /purge_api +{"uris": ["http://www.example.com/"], "purge_mode": "foobar"}' +] +--- more_headers +Content-Type: Application/JSON +--- error_code eval +[200,400,400,400,400,400] +--- response_body eval +[ +"purge_mode: invalidate +result.foobar.error: bad uri: foobar +", +"error: Could not parse request body: Expected value but found invalid token at character 1 +", +"error: No URIs provided +", +"error: No URIs provided +", +"error: Field 'uris' must be an array +", +"error: Invalid purge_mode +", +] +--- no_error_log +[error] From 0396ec4f78e7b160cf689595271408dcd3090e62 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 14 Sep 2017 11:05:12 +0100 Subject: [PATCH 54/90] Unit test for purge api --- t/01-unit/purge.t | 95 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index 016fddd3..635e5eae 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -256,3 +256,98 @@ location /cache { ] --- no_error_log [error] + +=== TEST 4: purge api +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache4 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + handler.storage = storage + + -- Stub out response object + local response = { + status = 0, + body, + set_body = function(self, body) + self.body = body + end + } + handler.response = response + + local json_body = nil + + ngx.req.get_body_data = function() + return json_body + end + + local purge_api = require("ledge.purge").purge_api + + -- Nil body + local ok, err = purge_api(handler) + if response.body then ngx.log(ngx.DEBUG, response.body) end + assert(ok == false and response.body ~= nil, "nil body should return false") + response.body = nil + + -- Invalid json + json_body = [[ foobar ]] + local ok, err = purge_api(handler) + if response.body then ngx.log(ngx.DEBUG, response.body) end + assert(ok == false and response.body ~= nil, "nil body should return false") + response.body = nil + + -- Valid json, bad request + json_body = [[{"foo": "bar"}]] + local ok, err = purge_api(handler) + if response.body then ngx.log(ngx.DEBUG, response.body) end + assert(ok == false and response.body ~= nil, "nil body should return false") + response.body = nil + + -- Valid API request + json_body = require("cjson").encode({ + uris = { + "http://"..ngx.var.host..":"..ngx.var.server_port.."/cache4_prx" + }, + purge_mode = "delete" + }) + local ok, err = purge_api(handler) + if response.body then ngx.log(ngx.DEBUG, response.body) end + assert(ok == true and response.body ~= nil, "nil body should return false") + response.body = nil + + local res, err = redis:exists(handler:cache_key_chain().main) + if err then ngx_log(ngx.ERR, err) end + assert(res == 0, "Key should have been removed") + + } +} +location /cache4_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + local handler = require("ledge").create_handler() + handler:run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=4600" + ngx.say("TEST 4") + } +} +--- request eval +[ +"GET /cache4_prx", +"GET /t" +] +--- no_error_log +[error] From 2fcded5541dcfda46d2b25bae1e09a5b8939708c Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 14 Sep 2017 11:53:07 +0100 Subject: [PATCH 55/90] Tests for passing through additional headers in purge API --- t/01-unit/purge.t | 32 ++++++++++++++++++++--- t/02-integration/purge.t | 56 ++++++++++++++++++++++++++++++++++++++++ 2 files changed, 84 insertions(+), 4 deletions(-) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index 635e5eae..f9263bba 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -301,14 +301,14 @@ location /t { json_body = [[ foobar ]] local ok, err = purge_api(handler) if response.body then ngx.log(ngx.DEBUG, response.body) end - assert(ok == false and response.body ~= nil, "nil body should return false") + assert(ok == false and response.body ~= nil, "invalid json should return false") response.body = nil -- Valid json, bad request json_body = [[{"foo": "bar"}]] local ok, err = purge_api(handler) if response.body then ngx.log(ngx.DEBUG, response.body) end - assert(ok == false and response.body ~= nil, "nil body should return false") + assert(ok == false and response.body ~= nil, "bad request should return false") response.body = nil -- Valid API request @@ -316,17 +316,35 @@ location /t { uris = { "http://"..ngx.var.host..":"..ngx.var.server_port.."/cache4_prx" }, - purge_mode = "delete" + purge_mode = "delete", + headers = { + ["X-Test"] = "Test Header" + } }) local ok, err = purge_api(handler) if response.body then ngx.log(ngx.DEBUG, response.body) end - assert(ok == true and response.body ~= nil, "nil body should return false") + assert(ok == true and response.body ~= nil, "valid request should return true") response.body = nil local res, err = redis:exists(handler:cache_key_chain().main) if err then ngx_log(ngx.ERR, err) end assert(res == 0, "Key should have been removed") + -- Custom headers should be added to request + json_body = require("cjson").encode({ + uris = { + "http://"..ngx.var.host..":"..ngx.var.server_port.."/hdr_test" + }, + purge_mode = "delete", + headers = { + ["X-Test"] = "Test Header" + } + }) + local ok, err = purge_api(handler) + if response.body then ngx.log(ngx.DEBUG, response.body) end + local match = response.body:find("X-Test: Test Header") + assert(ok == true and match ~= nil, "custom header s should pass through") + response.body = nil } } location /cache4_prx { @@ -338,6 +356,12 @@ location /cache4_prx { } } +location /hdr_test { + content_by_lua_block { + ngx.print(ngx.DEBUG, "X-Test: ", ngx.req.get_headers()["X-Test"]) + } +} + location /cache { content_by_lua_block { ngx.header["Cache-Control"] = "max-age=4600" diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index 838c70aa..cc880718 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -1169,3 +1169,59 @@ result.foobar.error: bad uri: foobar ] --- no_error_log [error] + + +=== TEST 17: Purge API passes through purge_mode +--- http_config eval: $::HttpConfig +--- config +location /purge_api { + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_17_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_17 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("TEST 17: ", ngx.req.get_uri_args()["a"]) + } +} +--- request eval +[ +"GET /purge_cached_17_prx?a=1", + +qq(PURGE /purge_api +{"purge_mode": "revalidate", "uris": ["http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx?a=1"]}), +] +--- more_headers eval +[ +"", "Content-Type: Application/JSON", +] +--- response_body_like eval +[ +"TEST 17: 1", + +qq(purge_mode: revalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.klass: ledge.jobs.revalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.priority: 4 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.tags.1: revalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.result: purged +), + +] +--- wait: 1 From 58d37df26f0d09c2a1ae0841b464122634ee74ef Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 14 Sep 2017 12:14:24 +0100 Subject: [PATCH 56/90] Remove superfluous schedule_purge_job func --- lib/ledge/purge.lua | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 1e25b0a0..ffd8bf6b 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -153,8 +153,9 @@ end _M.purge = purge -local function schedule_purge_job(handler, purge_mode, key_chain) - return put_background_job( +local function purge_in_background(handler, purge_mode) + local key_chain = handler:cache_key_chain() + local job, err = put_background_job( "ledge_purge", "ledge.jobs.purge", { @@ -170,11 +171,6 @@ local function schedule_purge_job(handler, purge_mode, key_chain) priority = 5, } ) -end - - -local function purge_in_background(handler, purge_mode) - local job, err = schedule_purge_job(handler, purge_mode, handler:cache_key_chain()) if err then ngx_log(ngx_ERR, err) end -- Create a JSON payload for the response From ad6d68daac85dec3b5f83c244bdd06cafa72253e Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 14 Sep 2017 12:39:05 +0100 Subject: [PATCH 57/90] Doc: Purge API --- README.md | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) diff --git a/README.md b/README.md index 18f4b5d2..21b50b68 100644 --- a/README.md +++ b/README.md @@ -315,6 +315,38 @@ limit_except GET POST PUT DELETE { [Back to TOC](#table-of-contents) + +### JSON API + +A JSON based API is also available for purging cache multiple cache items at once. +This requires a `PURGE` request with a `Content-Type` header set to `application/json` and a valid JSON request body. + +Valid parameters + * `uris` - Array of URIs to purge, can contain wildcard URIs + * `purge_mode` - As the `X-Purge` header in a normal purge request + * `headers` - Hash of additional headers to include in the purge request + +Returns a results hash keyed by URI or a JSON error response + +`$> curl -X PURGE -H "Content-Type: Application/JSON" http://cache.example.com/ -d '{"uris": ["http://www.example.com/1", "http://www.example.com/2"]}' | jq .` + +```json +{ + "purge_mode": "invalidate", + "result": { + "http://www.example.com/1": { + "result": "purged" + }, + "http://www.example.com/2":{ + "result": "nothing to purge" + } + } +} +``` + +[Back to TOC](#table-of-contents) + + ### Wildcard purging Wildcard (\*) patterns are also supported in `PURGE` URIs, which will always return a status of `200` and a JSON body detailing a background job. Wildcard purges involve scanning the entire keyspace, and so can take a little while. See [keyspace\_scan\_count](#keyspace_scan_count) for tuning help. From 3fd9e1e0e8fa6b6c9842f9d3e94354399deec3a0 Mon Sep 17 00:00:00 2001 From: Hamish Date: Thu, 14 Sep 2017 13:31:11 +0100 Subject: [PATCH 58/90] Optimise wildcard detection in default_args --- lib/ledge/request.lua | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/lib/ledge/request.lua b/lib/ledge/request.lua index 41072ac0..4aeb846b 100644 --- a/lib/ledge/request.lua +++ b/lib/ledge/request.lua @@ -4,7 +4,8 @@ local ngx_req_get_headers = ngx.req.get_headers local ngx_re_gsub = ngx.re.gsub local ngx_req_get_uri_args = ngx.req.get_uri_args local ngx_req_get_method = ngx.req.get_method -local ngx_re_find = ngx.re.find + +local str_byte = string.byte local ngx_var = ngx.var @@ -103,10 +104,10 @@ _M.args_sorted = args_sorted -- If you override the "args" field in a cache key spec with your own function, -- you'll want to use this to ensure wildcard purges operate correctly. local function default_args() - if ngx_req_get_method() == "PURGE" then - if ngx_re_find(ngx_var.request_uri, "\\*$", "soj") then - return "*" - end + if ngx_req_get_method() == "PURGE" and + str_byte(ngx_var.request_uri, -1) == 42 + then + return "*" end return "" end From dd55ce1c32dc89d3d2a1c9ae3abfb83d2d736c7f Mon Sep 17 00:00:00 2001 From: James Loh Date: Thu, 21 Sep 2017 13:25:43 +1000 Subject: [PATCH 59/90] Move socket locations to new var for portability --- t/02-integration/ssl.t | 21 +++++++++++---------- t/02-integration/upstream.t | 5 +++-- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/t/02-integration/ssl.t b/t/02-integration/ssl.t index 910bb873..b861184f 100644 --- a/t/02-integration/ssl.t +++ b/t/02-integration/ssl.t @@ -8,6 +8,7 @@ $ENV{TEST_LEDGE_REDIS_DATABASE} |= 2; $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} |= 3; $ENV{TEST_COVERAGE} ||= 0; $ENV{TEST_NGINX_HTML_DIR} ||= html_dir(); +$ENV{TEST_NGINX_SOCKET_DIR} ||= $ENV{TEST_NGINX_HTML_DIR}; sub read_file { my $infile = shift; @@ -49,7 +50,7 @@ init_by_lua_block { local httpc_ssl = require("resty.http").new() local ok, err = - httpc_ssl:connect("unix:$ENV{TEST_NGINX_HTML_DIR}/nginx-ssl.sock") + httpc_ssl:connect("unix:$ENV{TEST_NGINX_SOCKET_DIR}/nginx-ssl.sock") if not ok then ngx.say("Unable to connect to sock, ", err) @@ -86,7 +87,7 @@ init_by_lua_block { }) require("ledge").set_handler_defaults({ - upstream_host = "unix:$ENV{TEST_NGINX_HTML_DIR}/nginx-ssl.sock", + upstream_host = "unix:$ENV{TEST_NGINX_SOCKET_DIR}/nginx-ssl.sock", upstream_use_ssl = true, upstream_ssl_server_name = "example.com", upstream_ssl_verify = true, @@ -113,7 +114,7 @@ __DATA__ === TEST 1: SSL works --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /upstream_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { @@ -144,7 +145,7 @@ OK https === TEST 2: Bad SSL name errors --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /upstream_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { @@ -176,7 +177,7 @@ ssl handshake failed === TEST 3: SSL verification can be disabled --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /upstream_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { @@ -210,7 +211,7 @@ OK https === TEST 4: Empty SSL name treated as nil --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /upstream_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { @@ -243,7 +244,7 @@ OK https === TEST 9a: Prime another key --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /purge_ssl_entry { rewrite ^(.*)_entry$ $1_prx break; content_by_lua_block { @@ -285,7 +286,7 @@ TEST 9: primed === TEST 9b: Purge with X-Purge: revalidate --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /purge_ssl_entry { rewrite ^(.*)_entry$ $1_prx break; content_by_lua_block { @@ -326,7 +327,7 @@ PURGE /purge_ssl_entry === TEST 9c: Confirm cache was revalidated --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /purge_ssl_entry { rewrite ^(.*)_entry$ $1_prx break; content_by_lua_block { @@ -358,7 +359,7 @@ TEST 9 Revalidated: primed --- log_level: debug --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx-ssl.sock ssl; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx-ssl.sock ssl; location /esi_ssl_entry { rewrite ^(.*)_entry$ $1_prx break; content_by_lua_block { diff --git a/t/02-integration/upstream.t b/t/02-integration/upstream.t index 3bd8ebaa..35ca24e3 100644 --- a/t/02-integration/upstream.t +++ b/t/02-integration/upstream.t @@ -8,6 +8,7 @@ $ENV{TEST_LEDGE_REDIS_DATABASE} |= 2; $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} |= 3; $ENV{TEST_COVERAGE} ||= 0; $ENV{TEST_NGINX_HTML_DIR} ||= html_dir(); +$ENV{TEST_NGINX_SOCKET_DIR} ||= $ENV{TEST_NGINX_HTML_DIR}; our $HttpConfig = qq{ lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; @@ -116,12 +117,12 @@ upstream connection failed: === TEST 4: No port with unix socket works --- http_config eval: $::HttpConfig --- config -listen unix:$TEST_NGINX_HTML_DIR/nginx.sock; +listen unix:$TEST_NGINX_SOCKET_DIR/nginx.sock; location /upstream_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { require("ledge").create_handler({ - upstream_host = "unix:$TEST_NGINX_HTML_DIR/nginx.sock", + upstream_host = "unix:$TEST_NGINX_SOCKET_DIR/nginx.sock", upstream_port = "", }):run() } From 0eb3d1cfd287ac0063b86e3cef3a9c7f212ed557 Mon Sep 17 00:00:00 2001 From: Hamish Date: Wed, 20 Sep 2017 14:57:06 +0100 Subject: [PATCH 60/90] Initial vary support --- lib/ledge.lua | 1 + lib/ledge/cache_key.lua | 221 ++++++++++- lib/ledge/handler.lua | 120 +++++- lib/ledge/jobs/purge.lua | 17 +- lib/ledge/purge.lua | 68 +++- lib/ledge/response.lua | 44 ++- lib/ledge/state_machine/actions.lua | 6 +- lib/ledge/state_machine/events.lua | 21 + lib/ledge/state_machine/states.lua | 59 ++- t/01-unit/cache_key.t | 454 ++++++++++++++++++++-- t/01-unit/handler.t | 16 +- t/01-unit/jobs.t | 11 +- t/01-unit/purge.t | 19 +- t/01-unit/response.t | 82 ++-- t/02-integration/cache.t | 24 +- t/02-integration/collapsed_forwarding.t | 110 ++++++ t/02-integration/esi.t | 4 +- t/02-integration/gc.t | 17 +- t/02-integration/memory_pressure.t | 2 + t/02-integration/purge.t | 174 ++++++--- t/02-integration/stale-while-revalidate.t | 1 + t/02-integration/vary.t | 339 ++++++++++++++++ 22 files changed, 1555 insertions(+), 255 deletions(-) create mode 100644 t/02-integration/vary.t diff --git a/lib/ledge.lua b/lib/ledge.lua index e4f90c9e..17907df6 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -101,6 +101,7 @@ local event_defaults = { before_upstream_connect = {}, before_upstream_request = {}, after_upstream_request = {}, + before_vary = {}, before_save = {}, before_save_revalidation_data = {}, before_serve = {}, diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index b1cdabbe..5ba3eb8a 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -4,6 +4,7 @@ local ipairs, next, type, pcall, setmetatable = local ngx_log = ngx.log local ngx_ERR = ngx.ERR local ngx_var = ngx.var +local ngx_null = ngx.null local tbl_insert = table.insert local tbl_concat = table.concat @@ -20,11 +21,11 @@ local _M = { } --- Generates the cache key. The default spec is: +-- Generates the root key. The default spec is: -- ledge:cache_obj:http:example.com:/about:p=3&q=searchterms -local function generate_cache_key(key_spec, max_args) +local function generate_root_key(key_spec, max_args) -- If key_spec is empty, provide a default - if not next(key_spec) then + if not key_spec or not next(key_spec) then key_spec = { "scheme", "host", @@ -72,34 +73,212 @@ local function generate_cache_key(key_spec, max_args) return tbl_concat(key, ":") end -_M.generate_cache_key = generate_cache_key +_M.generate_root_key = generate_root_key + + +-- Read the list of vary headers from redis +local function read_vary_spec(redis, root_key) + if not redis or not next(redis) then + return nil, "Redis required" + end + + if not root_key then + return nil, "Root key required" + end + + local res, err = redis:smembers(root_key.."::vary") + if err then + return nil, err + end + + return res +end +_M.read_vary_spec = read_vary_spec + + +local function vary_spec_compare(spec_a, spec_b) + if (not spec_a or not next(spec_a)) then + if (not spec_b or not next(spec_b)) then + -- both nil or empty + return false + else + -- spec_b is set but spec_a is empty + return true + end + + elseif (spec_b and next(spec_b)) then + -- TODO: looping here faster? + if tbl_concat(spec_b, ",") == tbl_concat(spec_a, ",") then + -- Current vary spec and new vary spec match + return false + end + end + + -- spec_a is a thing but spec_b is not + return true +end +_M.vary_spec_compare = vary_spec_compare + + +local function generate_vary_key(vary_spec, callback, headers) + local vary_key = {} + + if vary_spec and next(vary_spec) then + headers = headers or ngx.req.get_headers() + + for _, h in ipairs(vary_spec) do + local v = headers[h] + if type(v) == "table" then + v = tbl_concat(v, ",") + end + -- ngx.null represents a key which was in the spec + -- but has no matching request header + vary_key[h] = v or ngx_null + end + end + + -- Callback allows user to modify the key + if type(callback) == "function" then + callback(vary_key) + end + + if not next(vary_key) then + return "" + end + + -- Convert hash table to array + local t = {} + local i = 1 + for k,v in pairs(vary_key) do + if v ~= ngx_null then + t[i] = k + t[i+1] = v + i = i+2 + end + end + + return tbl_concat(t, ":") +end +_M.generate_vary_key = generate_vary_key -- Returns the key chain for all cache keys, except the body entity -local function key_chain(cache_key) - return setmetatable({ - -- hash: cache key metadata - main = cache_key .. "::main", +local function key_chain(root_key, vary_key, vary_spec) + if not root_key then + return nil, "Missing root key" + end + if not vary_key then + return nil, "Missing vary key" + end + if not vary_spec then + return nil, "Missing vary_spec" + end + + + local full_key = root_key .. "#" .. vary_key + + -- Apply metatable + local key_chain = setmetatable({ + -- set: headers upon which to vary + vary = root_key .. "::vary", - -- sorted set: current entities score with sizes - entities = cache_key .. "::entities", + -- set: representations for this root key + repset = root_key .. "::repset", - -- hash: response headers - headers = cache_key .. "::headers", + -- hash: cache key metadata + main = full_key .. "::main", - -- hash: request headers for revalidation - reval_params = cache_key .. "::reval_params", + -- sorted set: current entities score with sizes + entities = full_key .. "::entities", - -- hash: request params for revalidation - reval_req_headers = cache_key .. "::reval_req_headers", + -- hash: response headers + headers = full_key .. "::headers", - }, get_fixed_field_metatable_proxy({ - -- Hide "root" and "fetching_lock" from iterators. - root = cache_key, - fetching_lock = cache_key .. "::fetching", - })) + -- hash: request headers for revalidation + reval_params = full_key .. "::reval_params", + + -- hash: request params for revalidation + reval_req_headers = full_key .. "::reval_req_headers", + }, get_fixed_field_metatable_proxy({ + -- Hide "root", "full", the "vary_spec" and "fetching_lock" from iterators. + root = root_key, + full = full_key, + vary_spec = vary_spec, + fetching_lock = full_key .. "::fetching", + }) + ) + + return key_chain end _M.key_chain = key_chain +local function clean_repset(redis, repset) + -- Ensure representation set only includes keys which actually exist + -- This only runs on the slow path at save time so should be ok? + -- Prevents this set from growing perpetually if there are unique variations + -- TODO use scan here incase the set is pathologically huge? + -- Has to be able to run in a transaction so maybe a housekeeping qless job? + local clean = [[ + local repset = KEYS[1] + local reps = redis.call("SMEMBERS", repset) + for _, rep in ipairs(reps) do + if redis.call("EXISTS", rep.."::main") == 0 then + redis.call("SREM", repset, rep) + end + end + ]] + + local res, err = redis:eval(clean, 1, repset) + if not res or res == ngx_null then + return nil, err + end + + return true +end + + +local function save_key_chain(redis, key_chain, ttl) + if not redis then + return nil, "Redis required" + end + + if type(key_chain) ~= "table" or not next(key_chain) then + return nil, "Key chain required" + end + + if not tonumber(ttl) then + return nil, "TTL must be a number" + end + + -- Delete the current set of vary headers + local _, e = redis:del(key_chain.vary) + if e then ngx_log(ngx_ERR, e) end + + local vary_spec = key_chain.vary_spec + + if next(vary_spec) then + local _, e = redis:sadd(key_chain.vary, unpack(vary_spec)) + if e then ngx_log(ngx_ERR, e) end + + local _, e = redis:expire(key_chain.vary, ttl) + if e then ngx_log(ngx_ERR, e) end + end + + -- Add this representation to the set + local _, e = redis:sadd(key_chain.repset, key_chain.full) + if e then ngx_log(ngx_ERR, e) end + + local _, e = redis:expire(key_chain.repset, ttl) + if e then ngx_log(ngx_ERR, e) end + + + local _, e = clean_repset(redis, key_chain.repset) + if e then ngx_log(ngx_ERR, e) end + + return true +end +_M.save_key_chain = save_key_chain + + return _M diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index fd3a54dd..9ba07b86 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -31,8 +31,7 @@ local esi_capabilities = require("ledge.esi").esi_capabilities local append_server_port = require("ledge.util").append_server_port -local generate_cache_key = require("ledge.cache_key").generate_cache_key -local key_chain = require("ledge.cache_key").key_chain +local ledge_cache_key = require("ledge.cache_key") local req_relative_uri = require("ledge.request").relative_uri local req_full_uri = require("ledge.request").full_uri @@ -90,8 +89,11 @@ local function new(config, events) esi_process_enabled = false, -- private: - _cache_key = "", + _root_key = "", + _vary_key = ngx_null, -- empty string is not the same as not set + _vary_spec = ngx_null, -- empty table is not the same as not set _cache_key_chain = {}, + _publish_key = "", }, get_fixed_field_metatable_proxy(_M)) @@ -184,41 +186,110 @@ end _M.emit = emit -local function cache_key(self) - if self._cache_key == "" then - self._cache_key = generate_cache_key( +function _M.entity_id(self, key_chain) + if not key_chain or not key_chain.main then return nil end + + local entity_id, err = self.redis:hget(key_chain.main, "entity") + if not entity_id or entity_id == ngx_null then + return nil, err + end + + return entity_id +end + + +local function root_key(self) + if self._root_key == "" then + self._root_key = ledge_cache_key.generate_root_key( self.config.cache_key_spec, self.config.max_uri_args ) end - return self._cache_key + + return self._root_key end -_M.cache_key = cache_key +_M.root_key = root_key + + +local function vary_spec(self, root_key) + if self._vary_spec == ngx_null then + local vary_spec, err = ledge_cache_key.read_vary_spec( + self.redis, + root_key + ) + if not vary_spec then + ngx_log(ngx_ERR, "Read vary spec: ", err) + return false + end + self._vary_spec = vary_spec + end + + return self._vary_spec +end +_M.vary_spec = vary_spec + + +local function create_vary_key_callback(self) + return function(vary_key) + -- TODO: gunzip? + emit(self, "before_vary", vary_key) + end +end +_M.create_vary_key_callback = create_vary_key_callback + + +local function vary_key(self, vary_spec) + if self._vary_key == ngx_null then + self._vary_key = ledge_cache_key.generate_vary_key( + vary_spec, + create_vary_key_callback(self) + ) + end + + return self._vary_key +end +_M.vary_key = vary_key local function cache_key_chain(self) if not next(self._cache_key_chain) then - self._cache_key_chain = key_chain(cache_key(self)) + if not self.redis or not next(self.redis) then + ngx_log(ngx_ERR, "Cannot get cache key without a redis connection") + return nil + end + + local rk = root_key(self) + + local vs = vary_spec(self, rk) + + local vk = vary_key(self, vs) +ngx.log(ngx.DEBUG, "Vary Key: ", vk) + self._cache_key_chain = ledge_cache_key.key_chain(rk, vk, vs) end + return self._cache_key_chain end _M.cache_key_chain = cache_key_chain -function _M.entity_id(self, key_chain) - if not key_chain or not key_chain.main then return nil end +local function reset_cache_key(self) + self._root_key = "" + self._vary_key = ngx_null + self._vary_spec = ngx_null + self._cache_key_chain = {} +end +_M.reset_cache_key = reset_cache_key - local entity_id, err = self.redis:hget(key_chain.main, "entity") - if not entity_id or entity_id == ngx_null then - return nil, err - end - return entity_id +local function set_vary_spec(self, vary_spec) + reset_cache_key(self) + self._vary_spec = vary_spec end +_M.set_vary_spec = set_vary_spec local function read_from_cache(self) - local res, err = response.new(self.redis, cache_key_chain(self)) + local res, err = response.new(self) if not res then return nil, err end local ok, err = res:read() @@ -285,7 +356,7 @@ local hop_by_hop_headers = { -- Fetches a resource from the origin server. local function fetch_from_origin(self) - local res, err = response.new(self.redis, cache_key_chain(self)) + local res, err = response.new(self) if not res then return nil, err end local method = ngx['HTTP_' .. ngx_req_get_method()] @@ -591,6 +662,10 @@ local function save_to_cache(self, res) local redis = self.redis redis:watch(key_chain.main) +ngx.log(ngx.DEBUG, "Saving: ", key_chain.main) + + local repset_ttl = redis:ttl(key_chain.repset) + -- We'll need to mark the old entity for expiration shortly, as reads -- could still be in progress. We need to know the previous entity keys -- and the size. @@ -658,6 +733,15 @@ local function save_to_cache(self, res) redis:expire(key_chain.reval_params, expiry) redis:expire(key_chain.reval_req_headers, expiry) + + -- repset and vary TTL should be the same as the longest living represenation + if repset_ttl < expiry then + repset_ttl = expiry + end + + -- Save updates to cache key + ledge_cache_key.save_key_chain(redis, key_chain, repset_ttl) + -- If we have a body, we need to attach the storage writer -- NOTE: res.has_body is false for known bodyless repsonse types -- (e.g. HEAD) but may be true and of zero length (commonly 301 etc). diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index 433f719d..34f8320b 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -1,6 +1,4 @@ local ipairs, tonumber = ipairs, tonumber -local str_len = string.len -local str_sub = string.sub local ngx_log = ngx.log local ngx_DEBUG = ngx.DEBUG local ngx_ERR = ngx.ERR @@ -10,16 +8,11 @@ local purge = require("ledge.purge").purge local create_redis_slave_connection = require("ledge").create_redis_slave_connection local close_redis_connection = require("ledge").close_redis_connection -local key_chain = require("ledge.cache_key").key_chain - local _M = { _VERSION = "2.0.0", } -local magic_len = -(str_len("::main") + 1) - - -- Scans the keyspace for keys which match, and expires them. We do this against -- the slave Redis instance if available. function _M.perform(job) @@ -73,7 +66,7 @@ function _M.expire_pattern(cursor, job, handler) -- Scan using the "main" key to get a single key per cache entry local res, err = job.redis_slave:scan( cursor, - "MATCH", job.data.key_chain.main, + "MATCH", job.data.key_chain.repset, "COUNT", job.data.keyspace_scan_count ) @@ -81,13 +74,11 @@ function _M.expire_pattern(cursor, job, handler) return nil, "SCAN error: " .. tostring(err) else for _,key in ipairs(res[2]) do - -- Strip the "main" suffix to find the cache key - local cache_key = str_sub(key, 1, magic_len) + ngx_log(ngx_DEBUG, "Purging set: ", key) - ngx_log(ngx_DEBUG, "Purging key: ", cache_key) - - local ok, err = purge(handler, job.data.purge_mode, key_chain(cache_key)) + local ok, err = purge(handler, job.data.purge_mode, key) if ok == nil and err then ngx_log(ngx_ERR, tostring(err)) end + end local cursor = tonumber(res[1]) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index ffd8bf6b..154a6ce7 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -1,6 +1,8 @@ local pcall, tonumber, tostring, pairs = pcall, tonumber, tostring, pairs +local tbl_insert = table.insert + local ngx_var = ngx.var local ngx_log = ngx.log local ngx_ERR = ngx.ERR @@ -9,6 +11,10 @@ local ngx_time = ngx.time local ngx_md5 = ngx.md5 local ngx_HTTP_BAD_REQUEST = ngx.HTTP_BAD_REQUEST +local str_find = string.find +local str_sub = string.sub +local str_len = string.len + local http = require("resty.http") local cjson_encode = require("cjson").encode @@ -17,17 +23,21 @@ local cjson_decode = require("cjson").decode local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local put_background_job = require("ledge.background").put_background_job +local key_chain = require("ledge.cache_key").key_chain + local _M = { _VERSION = "2.0.0", } +local repset_len = -(str_len("::repset")+1) + -local function create_purge_response(purge_mode, result, qless_job) +local function create_purge_response(purge_mode, result, qless_jobs) local d = { purge_mode = purge_mode, result = result, } - if qless_job then d.qless_job = qless_job end + if qless_jobs then d.qless_jobs = qless_jobs end local ok, json = pcall(cjson_encode, d) @@ -97,7 +107,6 @@ local function expire_keys(redis, storage, key_chain, entity_id) end _M.expire_keys = expire_keys - -- Purges the cache item according to purge_mode which defaults to "invalidate". -- If there's nothing to do we return false which results in a 404. -- @param table handler instance @@ -106,7 +115,7 @@ _M.expire_keys = expire_keys -- @return boolean success -- @return string message -- @return table qless job (for revalidate only) -local function purge(handler, purge_mode, key_chain) +local function _purge(handler, purge_mode, key_chain) local redis = handler.redis local storage = handler.storage @@ -150,6 +159,55 @@ local function purge(handler, purge_mode, key_chain) end end + + +local function key_chain_from_rep(root_key, full_key) + local pos = str_find(full_key, "#") + if pos == nil then + return nil + end + + -- Remove the root_key from the start + local vary_key = str_sub(full_key, pos+1) + + local vary_spec = {} -- We don't need this + + + + return key_chain(root_key, vary_key, vary_spec) +end + + +-- Purges all representatinos of the cache item +local function purge(handler, purge_mode, repset) + local representations, err = handler.redis:smembers(repset) + if err then + return nil, err + end + + if #representations == 0 then + return false, "nothing to purge", nil + end + + local root_key = str_sub(repset, 1, repset_len) + + local res_ok, res_message + local jobs = {} + + for _, rep in ipairs(representations) do + + ngx.log(ngx.DEBUG, "Purging representation: ", rep) + local ok, message, job = _purge(handler, purge_mode, key_chain_from_rep(root_key, rep)) + + if res_ok == nil or ok == true then + res_ok = ok + res_message = message + end + + tbl_insert(jobs, job) + end + return res_ok, res_message, jobs +end _M.purge = purge @@ -174,7 +232,7 @@ local function purge_in_background(handler, purge_mode) if err then ngx_log(ngx_ERR, err) end -- Create a JSON payload for the response - local res = create_purge_response(purge_mode, "scheduled", job) + local res = create_purge_response(purge_mode, "scheduled", {job}) handler.response:set_body(res) return true diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 6533f72d..47baf8fb 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -47,14 +47,18 @@ end _M.empty_body_reader = empty_body_reader -function _M.new(redis, key_chain) - if not redis or not next(redis) or not key_chain or not next(key_chain) then - return nil, "redis and key_chain args required" +function _M.new(handler) + if not handler or not next(handler) then + return nil, "Handler is required" + end + + if not handler.redis or not next(handler.redis) then + return nil, "Handler has no redis connection" end return setmetatable({ - redis = redis, - key_chain = key_chain, -- Cache key chain + redis = handler.redis, + handler = handler, -- Cache key chain uri = "", status = 0, @@ -131,6 +135,10 @@ function _M.is_cacheable(self) return false end + if h["Vary"] == "*" then + return false + end + if self:ttl() > 0 then return true else @@ -187,7 +195,9 @@ end -- so we MISS and update the entry. function _M.read(self) local redis = self.redis - local key_chain = self.key_chain + local key_chain = self.handler:cache_key_chain() + + ngx.log(ngx.DEBUG, "Response Reading: ", key_chain.main) -- Read main metdata local cache_parts, err = redis:hgetall(key_chain.main) @@ -352,8 +362,8 @@ function _M.save(self, keep_cache_for) local redis = self.redis if not next(redis) then return nil, "no redis" end - local key_chain = self.key_chain - + local key_chain = self.handler:cache_key_chain() +ngx.log(ngx.DEBUG, "RESPONSE saving: ", key_chain.main) if not self.header["Date"] then self.header["Date"] = ngx_http_time(ngx_time()) end @@ -403,7 +413,8 @@ end function _M.set_and_save(self, field, value) local redis = self.redis - local ok, err = redis:hset(self.key_chain.main, field, tostring(value)) +ngx.log(ngx.DEBUG, "RESPONSE set and saving: ", self.handler:cache_key_chain().main) + local ok, err = redis:hset(self.handler:cache_key_chain().main, field, tostring(value)) if not ok then if err then ngx_log(ngx_ERR, err) end return nil, err @@ -432,4 +443,19 @@ function _M.add_warning(self, code, name) end +function _M.process_vary(self) + local vary_hdr = self.header["Vary"] + local vary_spec + + if vary_hdr then + if type(vary_hdr) == "table" then + vary_hdr = tbl_concat(vary_hdr,",") + end + vary_spec = str_split(vary_hdr, ",") + end + + return vary_spec +end + + return _M diff --git a/lib/ledge/state_machine/actions.lua b/lib/ledge/state_machine/actions.lua index 4e5dab31..55e2e783 100644 --- a/lib/ledge/state_machine/actions.lua +++ b/lib/ledge/state_machine/actions.lua @@ -172,7 +172,7 @@ return { end, set_json_response = function(handler) - local res = response.new(handler.redis, handler:cache_key_chain()) + local res = response.new(handler) res.header["Content-Type"] = "application/json" handler.response = res end, @@ -203,6 +203,10 @@ return { handler.output_buffers_enabled = false end, + reset_cache_key = function(handler) + handler:reset_cache_key() + end, + set_http_ok = function() ngx.status = ngx.HTTP_OK end, diff --git a/lib/ledge/state_machine/events.lua b/lib/ledge/state_machine/events.lua index 848cc565..655f4a09 100644 --- a/lib/ledge/state_machine/events.lua +++ b/lib/ledge/state_machine/events.lua @@ -156,6 +156,12 @@ return { { begin = "considering_stale_error" }, }, + -- We were waiting on another request, but the vary key changed + -- Might still match so check the cache again + collapsed_forwarding_vary_modified = { + { begin = "checking_cache", but_first = "reset_cache_key" }, + }, + -- We need to fetch and nothing is telling us we shouldn't. -- Collapsed forwarding is not enabled. can_fetch = { @@ -165,9 +171,18 @@ return { -- We've fetched and got a response status and headers. We should consider -- potential for ESI before doing anything else. response_fetched = { + { in_case = "vary_modified", begin = "considering_esi_scan" }, + { begin = "considering_vary" }, + }, + + vary_modified = { { begin = "considering_esi_scan" }, }, + vary_unmodified = { + { begin = "considering_esi_scan" } + }, + partial_response_fetched = { { begin = "considering_background_fetch" }, }, @@ -271,6 +286,12 @@ return { -- served stale), we can just exit. Otherwise go back through validationg -- in case we can 304 to the client. response_cacheable = { + { + after = "fetching_as_surrogate", + in_case = "vary_modified", + begin = "publishing_collapse_vary_modified", + but_first = "save_to_cache" + }, { after = "fetching_as_surrogate", begin = "publishing_collapse_success", diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index d61a7641..1a126f2c 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -38,6 +38,8 @@ local acquire_lock = require("ledge.collapse").acquire_lock local parse_content_range = require("ledge.range").parse_content_range +local vary_spec_compare = require("ledge.cache_key").vary_spec_compare + local _M = { -- luacheck: no unused _VERSION = "2.0.0", @@ -262,7 +264,7 @@ return { -- In which case we have missed the publish event local redis_subscriber = ledge.create_redis_connection() - local ok, err = redis_subscriber:subscribe(key_chain.root) + local ok, err = redis_subscriber:subscribe(lock_key) if not ok or ok == ngx_null then -- Failed to enter subscribe mode if err then ngx_log(ngx_ERR, err) end @@ -287,32 +289,63 @@ return { publishing_collapse_success = function(sm, handler) local redis = handler.redis - local key_chain = handler:cache_key_chain() - redis:del(key_chain.fetching_lock) -- Clear the lock - redis:publish(key_chain.root, "collapsed_response_ready") + local key = handler._publish_key + redis:del(key) -- Clear the lock + redis:publish(key, "collapsed_response_ready") + return sm:e "published" end, publishing_collapse_failure = function(sm, handler) local redis = handler.redis - local key_chain = handler:cache_key_chain() - redis:del(key_chain.fetching_lock) -- Clear the lock - redis:publish(key_chain.root, "collapsed_forwarding_failed") + local key = handler._publish_key + redis:del(key) -- Clear the lock + redis:publish(key, "collapsed_forwarding_failed") + return sm:e "published" end, publishing_collapse_upstream_error = function(sm, handler) local redis = handler.redis - local key_chain = handler:cache_key_chain() - redis:del(key_chain.fetching_lock) -- Clear the lock - redis:publish(key_chain.root, "collapsed_forwarding_upstream_error") + local key = handler._publish_key + redis:del(key) -- Clear the lock + redis:publish(key, "collapsed_forwarding_upstream_error") + + return sm:e "published" + end, + + publishing_collapse_vary_modified = function(sm, handler) + local redis = handler.redis + local key = handler._publish_key + redis:del(key) -- Clear the lock + redis:publish(key, "collapsed_forwarding_vary_modified") + return sm:e "published" end, - fetching_as_surrogate = function(sm) + fetching_as_surrogate = function(sm, handler) + -- stash these because we might change the key + -- depending on vary response + local key_chain = handler:cache_key_chain() + handler._publish_key = key_chain.fetching_lock + return sm:e "can_fetch" end, + considering_vary = function(sm, handler) + local new_spec = handler.response:process_vary() + local key_chain = handler:cache_key_chain() + + if vary_spec_compare(new_spec, key_chain.vary_spec) then + handler:set_vary_spec(new_spec) + return sm:e "vary_modified" + + else + return sm:e "vary_unmodified" + + end + end, + waiting_on_collapsed_forwarding_channel = function(sm, handler) local redis = handler.redis_subscriber @@ -333,6 +366,8 @@ return { return sm:e "collapsed_response_ready" elseif res[3] == "collapsed_forwarding_upstream_error" then return sm:e "collapsed_forwarding_upstream_error" + elseif res[3] == "collapsed_forwarding_vary_modified" then + return sm:e "collapsed_forwarding_vary_modified" else return sm:e "collapsed_forwarding_failed" end @@ -386,7 +421,7 @@ return { purging = function(sm, handler) local mode = purge_mode() - local ok, message, job = purge(handler, mode, handler:cache_key_chain()) + local ok, message, job = purge(handler, mode, handler:cache_key_chain().repset) local json = create_purge_response(mode, message, job) handler.response:set_body(json) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index 169c5884..d0d28c12 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -39,19 +39,19 @@ run_tests(); __DATA__ -=== TEST 1: Cache key is the same with nil ngx.var.args and empty string +=== TEST 1: Root key is the same with nil ngx.var.args and empty string --- http_config eval: $::HttpConfig --- config location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local key_chain = require("ledge").create_handler():cache_key_chain() - local key1 = key_chain.main + local ledge_cache_key = require("ledge.cache_key") + + local key1 = ledge_cache_key.generate_root_key(nil, nil) ngx.req.set_uri_args({}) - key_chain = require("ledge").create_handler():cache_key_chain() - local key2 = key_chain.main + local key2 = ledge_cache_key.generate_root_key(nil, nil) assert(key1 == key2, "key1 should equal key2") } @@ -63,51 +63,50 @@ GET /t [error] -=== TEST 2: Custom cache key spec +=== TEST 2: Custom key spec --- http_config eval: $::HttpConfig --- config location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler() + local ledge_cache_key = require("ledge.cache_key") + + local root_key = ledge_cache_key.generate_root_key(nil, nil) - assert(handler:cache_key() == "ledge:cache:http:localhost:/t:a=1", - "cache_key should be ledge:cache:http:localhost:/t:a=1") + assert(root_key == "ledge:cache:http:localhost:/t:a=1", + "root_key should be ledge:cache:http:localhost:/t:a=1") - local handler = require("ledge").create_handler({ - cache_key_spec = { + local cache_key_spec = { "scheme", "host", "port", "uri", "args", } - }) + local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil) - assert(handler:cache_key() == "ledge:cache:http:localhost:1984:/t:a=1", - "cache_key should be ledge:cache:http:localhost:1984:/t:a=1") + assert(root_key == "ledge:cache:http:localhost:1984:/t:a=1", + "root_key should be ledge:cache:http:localhost:1984:/t:a=1") - local handler = require("ledge").create_handler({ - cache_key_spec = { + local cache_key_spec = { "host", "uri", } - }) + local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil) - assert(handler:cache_key() == "ledge:cache:localhost:/t", - "cache_key should be ledge:cache:localhost:/t") + assert(root_key == "ledge:cache:localhost:/t", + "root_key should be ledge:cache:localhost:/t") - local handler = require("ledge").create_handler({ - cache_key_spec = { + local cache_key_spec = { "host", "uri", function() return "hello" end, } - }) + local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil) - assert(handler:cache_key() == "ledge:cache:localhost:/t:hello", - "cache_key should be ledge:cache:localhost:/t:hello") + assert(root_key == "ledge:cache:localhost:/t:hello", + "root_key should be ledge:cache:localhost:/t:hello") } } @@ -123,27 +122,27 @@ GET /t?a=1 location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler({ - cache_key_spec = { + local ledge_cache_key = require("ledge.cache_key") + + local cache_key_spec = { "host", "uri", function() return 123 end, } - }) + local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil) - assert(handler:cache_key() == "ledge:cache:localhost:/t", + assert(root_key == "ledge:cache:localhost:/t", "cache_key should be ledge:cache:localhost:/t") - local handler = require("ledge").create_handler({ - cache_key_spec = { + local cache_key_spec = { "host", "uri", function() return foo() end, } - }) + local root_key = ledge_cache_key.generate_root_key(cache_key_spec, nil) - assert(handler:cache_key() == "ledge:cache:localhost:/t", + assert(root_key == "ledge:cache:localhost:/t", "cache_key should be ledge:cache:localhost:/t") } } @@ -161,8 +160,10 @@ error in function supplied to cache_key_spec location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler() - ngx.print(handler:cache_key()) + local ledge_cache_key = require("ledge.cache_key") + + local root_key = ledge_cache_key.generate_root_key(nil, nil) + ngx.print(root_key) } } --- request eval @@ -199,10 +200,10 @@ location /t { location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler({ - max_uri_args = 2, - }) - ngx.print(handler:cache_key()) + local ledge_cache_key = require("ledge.cache_key") + + local root_key = ledge_cache_key.generate_root_key(nil, 2) + ngx.print(root_key) } } --- request eval @@ -229,8 +230,10 @@ location /t { location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local handler = require("ledge").create_handler() - ngx.print(handler:cache_key()) + local ledge_cache_key = require("ledge.cache_key") + + local root_key = ledge_cache_key.generate_root_key(nil, nil) + ngx.print(root_key) } } --- request eval @@ -249,3 +252,374 @@ location /t { ] --- no_error_log [error] + +=== TEST 7: Compare vary spec +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local vary_spec_compare = require("ledge.cache_key").vary_spec_compare + + -- Compare vary specs + local changed = vary_spec_compare({}, {}) + assert(changed == false, "empty table == empty table") + + local changed = vary_spec_compare({}, nil) + assert(changed == false, "empty table == nil") + + local changed = vary_spec_compare(nil, {}) + assert(changed == false, "nil == empty table") + + local changed = vary_spec_compare({"Foo"}, {"Foo"}) + assert(changed == false, "table == table") + + local changed = vary_spec_compare({"Foo", "Bar"}, {"Foo", "Bar"}) + assert(changed == false, "table == table (multi-values") + + + local changed = vary_spec_compare({"Foo"}, {}) + assert(changed == true, "table ~= empty table") + + local changed = vary_spec_compare({}, {"Foo"}) + assert(changed == true, "empty table ~= table") + + local changed = vary_spec_compare({"Foo"}, nil) + assert(changed == true, "table ~= nil") + + local changed = vary_spec_compare(nil, {"Foo"}) + assert(changed == true, "nil ~= table") + + local changed = vary_spec_compare({"Foo"}, {}) + assert(changed == true, "table ~= empty table") + } +} +--- request +GET /t +--- no_error_log +[error] + +=== TEST 8: Generate vary key +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local function log(...) + ngx.log(ngx.DEBUG, ...) + end + + local generate_vary_key = require("ledge.cache_key").generate_vary_key + + local called_flag = false + local callback = function(vary_key) + assert(type(vary_key) == "table", "callback receives vary key_table") + called_flag = true + end + + + -- Set headers + ngx.req.set_header("Foo", "Bar") + ngx.req.set_header("X-Test", "value") + + called_flag = false + + -- Empty/nil spec + local vary_key = generate_vary_key(nil, nil, nil) + log(vary_key) + assert(vary_key == "", "Nil spec generates empty string") + + local vary_key = generate_vary_key({}, nil, nil) + log(vary_key) + assert(vary_key == "", "Empty spec generates empty string") + + local vary_key = generate_vary_key(nil, callback, nil) + log(vary_key) + assert(called_flag == true, "Callback is called with nil spec") + assert(vary_key == "", "Nil vary spec not modified with noop function") + called_flag = false + + local vary_key = generate_vary_key({}, callback, nil) + log(vary_key) + assert(called_flag == true, "Callback is called with empty spec") + assert(vary_key == "", "Empty vary spec not modified with noop function") + called_flag = false + + + -- With spec + local vary_key = generate_vary_key({"Foo"}, callback, nil) + log(vary_key) + assert(called_flag == true, "Callback is called") + assert(vary_key == "Foo:Bar", "Vary spec not modified with noop function") + called_flag = false + + local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil) + log(vary_key) + assert(called_flag == true, "Callback is called - multivalue spec") + assert(vary_key == "Foo:Bar:X-Test:value", "Vary spec not modified with noop function - multivalue spec") + called_flag = false + + + -- Active callback + callback = function(vary_key) + vary_key["MyVal"] = "Arbitrary" + end + local vary_key = generate_vary_key(nil, callback, nil) + log(vary_key) + assert(vary_key == "MyVal:Arbitrary", "Callback modifies key with nil spec") + + local vary_key = generate_vary_key({}, callback, nil) + log(vary_key) + assert(vary_key == "MyVal:Arbitrary", "Callback modifies key with empty spec") + + local vary_key = generate_vary_key({"Foo"}, callback, nil) + log(vary_key) + assert(vary_key == "Foo:Bar:MyVal:Arbitrary", "Callback appends key with spec") + + local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil) + log(vary_key) + assert(vary_key == "MyVal:Arbitrary:Foo:Bar:X-Test:value", "Callback appends key with spec - multi values") + + + callback = function(vary_key) + vary_key["Foo"] = "Arbitrary" + end + + local vary_key = generate_vary_key({"Foo"}, callback, nil) + log(vary_key) + assert(vary_key == "Foo:Arbitrary", "Callback overrides key spec") + + + callback = function(vary_key) + vary_key["Foo"] = nil + end + + local vary_key = generate_vary_key({"Foo"}, callback, nil) + log(vary_key) + assert(vary_key == "", "Callback removes from key spec") + + + callback = function(vary_key) + assert(vary_key["X-None"] == ngx.null, "Spec values with missing headers appear as null") + end + + local vary_key = generate_vary_key({"X-None"}, callback, nil) + log(vary_key) + assert(vary_key == "", "Missing values do not appear in key") + + + local vary_key = generate_vary_key({"A", "B"}, nil, {["A"] = "123", ["B"] = "xyz"}) + log(vary_key) + assert(vary_key == "A:123:B:xyz", "Vary key from arbitrary headers") + + local vary_key = generate_vary_key({"Foo", "B"}, nil, {["Foo"] = "123", ["B"] = "xyz"}) + log(vary_key) + assert(vary_key == "Foo:123:B:xyz", "Arbitrary headers take precendence") + + } +} +--- request +GET /t +--- no_error_log +[error] + +=== TEST 9: Read vary spec +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local read_vary_spec = require("ledge.cache_key").read_vary_spec + + local root_key = "ledge:dummy:root:" + local vary_spec_key = root_key.."::vary" + + local spec, err = read_vary_spec() + assert(spec == nil and err ~= nil, "Redis required to read spec") + + local spec, err = read_vary_spec(redis) + assert(spec == nil and err ~= nil, "Root key required to read spec") + + redis.smembers = function() return nil, "Redis Error" end + local spec, err = read_vary_spec(redis, root_key) + assert(spec == nil and err == "Redis Error", "Redis error returned") + redis.smembers = require("resty.redis").smembers + + + local exists = redis:exists(vary_spec_key) + local spec, err = read_vary_spec(redis, root_key) + assert(type(spec) == "table" and #spec == 0 and exists == 0, "Missing key returns empty table") + + + redis:sadd(vary_spec_key, "Foo") + redis:sadd(vary_spec_key, "Bar") + local spec, err = read_vary_spec(redis, root_key) + assert(type(spec) == "table" and #spec == 2 and spec[1] == "Foo", "Spec returned") + + } +} +--- request +GET /t +--- no_error_log +[error] + + +=== TEST 10: Key chain +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local key_chain = require("ledge.cache_key").key_chain + + local root_key = "ledge:dummy:root:" + local vary_key = "Foo:Bar:Test:value" + local vary_spec = {"Foo", "Test"} + + local expected = { + vary = "ledge:dummy:root:::vary", + repset = "ledge:dummy:root:::repset", + main = "ledge:dummy:root:#Foo:Bar:Test:value::main", + entities = "ledge:dummy:root:#Foo:Bar:Test:value::entities", + headers = "ledge:dummy:root:#Foo:Bar:Test:value::headers", + reval_params = "ledge:dummy:root:#Foo:Bar:Test:value::reval_params", + reval_req_headers = "ledge:dummy:root:#Foo:Bar:Test:value::reval_req_headers", + } + local extra = { + root = "ledge:dummy:root:", + full = "ledge:dummy:root:#Foo:Bar:Test:value", + fetching_lock = "ledge:dummy:root:#Foo:Bar:Test:value::fetching", + } + + local chain, err = key_chain() + assert(chain == nil and err ~= nil, "Root key required") + + local chain, err = key_chain(root_key) + assert(chain == nil and err ~= nil, "Vary key required") + + local chain, err = key_chain(root_key, vary_key) + assert(chain == nil and err ~= nil, "Vary spec required") + + + local chain, err = key_chain(root_key, vary_key, vary_spec) + assert(type(chain) == "table", "key chain returned") + + local i = 0 + for k,v in pairs(chain) do + i = i +1 + ngx.log(ngx.DEBUG, k, ": ", v, " == ", expected[k]) + assert(expected[k] == v, k.." chain mismatch") + end + assert(i == 7, "7 keys: "..i) + + for k,v in pairs(expected) do + ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k]) + assert(chain[k] == v, k.." expected mismatch") + end + + for k,v in pairs(extra) do + ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k]) + assert(chain[k] == v, k.." extra mismatch") + end + + for i,v in ipairs(vary_spec) do + assert(chain.vary_spec[i] == v, " Vary spec mismatch") + end + + } +} +--- request +GET /t +--- no_error_log +[error] + + +=== TEST 11: Save key chain +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local key_chain = require("ledge.cache_key").key_chain + local save_key_chain = require("ledge.cache_key").save_key_chain + + local root_key = "ledge:dummy:root:" + local vary_key = "Foo:Bar:Test:value" + local vary_spec = {"Foo", "Test"} + + + local chain = key_chain(root_key, vary_key, vary_spec) + + local ok, err = save_key_chain() + assert(ok == nil and err ~= nil, "Redis required") + + local ok, err = save_key_chain(redis) + assert(ok == nil and err ~= nil, "Key chain required") + + local ok, err = save_key_chain(redis, "foo") + assert(ok == nil and err ~= nil, "Key chain must be a table") + + local ok, err = save_key_chain(redis, {}) + assert(ok == nil and err ~= nil, "Key chain must not be empty") + + local ok, err = save_key_chain(redis, chain) + assert(ok == nil and err ~= nil, "TTL required") + + local ok, err = save_key_chain(redis, chain, "foo") + assert(ok == nil and err ~= nil, "TTL must be a number") + + + -- Create main key + redis:set(chain.main, "foobar") + + local ok, err = save_key_chain(redis, chain, 3600) + assert(ok == true , "returns true") + + assert(redis:exists(chain.vary) == 1, "Vary spec key created") + assert(redis:exists(chain.repset) == 1, "Repset created") + + local vs = redis:smembers(chain.vary) + for i, v in pairs(vs) do + assert(vary_spec[i] == v, "Vary spec saved: "..i) + end + + local vs = redis:smembers(chain.repset) + for _, v in pairs(vs) do + assert(v == chain.full, "Full key added to repset") + end + + assert(redis:ttl(chain.vary) == 3600, "Vary spec expiry set") + assert(redis:ttl(chain.repset) == 3600, "Repset expiry set") + + local vary_spec = {"Baz", "Qux"} + local chain = key_chain(root_key, vary_key, vary_spec) + local ok, err = save_key_chain(redis, chain, 3600) + + local vs = redis:smembers(chain.vary) + for i, v in pairs(vs) do + assert(vary_spec[i] == v, "Vary spec overwritten: "..i) + end + + redis:sadd(chain.repset, "dummy_value") + local ok, err = save_key_chain(redis, chain, 3600) + + local vs = redis:smembers(chain.repset) + for _, v in pairs(vs) do + assert(v ~= "dummy_value", "Missing keys are removed from repset") + end + + redis:del(chain.repset) + + local chain = key_chain(root_key, vary_key, {}) + local ok, err = save_key_chain(redis, chain, 3600) + assert(redis:exists(chain.vary ) == 0, "Empty spec removes vary key") + assert(redis:exists(chain.repset) == 1, "Empty spec still creates repset") + + } +} +--- request +GET /t +--- no_error_log +[error] diff --git a/t/01-unit/handler.t b/t/01-unit/handler.t index ffd9c354..131787b7 100644 --- a/t/01-unit/handler.t +++ b/t/01-unit/handler.t @@ -199,12 +199,19 @@ GET /t location /t { content_by_lua_block { local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + -- Set redis and read the cache key + handler.redis = redis + handler:cache_key_chain() + + -- Unset redis again + handler.redis = {} local res, err = handler:read_from_cache() assert(res == nil and err ~= nil, "read_from_cache should error with no redis connections") - handler.redis = require("ledge").create_redis_connection() + handler.redis = redis handler.storage = require("ledge").create_storage_connection( handler.config.storage_driver, handler.config.storage_driver_config @@ -268,6 +275,11 @@ location /t_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + + handler.redis = redis + handler:cache_key_chain() + handler.redis = {} local res, err = handler:save_to_cache() assert(res == nil and err ~= nil, @@ -277,7 +289,7 @@ location /t_prx { assert(res == nil and err ~= nil, "fetch_from_origin should error with no redis") - handler.redis = require("ledge").create_redis_connection() + handler.redis = redis handler.storage = require("ledge").create_storage_connection( handler.config.storage_driver, handler.config.storage_driver_config diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t index c22f4c7e..a1241536 100644 --- a/t/01-unit/jobs.t +++ b/t/01-unit/jobs.t @@ -51,6 +51,9 @@ Prime cache then collect the entity location /t { rewrite ^ /cache break; content_by_lua_block { + local redis = require("ledge").create_redis_connection() + redis:flushall() -- Previous tests create some odd keys + local collect_entity = require("ledge.jobs.collect_entity") local handler = require("ledge").create_handler() @@ -125,6 +128,7 @@ location /t { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local job = { @@ -305,13 +309,16 @@ location /t { rewrite ^ /cache break; content_by_lua_block { local purge_job = require("ledge.jobs.purge") + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + handler.redis = redis local heartbeat_flag = false local job = { - redis = require("ledge").create_redis_connection(), + redis = redis, data = { - key_chain = { main = "*::main" }, + key_chain = { repset = "*::repset" }, keyspace_scan_count = 2, purge_mode = "invalidate", storage_driver = handler.config.storage_driver, diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index f9263bba..23fe6841 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -63,7 +63,7 @@ location /t { assert(data.result == "purged", "result should be purged") - assert(not data.qless_job, "qless_job should be nil") + assert(not data.qless_jobs, "qless_jobs should be nil") local json, err = create_purge_response("revalidate", "scheduled", { @@ -73,7 +73,7 @@ location /t { assert(not err, "err should be nil") - assert(data.qless_job.jid == "12345", + assert(data.qless_jobs.jid == "12345", "qless_job.jid should be '12345'") @@ -191,16 +191,17 @@ location /t { handler.storage = storage local key_chain = handler:cache_key_chain() + ngx.log(ngx.DEBUG, require("cjson").encode(key_chain)) local purge = require("ledge.purge").purge -- invalidate - error - local ok, err = purge(handler, "invalidate", {main = "bogus_key"}) + local ok, err = purge(handler, "invalidate", "bad_key") if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - bad key") -- invalidate - local ok, err = purge(handler, "invalidate", key_chain) + local ok, err = purge(handler, "invalidate", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == true and err == "purged", "purge should return true - purged") @@ -211,26 +212,26 @@ location /t { return "job" end - local ok, err, job = purge(handler, "revalidate", key_chain) + local ok, err, job = purge(handler, "revalidate", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "already expired", "purge should return false - already expired") assert(reval_job == true, "revalidate should schedule job") - assert(job == "job", "revalidate should return the job "..tostring(job)) + assert(job[1] == "job", "revalidate should return the job "..tostring(job)) -- delete, error handler.delete_from_cache = function() return nil, "delete error" end - local ok, err = purge(handler, "delete", key_chain) + local ok, err = purge(handler, "delete", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == nil and err == "delete error", "purge should return nil, error") handler.delete_from_cache = require("ledge.handler").delete_from_cache -- delete - local ok, err = purge(handler, "delete", key_chain) + local ok, err = purge(handler, "delete", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == true and err == "deleted", "purge should return true - deleted") -- delete, missing - local ok, err = purge(handler, "delete", key_chain) + local ok, err = purge(handler, "delete", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - nothing to purge") } diff --git a/t/01-unit/response.t b/t/01-unit/response.t index d9b21a37..c4d6fcfc 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -53,17 +53,21 @@ location /t { content_by_lua_block { local res, err = require("ledge.response").new() assert(not res, "new with empty args should return negatively") - assert(string.find(err, "redis and key_chain args required"), - "err should contain 'redis and key_chian args required'") + assert(err ~= nil, "err not nil") + + local res, err = require("ledge.response").new({}) + assert(not res, "new with empty handler should return negatively") + assert(err ~= nil, "err not nil") + + local res, err = require("ledge.response").new({redis = {} }) + assert(not res, "new with empty handler redis should return negatively") + assert(err ~= nil, "err not nil") local handler = require("ledge").create_handler() local redis = require("ledge").create_redis_connection() handler.redis = redis - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) assert(res and not err, "response object should be created without error") @@ -90,10 +94,7 @@ location /t { local redis = require("ledge").create_redis_connection() handler.redis = redis - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) read_body(res) -- will be empty @@ -119,10 +120,7 @@ location /t { handler.redis = redis require("ledge.response").set_debug(true) - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) res:set_body("foo") @@ -174,10 +172,7 @@ location /t { handler.redis = redis require("ledge.response").set_debug(true) - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) assert(not res:is_cacheable()) @@ -211,6 +206,12 @@ location /t { ["Cache-Control"] = "max-age=60, no-cache=X-Foo", } assert(res:is_cacheable()) + + res.header = { + ["Cache-Control"] = "max-age=60", + ["Vary"] = "*", + } + assert(not res:is_cacheable()) } } --- request @@ -229,10 +230,7 @@ location /t { handler.redis = redis require("ledge.response").set_debug(true) - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) assert(res:ttl() == 0, "ttl should be 0") @@ -266,10 +264,7 @@ location /t { local redis = require("ledge").create_redis_connection() handler.redis = redis - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) res.uri = "http://example.com" res.status = 200 @@ -278,10 +273,7 @@ location /t { assert(ok and not err, "res should save without err") - local res2, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res2, err = require("ledge.response").new(handler) local ok, err = res2:read() assert(ok and not err, "res2 should save without err") @@ -291,10 +283,7 @@ location /t { res2.header["X-Save-Me"] = "ok" res2:save(60) - local res3, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res3, err = require("ledge.response").new(handler) res3:read() assert(res3.header["X-Save-Me"] == "ok", "res3 headers") @@ -304,10 +293,7 @@ location /t { assert(res3.size == 99, "res3.size should be 99") - local res4, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res4, err = require("ledge.response").new(handler) res4:read() assert(res4.size == 99, "res3.size should be 99") @@ -330,10 +316,7 @@ location /t { local redis = require("ledge").create_redis_connection() handler.redis = redis - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) -- Ensure entry exists res.uri = "http://example.com" @@ -382,10 +365,7 @@ location /t { local redis = require("ledge").create_redis_connection() handler.redis = redis - local res, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res, err = require("ledge.response").new(handler) res.uri = "http://example.com" res.status = 200 @@ -395,10 +375,7 @@ location /t { res:set_and_save("has_esi", "dummy") - local res2, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res2, err = require("ledge.response").new(handler) local ok, err = res2:read() assert(ok and not err, "res2 should save without err") @@ -409,10 +386,7 @@ location /t { res2.header["X-Save-Me"] = "ok" res2:save(60) - local res3, err = require("ledge.response").new( - handler.redis, - handler:cache_key_chain() - ) + local res3, err = require("ledge.response").new(handler) res3:read() assert(res3.header["X-Save-Me"] == "ok", "res3 headers") diff --git a/t/02-integration/cache.t b/t/02-integration/cache.t index 058f2b28..26fa98d1 100644 --- a/t/02-integration/cache.t +++ b/t/02-integration/cache.t @@ -320,8 +320,9 @@ location /cache_6 { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key_chain = handler:cache_key_chain() local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key_chain = handler:cache_key_chain() local res, err = redis:keys(key_chain.root .. "*") if res then @@ -686,6 +687,7 @@ location /cache_15_prx { location /cache_15 { content_by_lua_block { ngx.header["Cache-Control"] = "max-age=60" + ngx.header["Vary"] = "Foobar" ngx.say("TEST 15") } } @@ -706,8 +708,9 @@ location /cache_15_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key_chain = handler:cache_key_chain() local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key_chain = handler:cache_key_chain() local res, err = redis:keys(key_chain.root .. "*") if res then @@ -727,8 +730,8 @@ location /cache_15_prx { GET /cache_15_prx --- timeout: 5 --- response_body -Numkeys: 5 -Numkeys: 5 +Numkeys: 7 +Numkeys: 7 --- no_error_log [error] @@ -834,8 +837,9 @@ location /cache_16_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key_chain = handler:cache_key_chain() local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key_chain = handler:cache_key_chain() -- Break entities redis:del(handler:cache_key_chain().entities) @@ -866,8 +870,9 @@ location /cache_17_modify { rewrite ^(.*)_modify$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key = handler:cache_key_chain().main local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key = handler:cache_key_chain().main -- Add new field to main key redis:hset(key, "bogus_field", "foobar") @@ -883,8 +888,9 @@ location /cache_17_check { rewrite ^(.*)_check$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key = handler:cache_key_chain().main local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key = handler:cache_key_chain().main -- Print result from redis local main, err = redis:hgetall(key) @@ -921,9 +927,9 @@ location /cache_17 { --- response_body eval [ "TEST 17", -"ledge:cache:http:localhost:/cache_17:::main bogus_field: foobar", +"ledge:cache:http:localhost:/cache_17:#::main bogus_field: foobar", "TEST 17", -"ledge:cache:http:localhost:/cache_17:::main bogus_field: nil", +"ledge:cache:http:localhost:/cache_17:#::main bogus_field: nil", ] --- no_error_log diff --git a/t/02-integration/collapsed_forwarding.t b/t/02-integration/collapsed_forwarding.t index a89b30af..fa414b12 100644 --- a/t/02-integration/collapsed_forwarding.t +++ b/t/02-integration/collapsed_forwarding.t @@ -426,3 +426,113 @@ GET /concurrent_collapsed --- response_body OK 1 OK 2 + +=== TEST 9: Collapsing with vary +--- http_config eval: $::HttpConfig +--- config +location /prime { + rewrite ^ /collapsed9 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + }):run() + } +} +location /concurrent_collapsed { + rewrite_by_lua_block { + ngx.shared.test:set("test_9", 0) + } + + echo_location_async "/collapsed9_prx"; + echo_sleep 0.05; + echo_location_async "/collapsed9_prx"; +} +location /collapsed9_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + }):run() + } +} +location /collapsed { + content_by_lua_block { + ngx.sleep(0.1) + local counter = ngx.shared.test:incr("test_9", 1) + ngx.header["Vary"] = "X-Test" + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("OK " .. tostring(counter)) + } +} +--- request eval +[ +"GET /prime", "PURGE /prime", +"GET /concurrent_collapsed" +] +--- error_code eval +[200, 200, 200] +--- response_body_like eval +[ +"OK nil", ".+", +"OK 1OK 1" +] + +=== TEST 10: Collapsing with vary - change in spec +--- http_config eval: $::HttpConfig +--- config +location /prime { + rewrite ^ /collapsed10 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + enable_collapsed_forwarding = false, + }):run() + } +} +location /concurrent_collapsed { + echo_location_async "/collapsed10_prx"; + echo_sleep 0.05; + echo_location_async "/collapsed10_prx"; +} +location /collapsed10_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + }):run() + } +} +location /collapsed { + content_by_lua_block { + ngx.sleep(0.1) + local counter = ngx.shared.test:incr("test_10", 1, 0) + if counter == 1 then + ngx.header["Vary"] = "X-Test" -- Prime with this + else + ngx.header["Vary"] = "X-Test2" + end + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("OK " .. tostring(counter)) + } +} +--- request eval +[ +"GET /prime", "PURGE /prime", +"GET /concurrent_collapsed" +] +--- more_headers eval +[ +"X-Test: Foo","X-Test: Foo", +"X-Test: Foo", +] +--- error_code eval +[200, 200, 200] +--- response_body_like eval +[ +"OK 1", ".+", +"OK 2OK 2" +] + diff --git a/t/02-integration/esi.t b/t/02-integration/esi.t index f27004b0..ea688bb8 100644 --- a/t/02-integration/esi.t +++ b/t/02-integration/esi.t @@ -2675,8 +2675,10 @@ location /esi_36_break { rewrite ^(.*)_break$ $1 break; content_by_lua_block { local handler = require("ledge").create_handler() - local key = handler:cache_key_chain().main local redis = require("ledge").create_redis_connection() + handler.redis = redis + local key = handler:cache_key_chain().main + -- Incorrectly set has_esi flag on main key redis:hset(key, "has_esi", "ESI/1.0") diff --git a/t/02-integration/gc.t b/t/02-integration/gc.t index 220bf07b..6dd3fe22 100644 --- a/t/02-integration/gc.t +++ b/t/02-integration/gc.t @@ -89,6 +89,8 @@ location /gc_b { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis + local key_chain = handler:cache_key_chain() local num_entities, err = redis:scard(key_chain.entities) ngx.say(num_entities) @@ -115,6 +117,8 @@ location /gc { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis + local key_chain = handler:cache_key_chain() local num_entities, err = redis:scard(key_chain.entities) ngx.say(num_entities) @@ -136,8 +140,10 @@ location /gc { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local redis = require("ledge").create_redis_connection() - local key_chain = require("ledge").create_handler():cache_key_chain() - local res, err = redis:keys(key_chain.root .. "*") + local handler = require("ledge").create_handler() + handler.redis = redis + local key_chain = handler:cache_key_chain() + local res, err = redis:keys(key_chain.full .. "*") assert(not next(res), "res should be empty") } } @@ -177,6 +183,7 @@ location /gc_5_prx { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() redis:del(key_chain.headers) handler:run() @@ -202,8 +209,10 @@ location /gc_5 { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { local redis = require("ledge").create_redis_connection() - local key_chain = require("ledge").create_handler():cache_key_chain() - local res, err = redis:keys(key_chain.root .. "*") + local handler = require("ledge").create_handler() + handler.redis = redis + local key_chain = handler:cache_key_chain() + local res, err = redis:keys(key_chain.full .. "*") if res then ngx.say(#res) end diff --git a/t/02-integration/memory_pressure.t b/t/02-integration/memory_pressure.t index 606d6049..90e60ca5 100644 --- a/t/02-integration/memory_pressure.t +++ b/t/02-integration/memory_pressure.t @@ -84,6 +84,7 @@ location "/mem_pressure_1_prx" { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() local evict = ngx.req.get_uri_args()["key"] @@ -245,6 +246,7 @@ location "/mem_pressure_4_prx" { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() local evict = ngx.req.get_uri_args()["key"] diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index cc880718..8479e106 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -218,11 +218,11 @@ PURGE /purge_cached* [error] --- response_body_like purge_mode: invalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_code: 200 @@ -295,11 +295,11 @@ PURGE /purge_c* --- error_code: 200 --- response_body_like purge_mode: invalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge --- no_error_log [error] @@ -311,6 +311,7 @@ location /purge_cached { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() local num_entities, err = redis:scard(key_chain.entities) @@ -367,11 +368,11 @@ PURGE /purge_ca*ed [error] --- response_body_like purge_mode: invalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_code: 200 @@ -439,11 +440,11 @@ PURGE /purge_cached_8* [error] --- response_body_like purge_mode: invalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_code: 200 @@ -527,11 +528,11 @@ PURGE /purge_cached_9_prx [error] --- response_body_like purge_mode: revalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.revalidate -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 4 -qless_job.options.tags.1: revalidate +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.revalidate +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 4 +qless_jobs.1.options.tags.1: revalidate result: purged --- error_code: 200 @@ -608,11 +609,11 @@ PURGE /purge_cached_10_prx?* [error] --- response_body_like purge_mode: revalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_log TEST 10 Revalidated: 1 primed @@ -746,11 +747,11 @@ PURGE /purge_cached_12_prx?* [error] --- response_body_like purge_mode: delete -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_code: 200 @@ -794,6 +795,7 @@ location /purge_cached_13_prx { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() if sabotage == "uri" then @@ -862,11 +864,11 @@ PURGE /purge_cached_13_prx?* TEST 13 Revalidated: 2 primed --- response_body_like purge_mode: revalidate -qless_job.jid: [a-f0-9]{32} -qless_job.klass: ledge.jobs.purge -qless_job.options.jid: [a-f0-9]{32} -qless_job.options.priority: 5 -qless_job.options.tags.1: purge +qless_jobs.1.jid: [a-f0-9]{32} +qless_jobs.1.klass: ledge.jobs.purge +qless_jobs.1.options.jid: [a-f0-9]{32} +qless_jobs.1.options.priority: 5 +qless_jobs.1.options.tags.1: purge result: scheduled --- error_code: 200 @@ -980,11 +982,11 @@ qq(PURGE /purge_api "TEST 15: 1", "TEST 15: 2", qq(purge_mode: invalidate -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.klass: ledge.jobs.purge -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.priority: 5 -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_job.options.tags.1: purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_jobs.1.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_jobs.1.klass: ledge.jobs.purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.priority: 5 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.qless_jobs.1.options.tags.1: purge result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_15_prx\\?a\\*.result: scheduled ), ] @@ -1069,11 +1071,11 @@ qq(PURGE /purge_api "TEST 16: 1", "TEST 16: 2", qq(purge_mode: invalidate -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.klass: ledge.jobs.purge -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.priority: 5 -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_job.options.tags.1: purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_jobs.1.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_jobs.1.klass: ledge.jobs.purge +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_jobs.1.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_jobs.1.options.priority: 5 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.qless_jobs.1.options.tags.1: purge result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_16_prx\\*.result: scheduled ), ] @@ -1215,13 +1217,75 @@ qq(PURGE /purge_api "TEST 17: 1", qq(purge_mode: revalidate -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.klass: ledge.jobs.revalidate -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.jid: [a-f0-9]{32} -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.priority: 4 -result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_job.options.tags.1: revalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_jobs.1.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_jobs.1.klass: ledge.jobs.revalidate +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_jobs.1.options.jid: [a-f0-9]{32} +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_jobs.1.options.priority: 4 +result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.qless_jobs.1.options.tags.1: revalidate result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.result: purged ), ] --- wait: 1 + + +=== TEST 18: Purge clears all representations +--- http_config eval: $::HttpConfig +--- config +location /purge { + rewrite ^ /purge_cached_18 break; + content_by_lua_block { + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_18_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_18 { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 18: ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +[ +"GET /purge_cached_18_prx", "GET /purge_cached_18_prx", + +"PURGE /purge", + +"GET /purge_cached_18_prx", "GET /purge_cached_18_prx", +] +--- more_headers eval +[ +"X-Test: abc", "X-Test: xyz", +"", +"X-Test: abc", "X-Test: xyz", +] +--- response_body eval +[ +"TEST 18: abc", "TEST 18: xyz", + +"purge_mode: invalidate +result: purged +", + +"TEST 18: abc", "TEST 18: xyz", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .+", "X-Cache: MISS from .+", +"", +"X-Cache: MISS from .+", "X-Cache: MISS from .+", +] +--- no_error_log +[error] diff --git a/t/02-integration/stale-while-revalidate.t b/t/02-integration/stale-while-revalidate.t index e2721d65..91712d50 100644 --- a/t/02-integration/stale-while-revalidate.t +++ b/t/02-integration/stale-while-revalidate.t @@ -489,6 +489,7 @@ location /stale_reval_params_remove { content_by_lua_block { local redis = require("ledge").create_redis_connection() local handler = require("ledge").create_handler() + handler.redis = redis local key_chain = handler:cache_key_chain() redis:del(key_chain.reval_req_headers) diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t new file mode 100644 index 00000000..a4bfe516 --- /dev/null +++ b/t/02-integration/vary.t @@ -0,0 +1,339 @@ +use Test::Nginx::Socket 'no_plan'; +use Cwd qw(cwd); + +my $pwd = cwd(); + +$ENV{TEST_NGINX_PORT} |= 1984; +$ENV{TEST_LEDGE_REDIS_DATABASE} |= 2; +$ENV{TEST_LEDGE_REDIS_QLESS_DATABASE} |= 3; +$ENV{TEST_COVERAGE} ||= 0; + +our $HttpConfig = qq{ +lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; + +lua_shared_dict ledge_test 1m; +lua_check_client_abort on; + +init_by_lua_block { + if $ENV{TEST_COVERAGE} == 1 then + require("luacov.runner").init() + end + + require("ledge").configure({ + redis_connector_params = { + db = $ENV{TEST_LEDGE_REDIS_DATABASE}, + }, + qless_db = $ENV{TEST_LEDGE_REDIS_QLESS_DATABASE}, + }) + + require("ledge").set_handler_defaults({ + upstream_host = "127.0.0.1", + upstream_port = $ENV{TEST_NGINX_PORT}, + storage_driver_config = { + redis_connector_params = { + db = $ENV{TEST_LEDGE_REDIS_DATABASE}, + }, + } + }) +} + +init_worker_by_lua_block { + require("ledge").create_worker():run() +} + +}; + +no_long_string(); +no_diff(); +run_tests(); + +__DATA__ + +=== TEST 1: Vary +--- http_config eval: $::HttpConfig +--- config +location /vary_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 1: ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"] +--- more_headers eval +[ +"X-Test: testval", +"X-Test: anotherval", +"", +"X-Test: testval", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: HIT from .*", +] +--- response_body eval +[ +"TEST 1: testval", +"TEST 1: anotherval", +"TEST 1: nil", +"TEST 1: testval", +] +--- no_error_log +[error] + +=== TEST 2: Vary change +--- http_config eval: $::HttpConfig +--- config +location /vary_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test2" + ngx.print("TEST 2: ", ngx.req.get_headers()["X-Test2"], " ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"] +--- more_headers eval +[ +"X-Test: testval +Cache-Control: no-cache", + +"X-Test2: newval", +"", + +"X-Test: testval +X-Test2: newval", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: HIT from .*", +"X-Cache: HIT from .*", +] +--- response_body eval +[ +"TEST 2: nil testval", +"TEST 2: newval nil", +"TEST 2: nil testval", +"TEST 2: newval nil", +] +--- no_error_log +[error] + + +=== TEST 3: Cache update changes 1 representation +--- http_config eval: $::HttpConfig +--- config +location /vary3_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 3: ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +["GET /vary3_prx", "GET /vary3_prx", "GET /vary3_prx", "GET /vary3_prx"] +--- more_headers eval +[ +"X-Test: testval", +"X-Test: value2", + +"X-Test: testval +Cache-Control: no-cache", + +"X-Test: value2", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: HIT from .*", +] +--- response_body eval +[ +"TEST 3: testval", +"TEST 3: value2", +"TEST 3: testval", +"TEST 3: value2", +] +--- no_error_log +[error] + + +=== TEST 4: Missing keys are cleaned from repset +--- http_config eval: $::HttpConfig +--- config +location /check { + rewrite ^ /vary break; + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + handler.redis = redis + local res, err = redis:smembers(handler:cache_key_chain().repset) + + for _, v in ipairs(res) do + assert(v ~= "foobar", "Key should have been cleaned") + end + ngx.print("OK") + } +} +location /vary_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + handler.redis = redis + local ok, err = redis:sadd(handler:cache_key_chain().repset, "foobar") + handler:run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 4") + } +} +--- request eval +["GET /vary_prx", "GET /check"] +--- more_headers eval +["Cache-Control: no-cache",""] +--- response_body eval +[ +"TEST 4", +"OK" +] +--- no_error_log +[error] + + +=== TEST 5: Repset TTL maintained +--- http_config eval: $::HttpConfig +--- config +location = /check { + rewrite ^ /vary5 break; + + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + handler.redis = redis + + local repset_ttl, err = redis:ttl(handler:cache_key_chain().repset) + if err then ngx.log(ngx.ERR, err) end + + local vary_ttl, err = redis:ttl(handler:cache_key_chain().vary) + if err then ngx.log(ngx.ERR, err) end + + local count = ngx.shared.ledge_test:get("test5") + + if count < 3 then + if (repset_ttl - handler.config.keep_cache_for) <= 300 + or (vary_ttl - handler.config.keep_cache_for) <= 300 then + ngx.print("FAIL") + ngx.log(ngx.ERR, + (repset_ttl - handler.config.keep_cache_for), + " ", + (vary_ttl - handler.config.keep_cache_for) + ) + else + ngx.print("OK") + end + else + + if (repset_ttl - handler.config.keep_cache_for) < 7200 + or (vary_ttl - handler.config.keep_cache_for) < 7200 then + ngx.print("FAIL 2") + ngx.log(ngx.ERR, + (repset_ttl - handler.config.keep_cache_for), + " ", + (vary_ttl - handler.config.keep_cache_for) + ) + else + ngx.print("OK") + end + end + } +} +location /vary5_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + local incr = ngx.shared.ledge_test:incr("test5", 1, 0) + if incr == 1 then + ngx.header["Cache-Control"] = "max-age=3600" + elseif incr == 3 then + ngx.header["Cache-Control"] = "max-age=7200" + else + ngx.header["Cache-Control"] = "max-age=300" + end + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 5") + } +} +--- request eval +["GET /vary5_prx", "GET /vary5_prx", "GET /check", "GET /vary5_prx", "GET /check"] +--- more_headers eval +[ +"Cache-Control: no-cache", +"Cache-Control: no-cache", +"", +"Cache-Control: no-cache", +"", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"", +"X-Cache: MISS from .*", +"", +] +--- response_body eval +[ +"TEST 5", +"TEST 5", +"OK", +"TEST 5", +"OK", +] +--- no_error_log +[error] + From 989d647452f606bcf94ae352ac41692b1f0b8d35 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 11:51:29 +0100 Subject: [PATCH 61/90] Tests for case-insensitive vary --- t/01-unit/cache_key.t | 36 +++++++++++++++++------------------ t/02-integration/vary.t | 42 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 60 insertions(+), 18 deletions(-) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index d0d28c12..15aecbd9 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -350,13 +350,13 @@ location /t { local vary_key = generate_vary_key({"Foo"}, callback, nil) log(vary_key) assert(called_flag == true, "Callback is called") - assert(vary_key == "Foo:Bar", "Vary spec not modified with noop function") + assert(vary_key == "foo:bar", "Vary spec not modified with noop function") called_flag = false local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil) log(vary_key) assert(called_flag == true, "Callback is called - multivalue spec") - assert(vary_key == "Foo:Bar:X-Test:value", "Vary spec not modified with noop function - multivalue spec") + assert(vary_key == "foo:bar:x-test:value", "Vary spec not modified with noop function - multivalue spec") called_flag = false @@ -366,19 +366,19 @@ location /t { end local vary_key = generate_vary_key(nil, callback, nil) log(vary_key) - assert(vary_key == "MyVal:Arbitrary", "Callback modifies key with nil spec") + assert(vary_key == "myval:arbitrary", "Callback modifies key with nil spec") local vary_key = generate_vary_key({}, callback, nil) log(vary_key) - assert(vary_key == "MyVal:Arbitrary", "Callback modifies key with empty spec") + assert(vary_key == "myval:arbitrary", "Callback modifies key with empty spec") local vary_key = generate_vary_key({"Foo"}, callback, nil) log(vary_key) - assert(vary_key == "Foo:Bar:MyVal:Arbitrary", "Callback appends key with spec") + assert(vary_key == "foo:bar:myval:arbitrary", "Callback appends key with spec") local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil) log(vary_key) - assert(vary_key == "MyVal:Arbitrary:Foo:Bar:X-Test:value", "Callback appends key with spec - multi values") + assert(vary_key == "myval:arbitrary:foo:bar:x-test:value", "Callback appends key with spec - multi values") callback = function(vary_key) @@ -387,7 +387,7 @@ location /t { local vary_key = generate_vary_key({"Foo"}, callback, nil) log(vary_key) - assert(vary_key == "Foo:Arbitrary", "Callback overrides key spec") + assert(vary_key == "foo:arbitrary", "Callback overrides key spec") callback = function(vary_key) @@ -410,11 +410,11 @@ location /t { local vary_key = generate_vary_key({"A", "B"}, nil, {["A"] = "123", ["B"] = "xyz"}) log(vary_key) - assert(vary_key == "A:123:B:xyz", "Vary key from arbitrary headers") + assert(vary_key == "a:123:b:xyz", "Vary key from arbitrary headers") local vary_key = generate_vary_key({"Foo", "B"}, nil, {["Foo"] = "123", ["B"] = "xyz"}) log(vary_key) - assert(vary_key == "Foo:123:B:xyz", "Arbitrary headers take precendence") + assert(vary_key == "foo:123:b:xyz", "Arbitrary headers take precendence") } } @@ -474,22 +474,22 @@ location /t { local key_chain = require("ledge.cache_key").key_chain local root_key = "ledge:dummy:root:" - local vary_key = "Foo:Bar:Test:value" + local vary_key = "foo:bar:test:value" local vary_spec = {"Foo", "Test"} local expected = { vary = "ledge:dummy:root:::vary", repset = "ledge:dummy:root:::repset", - main = "ledge:dummy:root:#Foo:Bar:Test:value::main", - entities = "ledge:dummy:root:#Foo:Bar:Test:value::entities", - headers = "ledge:dummy:root:#Foo:Bar:Test:value::headers", - reval_params = "ledge:dummy:root:#Foo:Bar:Test:value::reval_params", - reval_req_headers = "ledge:dummy:root:#Foo:Bar:Test:value::reval_req_headers", + main = "ledge:dummy:root:#foo:bar:test:value::main", + entities = "ledge:dummy:root:#foo:bar:test:value::entities", + headers = "ledge:dummy:root:#foo:bar:test:value::headers", + reval_params = "ledge:dummy:root:#foo:bar:test:value::reval_params", + reval_req_headers = "ledge:dummy:root:#foo:bar:test:value::reval_req_headers", } local extra = { root = "ledge:dummy:root:", - full = "ledge:dummy:root:#Foo:Bar:Test:value", - fetching_lock = "ledge:dummy:root:#Foo:Bar:Test:value::fetching", + full = "ledge:dummy:root:#foo:bar:test:value", + fetching_lock = "ledge:dummy:root:#foo:bar:test:value::fetching", } local chain, err = key_chain() @@ -546,7 +546,7 @@ location /t { local save_key_chain = require("ledge.cache_key").save_key_chain local root_key = "ledge:dummy:root:" - local vary_key = "Foo:Bar:Test:value" + local vary_key = "foo:bar:test:value" local vary_spec = {"Foo", "Test"} diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index a4bfe516..d89d2fb6 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -337,3 +337,45 @@ location /vary { --- no_error_log [error] + +=== TEST 6: Vary - case insensitive +--- http_config eval: $::HttpConfig +--- config +location /vary6_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 6: ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +["GET /vary6_prx", "GET /vary6_prx", "GET /vary6_prx"] +--- more_headers eval +[ +"X-Test: testval", +"X-test: TestVAL", +"X-teSt: foobar", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: HIT from .*", +"X-Cache: MISS from .*", +] +--- response_body eval +[ +"TEST 6: testval", +"TEST 6: testval", +"TEST 6: foobar", + +] +--- no_error_log +[error] From 3269d0162865731354864b1c00d6da4691927010 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 11:52:36 +0100 Subject: [PATCH 62/90] Vary key is always lowercase --- lib/ledge/cache_key.lua | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index 5ba3eb8a..60342002 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -1,6 +1,8 @@ local ipairs, next, type, pcall, setmetatable = ipairs, next, type, pcall, setmetatable +local str_lower = string.lower + local ngx_log = ngx.log local ngx_ERR = ngx.ERR local ngx_var = ngx.var @@ -157,7 +159,7 @@ local function generate_vary_key(vary_spec, callback, headers) end end - return tbl_concat(t, ":") + return str_lower(tbl_concat(t, ":")) end _M.generate_vary_key = generate_vary_key From 2acebf5434fe933bfce4c3721240cb76e65d1f41 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 12:00:47 +0100 Subject: [PATCH 63/90] Tests for vary header case insensitivity --- t/01-unit/cache_key.t | 3 +++ t/02-integration/vary.t | 9 ++++++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index 15aecbd9..f63c20e9 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -277,6 +277,9 @@ location /t { local changed = vary_spec_compare({"Foo", "Bar"}, {"Foo", "Bar"}) assert(changed == false, "table == table (multi-values") + local changed = vary_spec_compare({"Foo", "bar"}, {"foo", "Bar"}) + assert(changed == false, "table == table (case)") + local changed = vary_spec_compare({"Foo"}, {}) assert(changed == true, "table ~= empty table") diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index d89d2fb6..26f68af8 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -352,7 +352,14 @@ location /vary6_prx { location /vary { content_by_lua_block { ngx.header["Cache-Control"] = "max-age=3600" - ngx.header["Vary"] = "X-Test" + local incr = ngx.shared.ledge_test:incr("test6", 1, 0) + if incr == 1 then + ngx.header["Vary"] = "X-Test" + elseif incr == 2 then + ngx.header["Vary"] = "X-test" + else + ngx.header["Vary"] = "x-Test" + end ngx.print("TEST 6: ", ngx.req.get_headers()["X-Test"]) } } From b2103c7f5ff5da8e28e687b95bb286092daa3c19 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 12:01:23 +0100 Subject: [PATCH 64/90] Always lowercase vary spec, vary_spec_compare is case insensitive --- lib/ledge/cache_key.lua | 2 +- lib/ledge/response.lua | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index 60342002..4d373a79 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -110,7 +110,7 @@ local function vary_spec_compare(spec_a, spec_b) elseif (spec_b and next(spec_b)) then -- TODO: looping here faster? - if tbl_concat(spec_b, ",") == tbl_concat(spec_a, ",") then + if str_lower(tbl_concat(spec_b, ",")) == str_lower(tbl_concat(spec_a, ",")) then -- Current vary spec and new vary spec match return false end diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 47baf8fb..88b99c30 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -451,7 +451,7 @@ function _M.process_vary(self) if type(vary_hdr) == "table" then vary_hdr = tbl_concat(vary_hdr,",") end - vary_spec = str_split(vary_hdr, ",") + vary_spec = str_split(str_lower(vary_hdr), ",") end return vary_spec From 1966ec62d9c058abbdca78418ebe28f954890d7e Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 12:18:04 +0100 Subject: [PATCH 65/90] Tests for duplicate vary fields (case insensitive) --- t/01-unit/cache_key.t | 23 ++++++++++++++++++++--- 1 file changed, 20 insertions(+), 3 deletions(-) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index f63c20e9..1a87820d 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -584,8 +584,14 @@ location /t { assert(redis:exists(chain.repset) == 1, "Repset created") local vs = redis:smembers(chain.vary) - for i, v in pairs(vs) do - assert(vary_spec[i] == v, "Vary spec saved: "..i) + for _, v in pairs(vs) do + local match = false + for _, v2 in ipairs(vary_spec) do + if v2:lower() == v then + match = true + end + end + assert(match, "Vary spec saved: ") end local vs = redis:smembers(chain.repset) @@ -602,7 +608,13 @@ location /t { local vs = redis:smembers(chain.vary) for i, v in pairs(vs) do - assert(vary_spec[i] == v, "Vary spec overwritten: "..i) + local match = false + for _, v2 in ipairs(vary_spec) do + if v2:lower() == v then + match = true + end + end + assert(match, "Vary spec overwritten") end redis:sadd(chain.repset, "dummy_value") @@ -620,6 +632,11 @@ location /t { assert(redis:exists(chain.vary ) == 0, "Empty spec removes vary key") assert(redis:exists(chain.repset) == 1, "Empty spec still creates repset") + + local chain = key_chain(root_key, vary_key, {"Foo", "Bar", "Foo", "bar"}) + local ok, err = save_key_chain(redis, chain, 3600) + assert(redis:scard(chain.vary) == 2, "Deduplicate vary fields") + } } --- request From 3df3d3d37cbd263ff5277840129cb763c69f916f Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 12:18:23 +0100 Subject: [PATCH 66/90] Always save vary_spec lowercase --- lib/ledge/cache_key.lua | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index 4d373a79..ed5da6fe 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -260,6 +260,12 @@ local function save_key_chain(redis, key_chain, ttl) local vary_spec = key_chain.vary_spec if next(vary_spec) then + -- Always lowercase all vary fields + -- key_chain.vary is a set so will deduplicate for us + for i,v in ipairs(vary_spec) do + vary_spec[i] = str_lower(v) + end + local _, e = redis:sadd(key_chain.vary, unpack(vary_spec)) if e then ngx_log(ngx_ERR, e) end From 0f201b8ac9e04fafd4a796956b66e21ef003f35e Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 15:00:34 +0100 Subject: [PATCH 67/90] Test: vary field ordering --- t/02-integration/vary.t | 69 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 69 insertions(+) diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index 26f68af8..46644381 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -386,3 +386,72 @@ location /vary { ] --- no_error_log [error] + +=== TEST 7: Vary - sort order +--- ONLY +--- http_config eval: $::HttpConfig +--- config +location /vary7_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3700" + local incr = ngx.shared.ledge_test:incr("test7", 1, 0) + if incr == 1 then + -- Prime with 1 order + ngx.header["Vary"] = "X-Test, X-Test2, X-Test3" + elseif incr == 2 then + -- Second request, different order, different values in request + ngx.header["Vary"] = "X-Test3, X-test, X-test2" + else + -- 3rd request, same values as request1, different values in vary + ngx.header["Vary"] = "X-Test2, X-test3, X-Test" + end + ngx.print("TEST 7: ", incr) + } +} +--- request eval +["GET /vary7_prx", "GET /vary7_prx", "GET /vary7_prx"] +--- more_headers eval +[ +"X-Test: abc +X-Test2: 123 +X-Test3: xyz +", + +"X-Test: abc2 +X-Test2: 123b +X-Test3: xyz2 +", + +"X-Test: abc +X-Test2: 123 +X-Test3: xyz +", + +] +--- response_headers_like eval +[ +"X-Cache: MISS from .* +Vary: X-Test, X-Test2, X-Test3", + +"X-Cache: MISS from .* +Vary: X-Test3, X-test, X-test2", + +"X-Cache: HIT from .* +Vary: X-Test, X-Test2, X-Test3", +] +--- response_body eval +[ +"TEST 7: 1", +"TEST 7: 2", +"TEST 7: 1", +] +--- no_error_log +[error] From 1e6f7ad3efa59379e94ae87b882efe4a5f454a38 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 15:32:45 +0100 Subject: [PATCH 68/90] Unit tests for process_vary --- t/01-unit/response.t | 81 +++++++++++++++++++++++++++++++++++++++++ t/02-integration/vary.t | 1 - 2 files changed, 81 insertions(+), 1 deletion(-) diff --git a/t/01-unit/response.t b/t/01-unit/response.t index c4d6fcfc..e3be1102 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -398,3 +398,84 @@ location /t { GET /t --- no_error_log [error] + + +=== TEST 9: Process Vary +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local encode = require("cjson").encode + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local res, err = require("ledge.response").new(handler) + + local tests = { + { + hdr = nil, + res = nil, + msg = "Nil header, nil spec", + }, + { + hdr = "", + res = nil, + msg = "Empty header, nil spec", + }, + { + hdr = "foo", + res = {"foo"}, + msg = "Single field", + }, + { + hdr = "Foo", + res = {"foo"}, + msg = "Single field - case", + }, + { + hdr = "fOo,bar,Baz", + res = {"bar","baz","foo"}, + msg = "Multi field", + }, + { + hdr = "fOo, bar , Baz", + res = {"bar","baz","foo"}, + msg = "Multi field - whitespace", + }, + { + hdr = "bar,baz,foo", + res = {"bar","baz","foo"}, + msg = "Multi field - sort1", + }, + { + hdr = "foo,baz,bar", + res = {"bar","baz","foo"}, + msg = "Multi field - sort2", + }, + } + + for _, t in ipairs(tests) do + res.header["Vary"] = t["hdr"] + local vary_spec = res:process_vary() + ngx.log(ngx.DEBUG, "-----------------------------------------------") + ngx.log(ngx.DEBUG, "header: ", t["hdr"]) + ngx.log(ngx.DEBUG, "spec: ", encode(vary_spec)) + ngx.log(ngx.DEBUG, "expected: ", encode(t["res"])) + + if type(t["res"]) == "table" then + for i, v in ipairs(t["res"]) do + assert(vary_spec[i] == v, t["msg"]) + end + else + + assert(res:process_vary() == t["res"], t["msg"]) + end + + end + } +} +--- request +GET /t +--- no_error_log +[error] diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index 46644381..d5f942e3 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -388,7 +388,6 @@ location /vary { [error] === TEST 7: Vary - sort order ---- ONLY --- http_config eval: $::HttpConfig --- config location /vary7_prx { From 71a92b3a1abbea5fd76bd00e733334735617e51b Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 16:20:24 +0100 Subject: [PATCH 69/90] Unit tests for deduplicating process_vary() --- t/01-unit/response.t | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/t/01-unit/response.t b/t/01-unit/response.t index e3be1102..62918ea9 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -453,6 +453,12 @@ location /t { res = {"bar","baz","foo"}, msg = "Multi field - sort2", }, + + { + hdr = "foo, bar, bar, foo, baz", + res = {"bar","baz","foo"}, + msg = "De-duplicate", + }, } for _, t in ipairs(tests) do From 52c0292a69777f62f6f331fcec99299e199e3824 Mon Sep 17 00:00:00 2001 From: Hamish Date: Fri, 22 Sep 2017 16:20:47 +0100 Subject: [PATCH 70/90] Strip whitespace, sort and deduplicate vary header when parsing --- lib/ledge/handler.lua | 4 +--- lib/ledge/response.lua | 36 +++++++++++++++++++++++++++++++----- 2 files changed, 32 insertions(+), 8 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 9ba07b86..cce9bdd0 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -263,7 +263,7 @@ local function cache_key_chain(self) local vs = vary_spec(self, rk) local vk = vary_key(self, vs) -ngx.log(ngx.DEBUG, "Vary Key: ", vk) + self._cache_key_chain = ledge_cache_key.key_chain(rk, vk, vs) end @@ -662,8 +662,6 @@ local function save_to_cache(self, res) local redis = self.redis redis:watch(key_chain.main) -ngx.log(ngx.DEBUG, "Saving: ", key_chain.main) - local repset_ttl = redis:ttl(key_chain.repset) -- We'll need to mark the old entity for expiration shortly, as reads diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 88b99c30..d9ea1a28 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -7,6 +7,7 @@ local pairs, setmetatable, tonumber, unpack = local tbl_getn = table.getn local tbl_insert = table.insert local tbl_concat = table.concat +local tbl_sort = table.sort local str_lower = string.lower local str_find = string.find @@ -25,6 +26,7 @@ local ngx_parse_http_time = ngx.parse_http_time local ngx_http_time = ngx.http_time local ngx_time = ngx.time local ngx_re_find = ngx.re.find +local ngx_re_gsub = ngx.re.gsub local header_has_directive = require("ledge.header_util").header_has_directive @@ -197,8 +199,6 @@ function _M.read(self) local redis = self.redis local key_chain = self.handler:cache_key_chain() - ngx.log(ngx.DEBUG, "Response Reading: ", key_chain.main) - -- Read main metdata local cache_parts, err = redis:hgetall(key_chain.main) if not cache_parts or cache_parts == ngx_null then @@ -363,7 +363,7 @@ function _M.save(self, keep_cache_for) local redis = self.redis if not next(redis) then return nil, "no redis" end local key_chain = self.handler:cache_key_chain() -ngx.log(ngx.DEBUG, "RESPONSE saving: ", key_chain.main) + if not self.header["Date"] then self.header["Date"] = ngx_http_time(ngx_time()) end @@ -413,7 +413,7 @@ end function _M.set_and_save(self, field, value) local redis = self.redis -ngx.log(ngx.DEBUG, "RESPONSE set and saving: ", self.handler:cache_key_chain().main) + local ok, err = redis:hset(self.handler:cache_key_chain().main, field, tostring(value)) if not ok then if err then ngx_log(ngx_ERR, err) end @@ -443,15 +443,41 @@ function _M.add_warning(self, code, name) end +local function deduplicate_table(table) + -- Can't have duplicates if there's 1 or 0 entries! + if #table <= 1 then + return table + end + + local new_table = {} + local unique = {} + local i = 0 + + for _,v in ipairs(table) do + if not unique[v] then + unique[v] = true + i = i +1 + new_table[i] = v + end + end + + return new_table +end + + function _M.process_vary(self) local vary_hdr = self.header["Vary"] local vary_spec - if vary_hdr then + if vary_hdr and vary_hdr ~= "" then if type(vary_hdr) == "table" then vary_hdr = tbl_concat(vary_hdr,",") end + -- Remove whitespace around commas + vary_hdr = ngx_re_gsub(vary_hdr, [[\s*,\s*]], ",", "oj") vary_spec = str_split(str_lower(vary_hdr), ",") + tbl_sort(vary_spec) + vary_spec = deduplicate_table(vary_spec) end return vary_spec From 5ba236eabc74e63cec640b0011cd16697211b761 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 25 Sep 2017 11:46:21 +0100 Subject: [PATCH 71/90] Rename response.process_vary -> response.parse_vary_header --- lib/ledge/response.lua | 2 +- lib/ledge/state_machine/states.lua | 2 +- t/01-unit/response.t | 6 ++-- t/02-integration/vary.t | 45 ++++++++++++++++++++++++++++++ 4 files changed, 50 insertions(+), 5 deletions(-) diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index d9ea1a28..b70c8fb8 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -465,7 +465,7 @@ local function deduplicate_table(table) end -function _M.process_vary(self) +function _M.parse_vary_header(self) local vary_hdr = self.header["Vary"] local vary_spec diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index 1a126f2c..c9e5e40f 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -333,7 +333,7 @@ return { end, considering_vary = function(sm, handler) - local new_spec = handler.response:process_vary() + local new_spec = handler.response:parse_vary_header() local key_chain = handler:cache_key_chain() if vary_spec_compare(new_spec, key_chain.vary_spec) then diff --git a/t/01-unit/response.t b/t/01-unit/response.t index 62918ea9..acdd12fe 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -400,7 +400,7 @@ GET /t [error] -=== TEST 9: Process Vary +=== TEST 9: Parse vary header --- http_config eval: $::HttpConfig --- config location /t { @@ -463,7 +463,7 @@ location /t { for _, t in ipairs(tests) do res.header["Vary"] = t["hdr"] - local vary_spec = res:process_vary() + local vary_spec = res:parse_vary_header() ngx.log(ngx.DEBUG, "-----------------------------------------------") ngx.log(ngx.DEBUG, "header: ", t["hdr"]) ngx.log(ngx.DEBUG, "spec: ", encode(vary_spec)) @@ -475,7 +475,7 @@ location /t { end else - assert(res:process_vary() == t["res"], t["msg"]) + assert(vary_spec == t["res"], t["msg"]) end end diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index d5f942e3..ea94ed34 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -454,3 +454,48 @@ Vary: X-Test, X-Test2, X-Test3", ] --- no_error_log [error] + + +=== TEST 8: Vary event +--- http_config eval: $::HttpConfig +--- config +location /vary_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} + +location /vary { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 1: ", ngx.req.get_headers()["X-Test"]) + } +} +--- request eval +["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"] +--- more_headers eval +[ +"X-Test: testval", +"X-Test: anotherval", +"", +"X-Test: testval", +] +--- response_headers_like eval +[ +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: MISS from .*", +"X-Cache: HIT from .*", +] +--- response_body eval +[ +"TEST 1: testval", +"TEST 1: anotherval", +"TEST 1: nil", +"TEST 1: testval", +] +--- no_error_log +[error] From 3dbd76d866db9d1e4e9524261850ac2d85773362 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 25 Sep 2017 12:22:43 +0100 Subject: [PATCH 72/90] Tests for before_vary_selection event --- lib/ledge.lua | 2 +- lib/ledge/handler.lua | 2 +- t/01-unit/events.t | 3 +++ t/02-integration/vary.t | 53 +++++++++++++++++++++++++++++------------ 4 files changed, 43 insertions(+), 17 deletions(-) diff --git a/lib/ledge.lua b/lib/ledge.lua index 17907df6..2e32f582 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -101,7 +101,7 @@ local event_defaults = { before_upstream_connect = {}, before_upstream_request = {}, after_upstream_request = {}, - before_vary = {}, + before_vary_selection = {}, before_save = {}, before_save_revalidation_data = {}, before_serve = {}, diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index cce9bdd0..c7e303f8 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -232,7 +232,7 @@ _M.vary_spec = vary_spec local function create_vary_key_callback(self) return function(vary_key) -- TODO: gunzip? - emit(self, "before_vary", vary_key) + emit(self, "before_vary_selection", vary_key) end end _M.create_vary_key_callback = create_vary_key_callback diff --git a/t/01-unit/events.t b/t/01-unit/events.t index 99a711f3..d251d4e4 100644 --- a/t/01-unit/events.t +++ b/t/01-unit/events.t @@ -52,6 +52,7 @@ location /t { handler:bind("before_save_revalidation_data", say) handler:bind("before_serve", say) handler:bind("before_esi_include_request", say) + handler:bind("before_vary_selection", say) handler:emit("after_cache_read", "after_cache_read") handler:emit("before_upstream_request", "before_upstream_request") @@ -60,6 +61,7 @@ location /t { handler:emit("before_save_revalidation_data", "before_save_revalidation_data") handler:emit("before_serve", "before_serve") handler:emit("before_esi_include_request", "before_esi_include_request") + handler:emit("before_vary_selection", "before_vary_selection") } } @@ -73,6 +75,7 @@ before_save before_save_revalidation_data before_serve before_esi_include_request +before_vary_selection --- error_log no such event: non_event diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index ea94ed34..4ae0bd13 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -456,46 +456,69 @@ Vary: X-Test, X-Test2, X-Test3", [error] -=== TEST 8: Vary event +=== TEST 8: Vary event hook --- http_config eval: $::HttpConfig --- config -location /vary_prx { +location /vary8_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { require("ledge.state_machine").set_debug(true) - require("ledge").create_handler():run() + local handler = require("ledge").create_handler() + + handler:bind("before_vary_selection", function(vary_key) + local x_vary = ngx.req.get_headers()["X-Vary"] + -- Do nothing if noop set + if x_vary ~= "noop" then + vary_key["x-test"] = nil + vary_key["X-Test2"] = x_vary + end + ngx.log(ngx.DEBUG, "Vary Key: ", require("cjson").encode(vary_key)) + end) + + handler:run() } } location /vary { content_by_lua_block { + local incr = ngx.shared.ledge_test:incr("test8", 1, 0) ngx.header["Cache-Control"] = "max-age=3600" - ngx.header["Vary"] = "X-Test" - ngx.print("TEST 1: ", ngx.req.get_headers()["X-Test"]) + if ngx.req.get_headers()["X-Vary"] == "noop" then + ngx.header["Vary"] = "X-Test2" + else + ngx.header["Vary"] = "X-Test" + end + ngx.print("TEST 8: ", incr) } } --- request eval -["GET /vary_prx", "GET /vary_prx", "GET /vary_prx", "GET /vary_prx"] +["GET /vary8_prx", "GET /vary8_prx", "GET /vary8_prx", "GET /vary8_prx"] --- more_headers eval [ -"X-Test: testval", -"X-Test: anotherval", -"", -"X-Test: testval", +"X-Test: testval +X-Vary: foo", + +"X-Test: anotherval +X-Vary: foo", + +"X-Test2: bar +X-Vary: noop", + +"X-Vary: bar", ] --- response_headers_like eval [ "X-Cache: MISS from .*", -"X-Cache: MISS from .*", +"X-Cache: HIT from .*", "X-Cache: MISS from .*", "X-Cache: HIT from .*", ] --- response_body eval [ -"TEST 1: testval", -"TEST 1: anotherval", -"TEST 1: nil", -"TEST 1: testval", +"TEST 8: 1", +"TEST 8: 1", +"TEST 8: 2", +"TEST 8: 2", ] --- no_error_log [error] From e063602b458b69e1cc8d0fe783c486b62d3d3e4c Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 25 Sep 2017 12:42:39 +0100 Subject: [PATCH 73/90] Test: collapsed forwarding, vary header changes and does not match child request --- t/02-integration/collapsed_forwarding.t | 67 +++++++++++++++++++++++++ 1 file changed, 67 insertions(+) diff --git a/t/02-integration/collapsed_forwarding.t b/t/02-integration/collapsed_forwarding.t index fa414b12..dd6c96ec 100644 --- a/t/02-integration/collapsed_forwarding.t +++ b/t/02-integration/collapsed_forwarding.t @@ -536,3 +536,70 @@ location /collapsed { "OK 2OK 2" ] +=== TEST 11: Collapsing with vary - change in spec mismatch +--- http_config eval: $::HttpConfig +--- config +location /prime { + rewrite ^ /collapsed11 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + enable_collapsed_forwarding = false, + }):run() + } +} +location /concurrent_collapsed { + echo_subrequest_async GET "/collapsed11a_prx"; # X-Test: Foo + echo_sleep 0.05; + echo_subrequest_async GET "/collapsed11b_prx"; # X-Test: Foo, X-Test2: Bar +} +location /collapsed11a_prx { + rewrite ^(.*)a_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + }):run() + } +} +location /collapsed11b_prx { + rewrite ^(.*)b_prx$ $1 break; + content_by_lua_block { + ngx.req.set_header("X-Test2", "Bar") + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler({ + enable_collapsed_forwarding = true, + }):run() + } +} +location /collapsed { + content_by_lua_block { + ngx.sleep(0.1) + local counter = ngx.shared.test:incr("test_11", 1, 0) + if counter == 1 then + ngx.header["Vary"] = "X-Test" -- Prime with this + else + ngx.header["Vary"] = "X-Test2" + end + ngx.header["Cache-Control"] = "max-age=3600" + ngx.print("OK " .. tostring(counter)) + } +} +--- request eval +[ +"GET /prime", "PURGE /prime", +"GET /concurrent_collapsed" +] +--- more_headers eval +[ +"X-Test: Foo","X-Test: Foo", +"X-Test: Foo", +] +--- error_code eval +[200, 200, 200] +--- response_body_like eval +[ +"OK 1", ".+", +"OK 2OK 3" +] + From 26bdb3a7b10c5497d7a072cc470132a895c3dc9f Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 25 Sep 2017 15:34:11 +0100 Subject: [PATCH 74/90] Vary purge tweaks --- lib/ledge/cache_key.lua | 16 +++++++++------- lib/ledge/handler.lua | 5 ++++- lib/ledge/jobs/purge.lua | 2 +- lib/ledge/purge.lua | 25 +++++++++++++++---------- 4 files changed, 29 insertions(+), 19 deletions(-) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index ed5da6fe..03d1ab28 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -181,12 +181,6 @@ local function key_chain(root_key, vary_key, vary_spec) -- Apply metatable local key_chain = setmetatable({ - -- set: headers upon which to vary - vary = root_key .. "::vary", - - -- set: representations for this root key - repset = root_key .. "::repset", - -- hash: cache key metadata main = full_key .. "::main", @@ -202,10 +196,18 @@ local function key_chain(root_key, vary_key, vary_spec) -- hash: request params for revalidation reval_req_headers = full_key .. "::reval_req_headers", }, get_fixed_field_metatable_proxy({ - -- Hide "root", "full", the "vary_spec" and "fetching_lock" from iterators. + -- Hide these keys from iterators + + -- These are not actual keys but useful to keep around root = root_key, full = full_key, vary_spec = vary_spec, + + -- set: headers upon which to vary + vary = root_key .. "::vary", + -- set: representations for this root key + repset = root_key .. "::repset", + -- Lock key for collapsed forwarding fetching_lock = full_key .. "::fetching", }) ) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index c7e303f8..f8782ab0 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -218,7 +218,7 @@ local function vary_spec(self, root_key) root_key ) if not vary_spec then - ngx_log(ngx_ERR, "Read vary spec: ", err) + ngx_log(ngx_ERR, "Failed to read vary spec: ", err) return false end self._vary_spec = vary_spec @@ -846,6 +846,9 @@ local function delete_from_cache(self, key_chain, entity_id) ) end + -- Remove this representation from the repset + redis:srem(key_chain.repset, key_chain.full) + -- Delete everything in the keychain local keys = {} for _, v in pairs(key_chain) do diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index 34f8320b..f8501b15 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -66,7 +66,7 @@ function _M.expire_pattern(cursor, job, handler) -- Scan using the "main" key to get a single key per cache entry local res, err = job.redis_slave:scan( cursor, - "MATCH", job.data.key_chain.repset, + "MATCH", job.data.repset, "COUNT", job.data.keyspace_scan_count ) diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 154a6ce7..a2828eb2 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -89,7 +89,6 @@ local function expire_keys(redis, storage, key_chain, entity_id) if e then ngx_log(ngx_ERR, e) end -- Set new TTLs for all keys in the key chain - key_chain.fetching_lock = nil -- this looks after itself for _,key in pairs(key_chain) do local _, e = redis:expire(key, new_ttl) if e then ngx_log(ngx_ERR, e) end @@ -161,7 +160,7 @@ local function _purge(handler, purge_mode, key_chain) end -local function key_chain_from_rep(root_key, full_key) +local function key_chain_from_full_key(root_key, full_key) local pos = str_find(full_key, "#") if pos == nil then return nil @@ -169,11 +168,8 @@ local function key_chain_from_rep(root_key, full_key) -- Remove the root_key from the start local vary_key = str_sub(full_key, pos+1) - local vary_spec = {} -- We don't need this - - return key_chain(root_key, vary_key, vary_spec) end @@ -194,11 +190,12 @@ local function purge(handler, purge_mode, repset) local res_ok, res_message local jobs = {} - for _, rep in ipairs(representations) do - - ngx.log(ngx.DEBUG, "Purging representation: ", rep) - local ok, message, job = _purge(handler, purge_mode, key_chain_from_rep(root_key, rep)) + local key_chain + for _, full_key in ipairs(representations) do + key_chain = key_chain_from_full_key(root_key, full_key) + local ok, message, job = _purge(handler, purge_mode, key_chain) + -- Set the overall response if any representation was purged if res_ok == nil or ok == true then res_ok = ok res_message = message @@ -206,6 +203,13 @@ local function purge(handler, purge_mode, repset) tbl_insert(jobs, job) end + + -- Clean up vary and repset keys if we're deleting + if purge_mode == "delete" and res_ok then + local _, e = handler.redis:del(key_chain.repset, key_chain.vary) + if e then ngx_log(ngx_ERR, e) end + end + return res_ok, res_message, jobs end _M.purge = purge @@ -213,11 +217,12 @@ _M.purge = purge local function purge_in_background(handler, purge_mode) local key_chain = handler:cache_key_chain() + local job, err = put_background_job( "ledge_purge", "ledge.jobs.purge", { - key_chain = key_chain, + repset = key_chain.repset, keyspace_scan_count = handler.config.keyspace_scan_count, purge_mode = purge_mode, storage_driver = handler.config.storage_driver, From a06c65067a49ee115757b77f3ef796bfe92bd3a8 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 25 Sep 2017 15:34:25 +0100 Subject: [PATCH 75/90] Further vary tests --- t/01-unit/cache_key.t | 20 ++++++--- t/01-unit/jobs.t | 2 +- t/01-unit/purge.t | 89 ++++++++++++++++++++++++++++++++++++++++- t/01-unit/response.t | 12 +++++- t/02-integration/vary.t | 49 +++++++++++++++++++++++ 5 files changed, 164 insertions(+), 8 deletions(-) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index 1a87820d..c6519c79 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -362,6 +362,14 @@ location /t { assert(vary_key == "foo:bar:x-test:value", "Vary spec not modified with noop function - multivalue spec") called_flag = false + ngx.req.set_header("Foo", {"Foo1", "Foo2"}) + local vary_key = generate_vary_key({"Foo", "X-Test"}, callback, nil) + log(vary_key) + assert(called_flag == true, "Callback is called - multivalue header") + assert(vary_key == "foo:foo1,foo2:x-test:value", "Vary spec - multivalue header") + called_flag = false + ngx.req.set_header("Foo", "Bar") + -- Active callback callback = function(vary_key) @@ -481,8 +489,6 @@ location /t { local vary_spec = {"Foo", "Test"} local expected = { - vary = "ledge:dummy:root:::vary", - repset = "ledge:dummy:root:::repset", main = "ledge:dummy:root:#foo:bar:test:value::main", entities = "ledge:dummy:root:#foo:bar:test:value::entities", headers = "ledge:dummy:root:#foo:bar:test:value::headers", @@ -490,8 +496,10 @@ location /t { reval_req_headers = "ledge:dummy:root:#foo:bar:test:value::reval_req_headers", } local extra = { - root = "ledge:dummy:root:", - full = "ledge:dummy:root:#foo:bar:test:value", + vary = "ledge:dummy:root:::vary", + repset = "ledge:dummy:root:::repset", + root = "ledge:dummy:root:", + full = "ledge:dummy:root:#foo:bar:test:value", fetching_lock = "ledge:dummy:root:#foo:bar:test:value::fetching", } @@ -514,7 +522,7 @@ location /t { ngx.log(ngx.DEBUG, k, ": ", v, " == ", expected[k]) assert(expected[k] == v, k.." chain mismatch") end - assert(i == 7, "7 keys: "..i) + assert(i == 5, "5 iterable keys: "..i) for k,v in pairs(expected) do ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k]) @@ -524,7 +532,9 @@ location /t { for k,v in pairs(extra) do ngx.log(ngx.DEBUG, k,": ", v, " == ", chain[k]) assert(chain[k] == v, k.." extra mismatch") + i = i +1 end + assert(i == 10, "10 total chain entries: "..i) for i,v in ipairs(vary_spec) do assert(chain.vary_spec[i] == v, " Vary spec mismatch") diff --git a/t/01-unit/jobs.t b/t/01-unit/jobs.t index a1241536..5140185f 100644 --- a/t/01-unit/jobs.t +++ b/t/01-unit/jobs.t @@ -318,7 +318,7 @@ location /t { local job = { redis = redis, data = { - key_chain = { repset = "*::repset" }, + repset = "*::repset", keyspace_scan_count = 2, purge_mode = "invalidate", storage_driver = handler.config.storage_driver, diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index 23fe6841..b9d24243 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -227,13 +227,18 @@ location /t { -- delete local ok, err = purge(handler, "delete", key_chain.repset) - if err then ngx.log(ngx.DEBUG, err) end + if err then ngx.log(ngx.DEBUG, "dekete: ",err) end assert(ok == true and err == "deleted", "purge should return true - deleted") -- delete, missing local ok, err = purge(handler, "delete", key_chain.repset) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - nothing to purge") + + local keys = redis:keys(key_chain.root.."*") + ngx.log(ngx.DEBUG, require("cjson").encode(keys)) + + assert(#keys == 0, "Keys have all been removed") } } location /cache3_prx { @@ -258,6 +263,88 @@ location /cache { --- no_error_log [error] +=== TEST 3b: purge with vary +--- http_config eval: $::HttpConfig +--- config +location /t { + rewrite ^ /cache3 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + local redis = require("ledge").create_redis_connection() + handler.redis = redis + + local storage = require("ledge").create_storage_connection( + handler.config.storage_driver, + handler.config.storage_driver_config + ) + handler.storage = storage + + local key_chain = handler:cache_key_chain() + ngx.log(ngx.DEBUG, require("cjson").encode(key_chain)) + + local purge = require("ledge.purge").purge + + -- invalidate + local ok, err = purge(handler, "invalidate", key_chain.repset) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == true and err == "purged", "purge should return true - purged") + + -- revalidate + local reval_job = false + local jobcount = 0 + handler.revalidate_in_background = function() + jobcount = jobcount + 1 + reval_job = true + return "job"..jobcount + end + + local ok, err, job = purge(handler, "revalidate", key_chain.repset) + if err then ngx.log(ngx.DEBUG, err) end + assert(ok == false and err == "already expired", "purge should return false - already expired") + assert(reval_job == true, "revalidate should schedule job") + assert(job[1] == "job1" and job[2] == "job2", "revalidate should return the job "..tostring(job)) + assert(jobcount == 2, "Revalidate should schedule 1 job per representation") + + -- delete + local ok, err = purge(handler, "delete", key_chain.repset) + if err then ngx.log(ngx.DEBUG, "dekete: ",err) end + assert(ok == true and err == "deleted", "purge should return true - deleted") + + + local keys = redis:keys(key_chain.root.."*") + ngx.log(ngx.DEBUG, require("cjson").encode(keys)) + + assert(#keys == 0, "Keys have all been removed") + } +} +location /cache3_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + handler:run() + } +} + +location /cache { + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Vary"] = "X-Test" + ngx.say("TEST 3b") + } +} +--- request eval +[ +"GET /cache3_prx", "GET /cache3_prx", +"GET /t" +] +--- more_headers eval +[ +"X-Test: foo", "X-Test: bar", +"" +] +--- no_error_log +[error] + === TEST 4: purge api --- http_config eval: $::HttpConfig --- config diff --git a/t/01-unit/response.t b/t/01-unit/response.t index acdd12fe..b85489a7 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -459,13 +459,23 @@ location /t { res = {"bar","baz","foo"}, msg = "De-duplicate", }, + { + hdr = {"foo", "Bar", "Baz, Qux"}, + res = {"bar", "baz", "foo", "qux"}, + msg = "Multiple vary headers", + }, + { + hdr = {"foo, bar", "foo", "bar, Qux", "bar, Foo"}, + res = {"bar", "foo", "qux"}, + msg = "Multiple vary headers - deduplicate", + }, } for _, t in ipairs(tests) do res.header["Vary"] = t["hdr"] local vary_spec = res:parse_vary_header() ngx.log(ngx.DEBUG, "-----------------------------------------------") - ngx.log(ngx.DEBUG, "header: ", t["hdr"]) + ngx.log(ngx.DEBUG, "header: ", encode(t["hdr"])) ngx.log(ngx.DEBUG, "spec: ", encode(vary_spec)) ngx.log(ngx.DEBUG, "expected: ", encode(t["res"])) diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index 4ae0bd13..e1a42caa 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -522,3 +522,52 @@ X-Vary: noop", ] --- no_error_log [error] + + +=== TEST 9: Other representations are preserved with a no-cache-response +--- http_config eval: $::HttpConfig +--- config +location /vary_9_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } +} +location /vary_9 { + content_by_lua_block { + local incr = ngx.shared.ledge_test:incr("test9", 1, 0) + if incr == 3 then + ngx.header["Cache-Control"] = "no-cache" + else + ngx.header["Cache-Control"] = "max-age=60" + end + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 9: ", incr) + } +} +--- request eval +[ +"GET /vary_9_prx", +"GET /vary_9_prx", +"GET /vary_9_prx", +"GET /vary_9_prx", +] +--- more_headers eval +[ +"X-Test: Foo", +"X-Test: Bar", +"X-Test: Foo +Cache-Control: no-cache", +"X-Test: Bar", +] +--- response_body eval +[ +"TEST 9: 1", +"TEST 9: 2", +"TEST 9: 3", +"TEST 9: 2", +] +--- no_error_log +[error] + From d81327fb36f29f08574807449531acd1aa91bcc2 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 26 Sep 2017 12:53:00 +0100 Subject: [PATCH 76/90] Test: can purge response with no body --- t/02-integration/purge.t | 59 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 59 insertions(+) diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index cc880718..275c2793 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -11,6 +11,8 @@ $ENV{TEST_COVERAGE} ||= 0; our $HttpConfig = qq{ lua_package_path "./lib/?.lua;../lua-resty-redis-connector/lib/?.lua;../lua-resty-qless/lib/?.lua;../lua-resty-http/lib/?.lua;../lua-ffi-zlib/lib/?.lua;;"; +lua_shared_dict ledge_test 1m; + init_by_lua_block { if $ENV{TEST_COVERAGE} == 1 then require("luacov.runner").init() @@ -1225,3 +1227,60 @@ result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.result: ] --- wait: 1 + +=== TEST 19: Purge response with no body +--- ONLY +--- http_config eval: $::HttpConfig +--- config +location /purge { + rewrite ^ /purge_cached_19 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(true) + require("ledge").create_handler():run() + } + body_filter_by_lua_block { + ngx.arg[1] = format_json(ngx.arg[1]) + ngx.arg[2] = true + } +} +location /purge_cached_19_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge.state_machine").set_debug(false) + require("ledge").create_handler({ + keep_cache_for = 3600, + }):run() + } +} +location /purge_cached_19 { + content_by_lua_block { + local incr = ngx.shared.ledge_test:incr("test19", 1, 0) + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["X-Incr"] = incr + } +} +--- request eval +[ +"GET /purge_cached_19_prx", "GET /purge_cached_19_prx", + +"PURGE /purge", + +"GET /purge_cached_19_prx" +] +--- error_code eval +[200, 200, 200, 200] +--- response_headers_like eval +[ +"X-Cache: MISS from .+ +X-Incr: 1", + +"X-Cache: HIT from .+ +X-Incr: 1", + +"", + +"X-Cache: MISS from .+ +X-Incr: 2" +] +--- no_error_log +[error] From 12c79cc7248509e5be0bca968daab021fcb2c9a3 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 26 Sep 2017 13:11:08 +0100 Subject: [PATCH 77/90] Fix test after purge bugfix --- t/01-unit/purge.t | 2 +- t/02-integration/purge.t | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/t/01-unit/purge.t b/t/01-unit/purge.t index f9263bba..4410a30f 100644 --- a/t/01-unit/purge.t +++ b/t/01-unit/purge.t @@ -195,7 +195,7 @@ location /t { local purge = require("ledge.purge").purge -- invalidate - error - local ok, err = purge(handler, "invalidate", {main = "bogus_key"}) + local ok, err = purge(handler, "invalidate", {main = "bogus_key3"}) if err then ngx.log(ngx.DEBUG, err) end assert(ok == false and err == "nothing to purge", "purge should return false - bad key") diff --git a/t/02-integration/purge.t b/t/02-integration/purge.t index 275c2793..2a55b442 100644 --- a/t/02-integration/purge.t +++ b/t/02-integration/purge.t @@ -324,7 +324,7 @@ GET /purge_cached --- no_error_log [error] --- response_body -entities: 1 +entities: 0 === TEST 7a: Prime another key with args @@ -1229,7 +1229,6 @@ result.http://localhost:$ENV{TEST_NGINX_PORT}/purge_cached_17_prx\\?a=1.result: --- wait: 1 === TEST 19: Purge response with no body ---- ONLY --- http_config eval: $::HttpConfig --- config location /purge { From 6c044c6f68b8356b0e0a127f4286057a6b93c154 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 26 Sep 2017 13:11:50 +0100 Subject: [PATCH 78/90] Change 'exists' check in purge from entity existing to main key existing --- lib/ledge/handler.lua | 4 ++-- lib/ledge/purge.lua | 17 +++++++++-------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index fd3a54dd..98d3ed66 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -735,11 +735,11 @@ end _M.save_to_cache = save_to_cache -local function delete_from_cache(self, key_chain, entity_id) +local function delete_from_cache(self, key_chain) local redis = self.redis -- Get entity_id if not already provided - entity_id = entity_id or self:entity_id(key_chain) + local entity_id = self:entity_id(key_chain) -- Schedule entity collection if entity_id then diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index ffd8bf6b..65d2bcef 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -85,8 +85,10 @@ local function expire_keys(redis, storage, key_chain, entity_id) if e then ngx_log(ngx_ERR, e) end end - _, e = storage:set_ttl(entity_id, new_ttl) - if e then ngx_log(ngx_ERR, e) end + -- Reduce TTL on entity if there is one + if entity_id and entity_id ~= ngx_null then + storage:set_ttl(entity_id, new_ttl) + end local ok, err = redis:exec() -- luacheck: ignore ok if err then @@ -110,19 +112,18 @@ local function purge(handler, purge_mode, key_chain) local redis = handler.redis local storage = handler.storage - local entity_id, err = handler:entity_id(key_chain) + local exists, err = redis:exists(key_chain.main) if err then ngx_log(ngx_ERR, err) end -- We 404 if we have nothing - if not entity_id or entity_id == ngx_null - or not storage:exists(entity_id) then - + if not exists or exists == ngx_null or exists == 0 then return false, "nothing to purge", nil end + -- Delete mode overrides everything else, since you can't revalidate if purge_mode == "delete" then - local res, err = handler:delete_from_cache(key_chain, entity_id) + local res, err = handler:delete_from_cache(key_chain) if not res then return nil, err, nil else @@ -137,7 +138,7 @@ local function purge(handler, purge_mode, key_chain) end -- Invalidate the keys - local ok, err = expire_keys(redis, storage, key_chain, entity_id) + local ok, err = expire_keys(redis, storage, key_chain, handler:entity_id(key_chain)) if not ok and err then return nil, err, job From 280b07967f66dc602ff841bbce11eb6014051cad Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 9 Oct 2017 14:54:12 +0100 Subject: [PATCH 79/90] Additional test for broken entities with ESI active --- t/02-integration/memory_pressure.t | 41 ++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) diff --git a/t/02-integration/memory_pressure.t b/t/02-integration/memory_pressure.t index 606d6049..9134f64a 100644 --- a/t/02-integration/memory_pressure.t +++ b/t/02-integration/memory_pressure.t @@ -279,3 +279,44 @@ location "/mem_pressure_4" { "MISSED: entities"] --- no_error_log [error] + +=== TEST 5: Prime and break active entity during read - ESI +--- http_config eval: $::HttpConfig +--- config +location "/mem_pressure_5_prx" { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + local handler = require("ledge").create_handler() + if not ngx.req.get_uri_args()["prime"] then + handler:bind("before_serve", function(res) + ngx.log(ngx.DEBUG, "Deleting: ", res.entity_id) + handler.storage:delete(res.entity_id) + end) + else + -- Dummy log for prime request + require("ledge.state_machine").set_debug(true) + ngx.log(ngx.DEBUG, "entity removed during read") + end + ngx.req.set_uri_args({}) + handler:run() + } +} +location "/mem_pressure_5" { + default_type text/html; + content_by_lua_block { + ngx.header["Cache-Control"] = "max-age=3600" + ngx.header["Surrogate-Control"] = 'content="ESI/1.0"' + ngx.print("ORIGIN") + ngx.print("$(QUERY_STRING)") + } +} +--- request eval +["GET /mem_pressure_5_prx?prime=true", "GET /mem_pressure_5_prx"] +--- response_body eval +["ORIGIN", ""] +--- response_headers_like eval +["X-Cache: MISS from .*", "X-Cache: HIT from .*"] +--- no_error_log +[error] +--- error_log +entity removed during read From c753141ff4454394593c73ecd9f14cb1556d3ee5 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 9 Oct 2017 16:02:27 +0100 Subject: [PATCH 80/90] Test: coroutine.wrap should not return errors as first value --- t/01-unit/util.t | 50 +++++++++++++++++++++++++++++++++++++++++++----- 1 file changed, 45 insertions(+), 5 deletions(-) diff --git a/t/01-unit/util.t b/t/01-unit/util.t index a283757d..1dc6254e 100644 --- a/t/01-unit/util.t +++ b/t/01-unit/util.t @@ -1,12 +1,9 @@ -use Test::Nginx::Socket; +use Test::Nginx::Socket 'no_plan'; use Cwd qw(cwd); - -plan tests => repeat_each() * (blocks() * 2); - my $pwd = cwd(); -$ENV{TEST_COVERAGE} ||= 0; $ENV{TEST_NGINX_PORT} |= 1984; +$ENV{TEST_COVERAGE} ||= 0; our $HttpConfig = qq{ lua_package_path "./lib/?.lua;;"; @@ -383,6 +380,49 @@ GET /t --- no_error_log [error] +=== TEST 8b: coroutine.wrap errors +--- http_config eval: $::HttpConfig +--- config +location /t { + content_by_lua_block { + local co_wrap = require("ledge.util").coroutine.wrap + + local co = co_wrap( + function() + for i = 1, 10 do + if i == 5 then + error("BOOM") + end + coroutine.yield(i) + end + end + ) + + function run() + local res = "" + repeat + local num, err = co() + if num then + res = res .. num .. "-" + elseif err then + ngx.log(ngx.DEBUG, "Coroutine error: ", err) + end + until not num + res = res .. "finished" + return res + end + + assert(run() == "1-2-3-4-finished", "Error was yielded!") + } +} +--- request +GET /t +--- error_log +Coroutine error: +BOOM +--- no_error_log +Error was yielded! + === TEST 9: get_hostname --- http_config eval: $::HttpConfig From e8908db3679d6d7cfee29bd36d8191a86ed6a509 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 9 Oct 2017 16:14:41 +0100 Subject: [PATCH 81/90] Catch errors in coroutine.wrap --- lib/ledge/util.lua | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/lib/ledge/util.lua b/lib/ledge/util.lua index 7affdd3a..44690432 100644 --- a/lib/ledge/util.lua +++ b/lib/ledge/util.lua @@ -1,8 +1,8 @@ local ngx_var = ngx.var local ffi = require "ffi" -local type, next, setmetatable, getmetatable, error, tostring, select = - type, next, setmetatable, getmetatable, error, tostring, select +local type, next, setmetatable, getmetatable, error, tostring = + type, next, setmetatable, getmetatable, error, tostring local str_find = string.find local str_sub = string.sub @@ -205,7 +205,13 @@ local function co_wrap(func) else return function(...) if co_status(co) == "suspended" then - return select(2, co_resume(co, ...)) + -- Handle errors in coroutines + local ok, val1, val2, val3 = co_resume(co, ...) + if ok == true then + return val1, val2, val3 + else + return nil, val1 + end else return nil, "can't resume a " .. co_status(co) .. " coroutine" end From 0488e00a580d57f1160f776625db3c968a1d0d38 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 17 Oct 2017 11:51:42 +0100 Subject: [PATCH 82/90] Invert vary comparison return value --- t/01-unit/cache_key.t | 46 +++++++++++++++++++++---------------------- 1 file changed, 23 insertions(+), 23 deletions(-) diff --git a/t/01-unit/cache_key.t b/t/01-unit/cache_key.t index 5942eac8..b361cf8f 100644 --- a/t/01-unit/cache_key.t +++ b/t/01-unit/cache_key.t @@ -259,42 +259,42 @@ location /t { location /t { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - local vary_spec_compare = require("ledge.cache_key").vary_spec_compare + local vary_compare = require("ledge.cache_key").vary_compare -- Compare vary specs - local changed = vary_spec_compare({}, {}) - assert(changed == false, "empty table == empty table") + local changed = vary_compare({}, {}) + assert(changed == true, "empty table == empty table") - local changed = vary_spec_compare({}, nil) - assert(changed == false, "empty table == nil") + local changed = vary_compare({}, nil) + assert(changed == true, "empty table == nil") - local changed = vary_spec_compare(nil, {}) - assert(changed == false, "nil == empty table") + local changed = vary_compare(nil, {}) + assert(changed == true, "nil == empty table") - local changed = vary_spec_compare({"Foo"}, {"Foo"}) - assert(changed == false, "table == table") + local changed = vary_compare({"Foo"}, {"Foo"}) + assert(changed == true, "table == table") - local changed = vary_spec_compare({"Foo", "Bar"}, {"Foo", "Bar"}) - assert(changed == false, "table == table (multi-values") + local changed = vary_compare({"Foo", "Bar"}, {"Foo", "Bar"}) + assert(changed == true, "table == table (multi-values") - local changed = vary_spec_compare({"Foo", "bar"}, {"foo", "Bar"}) - assert(changed == false, "table == table (case)") + local changed = vary_compare({"Foo", "bar"}, {"foo", "Bar"}) + --assert(changed == true, "table == table (case)") - local changed = vary_spec_compare({"Foo"}, {}) - assert(changed == true, "table ~= empty table") + local changed = vary_compare({"Foo"}, {}) + assert(changed == false, "table ~= empty table") - local changed = vary_spec_compare({}, {"Foo"}) - assert(changed == true, "empty table ~= table") + local changed = vary_compare({}, {"Foo"}) + assert(changed == false, "empty table ~= table") - local changed = vary_spec_compare({"Foo"}, nil) - assert(changed == true, "table ~= nil") + local changed = vary_compare({"Foo"}, nil) + assert(changed == false, "table ~= nil") - local changed = vary_spec_compare(nil, {"Foo"}) - assert(changed == true, "nil ~= table") + local changed = vary_compare(nil, {"Foo"}) + assert(changed == false, "nil ~= table") - local changed = vary_spec_compare({"Foo"}, {}) - assert(changed == true, "table ~= empty table") + local changed = vary_compare({"Foo"}, {}) + assert(changed == false, "table ~= empty table") } } --- request From 3e57ca9073f664e16f9a08044e24cc88fceea0aa Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 17 Oct 2017 11:52:08 +0100 Subject: [PATCH 83/90] vary_spec_compare -> vary_compare and inverted return value, optimise table comparison --- lib/ledge/cache_key.lua | 34 ++++++++++++++++++++++-------- lib/ledge/response.lua | 7 +++--- lib/ledge/state_machine/states.lua | 4 ++-- 3 files changed, 31 insertions(+), 14 deletions(-) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index 03d1ab28..48cedb6c 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -98,28 +98,44 @@ end _M.read_vary_spec = read_vary_spec -local function vary_spec_compare(spec_a, spec_b) +local function vary_compare(spec_a, spec_b) if (not spec_a or not next(spec_a)) then if (not spec_b or not next(spec_b)) then -- both nil or empty - return false + return true else -- spec_b is set but spec_a is empty - return true + return false end elseif (spec_b and next(spec_b)) then - -- TODO: looping here faster? - if str_lower(tbl_concat(spec_b, ",")) == str_lower(tbl_concat(spec_a, ",")) then - -- Current vary spec and new vary spec match - return false + local outer_match = true + + -- Loop over all values in spec_a + for _, v in ipairs(spec_a) do + local match = false + -- Look for a match in spec_b + for _, v2 in ipairs(spec_b) do + if v == v2 then + match = true + break + end + end + + -- Didn't match any values in spec_b + if match == false then + outer_match = false + break + end end + + return outer_match end -- spec_a is a thing but spec_b is not - return true + return false end -_M.vary_spec_compare = vary_spec_compare +_M.vary_compare = vary_compare local function generate_vary_key(vary_spec, callback, headers) diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index b70c8fb8..2e49cf2f 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -473,13 +473,14 @@ function _M.parse_vary_header(self) if type(vary_hdr) == "table" then vary_hdr = tbl_concat(vary_hdr,",") end - -- Remove whitespace around commas - vary_hdr = ngx_re_gsub(vary_hdr, [[\s*,\s*]], ",", "oj") - vary_spec = str_split(str_lower(vary_hdr), ",") + -- Remove whitespace around commas and lowercase + vary_hdr = ngx_re_gsub(str_lower(vary_hdr), [[\s*,\s*]], ",", "oj") + vary_spec = str_split(vary_hdr, ",") tbl_sort(vary_spec) vary_spec = deduplicate_table(vary_spec) end + -- Return the new vary sepc table *and* the normalised header return vary_spec end diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index c9e5e40f..7ac667e7 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -38,7 +38,7 @@ local acquire_lock = require("ledge.collapse").acquire_lock local parse_content_range = require("ledge.range").parse_content_range -local vary_spec_compare = require("ledge.cache_key").vary_spec_compare +local vary_compare = require("ledge.cache_key").vary_compare local _M = { -- luacheck: no unused @@ -336,7 +336,7 @@ return { local new_spec = handler.response:parse_vary_header() local key_chain = handler:cache_key_chain() - if vary_spec_compare(new_spec, key_chain.vary_spec) then + if vary_compare(new_spec, key_chain.vary_spec) == false then handler:set_vary_spec(new_spec) return sm:e "vary_modified" From 7ae812d25b3006cd85f78ccbffc13942d6d41514 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 17 Oct 2017 12:41:56 +0100 Subject: [PATCH 84/90] Test: Vary key is removed with last representation --- t/02-integration/vary.t | 71 ++++++++++++++++++++++++++++++++++++++++- 1 file changed, 70 insertions(+), 1 deletion(-) diff --git a/t/02-integration/vary.t b/t/02-integration/vary.t index e1a42caa..c5069e77 100644 --- a/t/02-integration/vary.t +++ b/t/02-integration/vary.t @@ -530,7 +530,7 @@ X-Vary: noop", location /vary_9_prx { rewrite ^(.*)_prx$ $1 break; content_by_lua_block { - require("ledge.state_machine").set_debug(true) + require("ledge.state_machine").set_debug(true) require("ledge").create_handler():run() } } @@ -571,3 +571,72 @@ Cache-Control: no-cache", --- no_error_log [error] +=== TEST 10: Vary key cleaned up +--- http_config eval: $::HttpConfig +--- config +location /vary_10_check { + rewrite ^(.*)_check$ $1 break; + content_by_lua_block { + local redis = require("ledge").create_redis_connection() + local handler = require("ledge").create_handler() + handler.redis = redis + + local chain = handler:cache_key_chain() + + local res, err = redis:smembers(chain.repset) + local exists, err = redis:exists(chain.vary) + ngx.print(#res, " ", exists) + } +} +location /vary_10_prx { + rewrite ^(.*)_prx$ $1 break; + content_by_lua_block { + require("ledge").create_handler():run() + } +} +location /vary_10 { + content_by_lua_block { + local incr = ngx.shared.ledge_test:incr("test10", 1, 0) + if incr < 3 then + ngx.header["Cache-Control"] = "max-age=60" + else + ngx.header["Cache-Control"] = "no-cache" + end + ngx.header["Vary"] = "X-Test" + ngx.print("TEST 10: ", incr) + } +} +--- request eval +[ +"GET /vary_10_prx", +"GET /vary_10_prx", +"GET /vary_10_check", +"GET /vary_10_prx", +"GET /vary_10_check", +"GET /vary_10_prx", +"GET /vary_10_check", +] +--- more_headers eval +[ +"X-Test: Foo", +"X-Test: Bar", +"", +"X-Test: Foo +Cache-Control: no-cache", +"", +"X-Test: Bar +Cache-Control: no-cache", +"", +] +--- response_body eval +[ +"TEST 10: 1", +"TEST 10: 2", +"2 1", +"TEST 10: 3", +"1 1", +"TEST 10: 4", +"0 0", +] +--- no_error_log +[error] From f97b12185695ce1b39495981a986a1d2793be321 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 17 Oct 2017 12:42:06 +0100 Subject: [PATCH 85/90] Remove the vary key if the repset is empty --- lib/ledge/handler.lua | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 1a18a2b7..14589fb6 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -854,6 +854,13 @@ local function delete_from_cache(self, key_chain) for _, v in pairs(key_chain) do tbl_insert(keys, v) end + + -- If there are no more entries in the repset clean up the vary key too + local exists = redis:exists(key_chain.repset) + if exists == 0 then + tbl_insert(keys, key_chain.vary) + end + return redis:del(unpack(keys)) end _M.delete_from_cache = delete_from_cache From daa0221ed816700004ed040963af9c8a11f005ee Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 23 Oct 2017 10:56:47 +0100 Subject: [PATCH 86/90] Doc: Vary support --- README.md | 73 +++++++++++++++++++++++++++++++++++++++++++++---------- 1 file changed, 60 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 21b50b68..053b9185 100644 --- a/README.md +++ b/README.md @@ -87,6 +87,7 @@ An `upstream` is the only thing which must be manually configured, and points to Cache body data is handled by the `storage` system, and as mentioned, by default shares the same Redis instance as the `metadata`. However, `storage` is abstracted via a [driver system](#storage_driver) making it possible to store cache body data in a separate Redis instance, or a group of horizontally scalable Redis instances via a [proxy](https://github.com/twitter/twemproxy), or to roll your own `storage` driver, for example targeting PostreSQL or even simply a filesystem. It's perhaps important to consider that by default all cache storage uses Redis, and as such is bound by system memory. +[Back to TOC](#table-of-contents) ### Cache keys @@ -102,8 +103,7 @@ URI arguments are sorted alphabetically by default, so `http://example.com?a=1&b HTTP response sizes can be wildly different, sometimes tiny and sometimes huge, and it's not always possible to know the total size up front. -To guarantee predictable memory usage regardless of response sizes Ledge operates a streaming design, meaning it only ever operates on a single `buffer` per request at a time. This is equally true when fetching upstream to when reading from cache or -serving to the client request. +To guarantee predictable memory usage regardless of response sizes Ledge operates a streaming design, meaning it only ever operates on a single `buffer` per request at a time. This is equally true when fetching upstream to when reading from cache or serving to the client request. It's also true (mostly) when processing [ESI](#edge-size-includes) instructions, except for in the case where an instruction is found to span multiple buffers. In this case, we continue buffering until a complete instruction can be understood, up to a [configurable limit](#esi_max_size). @@ -123,7 +123,6 @@ This is particularly useful to reduce upstream load if a spike of traffic occurs Beyond standard RFC compliant cache behaviours, Ledge has many features designed to maximise cache HIT rates and to reduce latency for requests. See the sections on [Edge Side Includes](#edge-side-includes), [serving stale](#serving-stale) and [revalidating on purge](#purging) for more information. - [Back to TOC](#table-of-contents) @@ -135,14 +134,14 @@ Assuming you have Redis running on `localhost:6379`, and your upstream is at `lo http { if_modified_since Off; lua_check_client_abort On; - + init_by_lua_block { require("ledge").configure({ redis_connector_params = { url = "redis://127.0.0.1:6379/0", }, }) - + require("ledge").set_handler_defaults({ upstream_host = "127.0.0.1", upstream_port = 8080, @@ -168,6 +167,7 @@ http { [Back to TOC](#table-of-contents) + ## Config systems There are four different layers to the configuration system. Firstly there is the main [Redis config](#ledgeconfigure) and [handler defaults](#ledgeset_handler_defaults) config, which are global and must be set during the Nginx `init` phase. @@ -221,11 +221,11 @@ More commonly, we just want to alter behaviour for a given Nginx `location`. location /foo_location { content_by_lua_block { local handler = require("ledge").create_handler() - + handler:bind("before_serve", function(res) res.header["X-Foo"] = "bar" -- only set X-Foo for this location end) - + handler:run() } } @@ -267,6 +267,8 @@ The goal is to be 100% RFC compliant, but with some extensions to allow more agr To manually invalidate a cache item (or purge), we support the non-standard `PURGE` method familiar to users of Squid. Send a HTTP request to the URI with the method set, and Ledge will attempt to invalidate the item, returning status `200` on success and `404` if the URI was not found in cache, along with a JSON body for more details. +A purge request will affect all representations associated with the cache key, for example compressed and uncompressed responses separated by the `Vary: Accept-Encoding` response header will all be purged. + `$> curl -X PURGE -H "Host: example.com" http://cache.example.com/page1 | jq .` ```json @@ -315,7 +317,6 @@ limit_except GET POST PUT DELETE { [Back to TOC](#table-of-contents) - ### JSON API A JSON based API is also available for purging cache multiple cache items at once. @@ -346,7 +347,6 @@ Returns a results hash keyed by URI or a JSON error response [Back to TOC](#table-of-contents) - ### Wildcard purging Wildcard (\*) patterns are also supported in `PURGE` URIs, which will always return a status of `200` and a JSON body detailing a background job. Wildcard purges involve scanning the entire keyspace, and so can take a little while. See [keyspace\_scan\_count](#keyspace_scan_count) for tuning help. @@ -397,7 +397,6 @@ In other words, set the TTL to the highest comfortable frequency of requests at All stale behaviours are constrained by normal cache control semantics. For example, if the origin is down, and the response could be served stale due to the upstream error, but the request contains `Cache-Control: no-cache` or even `Cache-Control: max-age=60` where the content is older than 60 seconds, they will be served the error, rather than the stale content. - [Back to TOC](#table-of-contents) @@ -559,6 +558,7 @@ init_by_lua_block { Ledge uses [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector) to handle all Redis connections. It simply passes anything given in `redis_connector_params` straight to [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector), so review the documentation there for options, including how to use [Redis Sentinel](https://redis.io/topics/sentinel). + #### qless_db `default: 1` @@ -667,7 +667,6 @@ syntax: `handler:run()` Must be called during the `init_worker` phase, otherwise background tasks will not be run, including garbage collection which is very importatnt. - [Back to TOC](#table-of-contents) @@ -726,6 +725,7 @@ This is a `string` value, which will be used to attempt to load a storage driver [Back to TOC](#handler-configuration-options) + #### storage_driver_config `default: {}` @@ -734,6 +734,7 @@ Storage configuration can vary based on the driver. Currently we only have a Red [Back to TOC](#handler-configuration-options) + ##### Redis storage driver config * `redis_connector_params` Redis params table, as per [lua-resty-redis-connector](https://github.com/pintsized/lua-resty-redis-connector) @@ -744,6 +745,7 @@ If `supports_transactions` is set to `false`, cache bodies are not written atomi [Back to TOC](#handler-configuration-options) + #### upstream_connect_timeout default: `1000 (ms)` @@ -752,6 +754,7 @@ Maximum time to wait for an upstream connection (in milliseconds). If it is exce [Back to TOC](#handler-configuration-options) + #### upstream_send_timeout default: `2000 (ms)` @@ -760,6 +763,7 @@ Maximum time to wait sending data on a connected upstream socket (in millisecond [Back to TOC](#handler-configuration-options) + #### upstream_read_timeout default: `10000 (ms)` @@ -768,18 +772,21 @@ Maximum time to wait on a connected upstream socket (in milliseconds). If it is [Back to TOC](#handler-configuration-options) + #### upstream_keepalive_timeout default: `75000` [Back to TOC](#handler-configuration-options) + #### upstream_keepalive_poolsize default: `64` [Back to TOC](#handler-configuration-options) + #### upstream_host default: `""` @@ -792,6 +799,7 @@ resolver 8.8.8.8; [Back to TOC](#handler-configuration-options) + #### upstream_port default: `80` @@ -800,6 +808,7 @@ Specifies the port of the upstream host. [Back to TOC](#handler-configuration-options) + #### upstream_use_ssl default: `false` @@ -808,6 +817,7 @@ Toggles the use of SSL on the upstream connection. Other `upstream_ssl_*` option [Back to TOC](#handler-configuration-options) + #### upstream_ssl_server_name default: `""` @@ -816,6 +826,7 @@ Specifies the SSL server name used for Server Name Indication (SNI). See [sslhan [Back to TOC](#handler-configuration-options) + #### upstream_ssl_verify default: `false` @@ -824,6 +835,7 @@ Toggles SSL verification. See [sslhandshake](https://github.com/openresty/lua-ng [Back to TOC](#handler-configuration-options) + #### cache_key_spec `default: cache_key_spec = { "scheme", "host", "uri", "args" },` @@ -862,6 +874,8 @@ require("ledge").create_handler({ }):run() ``` +Consider leveraging vary, via the [before_vary_selection](#before_vary_selection) event, for separating cache entries rather than modifying the main `cache_key_spec` directly. + [Back to TOC](#handler-configuration-options) @@ -1042,6 +1056,7 @@ If set to false, disables advertising the software name and version, e.g. `(ledg * [before_save](#before_save) * [before_serve](#before_serve) * [before_save_revalidation_data](#before_save_revalidation_data) +* [before_vary_selection](#before_vary_selection) #### after_cache_read @@ -1049,7 +1064,7 @@ syntax: `bind("after_cache_read", function(res) -- end)` params: `res`. The cached response table. -Fires directly after the response was successfully loaded from cache. +Fires directly after the response was successfully loaded from cache. The `res` table given contains: @@ -1174,8 +1189,39 @@ The `reval_params` are values derived from the current running configuration for [Back to TOC](#events) -## Administration +#### before_vary_selection + +syntax: `bind("before_vary_selection", function(vary_key) -- end)` + +params: `vary_key` A table of selecting headers + +Fires when we're about to generate the vary key, used to select the correct cache representation. + +The `vary_key` table is a hash of header field names (lowercase) to values. +A field name which exists in the Vary response header but does not exist in the current request header will have a value of `ngx.null`. + +``` +Request Headers: + Accept-Encoding: gzip + X-Test: abc + X-test: def + +Response Headers: + Vary: Accept-Encoding, X-Test + Vary: X-Foo +vary_key table: +{ + ["accept-encoding"] = "gzip", + ["x-test"] = "abc,def", + ["x-foo"] = ngx.null +} +``` + +[Back to TOC](#events) + + +## Administration ### X-Cache @@ -1231,6 +1277,7 @@ You may also wish to tweak the [qless job history](https://github.com/pintsized/ [Back to TOC](#table-of-contents) + ## Author James Hurst From b7ebd15802ca18099b853fcfd0f73b5799d4f172 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 23 Oct 2017 12:37:36 +0100 Subject: [PATCH 87/90] Test: Handle error in cache key generation --- t/01-unit/response.t | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/t/01-unit/response.t b/t/01-unit/response.t index b85489a7..fa726bd2 100644 --- a/t/01-unit/response.t +++ b/t/01-unit/response.t @@ -348,6 +348,12 @@ location /t { local ok, err = res:read() assert(ok or not err, "read should return error on redis error") + + + handler.cache_key_chain = function() return nil, "Dummy" end + + local ok, err = res:read() + assert(ok == nil and err == "Dummy", "read should return error when failing to get the key chain") } } --- request From 1c7db61c0f7a017b2a0b31f9d5406f6482409936 Mon Sep 17 00:00:00 2001 From: Hamish Date: Mon, 23 Oct 2017 12:38:46 +0100 Subject: [PATCH 88/90] Return error when key_chain generation fails --- lib/ledge/handler.lua | 8 +++++++- lib/ledge/response.lua | 6 +++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 14589fb6..147fedbf 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -264,7 +264,13 @@ local function cache_key_chain(self) local vk = vary_key(self, vs) - self._cache_key_chain = ledge_cache_key.key_chain(rk, vk, vs) + local chain, err = ledge_cache_key.key_chain(rk, vk, vs) + + if not chain then + return nil, err + end + + self._cache_key_chain = chain end return self._cache_key_chain diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 2e49cf2f..295eb425 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -196,8 +196,12 @@ end -- Return nil and *no* error if this is just a broken/partial cache entry -- so we MISS and update the entry. function _M.read(self) + local key_chain, err = self.handler:cache_key_chain() + if not key_chain then + return nil, err + end + local redis = self.redis - local key_chain = self.handler:cache_key_chain() -- Read main metdata local cache_parts, err = redis:hgetall(key_chain.main) From 3c02300fb1392d3b10ad36e48f9651aa4a7450e2 Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 28 Nov 2017 13:33:34 +0000 Subject: [PATCH 89/90] Never set a nil vary spec --- lib/ledge/handler.lua | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index 147fedbf..aecfb657 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -289,7 +289,9 @@ _M.reset_cache_key = reset_cache_key local function set_vary_spec(self, vary_spec) reset_cache_key(self) - self._vary_spec = vary_spec + if vary_spec then + self._vary_spec = vary_spec + end end _M.set_vary_spec = set_vary_spec From 7892dcaa6520632cfa434898bef5e204d018e79b Mon Sep 17 00:00:00 2001 From: Hamish Date: Tue, 28 Nov 2017 13:35:30 +0000 Subject: [PATCH 90/90] Increment version --- lib/ledge.lua | 2 +- lib/ledge/background.lua | 2 +- lib/ledge/cache_key.lua | 2 +- lib/ledge/collapse.lua | 2 +- lib/ledge/esi.lua | 2 +- lib/ledge/esi/processor_1_0.lua | 2 +- lib/ledge/esi/tag_parser.lua | 2 +- lib/ledge/gzip.lua | 2 +- lib/ledge/handler.lua | 2 +- lib/ledge/header_util.lua | 2 +- lib/ledge/jobs/collect_entity.lua | 2 +- lib/ledge/jobs/purge.lua | 2 +- lib/ledge/jobs/revalidate.lua | 2 +- lib/ledge/purge.lua | 2 +- lib/ledge/range.lua | 2 +- lib/ledge/request.lua | 2 +- lib/ledge/response.lua | 2 +- lib/ledge/stale.lua | 2 +- lib/ledge/state_machine.lua | 2 +- lib/ledge/state_machine/actions.lua | 2 +- lib/ledge/state_machine/events.lua | 2 +- lib/ledge/state_machine/pre_transitions.lua | 2 +- lib/ledge/state_machine/states.lua | 2 +- lib/ledge/storage/redis.lua | 2 +- lib/ledge/util.lua | 2 +- lib/ledge/validation.lua | 2 +- lib/ledge/worker.lua | 2 +- 27 files changed, 27 insertions(+), 27 deletions(-) diff --git a/lib/ledge.lua b/lib/ledge.lua index 2e32f582..b3cb9faf 100644 --- a/lib/ledge.lua +++ b/lib/ledge.lua @@ -15,7 +15,7 @@ local redis_connector = require("resty.redis.connector") local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", ORIGIN_MODE_BYPASS = 1, -- Never go to the origin, serve from cache or 503 ORIGIN_MODE_AVOID = 2, -- Avoid the origin, serve from cache where possible diff --git a/lib/ledge/background.lua b/lib/ledge/background.lua index 85e25012..2a3fb5d3 100644 --- a/lib/ledge/background.lua +++ b/lib/ledge/background.lua @@ -3,7 +3,7 @@ local math_ceil = math.ceil local qless = require("resty.qless") local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } local function put_background_job( queue, klass, data, options) diff --git a/lib/ledge/cache_key.lua b/lib/ledge/cache_key.lua index 48cedb6c..eb554526 100644 --- a/lib/ledge/cache_key.lua +++ b/lib/ledge/cache_key.lua @@ -19,7 +19,7 @@ local get_fixed_field_metatable_proxy = local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/collapse.lua b/lib/ledge/collapse.lua index 7121531b..d6bf20d8 100644 --- a/lib/ledge/collapse.lua +++ b/lib/ledge/collapse.lua @@ -1,5 +1,5 @@ local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } -- Attempts to set a lock key in redis. The lock will expire after diff --git a/lib/ledge/esi.lua b/lib/ledge/esi.lua index 00bd568b..2089aa33 100644 --- a/lib/ledge/esi.lua +++ b/lib/ledge/esi.lua @@ -19,7 +19,7 @@ local ngx_ERR = ngx.ERR local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/esi/processor_1_0.lua b/lib/ledge/esi/processor_1_0.lua index a0c1eff1..2b7bd212 100644 --- a/lib/ledge/esi/processor_1_0.lua +++ b/lib/ledge/esi/processor_1_0.lua @@ -33,7 +33,7 @@ local get_fixed_field_metatable_proxy = local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/esi/tag_parser.lua b/lib/ledge/esi/tag_parser.lua index 685c424d..b39bea64 100644 --- a/lib/ledge/esi/tag_parser.lua +++ b/lib/ledge/esi/tag_parser.lua @@ -13,7 +13,7 @@ local get_fixed_field_metatable_proxy = local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/gzip.lua b/lib/ledge/gzip.lua index 7c680ad3..49a2489e 100644 --- a/lib/ledge/gzip.lua +++ b/lib/ledge/gzip.lua @@ -8,7 +8,7 @@ local zlib = require("ffi-zlib") local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/handler.lua b/lib/ledge/handler.lua index aecfb657..7f0b8c7f 100644 --- a/lib/ledge/handler.lua +++ b/lib/ledge/handler.lua @@ -52,7 +52,7 @@ local response = require("ledge.response") local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/header_util.lua b/lib/ledge/header_util.lua index e1b574d1..54677269 100644 --- a/lib/ledge/header_util.lua +++ b/lib/ledge/header_util.lua @@ -7,7 +7,7 @@ local tbl_concat = table.concat local _M = { - _VERSION = "2.0.0" + _VERSION = "2.1.0" } local mt = { diff --git a/lib/ledge/jobs/collect_entity.lua b/lib/ledge/jobs/collect_entity.lua index 86bd968c..1f8d50c2 100644 --- a/lib/ledge/jobs/collect_entity.lua +++ b/lib/ledge/jobs/collect_entity.lua @@ -5,7 +5,7 @@ local create_storage_connection = require("ledge").create_storage_connection local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/jobs/purge.lua b/lib/ledge/jobs/purge.lua index f8501b15..ae3c7bf4 100644 --- a/lib/ledge/jobs/purge.lua +++ b/lib/ledge/jobs/purge.lua @@ -9,7 +9,7 @@ local create_redis_slave_connection = require("ledge").create_redis_slave_connec local close_redis_connection = require("ledge").close_redis_connection local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/jobs/revalidate.lua b/lib/ledge/jobs/revalidate.lua index d497b2e6..3ab5a2be 100644 --- a/lib/ledge/jobs/revalidate.lua +++ b/lib/ledge/jobs/revalidate.lua @@ -3,7 +3,7 @@ local http_headers = require "resty.http_headers" local ngx_null = ngx.null local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/purge.lua b/lib/ledge/purge.lua index 1b8b400b..a2450a63 100644 --- a/lib/ledge/purge.lua +++ b/lib/ledge/purge.lua @@ -26,7 +26,7 @@ local put_background_job = require("ledge.background").put_background_job local key_chain = require("ledge.cache_key").key_chain local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } local repset_len = -(str_len("::repset")+1) diff --git a/lib/ledge/range.lua b/lib/ledge/range.lua index dc35ff0c..e405c22c 100644 --- a/lib/ledge/range.lua +++ b/lib/ledge/range.lua @@ -29,7 +29,7 @@ local ngx_PARTIAL_CONTENT = 206 local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/request.lua b/lib/ledge/request.lua index 4aeb846b..94ef512f 100644 --- a/lib/ledge/request.lua +++ b/lib/ledge/request.lua @@ -14,7 +14,7 @@ local tbl_insert = table.insert local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/response.lua b/lib/ledge/response.lua index 295eb425..f632436f 100644 --- a/lib/ledge/response.lua +++ b/lib/ledge/response.lua @@ -37,7 +37,7 @@ local get_fixed_field_metatable_proxy = local _DEBUG = false local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", set_debug = function(debug) _DEBUG = debug end, } diff --git a/lib/ledge/stale.lua b/lib/ledge/stale.lua index d336a118..1158de24 100644 --- a/lib/ledge/stale.lua +++ b/lib/ledge/stale.lua @@ -8,7 +8,7 @@ local get_numeric_header_token = local _M = { - _VERSION = "2.0.0" + _VERSION = "2.1.0" } diff --git a/lib/ledge/state_machine.lua b/lib/ledge/state_machine.lua index 0dd8b9d4..8b467e09 100644 --- a/lib/ledge/state_machine.lua +++ b/lib/ledge/state_machine.lua @@ -15,7 +15,7 @@ local get_fixed_field_metatable_proxy = local _DEBUG = false local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", set_debug = function(debug) _DEBUG = debug end, } diff --git a/lib/ledge/state_machine/actions.lua b/lib/ledge/state_machine/actions.lua index 55e2e783..ba1b0ce0 100644 --- a/lib/ledge/state_machine/actions.lua +++ b/lib/ledge/state_machine/actions.lua @@ -12,7 +12,7 @@ local ngx_req_set_header = ngx.req.set_header local get_gzip_decoder = require("ledge.gzip").get_gzip_decoder local _M = { -- luacheck: no unused - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/state_machine/events.lua b/lib/ledge/state_machine/events.lua index 655f4a09..b3f0c849 100644 --- a/lib/ledge/state_machine/events.lua +++ b/lib/ledge/state_machine/events.lua @@ -1,5 +1,5 @@ local _M = { -- luacheck: no unused - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/state_machine/pre_transitions.lua b/lib/ledge/state_machine/pre_transitions.lua index c548dd60..517211c0 100644 --- a/lib/ledge/state_machine/pre_transitions.lua +++ b/lib/ledge/state_machine/pre_transitions.lua @@ -1,5 +1,5 @@ local _M = { -- luacheck: no unused - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/state_machine/states.lua b/lib/ledge/state_machine/states.lua index 7ac667e7..f440287f 100644 --- a/lib/ledge/state_machine/states.lua +++ b/lib/ledge/state_machine/states.lua @@ -42,7 +42,7 @@ local vary_compare = require("ledge.cache_key").vary_compare local _M = { -- luacheck: no unused - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/storage/redis.lua b/lib/ledge/storage/redis.lua index 6062fbc7..122aa7a9 100644 --- a/lib/ledge/storage/redis.lua +++ b/lib/ledge/storage/redis.lua @@ -16,7 +16,7 @@ local get_fixed_field_metatable_proxy = local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/util.lua b/lib/ledge/util.lua index 44690432..6154555b 100644 --- a/lib/ledge/util.lua +++ b/lib/ledge/util.lua @@ -30,7 +30,7 @@ if not ok then ngx.log(ngx.ERR, err) end local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", string = {}, table = {}, mt = {}, diff --git a/lib/ledge/validation.lua b/lib/ledge/validation.lua index d9179ffe..03a2be6a 100644 --- a/lib/ledge/validation.lua +++ b/lib/ledge/validation.lua @@ -8,7 +8,7 @@ local header_has_directive = require("ledge.header_util").header_has_directive local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", } diff --git a/lib/ledge/worker.lua b/lib/ledge/worker.lua index 84f24682..06ef60fd 100644 --- a/lib/ledge/worker.lua +++ b/lib/ledge/worker.lua @@ -8,7 +8,7 @@ local fixed_field_metatable = require("ledge.util").mt.fixed_field_metatable local _M = { - _VERSION = "2.0.0", + _VERSION = "2.1.0", }