From 7e3c6b9ff021db7e3fd783a85c06ec872342d032 Mon Sep 17 00:00:00 2001 From: Vangelis Katikaridis Date: Sun, 22 Mar 2020 15:55:19 +0100 Subject: [PATCH] #128 first iteration of server cache functionality Signed-off-by: Vangelis Katikaridis --- README.md | 78 +- component/http/handler.go | 2 + component/http/http.go | 1 + component/http/route.go | 109 ++- examples/sixth/main.go | 2 +- sync/http/cache.go | 424 +++++++---- sync/http/cache_test.go | 1318 ++++++++++++++++++++++++++++++--- sync/http/route_cache_test.go | 1 + 8 files changed, 1676 insertions(+), 259 deletions(-) create mode 100644 sync/http/route_cache_test.go diff --git a/README.md b/README.md index ffd47e5fee..98d00e3a2a 100644 --- a/README.md +++ b/README.md @@ -195,6 +195,82 @@ route := NewRoute("/index", "GET" ProcessorFunc, true, ...MiddlewareFunc) routeWithAuth := NewAuthRoute("/index", "GET" ProcessorFunc, true, Authendicator, ...MiddlewareFunc) ``` +### HTTP Caching + +The caching layer for HTTP routes is specified per Route. + +```go +type routeCache struct { + // path is the route path, which the cache is enabled for + path string + // processor is the processor function for the route + processor sync.ProcessorFunc + // cache is the cache implementation to be used + cache cache.Cache + // ttl is the time to live for all cached objects + ttl time.Duration + // instant is the timing function for the cache expiry calculations + instant TimeInstant + // minAge specifies the minimum amount of max-age header value for client cache-control requests + minAge uint + // max-fresh specifies the maximum amount of min-fresh header value for client cache-control requests + maxFresh uint + // staleResponse specifies if the server is willing to send stale responses + // if a new response could not be generated for any reason + staleResponse bool +} +``` + +#### server cache +- The **cache key** is based on the route path and the url request parameters. +- The server caches only **GET requests**. +- The server implementation must specify a **Time to Live policy** upon construction. +- The route should return always the most fresh object instance. +- An **ETag header** must be always in responses that are part of the cache, representing the hash of the response. +- Requests within the time-to-live threshold, will be served from the cache. +Otherwise the request will be handled as usual by the route processor function. +The resulting response will be cached for future requests. +- Requests that cannot be processed due to any kind of error, but are found in the cache, +will be returned to the client with a `Warning` header present in the response. +ONLY IF : this option is specified in the server with the `staleResponse` parameter set to `true` + +``` +Note : The server is unaware of the cache time-to-live policy itself. +The cache might evict entries based on it's internal configuration. +This is transparent to the server. As long as a key cannot be found in the cache, +the server will execute the route processor function and fill the corresponding cache entry +``` + +#### client cache-control +The client can control the cache with the appropriate Headers +- `max-age=?` + +returns the cached instance only if the age of the instance is lower than the max-age parameter. +This parameter is bounded from below by the server option `minAge`. +This is to avoid chatty clients with no cache control policy (or very aggressive max-age policy) to effectively disable the cache +- `min-fesh=?` + +returns the cached instance if the time left for expiration is lower than the provided parameter. +This parameter is bounded from above by the server option `maxFresh`. +This is to avoid chatty clients with no cache control policy (or very aggressive min-fresh policy) to effectively disable the cache +- `max-stale=?` + +returns the cached instance, even if it has expired within the provided bound by the client. +This response should always be accompanied by a `must-revalidate` response header. +- `no-cache` / `no-store` + +returns a new response to the client by executing the route processing function. +NOTE : Except for cases where a `minAge` or `maxFresh` parameter has been specified in the server. +This is again a safety mechanism to avoid 'aggressive' clients put unexpected load on the server. +The server is responsible to cap the refresh time, BUT must respond with a `Warning` header in such a case. +- `only-if-cached` + +expects any response that is found in the cache, otherwise returns an empty response + +#### cache design reference +- https://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html +- https://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9 + ### Asynchronous The implementation of the async processor follows exactly the same principle as the sync processor. @@ -370,4 +446,4 @@ GET /ready Both can return either a `200 OK` or a `503 Service Unavailable` status code (default: `200 OK`). -It is possible to customize their behaviour by injecting an `http.AliveCheck` and/or an `http.ReadyCheck` `OptionFunc` to the HTTP component constructor. \ No newline at end of file +It is possible to customize their behaviour by injecting an `http.AliveCheck` and/or an `http.ReadyCheck` `OptionFunc` to the HTTP component constructor. diff --git a/component/http/handler.go b/component/http/handler.go index a87c398b9c..af28eae53c 100644 --- a/component/http/handler.go +++ b/component/http/handler.go @@ -36,7 +36,9 @@ func handler(hnd ProcessorFunc) http.HandlerFunc { h := extractHeaders(r) + // TODO : pass url to the Request req := NewRequest(f, r.Body, h, dec) + // TODO : manage warning error type by adding warning to headers rsp, err := hnd(ctx, req) if err != nil { handleError(logger, w, enc, err) diff --git a/component/http/http.go b/component/http/http.go index 747dfa3abc..7bd7885b6d 100644 --- a/component/http/http.go +++ b/component/http/http.go @@ -28,6 +28,7 @@ func (r *Request) Decode(v interface{}) error { // Response definition of the sync response model. type Response struct { Payload interface{} + Headers map[string]string } // NewResponse creates a new response. diff --git a/component/http/route.go b/component/http/route.go index bfb7e2a4b9..dfec72bdf5 100644 --- a/component/http/route.go +++ b/component/http/route.go @@ -214,38 +214,107 @@ func NewRoutesBuilder() *RoutesBuilder { } type CachedRouteBuilder struct { - path string - processor sync.ProcessorFunc - cache cache.Cache - instant TimeInstant - errors []error + path string + processor sync.ProcessorFunc + cache cache.Cache + instant TimeInstant + ttl time.Duration + minAge uint + maxFresh uint + staleResponse bool + errors []error +} + +// WithTimeInstant specifies a time instant function for checking expiry. +func (cb *CachedRouteBuilder) WithTimeInstant(instant TimeInstant) *CachedRouteBuilder { + if instant == nil { + cb.errors = append(cb.errors, errors.New("time instant is nil")) + } + cb.instant = instant + return cb +} + +// WithTimeInstant adds a time to live parameter to control the cache expiry policy. +func (cb *CachedRouteBuilder) WithTimeToLive(ttl time.Duration) *CachedRouteBuilder { + if ttl <= 0 { + cb.errors = append(cb.errors, errors.New("time to live must be greater than `0`")) + } + cb.ttl = ttl + return cb +} + +// WithMinAge adds a minimum age for the cache responses. +// This will avoid cases where a single client with high request rate and no cache control headers might effectively disable the cache +// This means that if this parameter is missing (e.g. is equal to '0' , the cache can effectively be made obsolete in the above scenario) +func (cb *CachedRouteBuilder) WithMinAge(minAge uint) *CachedRouteBuilder { + cb.minAge = minAge + return cb } -func NewCachedRouteBuilder(path string, processor sync.ProcessorFunc) *CachedRouteBuilder { +// WithMinFresh adds a minimum age for the cache responses. +// This will avoid cases where a single client with high request rate and no cache control headers might effectively disable the cache +// This means that if this parameter is missing (e.g. is equal to '0' , the cache can effectively be made obsolete in the above scenario) +func (cb *CachedRouteBuilder) WithMaxFresh(maxFresh uint) *CachedRouteBuilder { + cb.maxFresh = maxFresh + return cb +} + +// WithStaleResponse allows the cache to return stale responses. +func (cb *CachedRouteBuilder) WithStaleResponse(staleResponse bool) *CachedRouteBuilder { + cb.staleResponse = staleResponse + return cb +} - return &CachedRouteBuilder{ +func (cb *CachedRouteBuilder) Create() (*routeCache, error) { + //if len(cb.errors) > 0 { + //ttl > 0 + //maxfresh < ttl + return &routeCache{}, nil + //} +} + +func NewRouteCache(path string, processor sync.ProcessorFunc, cache cache.Cache) *routeCache { + if strings.ReplaceAll(path, " ", "") == "" { + + } + return &routeCache{ path: path, processor: processor, + cache: cache, instant: func() int64 { return time.Now().Unix() }, } } -// WithTimeInstant adds authenticator. -func (cb *CachedRouteBuilder) WithTimeInstant(instant TimeInstant) *CachedRouteBuilder { - if instant == nil { - cb.errors = append(cb.errors, errors.New("time instant is nil")) +// ToGetRouteBuilder transforms the cached builder to a GET endpoint builder +// while propagating any errors +func (cb *CachedRouteBuilder) ToGetRouteBuilder() *RouteBuilder { + routeCache, err := cb.Create() + if err == nil { + } - cb.instant = instant - return cb + rb := NewRouteBuilder(cb.path, cacheHandler(cb.processor, routeCache)).MethodGet() + rb.errors = append(rb.errors, cb.errors...) + return rb } -// WithCache adds authenticator. -func (cb *CachedRouteBuilder) WithCache(cache cache.Cache) *RouteBuilder { - if cache == nil { - // let it break later - return NewRouteBuilder(cb.path, nil) - } - return NewRouteBuilder(cb.path, cacheHandler(cb.processor, cb.cache, cb.instant)) +type routeCache struct { + // path is the route path, which the cache is enabled for + path string + // processor is the processor function for the route + processor sync.ProcessorFunc + // cache is the cache implementation to be used + cache cache.Cache + // ttl is the time to live for all cached objects + ttl time.Duration + // instant is the timing function for the cache expiry calculations + instant TimeInstant + // minAge specifies the minimum amount of max-age header value for client cache-control requests + minAge uint + // max-fresh specifies the maximum amount of min-fresh header value for client cache-control requests + maxFresh uint + // staleResponse specifies if the server is willing to send stale responses + // if a new response could not be generated for any reason + staleResponse bool } diff --git a/examples/sixth/main.go b/examples/sixth/main.go index 7ae013bc72..08f542b201 100644 --- a/examples/sixth/main.go +++ b/examples/sixth/main.go @@ -44,7 +44,7 @@ func main() { if err != nil { log.Fatalf("failed to init the cache %v", err) } - cachedRoute := patronhttp.NewCachedRouteBuilder("/", sixth).WithCache(cache).MethodGet() + cachedRoute := patronhttp.NewCachedRouteBuilder("/", sixth, cache).ToGetRouteBuilder() ctx := context.Background() err = patron.New(name, version).WithRoutesBuilder(patronhttp.NewRoutesBuilder().Append(cachedRoute)).Run(ctx) diff --git a/sync/http/cache.go b/sync/http/cache.go index 6b5df77b16..f598db19c8 100644 --- a/sync/http/cache.go +++ b/sync/http/cache.go @@ -3,10 +3,11 @@ package http import ( "context" "fmt" + "hash/crc32" "strconv" "strings" + "time" - "github.com/beatlabs/patron/cache" "github.com/beatlabs/patron/log" "github.com/beatlabs/patron/sync" ) @@ -16,168 +17,333 @@ import ( type CacheHeader int const ( - max_age CacheHeader = iota + 1 - max_stale - min_fresh - no_cache - no_store - no_transform - only_if_cached - public - private - - CacheControlHeader = "CACHE-CONTROL" -) + // cacheControlHeader is the header key for cache related values + // note : it is case-sensitive + cacheControlHeader = "Cache-Control" + + // cache control header values -var cacheHeaders = map[string]CacheHeader{"public": public, "private": private, "max-age": max_age, "max-stale": max_stale, "min-fresh": min_fresh, "no-cache": no_cache, "no-store": no_store, "no-transform": no_transform, "only-if-cached": only_if_cached} + // maxStale specifies the staleness in seconds + // that the client is willing to accept on a cached response + maxStale = "max-stale" + // minFresh specifies the minimum amount in seconds + // that the cached object should have before it expires + minFresh = "min-fresh" + // noCache specifies that the client does not expect to get a cached response + noCache = "no-cache" + // noStore specifies that the client does not expect to get a cached response + noStore = "no-store" + // onlyIfCached specifies that the client wants a response , + // only if it is present in the cache + onlyIfCached = "only-if-cached" + // mustRevalidate signals to the client that the response might be stale + mustRevalidate = "must-revalidate" + // maxAge specifies the maximum age in seconds + // - that the client is willing to accept cached objects for + // (if it s part of the request headers) + // - that the response object still has , before it expires in the cache + // (if it s part of the response headers) + maxAge = "max-age" + // other response headers + // eTagHeader specifies the hash of the cached object + eTagHeader = "ETag" + // warningHeader signals to the client that it's request , + // as defined by the headers , could not be served consistently. + // The client must assume the the best-effort approach has been used to return any response + // it can ignore the response or use it knowingly of the potential staleness involved + warningHeader = "Warning" +) +// TimeInstant is a timing function +// returns the current time instant of the system's clock +// by default it can be `tine.Now().Unix()` , +// but this abstraction allows also for non-linear implementations type TimeInstant func() int64 -func cacheHandler(hnd sync.ProcessorFunc, cache cache.Cache, instant TimeInstant) sync.ProcessorFunc { +// validator is a conditional function on an objects age and the configured ttl +type validator func(age, ttl int64) bool + +// expiryCheck is the main validator that checks that the entry has not expired e.g. is stale +var expiryCheck validator = func(age, ttl int64) bool { + return age <= ttl +} + +// cacheControl is the model of the request parameters regarding the cache control +type cacheControl struct { + noCache bool + forceCache bool + validators []validator + expiryValidator validator +} + +// cacheHandler wraps the handler func with a cache layer +// hnd is the processor func that the cache will wrap +// rc is the route cache implementation to be used +func cacheHandler(hnd sync.ProcessorFunc, rc *routeCache) sync.ProcessorFunc { + return func(ctx context.Context, request *sync.Request) (response *sync.Response, e error) { - now := instant() + responseHeaders := make(map[string]string) - skipCache, onlyIfCached, ttl := extractCacheHeaders(request) + now := rc.instant() - if skipCache { - return hnd(ctx, request) + cfg, warning := extractCacheHeaders(request, rc.minAge, rc.maxFresh) + if cfg.expiryValidator == nil { + cfg.expiryValidator = expiryCheck } + key := extractRequestKey(rc.path, request) // TODO : add metrics - key := createRequestKey(request) - if resp, ok, err := cache.Get(key); ok && err == nil { - // TODO : cache also errors ??? - if r, ok := resp.(cachedResponse); ok && notExpired(now, r.lastValid, ttl) { - println(fmt.Sprintf("cache = %v", cache)) - // TODO : set the headers - // ETag , Last-Modified - return r.response, r.err + var rsp *cachedResponse + var fromCache bool + + // explore the cache + if cfg.noCache && !rc.staleResponse { + // need to execute the handler always + rsp = handlerExecutor(ctx, request, hnd, now, key) + } else { + // lets check the cache if we have anything for the given key + if rsp = cacheRetriever(key, rc, now); rsp == nil { + // we have not encountered this key before + rsp = handlerExecutor(ctx, request, hnd, now, key) } else { - log.Errorf("could not parse cached response from %v", resp) + expiry := int64(rc.ttl / time.Second) + if !isValid(expiry, now, rsp.lastValid, append(cfg.validators, cfg.expiryValidator)...) { + tmpRsp := handlerExecutor(ctx, request, hnd, now, key) + if rc.staleResponse && tmpRsp.err != nil { + warning = "last-valid" + fromCache = true + } else { + rsp = tmpRsp + } + } else { + fromCache = true + } } - } else if err != nil { - log.Debugf("could not read cache value for [ key = %v , err = %v ]", key, err) } - - if onlyIfCached { - // return empty response if we have cache-only header present + // TODO : use the forceCache parameter + if cfg.forceCache { + // return empty response if we have rc-only responseHeaders present return sync.NewResponse([]byte{}), nil } - // we have not encountered this key before - response, e = hnd(ctx, request) - resp := cachedResponse{ - response: response, - lastValid: now, - err: e, + response = rsp.response + e = rsp.err + + // TODO : abstract into method + if e == nil { + responseHeaders[eTagHeader] = rsp.etag + + responseHeaders[cacheControlHeader] = genCacheControlHeader(rc.ttl, now-rsp.lastValid) + + if warning != "" && fromCache { + responseHeaders[warningHeader] = warning + } + + response.Headers = responseHeaders + } + + // we cache response only if we did not retrieve it from the cache itself and error is nil + if !fromCache && e == nil { + if err := rc.cache.Set(key, rsp); err != nil { + log.Errorf("could not cache response for request key %s %v", key, err) + } } - err := cache.Set(key, resp) - log.Errorf("could not cache response for request key %s %w", key, err) + return } } -func extractCacheHeaders(request *sync.Request) (bool, bool, int64) { - var noCache bool - var forceCache bool - var ttl int64 - println(fmt.Sprintf("request = %v", request)) - if CacheControl, ok := request.Headers[CacheControlHeader]; ok { - // time to live threshold - for _, header := range strings.Split(CacheControl, ",") { - println(fmt.Sprintf("header = %v", header)) - keyValue := strings.Split(header, "=") - println(fmt.Sprintf("keyValue = %v", keyValue)) - if cacheHeader, ok := cacheHeaders[keyValue[0]]; ok { - switch cacheHeader { - case max_stale: - /** - Indicates that the client is willing to accept a response that has - exceeded its expiration time. If max-stale is assigned a value, - then the client is willing to accept a response that has exceeded - its expiration time by no more than the specified number of - seconds. If no value is assigned to max-stale, then the client is - willing to accept a stale response of any age. - */ - fallthrough - case max_age: - /** - Indicates that the client is willing to accept a response whose - age is no greater than the specified time in seconds. Unless max- - stale directive is also included, the client is not willing to - accept a stale response. - */ - expiration, err := strconv.Atoi(keyValue[1]) - if err == nil { - ttl -= int64(expiration) - } - case min_fresh: - /** - Indicates that the client is willing to accept a response whose - freshness lifetime is no less than its current age plus the - specified time in seconds. That is, the client wants a response - that will still be fresh for at least the specified number of - seconds. - */ - freshness, err := strconv.Atoi(keyValue[1]) - if err == nil { - ttl += int64(freshness) - } - case no_cache: - /** - return response if entity has changed - e.g. (304 response if nothing has changed : 304 Not Modified) - it SHOULD NOT include min-fresh, max-stale, or max-age. - reqeust should be accompanied by an ETag token - */ - fallthrough - case no_store: - /** - no storage whatsoever - */ - fallthrough - case private: - /** - server does not support private caching, - this should be the responsibility of the client - */ - noCache = true - case no_transform: - /** - response should be kept intact - */ - // we always use no-transform - case only_if_cached: - /** - return only if is in cache , otherwise 504 - */ - forceCache = true - default: - log.Warnf("unrecognised cache header %s", header) - } +// cacheRetriever is the implementation that will provide a cachedResponse instance from the cache, +// if it exists +var cacheRetriever = func(key string, rc *routeCache, now int64) *cachedResponse { + if resp, ok, err := rc.cache.Get(key); ok && err == nil { + if r, ok := resp.(*cachedResponse); ok { + return r + } else { + log.Errorf("could not parse cached response %v for key %s", resp, key) + } + } else if err != nil { + log.Debugf("could not read cache value for [ key = %v , err = %v ]", key, err) + } + return nil +} + +// handlerExecutor is the function that will create a new cachedResponse from based on the handler implementation +var handlerExecutor = func(ctx context.Context, request *sync.Request, hnd sync.ProcessorFunc, now int64, key string) *cachedResponse { + response, err := hnd(ctx, request) + return &cachedResponse{ + response: response, + lastValid: now, + etag: genETag([]byte(key), time.Now().Nanosecond()), + err: err, + } +} + +// extractCacheHeaders extracts the client request headers allowing the client some control over the cache +func extractCacheHeaders(request *sync.Request, minAge, maxFresh uint) (*cacheControl, string) { + if CacheControl, ok := request.Headers[cacheControlHeader]; ok { + return extractCacheHeader(CacheControl, minAge, maxFresh) + } + // if we have no headers we assume we dont want to cache, + return &cacheControl{noCache: minAge == 0 && maxFresh == 0}, "" +} + +func extractCacheHeader(headers string, minAge, maxFresh uint) (*cacheControl, string) { + cfg := cacheControl{ + validators: make([]validator, 0), + } + + var warning string + wrn := make([]string, 0) + + for _, header := range strings.Split(headers, ",") { + keyValue := strings.Split(header, "=") + headerKey := strings.ToLower(keyValue[0]) + switch headerKey { + case maxStale: + /** + Indicates that the client is willing to accept a response that has + exceeded its expiration time. If max-stale is assigned a value, + then the client is willing to accept a response that has exceeded + its expiration time by no more than the specified number of + seconds. If no value is assigned to max-stale, then the client is + willing to accept a stale response of any age. + */ + value, ok := parseValue(keyValue) + if !ok || value < 0 { + log.Debugf("invalid value for header '%s', defaulting to '0' ", keyValue) + value = 0 + } + cfg.expiryValidator = func(age, ttl int64) bool { + return ttl-age+value >= 0 + } + case maxAge: + /** + Indicates that the client is willing to accept a response whose + age is no greater than the specified time in seconds. Unless max- + stale directive is also included, the client is not willing to + accept a stale response. + */ + value, ok := parseValue(keyValue) + if !ok || value < 0 { + log.Debugf("invalid value for header '%s', defaulting to '0' ", keyValue) + value = 0 } + value, adjusted := min(value, int64(minAge)) + if adjusted { + wrn = append(wrn, fmt.Sprintf("max-age=%d", minAge)) + } + cfg.validators = append(cfg.validators, func(age, ttl int64) bool { + return age <= value + }) + case minFresh: + /** + Indicates that the client is willing to accept a response whose + freshness lifetime is no less than its current age plus the + specified time in seconds. That is, the client wants a response + that will still be fresh for at least the specified number of + seconds. + */ + value, ok := parseValue(keyValue) + if !ok || value < 0 { + log.Debugf("invalid value for header '%s', defaulting to '0' ", keyValue) + value = 0 + } + value, adjusted := max(value, int64(maxFresh)) + if adjusted { + wrn = append(wrn, fmt.Sprintf("min-fresh=%d", maxFresh)) + } + cfg.validators = append(cfg.validators, func(age, ttl int64) bool { + return ttl-age >= value + }) + case noCache: + /** + return response if entity has changed + e.g. (304 response if nothing has changed : 304 Not Modified) + it SHOULD NOT include min-fresh, max-stale, or max-age. + request should be accompanied by an ETag token + */ + fallthrough + case noStore: + /** + no storage whatsoever + */ + cfg.noCache = true + case onlyIfCached: + /** + return only if is in cache , otherwise 504 + */ + cfg.forceCache = true + default: + log.Warnf("unrecognised cache header %s", header) + } + } + + if len(wrn) > 0 { + warning = strings.Join(wrn, ",") + } + + return &cfg, warning +} + +func min(value, threshold int64) (int64, bool) { + if value < threshold { + return threshold, true + } + return value, false +} + +func max(value, threshold int64) (int64, bool) { + if threshold > 0 && value > threshold { + return threshold, true + } + return value, false +} + +func parseValue(keyValue []string) (int64, bool) { + if len(keyValue) > 1 { + value, err := strconv.ParseInt(keyValue[1], 10, 64) + if err == nil { + return value, true } - } else { - // we dont have any cache-control headers, so no intention of caching anything - noCache = true } - return noCache, forceCache, ttl + return 0, false } type cachedResponse struct { response *sync.Response lastValid int64 + etag string err error } -func createRequestKey(request *sync.Request) string { - // TODO : define the key requirements in more detail - return fmt.Sprintf("%s:%s", request.Headers, request.Fields) +func extractRequestKey(path string, request *sync.Request) string { + return fmt.Sprintf("%s:%s", path, request.Fields) } -func notExpired(now, last, ttl int64) bool { - nilExp := now+ttl <= last - return nilExp +func isValid(ttl, now, last int64, validators ...validator) bool { + if len(validators) == 0 { + return false + } + age := now - last + for _, validator := range validators { + if !validator(age, ttl) { + return false + } + } + return true +} + +func genETag(key []byte, t int) string { + return fmt.Sprintf("%d-%d", crc32.ChecksumIEEE(key), t) +} + +func genCacheControlHeader(ttl time.Duration, lastValid int64) string { + maxAge := int64(ttl/time.Second) - lastValid + if maxAge <= 0 { + return fmt.Sprintf("%s", mustRevalidate) + } + return fmt.Sprintf("%s=%d", maxAge, int64(ttl/time.Second)-lastValid) } diff --git a/sync/http/cache_test.go b/sync/http/cache_test.go index 1e7f7971e0..dfbf9740e7 100644 --- a/sync/http/cache_test.go +++ b/sync/http/cache_test.go @@ -2,220 +2,1320 @@ package http import ( "context" - "fmt" + "errors" + "os" "strconv" "testing" + "time" + + "github.com/beatlabs/patron/cache" + + "github.com/beatlabs/patron/log" + "github.com/beatlabs/patron/log/zerolog" "github.com/stretchr/testify/assert" "github.com/beatlabs/patron/sync" ) +func TestMain(m *testing.M) { + + err := log.Setup(zerolog.Create(log.Level(log.DebugLevel)), make(map[string]interface{})) + + if err != nil { + os.Exit(1) + } + + exitVal := m.Run() + + os.Exit(exitVal) + +} + func TestExtractCacheHeaders(t *testing.T) { + type caheRequestCondition struct { + noCache bool + forceCache bool + validators int + expiryValidator bool + } + type args struct { - headers map[string]string - noCache bool - forceCache bool - ttl int64 + cfg caheRequestCondition + headers map[string]string + wrn string } // TODO : cover the extract headers functionality from 'real' http header samples + minAge := uint(0) + minFresh := uint(0) + params := []args{ { - headers: map[string]string{CacheControlHeader: "max-age=10"}, - noCache: false, - forceCache: false, - ttl: -10, + headers: map[string]string{cacheControlHeader: "max-age=10"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + validators: 1, + }, + wrn: "", + }, + // header cannot be parsed + { + headers: map[string]string{cacheControlHeader: "maxage=10"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + }, + wrn: "", + }, + // header resets to minAge + { + headers: map[string]string{cacheControlHeader: "max-age=twenty"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + validators: 1, + }, + wrn: "", + }, + { + headers: map[string]string{cacheControlHeader: "min-fresh=10"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + validators: 1, + }, + wrn: "", + }, + { + headers: map[string]string{cacheControlHeader: "min-fresh=5,max-age=5"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + validators: 2, + }, + wrn: "", + }, + { + headers: map[string]string{cacheControlHeader: "max-stale=5"}, + cfg: caheRequestCondition{ + noCache: false, + forceCache: false, + expiryValidator: true, + }, + wrn: "", + }, + { + headers: map[string]string{cacheControlHeader: "no-cache"}, + cfg: caheRequestCondition{ + noCache: true, + forceCache: false, + }, + wrn: "", + }, + { + headers: map[string]string{cacheControlHeader: "no-store"}, + cfg: caheRequestCondition{ + noCache: true, + forceCache: false, + }, + wrn: "", }, } for _, param := range params { req := sync.NewRequest(map[string]string{}, nil, param.headers, nil) - noCache, forceCache, ttl := extractCacheHeaders(req) - assert.Equal(t, param.noCache, noCache) - assert.Equal(t, param.forceCache, forceCache) - assert.Equal(t, param.ttl, ttl) + cfg, wrn := extractCacheHeaders(req, minAge, minFresh) + assert.Equal(t, param.wrn, wrn) + assert.Equal(t, param.cfg.noCache, cfg.noCache) + assert.Equal(t, param.cfg.forceCache, cfg.forceCache) + assert.Equal(t, param.cfg.validators, len(cfg.validators)) + assert.Equal(t, param.cfg.expiryValidator, cfg.expiryValidator != nil) } } -type testArgs struct { +type routeConfig struct { + path string + ttl time.Duration + hnd sync.ProcessorFunc + minAge uint + maxFresh uint + staleResponse bool +} + +type requestParams struct { header map[string]string fields map[string]string - response *sync.Response timeInstance int64 - err error } -// TestCacheMaxAgeHeader tests the cache implementation -// for the same request, -// for header max-age -func TestCacheMaxAgeHeader(t *testing.T) { +type testArgs struct { + routeConfig routeConfig + cache cache.Cache + requestParams requestParams + response *sync.Response + err error +} + +func testHeader(maxAge int) map[string]string { + header := make(map[string]string) + header[cacheControlHeader] = genCacheControlHeader(time.Duration(maxAge)*time.Second, 0) + return header +} + +func testHeaderWithWarning(maxAge int, warning string) map[string]string { + h := testHeader(maxAge) + h[warningHeader] = warning + return h +} + +func TestMinAgeCache_WithoutClientHeader(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 1, // to avoid no-cache + staleResponse: false, + } args := [][]testArgs{ // cache expiration with max-age header { - // initial request + // initial request, will fill up the cache { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(10), - timeInstance: 1, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, }, // cache response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(10), - timeInstance: 9, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 9, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(2)}, + err: nil, }, - // still cached response because we are at the edge of the expiry e.g. 11 - 1 = 10 + // still cached response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(10), - timeInstance: 11, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 11, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(0)}, + err: nil, }, - // new response because cache has expired + // new response , due to expiry validator 10 + 1 - 12 < 0 { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(120), - timeInstance: 12, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 12, + }, + routeConfig: rc, + response: &sync.Response{Payload: 120, Headers: testHeader(10)}, + err: nil, }, - // make an extra request with the new cache value + }, + } + run(t, args) +} + +func TestNoMinAgeCache_WithoutClientHeader(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 0, // min age is set to '0', + // this means , without client control headers we will always return a non-cached response + // despite the ttl parameter + staleResponse: false, + } + + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(120), - timeInstance: 15, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, }, - // and another when the previous has expired 12 + 10 = 22 + // no cached response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(230), - timeInstance: 23, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 2, + }, + routeConfig: rc, + response: &sync.Response{Payload: 20, Headers: testHeader(10)}, + err: nil, }, }, } + run(t, args) +} + +func TestNoMinAgeCache_WithMaxAgeHeader(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 0, + staleResponse: false, + } + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, + }, + // cached response, because of the max-age header + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=10"}, + timeInstance: 3, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(8)}, + err: nil, + }, + // new response, because of missing header, and minAge == 0 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 9, + }, + routeConfig: rc, + response: &sync.Response{Payload: 90, Headers: testHeader(10)}, + err: nil, + }, + // new cached response , because max-age header again + // note : because of the cache refresh triggered by the previous call we see the last cached value + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=10"}, + timeInstance: 14, + }, + routeConfig: rc, + response: &sync.Response{Payload: 90, Headers: testHeader(5)}, + err: nil, + }, + }, + } run(t, args) +} +func TestCache_WithConstantMaxAgeHeader(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 5, + staleResponse: false, + } + + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, + }, + // cached response + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 3, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(8)}, + err: nil, + }, + // new response, because max-age > 9 - 1 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 9, + }, + routeConfig: rc, + response: &sync.Response{Payload: 90, Headers: testHeader(10)}, + err: nil, + }, + // cached response right before the age threshold max-age == 14 - 9 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 14, + }, + routeConfig: rc, + response: &sync.Response{Payload: 90, Headers: testHeader(5)}, + err: nil, + }, + // new response, because max-age > 15 - 9 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 15, + }, + routeConfig: rc, + response: &sync.Response{Payload: 150, Headers: testHeader(10)}, + err: nil, + }, + }, + } + run(t, args) } -// TestCacheMaxAgeHeader tests the cache implementation -// for the same request, -// for header max-age -func TestCacheMinFreshHeader(t *testing.T) { +func TestCache_WithMaxAgeHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 30 * time.Second, + staleResponse: false, + } args := [][]testArgs{ // cache expiration with max-age header { - // initial request + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(30)}, + err: nil, + }, + // cached response + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=10"}, + timeInstance: 10, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(20)}, + err: nil, + }, + // cached response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "min-fresh=10"}, - response: sync.NewResponse(10), - timeInstance: 1, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=20"}, + timeInstance: 20, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // new response + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=5"}, + timeInstance: 20, + }, + routeConfig: rc, + response: &sync.Response{Payload: 200, Headers: testHeader(30)}, + err: nil, }, // cache response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(10), - timeInstance: 9, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=25"}, + timeInstance: 25, + }, + routeConfig: rc, + response: &sync.Response{Payload: 200, Headers: testHeader(25)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestMinAgeCache_WithHighMaxAgeHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 5 * time.Second, + staleResponse: false, + } + + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(5)}, + err: nil, + }, + // despite the max-age request, the cache will refresh because of it's ttl + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=100"}, + timeInstance: 6, + }, + routeConfig: rc, + response: &sync.Response{Payload: 60, Headers: testHeader(5)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestNoMinAgeCache_WithLowMaxAgeHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 30 * time.Second, + staleResponse: false, + } + + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(30)}, + err: nil, + }, + // a max-age=0 request will always refresh the cache, + // if there is not minAge limit set + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=0"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(30)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestMinAgeCache_WithMaxAgeHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 30 * time.Second, + minAge: 5, + staleResponse: false, + } + + args := [][]testArgs{ + // cache expiration with max-age header + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(30)}, + err: nil, + }, + // cached response still, because of minAge override + // note : max-age=2 gets ignored + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=2"}, + timeInstance: 4, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeaderWithWarning(26, "max-age=5")}, + err: nil, + }, + // cached response because of bigger max-age parameter + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=20"}, + timeInstance: 5, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(25)}, + err: nil, + }, + // new response because of minAge floor + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-age=3"}, + timeInstance: 6, + }, + routeConfig: rc, + // note : no warning because it s a new response + response: &sync.Response{Payload: 60, Headers: testHeader(30)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestCache_WithConstantMinFreshHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // expecting cache response, as value is still fresh : 5 - 0 == 5 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 5, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(5)}, + err: nil, + }, + // expecting new response, as value is not fresh enough + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 6, + }, + routeConfig: rc, + response: &sync.Response{Payload: 60, Headers: testHeader(10)}, + err: nil, + }, + // cache response, as value is expired : 11 - 6 <= 5 + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 11, + }, + routeConfig: rc, + response: &sync.Response{Payload: 60, Headers: testHeader(5)}, + err: nil, + }, + // expecting new response + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 12, + }, + routeConfig: rc, + response: &sync.Response{Payload: 120, Headers: testHeader(10)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestNoMaxFreshCache_WithExtremeMinFreshHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, }, - // still cached response because we are at the edge of the expiry e.g. 11 - 1 = 10 { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(10), - timeInstance: 11, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=100"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, }, - // new response because cache has expired + }, + } + run(t, args) +} + +func TestMaxFreshCache_WithMinFreshHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 5, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // expecting cache response, as min-fresh is bounded by maxFresh configuration parameter + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=100"}, + timeInstance: 5, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeaderWithWarning(5, "min-fresh=5")}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestCache_WithConstantMaxStaleHeader(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request, will fill up the cache + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // cached response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(120), - timeInstance: 12, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-stale=5"}, + timeInstance: 3, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(7)}, + err: nil, }, - // make an extra request with the new cache value + // cached response { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(120), - timeInstance: 15, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-stale=5"}, + timeInstance: 8, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(2)}, + err: nil, }, - // and another when the previous has expired 12 + 10 = 22 + // cached response , still stale threshold not breached , 12 - 0 <= 10 + 5 { - fields: map[string]string{"VALUE": "1"}, - header: map[string]string{CacheControlHeader: "max-age=10"}, - response: sync.NewResponse(230), - timeInstance: 23, - err: nil, + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-stale=5"}, + timeInstance: 15, + }, + routeConfig: rc, + // note : we are also getting a must-revalidate header + response: &sync.Response{Payload: 0, Headers: testHeader(-5)}, + err: nil, + }, + // new response + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "max-stale=5"}, + timeInstance: 16, + }, + routeConfig: rc, + response: &sync.Response{Payload: 160, Headers: testHeader(10)}, + err: nil, }, }, } + run(t, args) +} + +func TestCache_WithMixedHeaders(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 5, + staleResponse: false, + } + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5,max-age=5"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // expecting cache response, as value is still fresh : 5 - 0 == min-fresh and still young : 5 - 0 < max-age + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5,max-age=10"}, + timeInstance: 5, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(5)}, + err: nil, + }, + // new response, as value is not fresh enough : 6 - 0 > min-fresh + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=5,max-age=10"}, + timeInstance: 6, + }, + routeConfig: rc, + response: &sync.Response{Payload: 60, Headers: testHeader(10)}, + err: nil, + }, + // cached response, as value is still fresh enough and still young + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=10,max-age=8"}, + timeInstance: 6, + }, + routeConfig: rc, + response: &sync.Response{Payload: 60, Headers: testHeader(10)}, + err: nil, + }, + // new response, as value is still fresh enough but too old + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + header: map[string]string{cacheControlHeader: "min-fresh=10,max-age=8"}, + timeInstance: 15, + }, + routeConfig: rc, + response: &sync.Response{Payload: 150, Headers: testHeader(10)}, + err: nil, + }, + }, + } run(t, args) +} + +func TestStaleCache_WithoutHeaders(t *testing.T) { + + hndErr := errors.New("error encountered on handler") + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: true, + } + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + }, + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 11, + }, + routeConfig: routeConfig{ + path: rc.path, + ttl: rc.ttl, + hnd: func(i context.Context, i2 *sync.Request) (response *sync.Response, e error) { + return nil, hndErr + }, + minAge: rc.minAge, + maxFresh: rc.maxFresh, + staleResponse: rc.staleResponse, + }, + response: &sync.Response{Payload: 0, Headers: testHeaderWithWarning(-1, "last-valid")}, + }, + }, + } + run(t, args) } + +func TestNoStaleCache_WithoutHeaders(t *testing.T) { + + hndErr := errors.New("error encountered on handler") + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + }, + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 11, + }, + routeConfig: routeConfig{ + path: rc.path, + ttl: rc.ttl, + hnd: func(i context.Context, i2 *sync.Request) (response *sync.Response, e error) { + return nil, hndErr + }, + minAge: rc.minAge, + maxFresh: rc.maxFresh, + staleResponse: rc.staleResponse, + }, + err: hndErr, + }, + }, + } + run(t, args) +} + +// TODO : test stale response for error (with Warning) + +func TestCache_WithHandlerErr(t *testing.T) { + + hndErr := errors.New("error encountered on handler") + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + hnd: func(i context.Context, i2 *sync.Request) (response *sync.Response, e error) { + return nil, hndErr + }, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + err: hndErr, + }, + }, + } + run(t, args) +} + +func TestCache_WithCacheGetErr(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + cacheImpl := &testingCache{ + cache: make(map[string]interface{}), + getErr: errors.New("get error"), + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + cache: cacheImpl, + }, + // new response, because of cache get error + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + cache: cacheImpl, + }, + }, + } + run(t, args) + + assert.Equal(t, 2, cacheImpl.getCount) + assert.Equal(t, 2, cacheImpl.setCount) + +} + +func TestCache_WithCacheSetErr(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + cacheImpl := &testingCache{ + cache: make(map[string]interface{}), + setErr: errors.New("set error"), + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + cache: cacheImpl, + }, + // new response, because of cache get error + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + cache: cacheImpl, + }, + }, + } + run(t, args) + + assert.Equal(t, 2, cacheImpl.getCount) + assert.Equal(t, 2, cacheImpl.setCount) + +} + +func TestCache_WithMixedPaths(t *testing.T) { + + rc1 := routeConfig{ + path: "/1", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + rc2 := routeConfig{ + path: "/2", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc1, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // cached response for the same path + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc1, + response: &sync.Response{Payload: 0, Headers: testHeader(9)}, + err: nil, + }, + // initial request for second path + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc2, + response: &sync.Response{Payload: 10, Headers: testHeader(10)}, + err: nil, + }, + // cached response for second path + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 2, + }, + routeConfig: rc2, + response: &sync.Response{Payload: 10, Headers: testHeader(9)}, + err: nil, + }, + }, + } + run(t, args) +} + +func TestCache_WithMixedRequestParameters(t *testing.T) { + + rc := routeConfig{ + path: "/", + ttl: 10 * time.Second, + minAge: 10, + maxFresh: 10, + staleResponse: false, + } + + args := [][]testArgs{ + { + // initial request + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 0, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(10)}, + err: nil, + }, + // cached response for same request parameter + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "1"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 0, Headers: testHeader(9)}, + err: nil, + }, + // new response for different request parameter + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "2"}, + timeInstance: 1, + }, + routeConfig: rc, + response: &sync.Response{Payload: 20, Headers: testHeader(10)}, + err: nil, + }, + // cached response for second request parameter + { + requestParams: requestParams{ + fields: map[string]string{"VALUE": "2"}, + timeInstance: 2, + }, + routeConfig: rc, + response: &sync.Response{Payload: 20, Headers: testHeader(9)}, + err: nil, + }, + }, + } + run(t, args) +} + +// TODO : test no-cache +// TODO : test no-store +// TODO : test only-if-cached + func run(t *testing.T, args [][]testArgs) { + + // create a test request handler + // that returns the current time instant times '10' multiplied by the VALUE parameter in the request handler := func(timeInstance int64) func(ctx context.Context, request *sync.Request) (*sync.Response, error) { return func(ctx context.Context, request *sync.Request) (*sync.Response, error) { i, err := strconv.Atoi(request.Fields["VALUE"]) if err != nil { return nil, err } - // return the specified parameter multiplied by the time instant return sync.NewResponse(i * 10 * int(timeInstance)), nil } } - cache := &testingCache{cache: make(map[string]interface{})} + // test cache implementation + cacheIml := &testingCache{cache: make(map[string]interface{})} for _, testArg := range args { for _, arg := range testArg { - request := sync.NewRequest(arg.fields, nil, arg.header, nil) - // initial request - response, err := cacheHandler(handler(arg.timeInstance), cache, func() int64 { - return arg.timeInstance + request := sync.NewRequest(arg.requestParams.fields, nil, arg.requestParams.header, nil) + + var hnd sync.ProcessorFunc + if arg.routeConfig.hnd != nil { + hnd = arg.routeConfig.hnd + } else { + hnd = handler(arg.requestParams.timeInstance) + } + + var ch cache.Cache + if arg.cache != nil { + ch = arg.cache + } else { + ch = cacheIml + } + + response, err := cacheHandler(hnd, &routeCache{ + cache: ch, + instant: func() int64 { + return arg.requestParams.timeInstance + }, + ttl: arg.routeConfig.ttl, + path: arg.routeConfig.path, + minAge: arg.routeConfig.minAge, + maxFresh: arg.routeConfig.maxFresh, + staleResponse: arg.routeConfig.staleResponse, })(context.Background(), request) + if arg.err != nil { assert.Error(t, err) - assert.NotNil(t, response) - // TODO : assert type of error + assert.Nil(t, response) + assert.Equal(t, err, arg.err) } else { assert.NoError(t, err) assert.NotNil(t, response) - assert.Equal(t, arg.response, response) + assert.Equal(t, arg.response.Payload, response.Payload) + assert.Equal(t, arg.response.Headers[cacheControlHeader], response.Headers[cacheControlHeader]) + assert.Equal(t, arg.response.Headers[warningHeader], response.Headers[warningHeader]) + assert.NotNil(t, arg.response.Headers[eTagHeader]) + assert.False(t, response.Headers[eTagHeader] == "") } } } } type testingCache struct { - cache map[string]interface{} + cache map[string]interface{} + getCount int + setCount int + getErr error + setErr error } func (t *testingCache) Get(key string) (interface{}, bool, error) { + t.getCount++ + if t.getErr != nil { + return nil, false, t.getErr + } r, ok := t.cache[key] - println(fmt.Sprintf("key = %v", key)) - println(fmt.Sprintf("r = %v", r)) return r, ok, nil } @@ -232,8 +1332,10 @@ func (t *testingCache) Remove(key string) error { } func (t *testingCache) Set(key string, value interface{}) error { + t.setCount++ + if t.setErr != nil { + return t.getErr + } t.cache[key] = value - println(fmt.Sprintf("key = %v", key)) - println(fmt.Sprintf("value = %v", value)) return nil } diff --git a/sync/http/route_cache_test.go b/sync/http/route_cache_test.go new file mode 100644 index 0000000000..d02cfda642 --- /dev/null +++ b/sync/http/route_cache_test.go @@ -0,0 +1 @@ +package http