-
Notifications
You must be signed in to change notification settings - Fork 24
/
index.d.ts
141 lines (120 loc) · 3.69 KB
/
index.d.ts
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
import { IncomingMessage, ServerResponse } from "http";
/**
* Cacheable response is a HTTP middleware for serving a pre-calculated response.
*/
declare function CacheableResponse<
Options extends CacheableResponse.GetOpts = CacheableResponse.GetOpts,
Props extends CacheableResponse.Props = {},
Data extends {} = {}
>(
params: CacheableResponse.InputParams<Options, Props, Data>
): (opts: Options) => any;
/** Framework agnostic req and res object */
export interface HttpContext {
req: IncomingMessage;
res: ServerResponse;
}
declare namespace CacheableResponse {
export interface InputParams<
Options extends GetOpts,
GetReturnProps extends Props,
Data extends {}
> {
/**
* The name of the query parameter to be used for skipping the cache copy in an intentional way.
*
* The default value is `'force'`.
*/
bypassQueryParameter?: string
/**
* The cache instance used for backed your pre-calculated server side response copies.
*
* The default value is an in-memory instance.
*/
cache?: CacheProvider<GetReturnProps, Data>;
/**
* Enable compress/decompress data using brotli compression format.
*
* The default value is `true`.
*/
compress?: boolean;
/**
* The method to be called for creating a fresh cacheable response associated with the current route path.
*/
get: (
opts: Options
) => Promise<
(Optional<Cache<Data>, "etag" | "ttl" | "createdAt"> & GetReturnProps) | null
>;
/**
* It determinates how the cache key should be computed, receiving `req, res` as input.
*
* The default value is determining from `req.url`.
*/
key?: (opts: Options) => string;
/**
* When it's present, every time cacheable-response is called, a log will be printed.
*
* The default value is a noop function to avoid print logs.
*/
logger?: (payload: object) => void;
/**
* The method used to determinate how the content should be rendered.
*/
send: (
opts: GetReturnProps & { data: Data } & Pick<Options, "req" | "res">
) => any;
/**
* Number of milliseconds that indicates grace period after response cache expiration for refreshing it in the background. The latency of the refresh is hidden from the user.
*
* The defalut value is `3600000`.
*/
staleTtl?: number | boolean;
/**
* Number of milliseconds a cache response is considered valid.
*
* The default value is `86400000`.
* */
ttl?: number;
/**
* It sets the serializer method to be used before compress.
*
* The default value is `JSON.stringify`.
*/
serialize?: (o: any) => string;
/**
* It sets the deserialize method to be used after decompress.
*
* The default value is `JSON.parse`.
*/
deserialize?: (o: string) => any;
}
export type GetOpts = HttpContext & Props;
export interface CacheProvider<P extends Props, V = any> {
/** Returns the cached value, or the compressed buffer when compress option is set to true. */
get(key: string): Promise<Buffer | (Cache<V> & P) | undefined>;
/**
* Set a value. You can implements an expiry TTL in milliseconds.
*/
set(
key: string,
value: Buffer | (Cache<V> & P),
ttl: number
): Promise<true>;
}
interface Props {
[key: string]: any;
}
interface Cache<T> {
etag: string;
/** JS timestamps */
createdAt: number;
/** ttl in milliseconds */
ttl: number;
/** cached value */
data: T;
}
type Optional<T extends object, K extends keyof T = keyof T> = Omit<T, K> &
Partial<Pick<T, K>>;
}
export default CacheableResponse;