1/**
2 * @module LRUCache
3 */
4const perf = typeof performance === 'object' &&
5    performance &&
6    typeof performance.now === 'function'
7    ? performance
8    : Date;
9const warned = new Set();
10/* c8 ignore start */
11const PROCESS = (typeof process === 'object' && !!process ? process : {});
12/* c8 ignore start */
13const emitWarning = (msg, type, code, fn) => {
14    typeof PROCESS.emitWarning === 'function'
15        ? PROCESS.emitWarning(msg, type, code, fn)
16        : console.error(`[${code}] ${type}: ${msg}`);
17};
18let AC = globalThis.AbortController;
19let AS = globalThis.AbortSignal;
20/* c8 ignore start */
21if (typeof AC === 'undefined') {
22    //@ts-ignore
23    AS = class AbortSignal {
24        onabort;
25        _onabort = [];
26        reason;
27        aborted = false;
28        addEventListener(_, fn) {
29            this._onabort.push(fn);
30        }
31    };
32    //@ts-ignore
33    AC = class AbortController {
34        constructor() {
35            warnACPolyfill();
36        }
37        signal = new AS();
38        abort(reason) {
39            if (this.signal.aborted)
40                return;
41            //@ts-ignore
42            this.signal.reason = reason;
43            //@ts-ignore
44            this.signal.aborted = true;
45            //@ts-ignore
46            for (const fn of this.signal._onabort) {
47                fn(reason);
48            }
49            this.signal.onabort?.(reason);
50        }
51    };
52    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
53    const warnACPolyfill = () => {
54        if (!printACPolyfillWarning)
55            return;
56        printACPolyfillWarning = false;
57        emitWarning('AbortController is not defined. If using lru-cache in ' +
58            'node 14, load an AbortController polyfill from the ' +
59            '`node-abort-controller` package. A minimal polyfill is ' +
60            'provided for use by LRUCache.fetch(), but it should not be ' +
61            'relied upon in other contexts (eg, passing it to other APIs that ' +
62            'use AbortController/AbortSignal might have undesirable effects). ' +
63            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
64    };
65}
66/* c8 ignore stop */
67const shouldWarn = (code) => !warned.has(code);
68const TYPE = Symbol('type');
69const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
70/* c8 ignore start */
71// This is a little bit ridiculous, tbh.
72// The maximum array length is 2^32-1 or thereabouts on most JS impls.
73// And well before that point, you're caching the entire world, I mean,
74// that's ~32GB of just integers for the next/prev links, plus whatever
75// else to hold that many keys and values.  Just filling the memory with
76// zeroes at init time is brutal when you get that big.
77// But why not be complete?
78// Maybe in the future, these limits will have expanded.
79const getUintArray = (max) => !isPosInt(max)
80    ? null
81    : max <= Math.pow(2, 8)
82        ? Uint8Array
83        : max <= Math.pow(2, 16)
84            ? Uint16Array
85            : max <= Math.pow(2, 32)
86                ? Uint32Array
87                : max <= Number.MAX_SAFE_INTEGER
88                    ? ZeroArray
89                    : null;
90/* c8 ignore stop */
91class ZeroArray extends Array {
92    constructor(size) {
93        super(size);
94        this.fill(0);
95    }
96}
97class Stack {
98    heap;
99    length;
100    // private constructor
101    static #constructing = false;
102    static create(max) {
103        const HeapCls = getUintArray(max);
104        if (!HeapCls)
105            return [];
106        Stack.#constructing = true;
107        const s = new Stack(max, HeapCls);
108        Stack.#constructing = false;
109        return s;
110    }
111    constructor(max, HeapCls) {
112        /* c8 ignore start */
113        if (!Stack.#constructing) {
114            throw new TypeError('instantiate Stack using Stack.create(n)');
115        }
116        /* c8 ignore stop */
117        this.heap = new HeapCls(max);
118        this.length = 0;
119    }
120    push(n) {
121        this.heap[this.length++] = n;
122    }
123    pop() {
124        return this.heap[--this.length];
125    }
126}
127/**
128 * Default export, the thing you're using this module to get.
129 *
130 * All properties from the options object (with the exception of
131 * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
132 * normal public members. (`max` and `maxBase` are read-only getters.)
133 * Changing any of these will alter the defaults for subsequent method calls,
134 * but is otherwise safe.
135 */
136export class LRUCache {
137    // properties coming in from the options of these, only max and maxSize
138    // really *need* to be protected. The rest can be modified, as they just
139    // set defaults for various methods.
140    #max;
141    #maxSize;
142    #dispose;
143    #disposeAfter;
144    #fetchMethod;
145    /**
146     * {@link LRUCache.OptionsBase.ttl}
147     */
148    ttl;
149    /**
150     * {@link LRUCache.OptionsBase.ttlResolution}
151     */
152    ttlResolution;
153    /**
154     * {@link LRUCache.OptionsBase.ttlAutopurge}
155     */
156    ttlAutopurge;
157    /**
158     * {@link LRUCache.OptionsBase.updateAgeOnGet}
159     */
160    updateAgeOnGet;
161    /**
162     * {@link LRUCache.OptionsBase.updateAgeOnHas}
163     */
164    updateAgeOnHas;
165    /**
166     * {@link LRUCache.OptionsBase.allowStale}
167     */
168    allowStale;
169    /**
170     * {@link LRUCache.OptionsBase.noDisposeOnSet}
171     */
172    noDisposeOnSet;
173    /**
174     * {@link LRUCache.OptionsBase.noUpdateTTL}
175     */
176    noUpdateTTL;
177    /**
178     * {@link LRUCache.OptionsBase.maxEntrySize}
179     */
180    maxEntrySize;
181    /**
182     * {@link LRUCache.OptionsBase.sizeCalculation}
183     */
184    sizeCalculation;
185    /**
186     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
187     */
188    noDeleteOnFetchRejection;
189    /**
190     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
191     */
192    noDeleteOnStaleGet;
193    /**
194     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
195     */
196    allowStaleOnFetchAbort;
197    /**
198     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
199     */
200    allowStaleOnFetchRejection;
201    /**
202     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
203     */
204    ignoreFetchAbort;
205    // computed properties
206    #size;
207    #calculatedSize;
208    #keyMap;
209    #keyList;
210    #valList;
211    #next;
212    #prev;
213    #head;
214    #tail;
215    #free;
216    #disposed;
217    #sizes;
218    #starts;
219    #ttls;
220    #hasDispose;
221    #hasFetchMethod;
222    #hasDisposeAfter;
223    /**
224     * Do not call this method unless you need to inspect the
225     * inner workings of the cache.  If anything returned by this
226     * object is modified in any way, strange breakage may occur.
227     *
228     * These fields are private for a reason!
229     *
230     * @internal
231     */
232    static unsafeExposeInternals(c) {
233        return {
234            // properties
235            starts: c.#starts,
236            ttls: c.#ttls,
237            sizes: c.#sizes,
238            keyMap: c.#keyMap,
239            keyList: c.#keyList,
240            valList: c.#valList,
241            next: c.#next,
242            prev: c.#prev,
243            get head() {
244                return c.#head;
245            },
246            get tail() {
247                return c.#tail;
248            },
249            free: c.#free,
250            // methods
251            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
252            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
253            moveToTail: (index) => c.#moveToTail(index),
254            indexes: (options) => c.#indexes(options),
255            rindexes: (options) => c.#rindexes(options),
256            isStale: (index) => c.#isStale(index),
257        };
258    }
259    // Protected read-only members
260    /**
261     * {@link LRUCache.OptionsBase.max} (read-only)
262     */
263    get max() {
264        return this.#max;
265    }
266    /**
267     * {@link LRUCache.OptionsBase.maxSize} (read-only)
268     */
269    get maxSize() {
270        return this.#maxSize;
271    }
272    /**
273     * The total computed size of items in the cache (read-only)
274     */
275    get calculatedSize() {
276        return this.#calculatedSize;
277    }
278    /**
279     * The number of items stored in the cache (read-only)
280     */
281    get size() {
282        return this.#size;
283    }
284    /**
285     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
286     */
287    get fetchMethod() {
288        return this.#fetchMethod;
289    }
290    /**
291     * {@link LRUCache.OptionsBase.dispose} (read-only)
292     */
293    get dispose() {
294        return this.#dispose;
295    }
296    /**
297     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
298     */
299    get disposeAfter() {
300        return this.#disposeAfter;
301    }
302    constructor(options) {
303        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
304        if (max !== 0 && !isPosInt(max)) {
305            throw new TypeError('max option must be a nonnegative integer');
306        }
307        const UintArray = max ? getUintArray(max) : Array;
308        if (!UintArray) {
309            throw new Error('invalid max value: ' + max);
310        }
311        this.#max = max;
312        this.#maxSize = maxSize;
313        this.maxEntrySize = maxEntrySize || this.#maxSize;
314        this.sizeCalculation = sizeCalculation;
315        if (this.sizeCalculation) {
316            if (!this.#maxSize && !this.maxEntrySize) {
317                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
318            }
319            if (typeof this.sizeCalculation !== 'function') {
320                throw new TypeError('sizeCalculation set to non-function');
321            }
322        }
323        if (fetchMethod !== undefined &&
324            typeof fetchMethod !== 'function') {
325            throw new TypeError('fetchMethod must be a function if specified');
326        }
327        this.#fetchMethod = fetchMethod;
328        this.#hasFetchMethod = !!fetchMethod;
329        this.#keyMap = new Map();
330        this.#keyList = new Array(max).fill(undefined);
331        this.#valList = new Array(max).fill(undefined);
332        this.#next = new UintArray(max);
333        this.#prev = new UintArray(max);
334        this.#head = 0;
335        this.#tail = 0;
336        this.#free = Stack.create(max);
337        this.#size = 0;
338        this.#calculatedSize = 0;
339        if (typeof dispose === 'function') {
340            this.#dispose = dispose;
341        }
342        if (typeof disposeAfter === 'function') {
343            this.#disposeAfter = disposeAfter;
344            this.#disposed = [];
345        }
346        else {
347            this.#disposeAfter = undefined;
348            this.#disposed = undefined;
349        }
350        this.#hasDispose = !!this.#dispose;
351        this.#hasDisposeAfter = !!this.#disposeAfter;
352        this.noDisposeOnSet = !!noDisposeOnSet;
353        this.noUpdateTTL = !!noUpdateTTL;
354        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
355        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
356        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
357        this.ignoreFetchAbort = !!ignoreFetchAbort;
358        // NB: maxEntrySize is set to maxSize if it's set
359        if (this.maxEntrySize !== 0) {
360            if (this.#maxSize !== 0) {
361                if (!isPosInt(this.#maxSize)) {
362                    throw new TypeError('maxSize must be a positive integer if specified');
363                }
364            }
365            if (!isPosInt(this.maxEntrySize)) {
366                throw new TypeError('maxEntrySize must be a positive integer if specified');
367            }
368            this.#initializeSizeTracking();
369        }
370        this.allowStale = !!allowStale;
371        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
372        this.updateAgeOnGet = !!updateAgeOnGet;
373        this.updateAgeOnHas = !!updateAgeOnHas;
374        this.ttlResolution =
375            isPosInt(ttlResolution) || ttlResolution === 0
376                ? ttlResolution
377                : 1;
378        this.ttlAutopurge = !!ttlAutopurge;
379        this.ttl = ttl || 0;
380        if (this.ttl) {
381            if (!isPosInt(this.ttl)) {
382                throw new TypeError('ttl must be a positive integer if specified');
383            }
384            this.#initializeTTLTracking();
385        }
386        // do not allow completely unbounded caches
387        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
388            throw new TypeError('At least one of max, maxSize, or ttl is required');
389        }
390        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
391            const code = 'LRU_CACHE_UNBOUNDED';
392            if (shouldWarn(code)) {
393                warned.add(code);
394                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
395                    'result in unbounded memory consumption.';
396                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
397            }
398        }
399    }
400    /**
401     * Return the remaining TTL time for a given entry key
402     */
403    getRemainingTTL(key) {
404        return this.#keyMap.has(key) ? Infinity : 0;
405    }
406    #initializeTTLTracking() {
407        const ttls = new ZeroArray(this.#max);
408        const starts = new ZeroArray(this.#max);
409        this.#ttls = ttls;
410        this.#starts = starts;
411        this.#setItemTTL = (index, ttl, start = perf.now()) => {
412            starts[index] = ttl !== 0 ? start : 0;
413            ttls[index] = ttl;
414            if (ttl !== 0 && this.ttlAutopurge) {
415                const t = setTimeout(() => {
416                    if (this.#isStale(index)) {
417                        this.delete(this.#keyList[index]);
418                    }
419                }, ttl + 1);
420                // unref() not supported on all platforms
421                /* c8 ignore start */
422                if (t.unref) {
423                    t.unref();
424                }
425                /* c8 ignore stop */
426            }
427        };
428        this.#updateItemAge = index => {
429            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
430        };
431        this.#statusTTL = (status, index) => {
432            if (ttls[index]) {
433                const ttl = ttls[index];
434                const start = starts[index];
435                /* c8 ignore next */
436                if (!ttl || !start)
437                    return;
438                status.ttl = ttl;
439                status.start = start;
440                status.now = cachedNow || getNow();
441                const age = status.now - start;
442                status.remainingTTL = ttl - age;
443            }
444        };
445        // debounce calls to perf.now() to 1s so we're not hitting
446        // that costly call repeatedly.
447        let cachedNow = 0;
448        const getNow = () => {
449            const n = perf.now();
450            if (this.ttlResolution > 0) {
451                cachedNow = n;
452                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
453                // not available on all platforms
454                /* c8 ignore start */
455                if (t.unref) {
456                    t.unref();
457                }
458                /* c8 ignore stop */
459            }
460            return n;
461        };
462        this.getRemainingTTL = key => {
463            const index = this.#keyMap.get(key);
464            if (index === undefined) {
465                return 0;
466            }
467            const ttl = ttls[index];
468            const start = starts[index];
469            if (!ttl || !start) {
470                return Infinity;
471            }
472            const age = (cachedNow || getNow()) - start;
473            return ttl - age;
474        };
475        this.#isStale = index => {
476            const s = starts[index];
477            const t = ttls[index];
478            return !!t && !!s && (cachedNow || getNow()) - s > t;
479        };
480    }
481    // conditionally set private methods related to TTL
482    #updateItemAge = () => { };
483    #statusTTL = () => { };
484    #setItemTTL = () => { };
485    /* c8 ignore stop */
486    #isStale = () => false;
487    #initializeSizeTracking() {
488        const sizes = new ZeroArray(this.#max);
489        this.#calculatedSize = 0;
490        this.#sizes = sizes;
491        this.#removeItemSize = index => {
492            this.#calculatedSize -= sizes[index];
493            sizes[index] = 0;
494        };
495        this.#requireSize = (k, v, size, sizeCalculation) => {
496            // provisionally accept background fetches.
497            // actual value size will be checked when they return.
498            if (this.#isBackgroundFetch(v)) {
499                return 0;
500            }
501            if (!isPosInt(size)) {
502                if (sizeCalculation) {
503                    if (typeof sizeCalculation !== 'function') {
504                        throw new TypeError('sizeCalculation must be a function');
505                    }
506                    size = sizeCalculation(v, k);
507                    if (!isPosInt(size)) {
508                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
509                    }
510                }
511                else {
512                    throw new TypeError('invalid size value (must be positive integer). ' +
513                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
514                        'or size must be set.');
515                }
516            }
517            return size;
518        };
519        this.#addItemSize = (index, size, status) => {
520            sizes[index] = size;
521            if (this.#maxSize) {
522                const maxSize = this.#maxSize - sizes[index];
523                while (this.#calculatedSize > maxSize) {
524                    this.#evict(true);
525                }
526            }
527            this.#calculatedSize += sizes[index];
528            if (status) {
529                status.entrySize = size;
530                status.totalCalculatedSize = this.#calculatedSize;
531            }
532        };
533    }
534    #removeItemSize = _i => { };
535    #addItemSize = (_i, _s, _st) => { };
536    #requireSize = (_k, _v, size, sizeCalculation) => {
537        if (size || sizeCalculation) {
538            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
539        }
540        return 0;
541    };
542    *#indexes({ allowStale = this.allowStale } = {}) {
543        if (this.#size) {
544            for (let i = this.#tail; true;) {
545                if (!this.#isValidIndex(i)) {
546                    break;
547                }
548                if (allowStale || !this.#isStale(i)) {
549                    yield i;
550                }
551                if (i === this.#head) {
552                    break;
553                }
554                else {
555                    i = this.#prev[i];
556                }
557            }
558        }
559    }
560    *#rindexes({ allowStale = this.allowStale } = {}) {
561        if (this.#size) {
562            for (let i = this.#head; true;) {
563                if (!this.#isValidIndex(i)) {
564                    break;
565                }
566                if (allowStale || !this.#isStale(i)) {
567                    yield i;
568                }
569                if (i === this.#tail) {
570                    break;
571                }
572                else {
573                    i = this.#next[i];
574                }
575            }
576        }
577    }
578    #isValidIndex(index) {
579        return (index !== undefined &&
580            this.#keyMap.get(this.#keyList[index]) === index);
581    }
582    /**
583     * Return a generator yielding `[key, value]` pairs,
584     * in order from most recently used to least recently used.
585     */
586    *entries() {
587        for (const i of this.#indexes()) {
588            if (this.#valList[i] !== undefined &&
589                this.#keyList[i] !== undefined &&
590                !this.#isBackgroundFetch(this.#valList[i])) {
591                yield [this.#keyList[i], this.#valList[i]];
592            }
593        }
594    }
595    /**
596     * Inverse order version of {@link LRUCache.entries}
597     *
598     * Return a generator yielding `[key, value]` pairs,
599     * in order from least recently used to most recently used.
600     */
601    *rentries() {
602        for (const i of this.#rindexes()) {
603            if (this.#valList[i] !== undefined &&
604                this.#keyList[i] !== undefined &&
605                !this.#isBackgroundFetch(this.#valList[i])) {
606                yield [this.#keyList[i], this.#valList[i]];
607            }
608        }
609    }
610    /**
611     * Return a generator yielding the keys in the cache,
612     * in order from most recently used to least recently used.
613     */
614    *keys() {
615        for (const i of this.#indexes()) {
616            const k = this.#keyList[i];
617            if (k !== undefined &&
618                !this.#isBackgroundFetch(this.#valList[i])) {
619                yield k;
620            }
621        }
622    }
623    /**
624     * Inverse order version of {@link LRUCache.keys}
625     *
626     * Return a generator yielding the keys in the cache,
627     * in order from least recently used to most recently used.
628     */
629    *rkeys() {
630        for (const i of this.#rindexes()) {
631            const k = this.#keyList[i];
632            if (k !== undefined &&
633                !this.#isBackgroundFetch(this.#valList[i])) {
634                yield k;
635            }
636        }
637    }
638    /**
639     * Return a generator yielding the values in the cache,
640     * in order from most recently used to least recently used.
641     */
642    *values() {
643        for (const i of this.#indexes()) {
644            const v = this.#valList[i];
645            if (v !== undefined &&
646                !this.#isBackgroundFetch(this.#valList[i])) {
647                yield this.#valList[i];
648            }
649        }
650    }
651    /**
652     * Inverse order version of {@link LRUCache.values}
653     *
654     * Return a generator yielding the values in the cache,
655     * in order from least recently used to most recently used.
656     */
657    *rvalues() {
658        for (const i of this.#rindexes()) {
659            const v = this.#valList[i];
660            if (v !== undefined &&
661                !this.#isBackgroundFetch(this.#valList[i])) {
662                yield this.#valList[i];
663            }
664        }
665    }
666    /**
667     * Iterating over the cache itself yields the same results as
668     * {@link LRUCache.entries}
669     */
670    [Symbol.iterator]() {
671        return this.entries();
672    }
673    /**
674     * A String value that is used in the creation of the default string description of an object.
675     * Called by the built-in method Object.prototype.toString.
676     */
677    [Symbol.toStringTag] = 'LRUCache';
678    /**
679     * Find a value for which the supplied fn method returns a truthy value,
680     * similar to Array.find().  fn is called as fn(value, key, cache).
681     */
682    find(fn, getOptions = {}) {
683        for (const i of this.#indexes()) {
684            const v = this.#valList[i];
685            const value = this.#isBackgroundFetch(v)
686                ? v.__staleWhileFetching
687                : v;
688            if (value === undefined)
689                continue;
690            if (fn(value, this.#keyList[i], this)) {
691                return this.get(this.#keyList[i], getOptions);
692            }
693        }
694    }
695    /**
696     * Call the supplied function on each item in the cache, in order from
697     * most recently used to least recently used.  fn is called as
698     * fn(value, key, cache).  Does not update age or recenty of use.
699     * Does not iterate over stale values.
700     */
701    forEach(fn, thisp = this) {
702        for (const i of this.#indexes()) {
703            const v = this.#valList[i];
704            const value = this.#isBackgroundFetch(v)
705                ? v.__staleWhileFetching
706                : v;
707            if (value === undefined)
708                continue;
709            fn.call(thisp, value, this.#keyList[i], this);
710        }
711    }
712    /**
713     * The same as {@link LRUCache.forEach} but items are iterated over in
714     * reverse order.  (ie, less recently used items are iterated over first.)
715     */
716    rforEach(fn, thisp = this) {
717        for (const i of this.#rindexes()) {
718            const v = this.#valList[i];
719            const value = this.#isBackgroundFetch(v)
720                ? v.__staleWhileFetching
721                : v;
722            if (value === undefined)
723                continue;
724            fn.call(thisp, value, this.#keyList[i], this);
725        }
726    }
727    /**
728     * Delete any stale entries. Returns true if anything was removed,
729     * false otherwise.
730     */
731    purgeStale() {
732        let deleted = false;
733        for (const i of this.#rindexes({ allowStale: true })) {
734            if (this.#isStale(i)) {
735                this.delete(this.#keyList[i]);
736                deleted = true;
737            }
738        }
739        return deleted;
740    }
741    /**
742     * Get the extended info about a given entry, to get its value, size, and
743     * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a
744     * single key. Always returns stale values, if their info is found in the
745     * cache, so be sure to check for expired TTLs if relevant.
746     */
747    info(key) {
748        const i = this.#keyMap.get(key);
749        if (i === undefined)
750            return undefined;
751        const v = this.#valList[i];
752        const value = this.#isBackgroundFetch(v)
753            ? v.__staleWhileFetching
754            : v;
755        if (value === undefined)
756            return undefined;
757        const entry = { value };
758        if (this.#ttls && this.#starts) {
759            const ttl = this.#ttls[i];
760            const start = this.#starts[i];
761            if (ttl && start) {
762                const remain = ttl - (perf.now() - start);
763                entry.ttl = remain;
764                entry.start = Date.now();
765            }
766        }
767        if (this.#sizes) {
768            entry.size = this.#sizes[i];
769        }
770        return entry;
771    }
772    /**
773     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
774     * passed to cache.load()
775     */
776    dump() {
777        const arr = [];
778        for (const i of this.#indexes({ allowStale: true })) {
779            const key = this.#keyList[i];
780            const v = this.#valList[i];
781            const value = this.#isBackgroundFetch(v)
782                ? v.__staleWhileFetching
783                : v;
784            if (value === undefined || key === undefined)
785                continue;
786            const entry = { value };
787            if (this.#ttls && this.#starts) {
788                entry.ttl = this.#ttls[i];
789                // always dump the start relative to a portable timestamp
790                // it's ok for this to be a bit slow, it's a rare operation.
791                const age = perf.now() - this.#starts[i];
792                entry.start = Math.floor(Date.now() - age);
793            }
794            if (this.#sizes) {
795                entry.size = this.#sizes[i];
796            }
797            arr.unshift([key, entry]);
798        }
799        return arr;
800    }
801    /**
802     * Reset the cache and load in the items in entries in the order listed.
803     * Note that the shape of the resulting cache may be different if the
804     * same options are not used in both caches.
805     */
806    load(arr) {
807        this.clear();
808        for (const [key, entry] of arr) {
809            if (entry.start) {
810                // entry.start is a portable timestamp, but we may be using
811                // node's performance.now(), so calculate the offset, so that
812                // we get the intended remaining TTL, no matter how long it's
813                // been on ice.
814                //
815                // it's ok for this to be a bit slow, it's a rare operation.
816                const age = Date.now() - entry.start;
817                entry.start = perf.now() - age;
818            }
819            this.set(key, entry.value, entry);
820        }
821    }
822    /**
823     * Add a value to the cache.
824     *
825     * Note: if `undefined` is specified as a value, this is an alias for
826     * {@link LRUCache#delete}
827     */
828    set(k, v, setOptions = {}) {
829        if (v === undefined) {
830            this.delete(k);
831            return this;
832        }
833        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
834        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
835        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
836        // if the item doesn't fit, don't do anything
837        // NB: maxEntrySize set to maxSize by default
838        if (this.maxEntrySize && size > this.maxEntrySize) {
839            if (status) {
840                status.set = 'miss';
841                status.maxEntrySizeExceeded = true;
842            }
843            // have to delete, in case something is there already.
844            this.delete(k);
845            return this;
846        }
847        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
848        if (index === undefined) {
849            // addition
850            index = (this.#size === 0
851                ? this.#tail
852                : this.#free.length !== 0
853                    ? this.#free.pop()
854                    : this.#size === this.#max
855                        ? this.#evict(false)
856                        : this.#size);
857            this.#keyList[index] = k;
858            this.#valList[index] = v;
859            this.#keyMap.set(k, index);
860            this.#next[this.#tail] = index;
861            this.#prev[index] = this.#tail;
862            this.#tail = index;
863            this.#size++;
864            this.#addItemSize(index, size, status);
865            if (status)
866                status.set = 'add';
867            noUpdateTTL = false;
868        }
869        else {
870            // update
871            this.#moveToTail(index);
872            const oldVal = this.#valList[index];
873            if (v !== oldVal) {
874                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
875                    oldVal.__abortController.abort(new Error('replaced'));
876                    const { __staleWhileFetching: s } = oldVal;
877                    if (s !== undefined && !noDisposeOnSet) {
878                        if (this.#hasDispose) {
879                            this.#dispose?.(s, k, 'set');
880                        }
881                        if (this.#hasDisposeAfter) {
882                            this.#disposed?.push([s, k, 'set']);
883                        }
884                    }
885                }
886                else if (!noDisposeOnSet) {
887                    if (this.#hasDispose) {
888                        this.#dispose?.(oldVal, k, 'set');
889                    }
890                    if (this.#hasDisposeAfter) {
891                        this.#disposed?.push([oldVal, k, 'set']);
892                    }
893                }
894                this.#removeItemSize(index);
895                this.#addItemSize(index, size, status);
896                this.#valList[index] = v;
897                if (status) {
898                    status.set = 'replace';
899                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
900                        ? oldVal.__staleWhileFetching
901                        : oldVal;
902                    if (oldValue !== undefined)
903                        status.oldValue = oldValue;
904                }
905            }
906            else if (status) {
907                status.set = 'update';
908            }
909        }
910        if (ttl !== 0 && !this.#ttls) {
911            this.#initializeTTLTracking();
912        }
913        if (this.#ttls) {
914            if (!noUpdateTTL) {
915                this.#setItemTTL(index, ttl, start);
916            }
917            if (status)
918                this.#statusTTL(status, index);
919        }
920        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
921            const dt = this.#disposed;
922            let task;
923            while ((task = dt?.shift())) {
924                this.#disposeAfter?.(...task);
925            }
926        }
927        return this;
928    }
929    /**
930     * Evict the least recently used item, returning its value or
931     * `undefined` if cache is empty.
932     */
933    pop() {
934        try {
935            while (this.#size) {
936                const val = this.#valList[this.#head];
937                this.#evict(true);
938                if (this.#isBackgroundFetch(val)) {
939                    if (val.__staleWhileFetching) {
940                        return val.__staleWhileFetching;
941                    }
942                }
943                else if (val !== undefined) {
944                    return val;
945                }
946            }
947        }
948        finally {
949            if (this.#hasDisposeAfter && this.#disposed) {
950                const dt = this.#disposed;
951                let task;
952                while ((task = dt?.shift())) {
953                    this.#disposeAfter?.(...task);
954                }
955            }
956        }
957    }
958    #evict(free) {
959        const head = this.#head;
960        const k = this.#keyList[head];
961        const v = this.#valList[head];
962        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
963            v.__abortController.abort(new Error('evicted'));
964        }
965        else if (this.#hasDispose || this.#hasDisposeAfter) {
966            if (this.#hasDispose) {
967                this.#dispose?.(v, k, 'evict');
968            }
969            if (this.#hasDisposeAfter) {
970                this.#disposed?.push([v, k, 'evict']);
971            }
972        }
973        this.#removeItemSize(head);
974        // if we aren't about to use the index, then null these out
975        if (free) {
976            this.#keyList[head] = undefined;
977            this.#valList[head] = undefined;
978            this.#free.push(head);
979        }
980        if (this.#size === 1) {
981            this.#head = this.#tail = 0;
982            this.#free.length = 0;
983        }
984        else {
985            this.#head = this.#next[head];
986        }
987        this.#keyMap.delete(k);
988        this.#size--;
989        return head;
990    }
991    /**
992     * Check if a key is in the cache, without updating the recency of use.
993     * Will return false if the item is stale, even though it is technically
994     * in the cache.
995     *
996     * Will not update item age unless
997     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
998     */
999    has(k, hasOptions = {}) {
1000        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
1001        const index = this.#keyMap.get(k);
1002        if (index !== undefined) {
1003            const v = this.#valList[index];
1004            if (this.#isBackgroundFetch(v) &&
1005                v.__staleWhileFetching === undefined) {
1006                return false;
1007            }
1008            if (!this.#isStale(index)) {
1009                if (updateAgeOnHas) {
1010                    this.#updateItemAge(index);
1011                }
1012                if (status) {
1013                    status.has = 'hit';
1014                    this.#statusTTL(status, index);
1015                }
1016                return true;
1017            }
1018            else if (status) {
1019                status.has = 'stale';
1020                this.#statusTTL(status, index);
1021            }
1022        }
1023        else if (status) {
1024            status.has = 'miss';
1025        }
1026        return false;
1027    }
1028    /**
1029     * Like {@link LRUCache#get} but doesn't update recency or delete stale
1030     * items.
1031     *
1032     * Returns `undefined` if the item is stale, unless
1033     * {@link LRUCache.OptionsBase.allowStale} is set.
1034     */
1035    peek(k, peekOptions = {}) {
1036        const { allowStale = this.allowStale } = peekOptions;
1037        const index = this.#keyMap.get(k);
1038        if (index === undefined ||
1039            (!allowStale && this.#isStale(index))) {
1040            return;
1041        }
1042        const v = this.#valList[index];
1043        // either stale and allowed, or forcing a refresh of non-stale value
1044        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
1045    }
1046    #backgroundFetch(k, index, options, context) {
1047        const v = index === undefined ? undefined : this.#valList[index];
1048        if (this.#isBackgroundFetch(v)) {
1049            return v;
1050        }
1051        const ac = new AC();
1052        const { signal } = options;
1053        // when/if our AC signals, then stop listening to theirs.
1054        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
1055            signal: ac.signal,
1056        });
1057        const fetchOpts = {
1058            signal: ac.signal,
1059            options,
1060            context,
1061        };
1062        const cb = (v, updateCache = false) => {
1063            const { aborted } = ac.signal;
1064            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
1065            if (options.status) {
1066                if (aborted && !updateCache) {
1067                    options.status.fetchAborted = true;
1068                    options.status.fetchError = ac.signal.reason;
1069                    if (ignoreAbort)
1070                        options.status.fetchAbortIgnored = true;
1071                }
1072                else {
1073                    options.status.fetchResolved = true;
1074                }
1075            }
1076            if (aborted && !ignoreAbort && !updateCache) {
1077                return fetchFail(ac.signal.reason);
1078            }
1079            // either we didn't abort, and are still here, or we did, and ignored
1080            const bf = p;
1081            if (this.#valList[index] === p) {
1082                if (v === undefined) {
1083                    if (bf.__staleWhileFetching) {
1084                        this.#valList[index] = bf.__staleWhileFetching;
1085                    }
1086                    else {
1087                        this.delete(k);
1088                    }
1089                }
1090                else {
1091                    if (options.status)
1092                        options.status.fetchUpdated = true;
1093                    this.set(k, v, fetchOpts.options);
1094                }
1095            }
1096            return v;
1097        };
1098        const eb = (er) => {
1099            if (options.status) {
1100                options.status.fetchRejected = true;
1101                options.status.fetchError = er;
1102            }
1103            return fetchFail(er);
1104        };
1105        const fetchFail = (er) => {
1106            const { aborted } = ac.signal;
1107            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
1108            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
1109            const noDelete = allowStale || options.noDeleteOnFetchRejection;
1110            const bf = p;
1111            if (this.#valList[index] === p) {
1112                // if we allow stale on fetch rejections, then we need to ensure that
1113                // the stale value is not removed from the cache when the fetch fails.
1114                const del = !noDelete || bf.__staleWhileFetching === undefined;
1115                if (del) {
1116                    this.delete(k);
1117                }
1118                else if (!allowStaleAborted) {
1119                    // still replace the *promise* with the stale value,
1120                    // since we are done with the promise at this point.
1121                    // leave it untouched if we're still waiting for an
1122                    // aborted background fetch that hasn't yet returned.
1123                    this.#valList[index] = bf.__staleWhileFetching;
1124                }
1125            }
1126            if (allowStale) {
1127                if (options.status && bf.__staleWhileFetching !== undefined) {
1128                    options.status.returnedStale = true;
1129                }
1130                return bf.__staleWhileFetching;
1131            }
1132            else if (bf.__returned === bf) {
1133                throw er;
1134            }
1135        };
1136        const pcall = (res, rej) => {
1137            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
1138            if (fmp && fmp instanceof Promise) {
1139                fmp.then(v => res(v === undefined ? undefined : v), rej);
1140            }
1141            // ignored, we go until we finish, regardless.
1142            // defer check until we are actually aborting,
1143            // so fetchMethod can override.
1144            ac.signal.addEventListener('abort', () => {
1145                if (!options.ignoreFetchAbort ||
1146                    options.allowStaleOnFetchAbort) {
1147                    res(undefined);
1148                    // when it eventually resolves, update the cache.
1149                    if (options.allowStaleOnFetchAbort) {
1150                        res = v => cb(v, true);
1151                    }
1152                }
1153            });
1154        };
1155        if (options.status)
1156            options.status.fetchDispatched = true;
1157        const p = new Promise(pcall).then(cb, eb);
1158        const bf = Object.assign(p, {
1159            __abortController: ac,
1160            __staleWhileFetching: v,
1161            __returned: undefined,
1162        });
1163        if (index === undefined) {
1164            // internal, don't expose status.
1165            this.set(k, bf, { ...fetchOpts.options, status: undefined });
1166            index = this.#keyMap.get(k);
1167        }
1168        else {
1169            this.#valList[index] = bf;
1170        }
1171        return bf;
1172    }
1173    #isBackgroundFetch(p) {
1174        if (!this.#hasFetchMethod)
1175            return false;
1176        const b = p;
1177        return (!!b &&
1178            b instanceof Promise &&
1179            b.hasOwnProperty('__staleWhileFetching') &&
1180            b.__abortController instanceof AC);
1181    }
1182    async fetch(k, fetchOptions = {}) {
1183        const {
1184        // get options
1185        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
1186        // set options
1187        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
1188        // fetch exclusive options
1189        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
1190        if (!this.#hasFetchMethod) {
1191            if (status)
1192                status.fetch = 'get';
1193            return this.get(k, {
1194                allowStale,
1195                updateAgeOnGet,
1196                noDeleteOnStaleGet,
1197                status,
1198            });
1199        }
1200        const options = {
1201            allowStale,
1202            updateAgeOnGet,
1203            noDeleteOnStaleGet,
1204            ttl,
1205            noDisposeOnSet,
1206            size,
1207            sizeCalculation,
1208            noUpdateTTL,
1209            noDeleteOnFetchRejection,
1210            allowStaleOnFetchRejection,
1211            allowStaleOnFetchAbort,
1212            ignoreFetchAbort,
1213            status,
1214            signal,
1215        };
1216        let index = this.#keyMap.get(k);
1217        if (index === undefined) {
1218            if (status)
1219                status.fetch = 'miss';
1220            const p = this.#backgroundFetch(k, index, options, context);
1221            return (p.__returned = p);
1222        }
1223        else {
1224            // in cache, maybe already fetching
1225            const v = this.#valList[index];
1226            if (this.#isBackgroundFetch(v)) {
1227                const stale = allowStale && v.__staleWhileFetching !== undefined;
1228                if (status) {
1229                    status.fetch = 'inflight';
1230                    if (stale)
1231                        status.returnedStale = true;
1232                }
1233                return stale ? v.__staleWhileFetching : (v.__returned = v);
1234            }
1235            // if we force a refresh, that means do NOT serve the cached value,
1236            // unless we are already in the process of refreshing the cache.
1237            const isStale = this.#isStale(index);
1238            if (!forceRefresh && !isStale) {
1239                if (status)
1240                    status.fetch = 'hit';
1241                this.#moveToTail(index);
1242                if (updateAgeOnGet) {
1243                    this.#updateItemAge(index);
1244                }
1245                if (status)
1246                    this.#statusTTL(status, index);
1247                return v;
1248            }
1249            // ok, it is stale or a forced refresh, and not already fetching.
1250            // refresh the cache.
1251            const p = this.#backgroundFetch(k, index, options, context);
1252            const hasStale = p.__staleWhileFetching !== undefined;
1253            const staleVal = hasStale && allowStale;
1254            if (status) {
1255                status.fetch = isStale ? 'stale' : 'refresh';
1256                if (staleVal && isStale)
1257                    status.returnedStale = true;
1258            }
1259            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
1260        }
1261    }
1262    /**
1263     * Return a value from the cache. Will update the recency of the cache
1264     * entry found.
1265     *
1266     * If the key is not found, get() will return `undefined`.
1267     */
1268    get(k, getOptions = {}) {
1269        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
1270        const index = this.#keyMap.get(k);
1271        if (index !== undefined) {
1272            const value = this.#valList[index];
1273            const fetching = this.#isBackgroundFetch(value);
1274            if (status)
1275                this.#statusTTL(status, index);
1276            if (this.#isStale(index)) {
1277                if (status)
1278                    status.get = 'stale';
1279                // delete only if not an in-flight background fetch
1280                if (!fetching) {
1281                    if (!noDeleteOnStaleGet) {
1282                        this.delete(k);
1283                    }
1284                    if (status && allowStale)
1285                        status.returnedStale = true;
1286                    return allowStale ? value : undefined;
1287                }
1288                else {
1289                    if (status &&
1290                        allowStale &&
1291                        value.__staleWhileFetching !== undefined) {
1292                        status.returnedStale = true;
1293                    }
1294                    return allowStale ? value.__staleWhileFetching : undefined;
1295                }
1296            }
1297            else {
1298                if (status)
1299                    status.get = 'hit';
1300                // if we're currently fetching it, we don't actually have it yet
1301                // it's not stale, which means this isn't a staleWhileRefetching.
1302                // If it's not stale, and fetching, AND has a __staleWhileFetching
1303                // value, then that means the user fetched with {forceRefresh:true},
1304                // so it's safe to return that value.
1305                if (fetching) {
1306                    return value.__staleWhileFetching;
1307                }
1308                this.#moveToTail(index);
1309                if (updateAgeOnGet) {
1310                    this.#updateItemAge(index);
1311                }
1312                return value;
1313            }
1314        }
1315        else if (status) {
1316            status.get = 'miss';
1317        }
1318    }
1319    #connect(p, n) {
1320        this.#prev[n] = p;
1321        this.#next[p] = n;
1322    }
1323    #moveToTail(index) {
1324        // if tail already, nothing to do
1325        // if head, move head to next[index]
1326        // else
1327        //   move next[prev[index]] to next[index] (head has no prev)
1328        //   move prev[next[index]] to prev[index]
1329        // prev[index] = tail
1330        // next[tail] = index
1331        // tail = index
1332        if (index !== this.#tail) {
1333            if (index === this.#head) {
1334                this.#head = this.#next[index];
1335            }
1336            else {
1337                this.#connect(this.#prev[index], this.#next[index]);
1338            }
1339            this.#connect(this.#tail, index);
1340            this.#tail = index;
1341        }
1342    }
1343    /**
1344     * Deletes a key out of the cache.
1345     * Returns true if the key was deleted, false otherwise.
1346     */
1347    delete(k) {
1348        let deleted = false;
1349        if (this.#size !== 0) {
1350            const index = this.#keyMap.get(k);
1351            if (index !== undefined) {
1352                deleted = true;
1353                if (this.#size === 1) {
1354                    this.clear();
1355                }
1356                else {
1357                    this.#removeItemSize(index);
1358                    const v = this.#valList[index];
1359                    if (this.#isBackgroundFetch(v)) {
1360                        v.__abortController.abort(new Error('deleted'));
1361                    }
1362                    else if (this.#hasDispose || this.#hasDisposeAfter) {
1363                        if (this.#hasDispose) {
1364                            this.#dispose?.(v, k, 'delete');
1365                        }
1366                        if (this.#hasDisposeAfter) {
1367                            this.#disposed?.push([v, k, 'delete']);
1368                        }
1369                    }
1370                    this.#keyMap.delete(k);
1371                    this.#keyList[index] = undefined;
1372                    this.#valList[index] = undefined;
1373                    if (index === this.#tail) {
1374                        this.#tail = this.#prev[index];
1375                    }
1376                    else if (index === this.#head) {
1377                        this.#head = this.#next[index];
1378                    }
1379                    else {
1380                        const pi = this.#prev[index];
1381                        this.#next[pi] = this.#next[index];
1382                        const ni = this.#next[index];
1383                        this.#prev[ni] = this.#prev[index];
1384                    }
1385                    this.#size--;
1386                    this.#free.push(index);
1387                }
1388            }
1389        }
1390        if (this.#hasDisposeAfter && this.#disposed?.length) {
1391            const dt = this.#disposed;
1392            let task;
1393            while ((task = dt?.shift())) {
1394                this.#disposeAfter?.(...task);
1395            }
1396        }
1397        return deleted;
1398    }
1399    /**
1400     * Clear the cache entirely, throwing away all values.
1401     */
1402    clear() {
1403        for (const index of this.#rindexes({ allowStale: true })) {
1404            const v = this.#valList[index];
1405            if (this.#isBackgroundFetch(v)) {
1406                v.__abortController.abort(new Error('deleted'));
1407            }
1408            else {
1409                const k = this.#keyList[index];
1410                if (this.#hasDispose) {
1411                    this.#dispose?.(v, k, 'delete');
1412                }
1413                if (this.#hasDisposeAfter) {
1414                    this.#disposed?.push([v, k, 'delete']);
1415                }
1416            }
1417        }
1418        this.#keyMap.clear();
1419        this.#valList.fill(undefined);
1420        this.#keyList.fill(undefined);
1421        if (this.#ttls && this.#starts) {
1422            this.#ttls.fill(0);
1423            this.#starts.fill(0);
1424        }
1425        if (this.#sizes) {
1426            this.#sizes.fill(0);
1427        }
1428        this.#head = 0;
1429        this.#tail = 0;
1430        this.#free.length = 0;
1431        this.#calculatedSize = 0;
1432        this.#size = 0;
1433        if (this.#hasDisposeAfter && this.#disposed) {
1434            const dt = this.#disposed;
1435            let task;
1436            while ((task = dt?.shift())) {
1437                this.#disposeAfter?.(...task);
1438            }
1439        }
1440    }
1441}
1442//# sourceMappingURL=index.js.map