1"use strict";
2/**
3 * @module LRUCache
4 */
5Object.defineProperty(exports, "__esModule", { value: true });
6exports.LRUCache = void 0;
7const perf = typeof performance === 'object' &&
8    performance &&
9    typeof performance.now === 'function'
10    ? performance
11    : Date;
12const warned = new Set();
13/* c8 ignore start */
14const PROCESS = (typeof process === 'object' && !!process ? process : {});
15/* c8 ignore start */
16const emitWarning = (msg, type, code, fn) => {
17    typeof PROCESS.emitWarning === 'function'
18        ? PROCESS.emitWarning(msg, type, code, fn)
19        : console.error(`[${code}] ${type}: ${msg}`);
20};
21let AC = globalThis.AbortController;
22let AS = globalThis.AbortSignal;
23/* c8 ignore start */
24if (typeof AC === 'undefined') {
25    //@ts-ignore
26    AS = class AbortSignal {
27        onabort;
28        _onabort = [];
29        reason;
30        aborted = false;
31        addEventListener(_, fn) {
32            this._onabort.push(fn);
33        }
34    };
35    //@ts-ignore
36    AC = class AbortController {
37        constructor() {
38            warnACPolyfill();
39        }
40        signal = new AS();
41        abort(reason) {
42            if (this.signal.aborted)
43                return;
44            //@ts-ignore
45            this.signal.reason = reason;
46            //@ts-ignore
47            this.signal.aborted = true;
48            //@ts-ignore
49            for (const fn of this.signal._onabort) {
50                fn(reason);
51            }
52            this.signal.onabort?.(reason);
53        }
54    };
55    let printACPolyfillWarning = PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1';
56    const warnACPolyfill = () => {
57        if (!printACPolyfillWarning)
58            return;
59        printACPolyfillWarning = false;
60        emitWarning('AbortController is not defined. If using lru-cache in ' +
61            'node 14, load an AbortController polyfill from the ' +
62            '`node-abort-controller` package. A minimal polyfill is ' +
63            'provided for use by LRUCache.fetch(), but it should not be ' +
64            'relied upon in other contexts (eg, passing it to other APIs that ' +
65            'use AbortController/AbortSignal might have undesirable effects). ' +
66            'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', 'NO_ABORT_CONTROLLER', 'ENOTSUP', warnACPolyfill);
67    };
68}
69/* c8 ignore stop */
70const shouldWarn = (code) => !warned.has(code);
71const TYPE = Symbol('type');
72const isPosInt = (n) => n && n === Math.floor(n) && n > 0 && isFinite(n);
73/* c8 ignore start */
74// This is a little bit ridiculous, tbh.
75// The maximum array length is 2^32-1 or thereabouts on most JS impls.
76// And well before that point, you're caching the entire world, I mean,
77// that's ~32GB of just integers for the next/prev links, plus whatever
78// else to hold that many keys and values.  Just filling the memory with
79// zeroes at init time is brutal when you get that big.
80// But why not be complete?
81// Maybe in the future, these limits will have expanded.
82const getUintArray = (max) => !isPosInt(max)
83    ? null
84    : max <= Math.pow(2, 8)
85        ? Uint8Array
86        : max <= Math.pow(2, 16)
87            ? Uint16Array
88            : max <= Math.pow(2, 32)
89                ? Uint32Array
90                : max <= Number.MAX_SAFE_INTEGER
91                    ? ZeroArray
92                    : null;
93/* c8 ignore stop */
94class ZeroArray extends Array {
95    constructor(size) {
96        super(size);
97        this.fill(0);
98    }
99}
100class Stack {
101    heap;
102    length;
103    // private constructor
104    static #constructing = false;
105    static create(max) {
106        const HeapCls = getUintArray(max);
107        if (!HeapCls)
108            return [];
109        Stack.#constructing = true;
110        const s = new Stack(max, HeapCls);
111        Stack.#constructing = false;
112        return s;
113    }
114    constructor(max, HeapCls) {
115        /* c8 ignore start */
116        if (!Stack.#constructing) {
117            throw new TypeError('instantiate Stack using Stack.create(n)');
118        }
119        /* c8 ignore stop */
120        this.heap = new HeapCls(max);
121        this.length = 0;
122    }
123    push(n) {
124        this.heap[this.length++] = n;
125    }
126    pop() {
127        return this.heap[--this.length];
128    }
129}
130/**
131 * Default export, the thing you're using this module to get.
132 *
133 * All properties from the options object (with the exception of
134 * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as
135 * normal public members. (`max` and `maxBase` are read-only getters.)
136 * Changing any of these will alter the defaults for subsequent method calls,
137 * but is otherwise safe.
138 */
139class LRUCache {
140    // properties coming in from the options of these, only max and maxSize
141    // really *need* to be protected. The rest can be modified, as they just
142    // set defaults for various methods.
143    #max;
144    #maxSize;
145    #dispose;
146    #disposeAfter;
147    #fetchMethod;
148    /**
149     * {@link LRUCache.OptionsBase.ttl}
150     */
151    ttl;
152    /**
153     * {@link LRUCache.OptionsBase.ttlResolution}
154     */
155    ttlResolution;
156    /**
157     * {@link LRUCache.OptionsBase.ttlAutopurge}
158     */
159    ttlAutopurge;
160    /**
161     * {@link LRUCache.OptionsBase.updateAgeOnGet}
162     */
163    updateAgeOnGet;
164    /**
165     * {@link LRUCache.OptionsBase.updateAgeOnHas}
166     */
167    updateAgeOnHas;
168    /**
169     * {@link LRUCache.OptionsBase.allowStale}
170     */
171    allowStale;
172    /**
173     * {@link LRUCache.OptionsBase.noDisposeOnSet}
174     */
175    noDisposeOnSet;
176    /**
177     * {@link LRUCache.OptionsBase.noUpdateTTL}
178     */
179    noUpdateTTL;
180    /**
181     * {@link LRUCache.OptionsBase.maxEntrySize}
182     */
183    maxEntrySize;
184    /**
185     * {@link LRUCache.OptionsBase.sizeCalculation}
186     */
187    sizeCalculation;
188    /**
189     * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}
190     */
191    noDeleteOnFetchRejection;
192    /**
193     * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}
194     */
195    noDeleteOnStaleGet;
196    /**
197     * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}
198     */
199    allowStaleOnFetchAbort;
200    /**
201     * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}
202     */
203    allowStaleOnFetchRejection;
204    /**
205     * {@link LRUCache.OptionsBase.ignoreFetchAbort}
206     */
207    ignoreFetchAbort;
208    // computed properties
209    #size;
210    #calculatedSize;
211    #keyMap;
212    #keyList;
213    #valList;
214    #next;
215    #prev;
216    #head;
217    #tail;
218    #free;
219    #disposed;
220    #sizes;
221    #starts;
222    #ttls;
223    #hasDispose;
224    #hasFetchMethod;
225    #hasDisposeAfter;
226    /**
227     * Do not call this method unless you need to inspect the
228     * inner workings of the cache.  If anything returned by this
229     * object is modified in any way, strange breakage may occur.
230     *
231     * These fields are private for a reason!
232     *
233     * @internal
234     */
235    static unsafeExposeInternals(c) {
236        return {
237            // properties
238            starts: c.#starts,
239            ttls: c.#ttls,
240            sizes: c.#sizes,
241            keyMap: c.#keyMap,
242            keyList: c.#keyList,
243            valList: c.#valList,
244            next: c.#next,
245            prev: c.#prev,
246            get head() {
247                return c.#head;
248            },
249            get tail() {
250                return c.#tail;
251            },
252            free: c.#free,
253            // methods
254            isBackgroundFetch: (p) => c.#isBackgroundFetch(p),
255            backgroundFetch: (k, index, options, context) => c.#backgroundFetch(k, index, options, context),
256            moveToTail: (index) => c.#moveToTail(index),
257            indexes: (options) => c.#indexes(options),
258            rindexes: (options) => c.#rindexes(options),
259            isStale: (index) => c.#isStale(index),
260        };
261    }
262    // Protected read-only members
263    /**
264     * {@link LRUCache.OptionsBase.max} (read-only)
265     */
266    get max() {
267        return this.#max;
268    }
269    /**
270     * {@link LRUCache.OptionsBase.maxSize} (read-only)
271     */
272    get maxSize() {
273        return this.#maxSize;
274    }
275    /**
276     * The total computed size of items in the cache (read-only)
277     */
278    get calculatedSize() {
279        return this.#calculatedSize;
280    }
281    /**
282     * The number of items stored in the cache (read-only)
283     */
284    get size() {
285        return this.#size;
286    }
287    /**
288     * {@link LRUCache.OptionsBase.fetchMethod} (read-only)
289     */
290    get fetchMethod() {
291        return this.#fetchMethod;
292    }
293    /**
294     * {@link LRUCache.OptionsBase.dispose} (read-only)
295     */
296    get dispose() {
297        return this.#dispose;
298    }
299    /**
300     * {@link LRUCache.OptionsBase.disposeAfter} (read-only)
301     */
302    get disposeAfter() {
303        return this.#disposeAfter;
304    }
305    constructor(options) {
306        const { max = 0, ttl, ttlResolution = 1, ttlAutopurge, updateAgeOnGet, updateAgeOnHas, allowStale, dispose, disposeAfter, noDisposeOnSet, noUpdateTTL, maxSize = 0, maxEntrySize = 0, sizeCalculation, fetchMethod, noDeleteOnFetchRejection, noDeleteOnStaleGet, allowStaleOnFetchRejection, allowStaleOnFetchAbort, ignoreFetchAbort, } = options;
307        if (max !== 0 && !isPosInt(max)) {
308            throw new TypeError('max option must be a nonnegative integer');
309        }
310        const UintArray = max ? getUintArray(max) : Array;
311        if (!UintArray) {
312            throw new Error('invalid max value: ' + max);
313        }
314        this.#max = max;
315        this.#maxSize = maxSize;
316        this.maxEntrySize = maxEntrySize || this.#maxSize;
317        this.sizeCalculation = sizeCalculation;
318        if (this.sizeCalculation) {
319            if (!this.#maxSize && !this.maxEntrySize) {
320                throw new TypeError('cannot set sizeCalculation without setting maxSize or maxEntrySize');
321            }
322            if (typeof this.sizeCalculation !== 'function') {
323                throw new TypeError('sizeCalculation set to non-function');
324            }
325        }
326        if (fetchMethod !== undefined &&
327            typeof fetchMethod !== 'function') {
328            throw new TypeError('fetchMethod must be a function if specified');
329        }
330        this.#fetchMethod = fetchMethod;
331        this.#hasFetchMethod = !!fetchMethod;
332        this.#keyMap = new Map();
333        this.#keyList = new Array(max).fill(undefined);
334        this.#valList = new Array(max).fill(undefined);
335        this.#next = new UintArray(max);
336        this.#prev = new UintArray(max);
337        this.#head = 0;
338        this.#tail = 0;
339        this.#free = Stack.create(max);
340        this.#size = 0;
341        this.#calculatedSize = 0;
342        if (typeof dispose === 'function') {
343            this.#dispose = dispose;
344        }
345        if (typeof disposeAfter === 'function') {
346            this.#disposeAfter = disposeAfter;
347            this.#disposed = [];
348        }
349        else {
350            this.#disposeAfter = undefined;
351            this.#disposed = undefined;
352        }
353        this.#hasDispose = !!this.#dispose;
354        this.#hasDisposeAfter = !!this.#disposeAfter;
355        this.noDisposeOnSet = !!noDisposeOnSet;
356        this.noUpdateTTL = !!noUpdateTTL;
357        this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection;
358        this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection;
359        this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort;
360        this.ignoreFetchAbort = !!ignoreFetchAbort;
361        // NB: maxEntrySize is set to maxSize if it's set
362        if (this.maxEntrySize !== 0) {
363            if (this.#maxSize !== 0) {
364                if (!isPosInt(this.#maxSize)) {
365                    throw new TypeError('maxSize must be a positive integer if specified');
366                }
367            }
368            if (!isPosInt(this.maxEntrySize)) {
369                throw new TypeError('maxEntrySize must be a positive integer if specified');
370            }
371            this.#initializeSizeTracking();
372        }
373        this.allowStale = !!allowStale;
374        this.noDeleteOnStaleGet = !!noDeleteOnStaleGet;
375        this.updateAgeOnGet = !!updateAgeOnGet;
376        this.updateAgeOnHas = !!updateAgeOnHas;
377        this.ttlResolution =
378            isPosInt(ttlResolution) || ttlResolution === 0
379                ? ttlResolution
380                : 1;
381        this.ttlAutopurge = !!ttlAutopurge;
382        this.ttl = ttl || 0;
383        if (this.ttl) {
384            if (!isPosInt(this.ttl)) {
385                throw new TypeError('ttl must be a positive integer if specified');
386            }
387            this.#initializeTTLTracking();
388        }
389        // do not allow completely unbounded caches
390        if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {
391            throw new TypeError('At least one of max, maxSize, or ttl is required');
392        }
393        if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {
394            const code = 'LRU_CACHE_UNBOUNDED';
395            if (shouldWarn(code)) {
396                warned.add(code);
397                const msg = 'TTL caching without ttlAutopurge, max, or maxSize can ' +
398                    'result in unbounded memory consumption.';
399                emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache);
400            }
401        }
402    }
403    /**
404     * Return the remaining TTL time for a given entry key
405     */
406    getRemainingTTL(key) {
407        return this.#keyMap.has(key) ? Infinity : 0;
408    }
409    #initializeTTLTracking() {
410        const ttls = new ZeroArray(this.#max);
411        const starts = new ZeroArray(this.#max);
412        this.#ttls = ttls;
413        this.#starts = starts;
414        this.#setItemTTL = (index, ttl, start = perf.now()) => {
415            starts[index] = ttl !== 0 ? start : 0;
416            ttls[index] = ttl;
417            if (ttl !== 0 && this.ttlAutopurge) {
418                const t = setTimeout(() => {
419                    if (this.#isStale(index)) {
420                        this.delete(this.#keyList[index]);
421                    }
422                }, ttl + 1);
423                // unref() not supported on all platforms
424                /* c8 ignore start */
425                if (t.unref) {
426                    t.unref();
427                }
428                /* c8 ignore stop */
429            }
430        };
431        this.#updateItemAge = index => {
432            starts[index] = ttls[index] !== 0 ? perf.now() : 0;
433        };
434        this.#statusTTL = (status, index) => {
435            if (ttls[index]) {
436                const ttl = ttls[index];
437                const start = starts[index];
438                /* c8 ignore next */
439                if (!ttl || !start)
440                    return;
441                status.ttl = ttl;
442                status.start = start;
443                status.now = cachedNow || getNow();
444                const age = status.now - start;
445                status.remainingTTL = ttl - age;
446            }
447        };
448        // debounce calls to perf.now() to 1s so we're not hitting
449        // that costly call repeatedly.
450        let cachedNow = 0;
451        const getNow = () => {
452            const n = perf.now();
453            if (this.ttlResolution > 0) {
454                cachedNow = n;
455                const t = setTimeout(() => (cachedNow = 0), this.ttlResolution);
456                // not available on all platforms
457                /* c8 ignore start */
458                if (t.unref) {
459                    t.unref();
460                }
461                /* c8 ignore stop */
462            }
463            return n;
464        };
465        this.getRemainingTTL = key => {
466            const index = this.#keyMap.get(key);
467            if (index === undefined) {
468                return 0;
469            }
470            const ttl = ttls[index];
471            const start = starts[index];
472            if (!ttl || !start) {
473                return Infinity;
474            }
475            const age = (cachedNow || getNow()) - start;
476            return ttl - age;
477        };
478        this.#isStale = index => {
479            const s = starts[index];
480            const t = ttls[index];
481            return !!t && !!s && (cachedNow || getNow()) - s > t;
482        };
483    }
484    // conditionally set private methods related to TTL
485    #updateItemAge = () => { };
486    #statusTTL = () => { };
487    #setItemTTL = () => { };
488    /* c8 ignore stop */
489    #isStale = () => false;
490    #initializeSizeTracking() {
491        const sizes = new ZeroArray(this.#max);
492        this.#calculatedSize = 0;
493        this.#sizes = sizes;
494        this.#removeItemSize = index => {
495            this.#calculatedSize -= sizes[index];
496            sizes[index] = 0;
497        };
498        this.#requireSize = (k, v, size, sizeCalculation) => {
499            // provisionally accept background fetches.
500            // actual value size will be checked when they return.
501            if (this.#isBackgroundFetch(v)) {
502                return 0;
503            }
504            if (!isPosInt(size)) {
505                if (sizeCalculation) {
506                    if (typeof sizeCalculation !== 'function') {
507                        throw new TypeError('sizeCalculation must be a function');
508                    }
509                    size = sizeCalculation(v, k);
510                    if (!isPosInt(size)) {
511                        throw new TypeError('sizeCalculation return invalid (expect positive integer)');
512                    }
513                }
514                else {
515                    throw new TypeError('invalid size value (must be positive integer). ' +
516                        'When maxSize or maxEntrySize is used, sizeCalculation ' +
517                        'or size must be set.');
518                }
519            }
520            return size;
521        };
522        this.#addItemSize = (index, size, status) => {
523            sizes[index] = size;
524            if (this.#maxSize) {
525                const maxSize = this.#maxSize - sizes[index];
526                while (this.#calculatedSize > maxSize) {
527                    this.#evict(true);
528                }
529            }
530            this.#calculatedSize += sizes[index];
531            if (status) {
532                status.entrySize = size;
533                status.totalCalculatedSize = this.#calculatedSize;
534            }
535        };
536    }
537    #removeItemSize = _i => { };
538    #addItemSize = (_i, _s, _st) => { };
539    #requireSize = (_k, _v, size, sizeCalculation) => {
540        if (size || sizeCalculation) {
541            throw new TypeError('cannot set size without setting maxSize or maxEntrySize on cache');
542        }
543        return 0;
544    };
545    *#indexes({ allowStale = this.allowStale } = {}) {
546        if (this.#size) {
547            for (let i = this.#tail; true;) {
548                if (!this.#isValidIndex(i)) {
549                    break;
550                }
551                if (allowStale || !this.#isStale(i)) {
552                    yield i;
553                }
554                if (i === this.#head) {
555                    break;
556                }
557                else {
558                    i = this.#prev[i];
559                }
560            }
561        }
562    }
563    *#rindexes({ allowStale = this.allowStale } = {}) {
564        if (this.#size) {
565            for (let i = this.#head; true;) {
566                if (!this.#isValidIndex(i)) {
567                    break;
568                }
569                if (allowStale || !this.#isStale(i)) {
570                    yield i;
571                }
572                if (i === this.#tail) {
573                    break;
574                }
575                else {
576                    i = this.#next[i];
577                }
578            }
579        }
580    }
581    #isValidIndex(index) {
582        return (index !== undefined &&
583            this.#keyMap.get(this.#keyList[index]) === index);
584    }
585    /**
586     * Return a generator yielding `[key, value]` pairs,
587     * in order from most recently used to least recently used.
588     */
589    *entries() {
590        for (const i of this.#indexes()) {
591            if (this.#valList[i] !== undefined &&
592                this.#keyList[i] !== undefined &&
593                !this.#isBackgroundFetch(this.#valList[i])) {
594                yield [this.#keyList[i], this.#valList[i]];
595            }
596        }
597    }
598    /**
599     * Inverse order version of {@link LRUCache.entries}
600     *
601     * Return a generator yielding `[key, value]` pairs,
602     * in order from least recently used to most recently used.
603     */
604    *rentries() {
605        for (const i of this.#rindexes()) {
606            if (this.#valList[i] !== undefined &&
607                this.#keyList[i] !== undefined &&
608                !this.#isBackgroundFetch(this.#valList[i])) {
609                yield [this.#keyList[i], this.#valList[i]];
610            }
611        }
612    }
613    /**
614     * Return a generator yielding the keys in the cache,
615     * in order from most recently used to least recently used.
616     */
617    *keys() {
618        for (const i of this.#indexes()) {
619            const k = this.#keyList[i];
620            if (k !== undefined &&
621                !this.#isBackgroundFetch(this.#valList[i])) {
622                yield k;
623            }
624        }
625    }
626    /**
627     * Inverse order version of {@link LRUCache.keys}
628     *
629     * Return a generator yielding the keys in the cache,
630     * in order from least recently used to most recently used.
631     */
632    *rkeys() {
633        for (const i of this.#rindexes()) {
634            const k = this.#keyList[i];
635            if (k !== undefined &&
636                !this.#isBackgroundFetch(this.#valList[i])) {
637                yield k;
638            }
639        }
640    }
641    /**
642     * Return a generator yielding the values in the cache,
643     * in order from most recently used to least recently used.
644     */
645    *values() {
646        for (const i of this.#indexes()) {
647            const v = this.#valList[i];
648            if (v !== undefined &&
649                !this.#isBackgroundFetch(this.#valList[i])) {
650                yield this.#valList[i];
651            }
652        }
653    }
654    /**
655     * Inverse order version of {@link LRUCache.values}
656     *
657     * Return a generator yielding the values in the cache,
658     * in order from least recently used to most recently used.
659     */
660    *rvalues() {
661        for (const i of this.#rindexes()) {
662            const v = this.#valList[i];
663            if (v !== undefined &&
664                !this.#isBackgroundFetch(this.#valList[i])) {
665                yield this.#valList[i];
666            }
667        }
668    }
669    /**
670     * Iterating over the cache itself yields the same results as
671     * {@link LRUCache.entries}
672     */
673    [Symbol.iterator]() {
674        return this.entries();
675    }
676    /**
677     * A String value that is used in the creation of the default string description of an object.
678     * Called by the built-in method Object.prototype.toString.
679     */
680    [Symbol.toStringTag] = 'LRUCache';
681    /**
682     * Find a value for which the supplied fn method returns a truthy value,
683     * similar to Array.find().  fn is called as fn(value, key, cache).
684     */
685    find(fn, getOptions = {}) {
686        for (const i of this.#indexes()) {
687            const v = this.#valList[i];
688            const value = this.#isBackgroundFetch(v)
689                ? v.__staleWhileFetching
690                : v;
691            if (value === undefined)
692                continue;
693            if (fn(value, this.#keyList[i], this)) {
694                return this.get(this.#keyList[i], getOptions);
695            }
696        }
697    }
698    /**
699     * Call the supplied function on each item in the cache, in order from
700     * most recently used to least recently used.  fn is called as
701     * fn(value, key, cache).  Does not update age or recenty of use.
702     * Does not iterate over stale values.
703     */
704    forEach(fn, thisp = this) {
705        for (const i of this.#indexes()) {
706            const v = this.#valList[i];
707            const value = this.#isBackgroundFetch(v)
708                ? v.__staleWhileFetching
709                : v;
710            if (value === undefined)
711                continue;
712            fn.call(thisp, value, this.#keyList[i], this);
713        }
714    }
715    /**
716     * The same as {@link LRUCache.forEach} but items are iterated over in
717     * reverse order.  (ie, less recently used items are iterated over first.)
718     */
719    rforEach(fn, thisp = this) {
720        for (const i of this.#rindexes()) {
721            const v = this.#valList[i];
722            const value = this.#isBackgroundFetch(v)
723                ? v.__staleWhileFetching
724                : v;
725            if (value === undefined)
726                continue;
727            fn.call(thisp, value, this.#keyList[i], this);
728        }
729    }
730    /**
731     * Delete any stale entries. Returns true if anything was removed,
732     * false otherwise.
733     */
734    purgeStale() {
735        let deleted = false;
736        for (const i of this.#rindexes({ allowStale: true })) {
737            if (this.#isStale(i)) {
738                this.delete(this.#keyList[i]);
739                deleted = true;
740            }
741        }
742        return deleted;
743    }
744    /**
745     * Get the extended info about a given entry, to get its value, size, and
746     * TTL info simultaneously. Like {@link LRUCache#dump}, but just for a
747     * single key. Always returns stale values, if their info is found in the
748     * cache, so be sure to check for expired TTLs if relevant.
749     */
750    info(key) {
751        const i = this.#keyMap.get(key);
752        if (i === undefined)
753            return undefined;
754        const v = this.#valList[i];
755        const value = this.#isBackgroundFetch(v)
756            ? v.__staleWhileFetching
757            : v;
758        if (value === undefined)
759            return undefined;
760        const entry = { value };
761        if (this.#ttls && this.#starts) {
762            const ttl = this.#ttls[i];
763            const start = this.#starts[i];
764            if (ttl && start) {
765                const remain = ttl - (perf.now() - start);
766                entry.ttl = remain;
767                entry.start = Date.now();
768            }
769        }
770        if (this.#sizes) {
771            entry.size = this.#sizes[i];
772        }
773        return entry;
774    }
775    /**
776     * Return an array of [key, {@link LRUCache.Entry}] tuples which can be
777     * passed to cache.load()
778     */
779    dump() {
780        const arr = [];
781        for (const i of this.#indexes({ allowStale: true })) {
782            const key = this.#keyList[i];
783            const v = this.#valList[i];
784            const value = this.#isBackgroundFetch(v)
785                ? v.__staleWhileFetching
786                : v;
787            if (value === undefined || key === undefined)
788                continue;
789            const entry = { value };
790            if (this.#ttls && this.#starts) {
791                entry.ttl = this.#ttls[i];
792                // always dump the start relative to a portable timestamp
793                // it's ok for this to be a bit slow, it's a rare operation.
794                const age = perf.now() - this.#starts[i];
795                entry.start = Math.floor(Date.now() - age);
796            }
797            if (this.#sizes) {
798                entry.size = this.#sizes[i];
799            }
800            arr.unshift([key, entry]);
801        }
802        return arr;
803    }
804    /**
805     * Reset the cache and load in the items in entries in the order listed.
806     * Note that the shape of the resulting cache may be different if the
807     * same options are not used in both caches.
808     */
809    load(arr) {
810        this.clear();
811        for (const [key, entry] of arr) {
812            if (entry.start) {
813                // entry.start is a portable timestamp, but we may be using
814                // node's performance.now(), so calculate the offset, so that
815                // we get the intended remaining TTL, no matter how long it's
816                // been on ice.
817                //
818                // it's ok for this to be a bit slow, it's a rare operation.
819                const age = Date.now() - entry.start;
820                entry.start = perf.now() - age;
821            }
822            this.set(key, entry.value, entry);
823        }
824    }
825    /**
826     * Add a value to the cache.
827     *
828     * Note: if `undefined` is specified as a value, this is an alias for
829     * {@link LRUCache#delete}
830     */
831    set(k, v, setOptions = {}) {
832        if (v === undefined) {
833            this.delete(k);
834            return this;
835        }
836        const { ttl = this.ttl, start, noDisposeOnSet = this.noDisposeOnSet, sizeCalculation = this.sizeCalculation, status, } = setOptions;
837        let { noUpdateTTL = this.noUpdateTTL } = setOptions;
838        const size = this.#requireSize(k, v, setOptions.size || 0, sizeCalculation);
839        // if the item doesn't fit, don't do anything
840        // NB: maxEntrySize set to maxSize by default
841        if (this.maxEntrySize && size > this.maxEntrySize) {
842            if (status) {
843                status.set = 'miss';
844                status.maxEntrySizeExceeded = true;
845            }
846            // have to delete, in case something is there already.
847            this.delete(k);
848            return this;
849        }
850        let index = this.#size === 0 ? undefined : this.#keyMap.get(k);
851        if (index === undefined) {
852            // addition
853            index = (this.#size === 0
854                ? this.#tail
855                : this.#free.length !== 0
856                    ? this.#free.pop()
857                    : this.#size === this.#max
858                        ? this.#evict(false)
859                        : this.#size);
860            this.#keyList[index] = k;
861            this.#valList[index] = v;
862            this.#keyMap.set(k, index);
863            this.#next[this.#tail] = index;
864            this.#prev[index] = this.#tail;
865            this.#tail = index;
866            this.#size++;
867            this.#addItemSize(index, size, status);
868            if (status)
869                status.set = 'add';
870            noUpdateTTL = false;
871        }
872        else {
873            // update
874            this.#moveToTail(index);
875            const oldVal = this.#valList[index];
876            if (v !== oldVal) {
877                if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {
878                    oldVal.__abortController.abort(new Error('replaced'));
879                    const { __staleWhileFetching: s } = oldVal;
880                    if (s !== undefined && !noDisposeOnSet) {
881                        if (this.#hasDispose) {
882                            this.#dispose?.(s, k, 'set');
883                        }
884                        if (this.#hasDisposeAfter) {
885                            this.#disposed?.push([s, k, 'set']);
886                        }
887                    }
888                }
889                else if (!noDisposeOnSet) {
890                    if (this.#hasDispose) {
891                        this.#dispose?.(oldVal, k, 'set');
892                    }
893                    if (this.#hasDisposeAfter) {
894                        this.#disposed?.push([oldVal, k, 'set']);
895                    }
896                }
897                this.#removeItemSize(index);
898                this.#addItemSize(index, size, status);
899                this.#valList[index] = v;
900                if (status) {
901                    status.set = 'replace';
902                    const oldValue = oldVal && this.#isBackgroundFetch(oldVal)
903                        ? oldVal.__staleWhileFetching
904                        : oldVal;
905                    if (oldValue !== undefined)
906                        status.oldValue = oldValue;
907                }
908            }
909            else if (status) {
910                status.set = 'update';
911            }
912        }
913        if (ttl !== 0 && !this.#ttls) {
914            this.#initializeTTLTracking();
915        }
916        if (this.#ttls) {
917            if (!noUpdateTTL) {
918                this.#setItemTTL(index, ttl, start);
919            }
920            if (status)
921                this.#statusTTL(status, index);
922        }
923        if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {
924            const dt = this.#disposed;
925            let task;
926            while ((task = dt?.shift())) {
927                this.#disposeAfter?.(...task);
928            }
929        }
930        return this;
931    }
932    /**
933     * Evict the least recently used item, returning its value or
934     * `undefined` if cache is empty.
935     */
936    pop() {
937        try {
938            while (this.#size) {
939                const val = this.#valList[this.#head];
940                this.#evict(true);
941                if (this.#isBackgroundFetch(val)) {
942                    if (val.__staleWhileFetching) {
943                        return val.__staleWhileFetching;
944                    }
945                }
946                else if (val !== undefined) {
947                    return val;
948                }
949            }
950        }
951        finally {
952            if (this.#hasDisposeAfter && this.#disposed) {
953                const dt = this.#disposed;
954                let task;
955                while ((task = dt?.shift())) {
956                    this.#disposeAfter?.(...task);
957                }
958            }
959        }
960    }
961    #evict(free) {
962        const head = this.#head;
963        const k = this.#keyList[head];
964        const v = this.#valList[head];
965        if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {
966            v.__abortController.abort(new Error('evicted'));
967        }
968        else if (this.#hasDispose || this.#hasDisposeAfter) {
969            if (this.#hasDispose) {
970                this.#dispose?.(v, k, 'evict');
971            }
972            if (this.#hasDisposeAfter) {
973                this.#disposed?.push([v, k, 'evict']);
974            }
975        }
976        this.#removeItemSize(head);
977        // if we aren't about to use the index, then null these out
978        if (free) {
979            this.#keyList[head] = undefined;
980            this.#valList[head] = undefined;
981            this.#free.push(head);
982        }
983        if (this.#size === 1) {
984            this.#head = this.#tail = 0;
985            this.#free.length = 0;
986        }
987        else {
988            this.#head = this.#next[head];
989        }
990        this.#keyMap.delete(k);
991        this.#size--;
992        return head;
993    }
994    /**
995     * Check if a key is in the cache, without updating the recency of use.
996     * Will return false if the item is stale, even though it is technically
997     * in the cache.
998     *
999     * Will not update item age unless
1000     * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.
1001     */
1002    has(k, hasOptions = {}) {
1003        const { updateAgeOnHas = this.updateAgeOnHas, status } = hasOptions;
1004        const index = this.#keyMap.get(k);
1005        if (index !== undefined) {
1006            const v = this.#valList[index];
1007            if (this.#isBackgroundFetch(v) &&
1008                v.__staleWhileFetching === undefined) {
1009                return false;
1010            }
1011            if (!this.#isStale(index)) {
1012                if (updateAgeOnHas) {
1013                    this.#updateItemAge(index);
1014                }
1015                if (status) {
1016                    status.has = 'hit';
1017                    this.#statusTTL(status, index);
1018                }
1019                return true;
1020            }
1021            else if (status) {
1022                status.has = 'stale';
1023                this.#statusTTL(status, index);
1024            }
1025        }
1026        else if (status) {
1027            status.has = 'miss';
1028        }
1029        return false;
1030    }
1031    /**
1032     * Like {@link LRUCache#get} but doesn't update recency or delete stale
1033     * items.
1034     *
1035     * Returns `undefined` if the item is stale, unless
1036     * {@link LRUCache.OptionsBase.allowStale} is set.
1037     */
1038    peek(k, peekOptions = {}) {
1039        const { allowStale = this.allowStale } = peekOptions;
1040        const index = this.#keyMap.get(k);
1041        if (index === undefined ||
1042            (!allowStale && this.#isStale(index))) {
1043            return;
1044        }
1045        const v = this.#valList[index];
1046        // either stale and allowed, or forcing a refresh of non-stale value
1047        return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v;
1048    }
1049    #backgroundFetch(k, index, options, context) {
1050        const v = index === undefined ? undefined : this.#valList[index];
1051        if (this.#isBackgroundFetch(v)) {
1052            return v;
1053        }
1054        const ac = new AC();
1055        const { signal } = options;
1056        // when/if our AC signals, then stop listening to theirs.
1057        signal?.addEventListener('abort', () => ac.abort(signal.reason), {
1058            signal: ac.signal,
1059        });
1060        const fetchOpts = {
1061            signal: ac.signal,
1062            options,
1063            context,
1064        };
1065        const cb = (v, updateCache = false) => {
1066            const { aborted } = ac.signal;
1067            const ignoreAbort = options.ignoreFetchAbort && v !== undefined;
1068            if (options.status) {
1069                if (aborted && !updateCache) {
1070                    options.status.fetchAborted = true;
1071                    options.status.fetchError = ac.signal.reason;
1072                    if (ignoreAbort)
1073                        options.status.fetchAbortIgnored = true;
1074                }
1075                else {
1076                    options.status.fetchResolved = true;
1077                }
1078            }
1079            if (aborted && !ignoreAbort && !updateCache) {
1080                return fetchFail(ac.signal.reason);
1081            }
1082            // either we didn't abort, and are still here, or we did, and ignored
1083            const bf = p;
1084            if (this.#valList[index] === p) {
1085                if (v === undefined) {
1086                    if (bf.__staleWhileFetching) {
1087                        this.#valList[index] = bf.__staleWhileFetching;
1088                    }
1089                    else {
1090                        this.delete(k);
1091                    }
1092                }
1093                else {
1094                    if (options.status)
1095                        options.status.fetchUpdated = true;
1096                    this.set(k, v, fetchOpts.options);
1097                }
1098            }
1099            return v;
1100        };
1101        const eb = (er) => {
1102            if (options.status) {
1103                options.status.fetchRejected = true;
1104                options.status.fetchError = er;
1105            }
1106            return fetchFail(er);
1107        };
1108        const fetchFail = (er) => {
1109            const { aborted } = ac.signal;
1110            const allowStaleAborted = aborted && options.allowStaleOnFetchAbort;
1111            const allowStale = allowStaleAborted || options.allowStaleOnFetchRejection;
1112            const noDelete = allowStale || options.noDeleteOnFetchRejection;
1113            const bf = p;
1114            if (this.#valList[index] === p) {
1115                // if we allow stale on fetch rejections, then we need to ensure that
1116                // the stale value is not removed from the cache when the fetch fails.
1117                const del = !noDelete || bf.__staleWhileFetching === undefined;
1118                if (del) {
1119                    this.delete(k);
1120                }
1121                else if (!allowStaleAborted) {
1122                    // still replace the *promise* with the stale value,
1123                    // since we are done with the promise at this point.
1124                    // leave it untouched if we're still waiting for an
1125                    // aborted background fetch that hasn't yet returned.
1126                    this.#valList[index] = bf.__staleWhileFetching;
1127                }
1128            }
1129            if (allowStale) {
1130                if (options.status && bf.__staleWhileFetching !== undefined) {
1131                    options.status.returnedStale = true;
1132                }
1133                return bf.__staleWhileFetching;
1134            }
1135            else if (bf.__returned === bf) {
1136                throw er;
1137            }
1138        };
1139        const pcall = (res, rej) => {
1140            const fmp = this.#fetchMethod?.(k, v, fetchOpts);
1141            if (fmp && fmp instanceof Promise) {
1142                fmp.then(v => res(v === undefined ? undefined : v), rej);
1143            }
1144            // ignored, we go until we finish, regardless.
1145            // defer check until we are actually aborting,
1146            // so fetchMethod can override.
1147            ac.signal.addEventListener('abort', () => {
1148                if (!options.ignoreFetchAbort ||
1149                    options.allowStaleOnFetchAbort) {
1150                    res(undefined);
1151                    // when it eventually resolves, update the cache.
1152                    if (options.allowStaleOnFetchAbort) {
1153                        res = v => cb(v, true);
1154                    }
1155                }
1156            });
1157        };
1158        if (options.status)
1159            options.status.fetchDispatched = true;
1160        const p = new Promise(pcall).then(cb, eb);
1161        const bf = Object.assign(p, {
1162            __abortController: ac,
1163            __staleWhileFetching: v,
1164            __returned: undefined,
1165        });
1166        if (index === undefined) {
1167            // internal, don't expose status.
1168            this.set(k, bf, { ...fetchOpts.options, status: undefined });
1169            index = this.#keyMap.get(k);
1170        }
1171        else {
1172            this.#valList[index] = bf;
1173        }
1174        return bf;
1175    }
1176    #isBackgroundFetch(p) {
1177        if (!this.#hasFetchMethod)
1178            return false;
1179        const b = p;
1180        return (!!b &&
1181            b instanceof Promise &&
1182            b.hasOwnProperty('__staleWhileFetching') &&
1183            b.__abortController instanceof AC);
1184    }
1185    async fetch(k, fetchOptions = {}) {
1186        const {
1187        // get options
1188        allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet,
1189        // set options
1190        ttl = this.ttl, noDisposeOnSet = this.noDisposeOnSet, size = 0, sizeCalculation = this.sizeCalculation, noUpdateTTL = this.noUpdateTTL,
1191        // fetch exclusive options
1192        noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, ignoreFetchAbort = this.ignoreFetchAbort, allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, context, forceRefresh = false, status, signal, } = fetchOptions;
1193        if (!this.#hasFetchMethod) {
1194            if (status)
1195                status.fetch = 'get';
1196            return this.get(k, {
1197                allowStale,
1198                updateAgeOnGet,
1199                noDeleteOnStaleGet,
1200                status,
1201            });
1202        }
1203        const options = {
1204            allowStale,
1205            updateAgeOnGet,
1206            noDeleteOnStaleGet,
1207            ttl,
1208            noDisposeOnSet,
1209            size,
1210            sizeCalculation,
1211            noUpdateTTL,
1212            noDeleteOnFetchRejection,
1213            allowStaleOnFetchRejection,
1214            allowStaleOnFetchAbort,
1215            ignoreFetchAbort,
1216            status,
1217            signal,
1218        };
1219        let index = this.#keyMap.get(k);
1220        if (index === undefined) {
1221            if (status)
1222                status.fetch = 'miss';
1223            const p = this.#backgroundFetch(k, index, options, context);
1224            return (p.__returned = p);
1225        }
1226        else {
1227            // in cache, maybe already fetching
1228            const v = this.#valList[index];
1229            if (this.#isBackgroundFetch(v)) {
1230                const stale = allowStale && v.__staleWhileFetching !== undefined;
1231                if (status) {
1232                    status.fetch = 'inflight';
1233                    if (stale)
1234                        status.returnedStale = true;
1235                }
1236                return stale ? v.__staleWhileFetching : (v.__returned = v);
1237            }
1238            // if we force a refresh, that means do NOT serve the cached value,
1239            // unless we are already in the process of refreshing the cache.
1240            const isStale = this.#isStale(index);
1241            if (!forceRefresh && !isStale) {
1242                if (status)
1243                    status.fetch = 'hit';
1244                this.#moveToTail(index);
1245                if (updateAgeOnGet) {
1246                    this.#updateItemAge(index);
1247                }
1248                if (status)
1249                    this.#statusTTL(status, index);
1250                return v;
1251            }
1252            // ok, it is stale or a forced refresh, and not already fetching.
1253            // refresh the cache.
1254            const p = this.#backgroundFetch(k, index, options, context);
1255            const hasStale = p.__staleWhileFetching !== undefined;
1256            const staleVal = hasStale && allowStale;
1257            if (status) {
1258                status.fetch = isStale ? 'stale' : 'refresh';
1259                if (staleVal && isStale)
1260                    status.returnedStale = true;
1261            }
1262            return staleVal ? p.__staleWhileFetching : (p.__returned = p);
1263        }
1264    }
1265    /**
1266     * Return a value from the cache. Will update the recency of the cache
1267     * entry found.
1268     *
1269     * If the key is not found, get() will return `undefined`.
1270     */
1271    get(k, getOptions = {}) {
1272        const { allowStale = this.allowStale, updateAgeOnGet = this.updateAgeOnGet, noDeleteOnStaleGet = this.noDeleteOnStaleGet, status, } = getOptions;
1273        const index = this.#keyMap.get(k);
1274        if (index !== undefined) {
1275            const value = this.#valList[index];
1276            const fetching = this.#isBackgroundFetch(value);
1277            if (status)
1278                this.#statusTTL(status, index);
1279            if (this.#isStale(index)) {
1280                if (status)
1281                    status.get = 'stale';
1282                // delete only if not an in-flight background fetch
1283                if (!fetching) {
1284                    if (!noDeleteOnStaleGet) {
1285                        this.delete(k);
1286                    }
1287                    if (status && allowStale)
1288                        status.returnedStale = true;
1289                    return allowStale ? value : undefined;
1290                }
1291                else {
1292                    if (status &&
1293                        allowStale &&
1294                        value.__staleWhileFetching !== undefined) {
1295                        status.returnedStale = true;
1296                    }
1297                    return allowStale ? value.__staleWhileFetching : undefined;
1298                }
1299            }
1300            else {
1301                if (status)
1302                    status.get = 'hit';
1303                // if we're currently fetching it, we don't actually have it yet
1304                // it's not stale, which means this isn't a staleWhileRefetching.
1305                // If it's not stale, and fetching, AND has a __staleWhileFetching
1306                // value, then that means the user fetched with {forceRefresh:true},
1307                // so it's safe to return that value.
1308                if (fetching) {
1309                    return value.__staleWhileFetching;
1310                }
1311                this.#moveToTail(index);
1312                if (updateAgeOnGet) {
1313                    this.#updateItemAge(index);
1314                }
1315                return value;
1316            }
1317        }
1318        else if (status) {
1319            status.get = 'miss';
1320        }
1321    }
1322    #connect(p, n) {
1323        this.#prev[n] = p;
1324        this.#next[p] = n;
1325    }
1326    #moveToTail(index) {
1327        // if tail already, nothing to do
1328        // if head, move head to next[index]
1329        // else
1330        //   move next[prev[index]] to next[index] (head has no prev)
1331        //   move prev[next[index]] to prev[index]
1332        // prev[index] = tail
1333        // next[tail] = index
1334        // tail = index
1335        if (index !== this.#tail) {
1336            if (index === this.#head) {
1337                this.#head = this.#next[index];
1338            }
1339            else {
1340                this.#connect(this.#prev[index], this.#next[index]);
1341            }
1342            this.#connect(this.#tail, index);
1343            this.#tail = index;
1344        }
1345    }
1346    /**
1347     * Deletes a key out of the cache.
1348     * Returns true if the key was deleted, false otherwise.
1349     */
1350    delete(k) {
1351        let deleted = false;
1352        if (this.#size !== 0) {
1353            const index = this.#keyMap.get(k);
1354            if (index !== undefined) {
1355                deleted = true;
1356                if (this.#size === 1) {
1357                    this.clear();
1358                }
1359                else {
1360                    this.#removeItemSize(index);
1361                    const v = this.#valList[index];
1362                    if (this.#isBackgroundFetch(v)) {
1363                        v.__abortController.abort(new Error('deleted'));
1364                    }
1365                    else if (this.#hasDispose || this.#hasDisposeAfter) {
1366                        if (this.#hasDispose) {
1367                            this.#dispose?.(v, k, 'delete');
1368                        }
1369                        if (this.#hasDisposeAfter) {
1370                            this.#disposed?.push([v, k, 'delete']);
1371                        }
1372                    }
1373                    this.#keyMap.delete(k);
1374                    this.#keyList[index] = undefined;
1375                    this.#valList[index] = undefined;
1376                    if (index === this.#tail) {
1377                        this.#tail = this.#prev[index];
1378                    }
1379                    else if (index === this.#head) {
1380                        this.#head = this.#next[index];
1381                    }
1382                    else {
1383                        const pi = this.#prev[index];
1384                        this.#next[pi] = this.#next[index];
1385                        const ni = this.#next[index];
1386                        this.#prev[ni] = this.#prev[index];
1387                    }
1388                    this.#size--;
1389                    this.#free.push(index);
1390                }
1391            }
1392        }
1393        if (this.#hasDisposeAfter && this.#disposed?.length) {
1394            const dt = this.#disposed;
1395            let task;
1396            while ((task = dt?.shift())) {
1397                this.#disposeAfter?.(...task);
1398            }
1399        }
1400        return deleted;
1401    }
1402    /**
1403     * Clear the cache entirely, throwing away all values.
1404     */
1405    clear() {
1406        for (const index of this.#rindexes({ allowStale: true })) {
1407            const v = this.#valList[index];
1408            if (this.#isBackgroundFetch(v)) {
1409                v.__abortController.abort(new Error('deleted'));
1410            }
1411            else {
1412                const k = this.#keyList[index];
1413                if (this.#hasDispose) {
1414                    this.#dispose?.(v, k, 'delete');
1415                }
1416                if (this.#hasDisposeAfter) {
1417                    this.#disposed?.push([v, k, 'delete']);
1418                }
1419            }
1420        }
1421        this.#keyMap.clear();
1422        this.#valList.fill(undefined);
1423        this.#keyList.fill(undefined);
1424        if (this.#ttls && this.#starts) {
1425            this.#ttls.fill(0);
1426            this.#starts.fill(0);
1427        }
1428        if (this.#sizes) {
1429            this.#sizes.fill(0);
1430        }
1431        this.#head = 0;
1432        this.#tail = 0;
1433        this.#free.length = 0;
1434        this.#calculatedSize = 0;
1435        this.#size = 0;
1436        if (this.#hasDisposeAfter && this.#disposed) {
1437            const dt = this.#disposed;
1438            let task;
1439            while ((task = dt?.shift())) {
1440                this.#disposeAfter?.(...task);
1441            }
1442        }
1443    }
1444}
1445exports.LRUCache = LRUCache;
1446//# sourceMappingURL=index.js.map