Sindbad~EG File Manager

Current Path : /home/infinitibizsol/.trash/node_modules.5/lru-cache/dist/cjs/
Upload File :
Current File : /home/infinitibizsol/.trash/node_modules.5/lru-cache/dist/cjs/index.min.js.map

{
  "version": 3,
  "sources": ["../../src/index.ts", "../../src/index-cjs.ts"],
  "sourcesContent": ["/**\n * @module LRUCache\n */\n\n// module-private names and types\ntype Perf = { now: () => number }\nconst perf: Perf =\n  typeof performance === 'object' &&\n  performance &&\n  typeof performance.now === 'function'\n    ? performance\n    : Date\n\nconst warned = new Set<string>()\n\n// either a function or a class\ntype ForC = ((...a: any[]) => any) | { new (...a: any[]): any }\n\nconst emitWarning = (\n  msg: string,\n  type: string,\n  code: string,\n  fn: ForC\n) => {\n  typeof process === 'object' &&\n  process &&\n  typeof process.emitWarning === 'function'\n    ? process.emitWarning(msg, type, code, fn)\n    : console.error(`[${code}] ${type}: ${msg}`)\n}\n\nconst shouldWarn = (code: string) => !warned.has(code)\n\nconst TYPE = Symbol('type')\ntype PosInt = number & { [TYPE]: 'Positive Integer' }\ntype Index = number & { [TYPE]: 'LRUCache Index' }\n\nconst isPosInt = (n: any): n is PosInt =>\n  n && n === Math.floor(n) && n > 0 && isFinite(n)\n\ntype UintArray = Uint8Array | Uint16Array | Uint32Array\ntype NumberArray = UintArray | number[]\n\n/* c8 ignore start */\n// This is a little bit ridiculous, tbh.\n// The maximum array length is 2^32-1 or thereabouts on most JS impls.\n// And well before that point, you're caching the entire world, I mean,\n// that's ~32GB of just integers for the next/prev links, plus whatever\n// else to hold that many keys and values.  Just filling the memory with\n// zeroes at init time is brutal when you get that big.\n// But why not be complete?\n// Maybe in the future, these limits will have expanded.\nconst getUintArray = (max: number) =>\n  !isPosInt(max)\n    ? null\n    : max <= Math.pow(2, 8)\n    ? Uint8Array\n    : max <= Math.pow(2, 16)\n    ? Uint16Array\n    : max <= Math.pow(2, 32)\n    ? Uint32Array\n    : max <= Number.MAX_SAFE_INTEGER\n    ? ZeroArray\n    : null\n/* c8 ignore stop */\n\nclass ZeroArray extends Array<number> {\n  constructor(size: number) {\n    super(size)\n    this.fill(0)\n  }\n}\n\ntype StackLike = Stack | Index[]\nclass Stack {\n  heap: NumberArray\n  length: number\n  // private constructor\n  static #constructing: boolean = false\n  static create(max: number): StackLike {\n    const HeapCls = getUintArray(max)\n    if (!HeapCls) return []\n    Stack.#constructing = true\n    const s = new Stack(max, HeapCls)\n    Stack.#constructing = false\n    return s\n  }\n  constructor(\n    max: number,\n    HeapCls: { new (n: number): NumberArray }\n  ) {\n    /* c8 ignore start */\n    if (!Stack.#constructing) {\n      throw new TypeError('instantiate Stack using Stack.create(n)')\n    }\n    /* c8 ignore stop */\n    this.heap = new HeapCls(max)\n    this.length = 0\n  }\n  push(n: Index) {\n    this.heap[this.length++] = n\n  }\n  pop(): Index {\n    return this.heap[--this.length] as Index\n  }\n}\n\n/**\n * Promise representing an in-progress {@link LRUCache#fetch} call\n */\nexport type BackgroundFetch<V> = Promise<V | undefined | void> & {\n  __returned: BackgroundFetch<V> | undefined\n  __abortController: AbortController\n  __staleWhileFetching: V | undefined\n}\n\ntype DisposeTask<K, V> = [\n  value: V,\n  key: K,\n  reason: LRUCache.DisposeReason\n]\n\nexport namespace LRUCache {\n  /**\n   * An integer greater than 0, reflecting the calculated size of items\n   */\n  export type Size = number\n\n  /**\n   * Integer greater than 0, representing some number of milliseconds, or the\n   * time at which a TTL started counting from.\n   */\n  export type Milliseconds = number\n\n  /**\n   * An integer greater than 0, reflecting a number of items\n   */\n  export type Count = number\n\n  /**\n   * The reason why an item was removed from the cache, passed\n   * to the {@link Disposer} methods.\n   */\n  export type DisposeReason = 'evict' | 'set' | 'delete'\n  /**\n   * A method called upon item removal, passed as the\n   * {@link OptionsBase.dispose} and/or\n   * {@link OptionsBase.disposeAfter} options.\n   */\n  export type Disposer<K, V> = (\n    value: V,\n    key: K,\n    reason: DisposeReason\n  ) => void\n\n  /**\n   * A function that returns the effective calculated size\n   * of an entry in the cache.\n   */\n  export type SizeCalculator<K, V> = (value: V, key: K) => Size\n\n  /**\n   * Options provided to the\n   * {@link OptionsBase.fetchMethod} function.\n   */\n  export interface FetcherOptions<K, V, FC = unknown> {\n    signal: AbortSignal\n    options: FetcherFetchOptions<K, V, FC>\n    /**\n     * Object provided in the {@link FetchOptions.context} option to\n     * {@link LRUCache#fetch}\n     */\n    context: FC\n  }\n\n  /**\n   * Status object that may be passed to {@link LRUCache#fetch},\n   * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}.\n   */\n  export interface Status<V> {\n    /**\n     * The status of a set() operation.\n     *\n     * - add: the item was not found in the cache, and was added\n     * - update: the item was in the cache, with the same value provided\n     * - replace: the item was in the cache, and replaced\n     * - miss: the item was not added to the cache for some reason\n     */\n    set?: 'add' | 'update' | 'replace' | 'miss'\n\n    /**\n     * the ttl stored for the item, or undefined if ttls are not used.\n     */\n    ttl?: Milliseconds\n\n    /**\n     * the start time for the item, or undefined if ttls are not used.\n     */\n    start?: Milliseconds\n\n    /**\n     * The timestamp used for TTL calculation\n     */\n    now?: Milliseconds\n\n    /**\n     * the remaining ttl for the item, or undefined if ttls are not used.\n     */\n    remainingTTL?: Milliseconds\n\n    /**\n     * The calculated size for the item, if sizes are used.\n     */\n    entrySize?: Size\n\n    /**\n     * The total calculated size of the cache, if sizes are used.\n     */\n    totalCalculatedSize?: Size\n\n    /**\n     * A flag indicating that the item was not stored, due to exceeding the\n     * {@link OptionsBase.maxEntrySize}\n     */\n    maxEntrySizeExceeded?: true\n\n    /**\n     * The old value, specified in the case of `set:'update'` or\n     * `set:'replace'`\n     */\n    oldValue?: V\n\n    /**\n     * The results of a {@link LRUCache#has} operation\n     *\n     * - hit: the item was found in the cache\n     * - stale: the item was found in the cache, but is stale\n     * - miss: the item was not found in the cache\n     */\n    has?: 'hit' | 'stale' | 'miss'\n\n    /**\n     * The status of a {@link LRUCache#fetch} operation.\n     * Note that this can change as the underlying fetch() moves through\n     * various states.\n     *\n     * - inflight: there is another fetch() for this key which is in process\n     * - get: there is no fetchMethod, so {@link LRUCache#get} was called.\n     * - miss: the item is not in cache, and will be fetched.\n     * - hit: the item is in the cache, and was resolved immediately.\n     * - stale: the item is in the cache, but stale.\n     * - refresh: the item is in the cache, and not stale, but\n     *   {@link FetchOptions.forceRefresh} was specified.\n     */\n    fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh'\n\n    /**\n     * The {@link OptionsBase.fetchMethod} was called\n     */\n    fetchDispatched?: true\n\n    /**\n     * The cached value was updated after a successful call to\n     * {@link OptionsBase.fetchMethod}\n     */\n    fetchUpdated?: true\n\n    /**\n     * The reason for a fetch() rejection.  Either the error raised by the\n     * {@link OptionsBase.fetchMethod}, or the reason for an\n     * AbortSignal.\n     */\n    fetchError?: Error\n\n    /**\n     * The fetch received an abort signal\n     */\n    fetchAborted?: true\n\n    /**\n     * The abort signal received was ignored, and the fetch was allowed to\n     * continue.\n     */\n    fetchAbortIgnored?: true\n\n    /**\n     * The fetchMethod promise resolved successfully\n     */\n    fetchResolved?: true\n\n    /**\n     * The fetchMethod promise was rejected\n     */\n    fetchRejected?: true\n\n    /**\n     * The status of a {@link LRUCache#get} operation.\n     *\n     * - fetching: The item is currently being fetched.  If a previous value\n     *   is present and allowed, that will be returned.\n     * - stale: The item is in the cache, and is stale.\n     * - hit: the item is in the cache\n     * - miss: the item is not in the cache\n     */\n    get?: 'stale' | 'hit' | 'miss'\n\n    /**\n     * A fetch or get operation returned a stale value.\n     */\n    returnedStale?: true\n  }\n\n  /**\n   * options which override the options set in the LRUCache constructor\n   * when calling {@link LRUCache#fetch}.\n   *\n   * This is the union of {@link GetOptions} and {@link SetOptions}, plus\n   * {@link OptionsBase.noDeleteOnFetchRejection},\n   * {@link OptionsBase.allowStaleOnFetchRejection},\n   * {@link FetchOptions.forceRefresh}, and\n   * {@link OptionsBase.context}\n   *\n   * Any of these may be modified in the {@link OptionsBase.fetchMethod}\n   * function, but the {@link GetOptions} fields will of course have no\n   * effect, as the {@link LRUCache#get} call already happened by the time\n   * the fetchMethod is called.\n   */\n  export interface FetcherFetchOptions<K, V, FC = unknown>\n    extends Pick<\n      OptionsBase<K, V, FC>,\n      | 'allowStale'\n      | 'updateAgeOnGet'\n      | 'noDeleteOnStaleGet'\n      | 'sizeCalculation'\n      | 'ttl'\n      | 'noDisposeOnSet'\n      | 'noUpdateTTL'\n      | 'noDeleteOnFetchRejection'\n      | 'allowStaleOnFetchRejection'\n      | 'ignoreFetchAbort'\n      | 'allowStaleOnFetchAbort'\n    > {\n    status?: Status<V>\n    size?: Size\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#fetch} method.\n   */\n  export interface FetchOptions<K, V, FC>\n    extends FetcherFetchOptions<K, V, FC> {\n    /**\n     * Set to true to force a re-load of the existing data, even if it\n     * is not yet stale.\n     */\n    forceRefresh?: boolean\n    /**\n     * Context provided to the {@link OptionsBase.fetchMethod} as\n     * the {@link FetcherOptions.context} param.\n     *\n     * If the FC type is specified as unknown (the default),\n     * undefined or void, then this is optional.  Otherwise, it will\n     * be required.\n     */\n    context?: FC\n    signal?: AbortSignal\n    status?: Status<V>\n  }\n  /**\n   * Options provided to {@link LRUCache#fetch} when the FC type is something\n   * other than `unknown`, `undefined`, or `void`\n   */\n  export interface FetchOptionsWithContext<K, V, FC>\n    extends FetchOptions<K, V, FC> {\n    context: FC\n  }\n  /**\n   * Options provided to {@link LRUCache#fetch} when the FC type is\n   * `undefined` or `void`\n   */\n  export interface FetchOptionsNoContext<K, V, FC>\n    extends FetchOptions<K, V, FC> {\n    context?: undefined\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#has} method.\n   */\n  export interface HasOptions<K, V, FC>\n    extends Pick<OptionsBase<K, V, FC>, 'updateAgeOnHas'> {\n    status?: Status<V>\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#get} method.\n   */\n  export interface GetOptions<K, V, FC>\n    extends Pick<\n      OptionsBase<K, V, FC>,\n      'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet'\n    > {\n    status?: Status<V>\n  }\n\n  /**\n   * Options that may be passed to the {@link LRUCache#peek} method.\n   */\n  export interface PeekOptions<K, V, FC>\n    extends Pick<OptionsBase<K, V, FC>, 'allowStale'> {}\n\n  /**\n   * Options that may be passed to the {@link LRUCache#set} method.\n   */\n  export interface SetOptions<K, V, FC>\n    extends Pick<\n      OptionsBase<K, V, FC>,\n      'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL'\n    > {\n    /**\n     * If size tracking is enabled, then setting an explicit size\n     * in the {@link LRUCache#set} call will prevent calling the\n     * {@link OptionsBase.sizeCalculation} function.\n     */\n    size?: Size\n    /**\n     * If TTL tracking is enabled, then setting an explicit start\n     * time in the {@link LRUCache#set} call will override the\n     * default time from `performance.now()` or `Date.now()`.\n     *\n     * Note that it must be a valid value for whichever time-tracking\n     * method is in use.\n     */\n    start?: Milliseconds\n    status?: Status<V>\n  }\n\n  /**\n   * The type signature for the {@link OptionsBase.fetchMethod} option.\n   */\n  export type Fetcher<K, V, FC = unknown> = (\n    key: K,\n    staleValue: V | undefined,\n    options: FetcherOptions<K, V, FC>\n  ) => Promise<V | void | undefined> | V | void | undefined\n\n  /**\n   * Options which may be passed to the {@link LRUCache} constructor.\n   *\n   * Most of these may be overridden in the various options that use\n   * them.\n   *\n   * Despite all being technically optional, the constructor requires that\n   * a cache is at minimum limited by one or more of {@link OptionsBase.max},\n   * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}.\n   *\n   * If {@link OptionsBase.ttl} is used alone, then it is strongly advised\n   * (and in fact required by the type definitions here) that the cache\n   * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially\n   * unbounded storage.\n   */\n  export interface OptionsBase<K, V, FC> {\n    /**\n     * The maximum number of items to store in the cache before evicting\n     * old entries. This is read-only on the {@link LRUCache} instance,\n     * and may not be overridden.\n     *\n     * If set, then storage space will be pre-allocated at construction\n     * time, and the cache will perform significantly faster.\n     *\n     * Note that significantly fewer items may be stored, if\n     * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also\n     * set.\n     */\n    max?: Count\n\n    /**\n     * Max time in milliseconds for items to live in cache before they are\n     * considered stale.  Note that stale items are NOT preemptively removed\n     * by default, and MAY live in the cache long after they have expired.\n     *\n     * Also, as this cache is optimized for LRU/MRU operations, some of\n     * the staleness/TTL checks will reduce performance, as they will incur\n     * overhead by deleting items.\n     *\n     * Must be an integer number of ms. If set to 0, this indicates \"no TTL\"\n     *\n     * @default 0\n     */\n    ttl?: Milliseconds\n\n    /**\n     * Minimum amount of time in ms in which to check for staleness.\n     * Defaults to 1, which means that the current time is checked\n     * at most once per millisecond.\n     *\n     * Set to 0 to check the current time every time staleness is tested.\n     * (This reduces performance, and is theoretically unnecessary.)\n     *\n     * Setting this to a higher value will improve performance somewhat\n     * while using ttl tracking, albeit at the expense of keeping stale\n     * items around a bit longer than their TTLs would indicate.\n     *\n     * @default 1\n     */\n    ttlResolution?: Milliseconds\n\n    /**\n     * Preemptively remove stale items from the cache.\n     * Note that this may significantly degrade performance,\n     * especially if the cache is storing a large number of items.\n     * It is almost always best to just leave the stale items in\n     * the cache, and let them fall out as new items are added.\n     *\n     * Note that this means that {@link OptionsBase.allowStale} is a bit\n     * pointless, as stale items will be deleted almost as soon as they\n     * expire.\n     *\n     * @default false\n     */\n    ttlAutopurge?: boolean\n\n    /**\n     * Update the age of items on {@link LRUCache#get}, renewing their TTL\n     *\n     * Has no effect if {@link OptionsBase.ttl} is not set.\n     *\n     * @default false\n     */\n    updateAgeOnGet?: boolean\n\n    /**\n     * Update the age of items on {@link LRUCache#has}, renewing their TTL\n     *\n     * Has no effect if {@link OptionsBase.ttl} is not set.\n     *\n     * @default false\n     */\n    updateAgeOnHas?: boolean\n\n    /**\n     * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return\n     * stale data, if available.\n     */\n    allowStale?: boolean\n\n    /**\n     * Function that is called on items when they are dropped from the cache.\n     * This can be handy if you want to close file descriptors or do other\n     * cleanup tasks when items are no longer accessible. Called with `key,\n     * value`.  It's called before actually removing the item from the\n     * internal cache, so it is *NOT* safe to re-add them.\n     *\n     * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after\n     * they have been full removed, when it is safe to add them back to the\n     * cache.\n     */\n    dispose?: Disposer<K, V>\n\n    /**\n     * The same as {@link OptionsBase.dispose}, but called *after* the entry\n     * is completely removed and the cache is once again in a clean state.\n     * It is safe to add an item right back into the cache at this point.\n     * However, note that it is *very* easy to inadvertently create infinite\n     * recursion this way.\n     */\n    disposeAfter?: Disposer<K, V>\n\n    /**\n     * Set to true to suppress calling the\n     * {@link OptionsBase.dispose} function if the entry key is\n     * still accessible within the cache.\n     * This may be overridden by passing an options object to\n     * {@link LRUCache#set}.\n     */\n    noDisposeOnSet?: boolean\n\n    /**\n     * Boolean flag to tell the cache to not update the TTL when\n     * setting a new value for an existing key (ie, when updating a value\n     * rather than inserting a new value).  Note that the TTL value is\n     * _always_ set (if provided) when adding a new entry into the cache.\n     *\n     * Has no effect if a {@link OptionsBase.ttl} is not set.\n     */\n    noUpdateTTL?: boolean\n\n    /**\n     * If you wish to track item size, you must provide a maxSize\n     * note that we still will only keep up to max *actual items*,\n     * if max is set, so size tracking may cause fewer than max items\n     * to be stored.  At the extreme, a single item of maxSize size\n     * will cause everything else in the cache to be dropped when it\n     * is added.  Use with caution!\n     *\n     * Note also that size tracking can negatively impact performance,\n     * though for most cases, only minimally.\n     */\n    maxSize?: Size\n\n    /**\n     * The maximum allowed size for any single item in the cache.\n     *\n     * If a larger item is passed to {@link LRUCache#set} or returned by a\n     * {@link OptionsBase.fetchMethod}, then it will not be stored in the\n     * cache.\n     */\n    maxEntrySize?: Size\n\n    /**\n     * A function that returns a number indicating the item's size.\n     *\n     * If not provided, and {@link OptionsBase.maxSize} or\n     * {@link OptionsBase.maxEntrySize} are set, then all\n     * {@link LRUCache#set} calls **must** provide an explicit\n     * {@link SetOptions.size} or sizeCalculation param.\n     */\n    sizeCalculation?: SizeCalculator<K, V>\n\n    /**\n     * Method that provides the implementation for {@link LRUCache#fetch}\n     */\n    fetchMethod?: Fetcher<K, V, FC>\n\n    /**\n     * Set to true to suppress the deletion of stale data when a\n     * {@link OptionsBase.fetchMethod} returns a rejected promise.\n     */\n    noDeleteOnFetchRejection?: boolean\n\n    /**\n     * Do not delete stale items when they are retrieved with\n     * {@link LRUCache#get}.\n     *\n     * Note that the `get` return value will still be `undefined`\n     * unless {@link OptionsBase.allowStale} is true.\n     */\n    noDeleteOnStaleGet?: boolean\n\n    /**\n     * Set to true to allow returning stale data when a\n     * {@link OptionsBase.fetchMethod} throws an error or returns a rejected\n     * promise.\n     *\n     * This differs from using {@link OptionsBase.allowStale} in that stale\n     * data will ONLY be returned in the case that the\n     * {@link LRUCache#fetch} fails, not any other times.\n     */\n    allowStaleOnFetchRejection?: boolean\n\n    /**\n     * Set to true to return a stale value from the cache when the\n     * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'`\n     * event, whether user-triggered, or due to internal cache behavior.\n     *\n     * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying\n     * {@link OptionsBase.fetchMethod} will still be considered canceled, and its return\n     * value will be ignored and not cached.\n     */\n    allowStaleOnFetchAbort?: boolean\n\n    /**\n     * Set to true to ignore the `abort` event emitted by the `AbortSignal`\n     * object passed to {@link OptionsBase.fetchMethod}, and still cache the\n     * resulting resolution value, as long as it is not `undefined`.\n     *\n     * When used on its own, this means aborted {@link LRUCache#fetch} calls are not\n     * immediately resolved or rejected when they are aborted, and instead\n     * take the full time to await.\n     *\n     * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted\n     * {@link LRUCache#fetch} calls will resolve immediately to their stale\n     * cached value or `undefined`, and will continue to process and eventually\n     * update the cache when they resolve, as long as the resulting value is\n     * not `undefined`, thus supporting a \"return stale on timeout while\n     * refreshing\" mechanism by passing `AbortSignal.timeout(n)` as the signal.\n     *\n     * **Note**: regardless of this setting, an `abort` event _is still\n     * emitted on the `AbortSignal` object_, so may result in invalid results\n     * when passed to other underlying APIs that use AbortSignals.\n     *\n     * This may be overridden in the {@link OptionsBase.fetchMethod} or the\n     * call to {@link LRUCache#fetch}.\n     */\n    ignoreFetchAbort?: boolean\n  }\n\n  export interface OptionsMaxLimit<K, V, FC>\n    extends OptionsBase<K, V, FC> {\n    max: Count\n  }\n  export interface OptionsTTLLimit<K, V, FC>\n    extends OptionsBase<K, V, FC> {\n    ttl: Milliseconds\n    ttlAutopurge: boolean\n  }\n  export interface OptionsSizeLimit<K, V, FC>\n    extends OptionsBase<K, V, FC> {\n    maxSize: Size\n  }\n\n  /**\n   * The valid safe options for the {@link LRUCache} constructor\n   */\n  export type Options<K, V, FC> =\n    | OptionsMaxLimit<K, V, FC>\n    | OptionsSizeLimit<K, V, FC>\n    | OptionsTTLLimit<K, V, FC>\n\n  /**\n   * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump}\n   */\n  export interface Entry<V> {\n    value: V\n    ttl?: Milliseconds\n    size?: Size\n    start?: Milliseconds\n  }\n}\n\n/**\n * Default export, the thing you're using this module to get.\n *\n * All properties from the options object (with the exception of\n * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as\n * normal public members. (`max` and `maxBase` are read-only getters.)\n * Changing any of these will alter the defaults for subsequent method calls,\n * but is otherwise safe.\n */\nexport class LRUCache<K extends {}, V extends {}, FC = unknown> {\n  // properties coming in from the options of these, only max and maxSize\n  // really *need* to be protected. The rest can be modified, as they just\n  // set defaults for various methods.\n  readonly #max: LRUCache.Count\n  readonly #maxSize: LRUCache.Size\n  readonly #dispose?: LRUCache.Disposer<K, V>\n  readonly #disposeAfter?: LRUCache.Disposer<K, V>\n  readonly #fetchMethod?: LRUCache.Fetcher<K, V, FC>\n\n  /**\n   * {@link LRUCache.OptionsBase.ttl}\n   */\n  ttl: LRUCache.Milliseconds\n\n  /**\n   * {@link LRUCache.OptionsBase.ttlResolution}\n   */\n  ttlResolution: LRUCache.Milliseconds\n  /**\n   * {@link LRUCache.OptionsBase.ttlAutopurge}\n   */\n  ttlAutopurge: boolean\n  /**\n   * {@link LRUCache.OptionsBase.updateAgeOnGet}\n   */\n  updateAgeOnGet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.updateAgeOnHas}\n   */\n  updateAgeOnHas: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStale}\n   */\n  allowStale: boolean\n\n  /**\n   * {@link LRUCache.OptionsBase.noDisposeOnSet}\n   */\n  noDisposeOnSet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.noUpdateTTL}\n   */\n  noUpdateTTL: boolean\n  /**\n   * {@link LRUCache.OptionsBase.maxEntrySize}\n   */\n  maxEntrySize: LRUCache.Size\n  /**\n   * {@link LRUCache.OptionsBase.sizeCalculation}\n   */\n  sizeCalculation?: LRUCache.SizeCalculator<K, V>\n  /**\n   * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection}\n   */\n  noDeleteOnFetchRejection: boolean\n  /**\n   * {@link LRUCache.OptionsBase.noDeleteOnStaleGet}\n   */\n  noDeleteOnStaleGet: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}\n   */\n  allowStaleOnFetchAbort: boolean\n  /**\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection}\n   */\n  allowStaleOnFetchRejection: boolean\n  /**\n   * {@link LRUCache.OptionsBase.ignoreFetchAbort}\n   */\n  ignoreFetchAbort: boolean\n\n  // computed properties\n  #size: LRUCache.Count\n  #calculatedSize: LRUCache.Size\n  #keyMap: Map<K, Index>\n  #keyList: (K | undefined)[]\n  #valList: (V | BackgroundFetch<V> | undefined)[]\n  #next: NumberArray\n  #prev: NumberArray\n  #head: Index\n  #tail: Index\n  #free: StackLike\n  #disposed?: DisposeTask<K, V>[]\n  #sizes?: ZeroArray\n  #starts?: ZeroArray\n  #ttls?: ZeroArray\n\n  #hasDispose: boolean\n  #hasFetchMethod: boolean\n  #hasDisposeAfter: boolean\n\n  /**\n   * Do not call this method unless you need to inspect the\n   * inner workings of the cache.  If anything returned by this\n   * object is modified in any way, strange breakage may occur.\n   *\n   * These fields are private for a reason!\n   *\n   * @internal\n   */\n  static unsafeExposeInternals<\n    K extends {},\n    V extends {},\n    FC extends unknown = unknown\n  >(c: LRUCache<K, V, FC>) {\n    return {\n      // properties\n      starts: c.#starts,\n      ttls: c.#ttls,\n      sizes: c.#sizes,\n      keyMap: c.#keyMap as Map<K, number>,\n      keyList: c.#keyList,\n      valList: c.#valList,\n      next: c.#next,\n      prev: c.#prev,\n      get head() {\n        return c.#head\n      },\n      get tail() {\n        return c.#tail\n      },\n      free: c.#free,\n      // methods\n      isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p),\n      backgroundFetch: (\n        k: K,\n        index: number | undefined,\n        options: LRUCache.FetchOptions<K, V, FC>,\n        context: any\n      ): BackgroundFetch<V> =>\n        c.#backgroundFetch(\n          k,\n          index as Index | undefined,\n          options,\n          context\n        ),\n      moveToTail: (index: number): void =>\n        c.#moveToTail(index as Index),\n      indexes: (options?: { allowStale: boolean }) =>\n        c.#indexes(options),\n      rindexes: (options?: { allowStale: boolean }) =>\n        c.#rindexes(options),\n      isStale: (index: number | undefined) =>\n        c.#isStale(index as Index),\n    }\n  }\n\n  // Protected read-only members\n\n  /**\n   * {@link LRUCache.OptionsBase.max} (read-only)\n   */\n  get max(): LRUCache.Count {\n    return this.#max\n  }\n  /**\n   * {@link LRUCache.OptionsBase.maxSize} (read-only)\n   */\n  get maxSize(): LRUCache.Count {\n    return this.#maxSize\n  }\n  /**\n   * The total computed size of items in the cache (read-only)\n   */\n  get calculatedSize(): LRUCache.Size {\n    return this.#calculatedSize\n  }\n  /**\n   * The number of items stored in the cache (read-only)\n   */\n  get size(): LRUCache.Count {\n    return this.#size\n  }\n  /**\n   * {@link LRUCache.OptionsBase.fetchMethod} (read-only)\n   */\n  get fetchMethod(): LRUCache.Fetcher<K, V, FC> | undefined {\n    return this.#fetchMethod\n  }\n  /**\n   * {@link LRUCache.OptionsBase.dispose} (read-only)\n   */\n  get dispose() {\n    return this.#dispose\n  }\n  /**\n   * {@link LRUCache.OptionsBase.disposeAfter} (read-only)\n   */\n  get disposeAfter() {\n    return this.#disposeAfter\n  }\n\n  constructor(\n    options: LRUCache.Options<K, V, FC> | LRUCache<K, V, FC>\n  ) {\n    const {\n      max = 0,\n      ttl,\n      ttlResolution = 1,\n      ttlAutopurge,\n      updateAgeOnGet,\n      updateAgeOnHas,\n      allowStale,\n      dispose,\n      disposeAfter,\n      noDisposeOnSet,\n      noUpdateTTL,\n      maxSize = 0,\n      maxEntrySize = 0,\n      sizeCalculation,\n      fetchMethod,\n      noDeleteOnFetchRejection,\n      noDeleteOnStaleGet,\n      allowStaleOnFetchRejection,\n      allowStaleOnFetchAbort,\n      ignoreFetchAbort,\n    } = options\n\n    if (max !== 0 && !isPosInt(max)) {\n      throw new TypeError('max option must be a nonnegative integer')\n    }\n\n    const UintArray = max ? getUintArray(max) : Array\n    if (!UintArray) {\n      throw new Error('invalid max value: ' + max)\n    }\n\n    this.#max = max\n    this.#maxSize = maxSize\n    this.maxEntrySize = maxEntrySize || this.#maxSize\n    this.sizeCalculation = sizeCalculation\n    if (this.sizeCalculation) {\n      if (!this.#maxSize && !this.maxEntrySize) {\n        throw new TypeError(\n          'cannot set sizeCalculation without setting maxSize or maxEntrySize'\n        )\n      }\n      if (typeof this.sizeCalculation !== 'function') {\n        throw new TypeError('sizeCalculation set to non-function')\n      }\n    }\n\n    if (\n      fetchMethod !== undefined &&\n      typeof fetchMethod !== 'function'\n    ) {\n      throw new TypeError(\n        'fetchMethod must be a function if specified'\n      )\n    }\n    this.#fetchMethod = fetchMethod\n    this.#hasFetchMethod = !!fetchMethod\n\n    this.#keyMap = new Map()\n    this.#keyList = new Array(max).fill(undefined)\n    this.#valList = new Array(max).fill(undefined)\n    this.#next = new UintArray(max)\n    this.#prev = new UintArray(max)\n    this.#head = 0 as Index\n    this.#tail = 0 as Index\n    this.#free = Stack.create(max)\n    this.#size = 0\n    this.#calculatedSize = 0\n\n    if (typeof dispose === 'function') {\n      this.#dispose = dispose\n    }\n    if (typeof disposeAfter === 'function') {\n      this.#disposeAfter = disposeAfter\n      this.#disposed = []\n    } else {\n      this.#disposeAfter = undefined\n      this.#disposed = undefined\n    }\n    this.#hasDispose = !!this.#dispose\n    this.#hasDisposeAfter = !!this.#disposeAfter\n\n    this.noDisposeOnSet = !!noDisposeOnSet\n    this.noUpdateTTL = !!noUpdateTTL\n    this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection\n    this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection\n    this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort\n    this.ignoreFetchAbort = !!ignoreFetchAbort\n\n    // NB: maxEntrySize is set to maxSize if it's set\n    if (this.maxEntrySize !== 0) {\n      if (this.#maxSize !== 0) {\n        if (!isPosInt(this.#maxSize)) {\n          throw new TypeError(\n            'maxSize must be a positive integer if specified'\n          )\n        }\n      }\n      if (!isPosInt(this.maxEntrySize)) {\n        throw new TypeError(\n          'maxEntrySize must be a positive integer if specified'\n        )\n      }\n      this.#initializeSizeTracking()\n    }\n\n    this.allowStale = !!allowStale\n    this.noDeleteOnStaleGet = !!noDeleteOnStaleGet\n    this.updateAgeOnGet = !!updateAgeOnGet\n    this.updateAgeOnHas = !!updateAgeOnHas\n    this.ttlResolution =\n      isPosInt(ttlResolution) || ttlResolution === 0\n        ? ttlResolution\n        : 1\n    this.ttlAutopurge = !!ttlAutopurge\n    this.ttl = ttl || 0\n    if (this.ttl) {\n      if (!isPosInt(this.ttl)) {\n        throw new TypeError(\n          'ttl must be a positive integer if specified'\n        )\n      }\n      this.#initializeTTLTracking()\n    }\n\n    // do not allow completely unbounded caches\n    if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) {\n      throw new TypeError(\n        'At least one of max, maxSize, or ttl is required'\n      )\n    }\n    if (!this.ttlAutopurge && !this.#max && !this.#maxSize) {\n      const code = 'LRU_CACHE_UNBOUNDED'\n      if (shouldWarn(code)) {\n        warned.add(code)\n        const msg =\n          'TTL caching without ttlAutopurge, max, or maxSize can ' +\n          'result in unbounded memory consumption.'\n        emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache)\n      }\n    }\n  }\n\n  /**\n   * Return the remaining TTL time for a given entry key\n   */\n  getRemainingTTL(key: K) {\n    return this.#keyMap.has(key) ? Infinity : 0\n  }\n\n  #initializeTTLTracking() {\n    const ttls = new ZeroArray(this.#max)\n    const starts = new ZeroArray(this.#max)\n    this.#ttls = ttls\n    this.#starts = starts\n\n    this.#setItemTTL = (index, ttl, start = perf.now()) => {\n      starts[index] = ttl !== 0 ? start : 0\n      ttls[index] = ttl\n      if (ttl !== 0 && this.ttlAutopurge) {\n        const t = setTimeout(() => {\n          if (this.#isStale(index)) {\n            this.delete(this.#keyList[index] as K)\n          }\n        }, ttl + 1)\n        // unref() not supported on all platforms\n        /* c8 ignore start */\n        if (t.unref) {\n          t.unref()\n        }\n        /* c8 ignore stop */\n      }\n    }\n\n    this.#updateItemAge = index => {\n      starts[index] = ttls[index] !== 0 ? perf.now() : 0\n    }\n\n    this.#statusTTL = (status, index) => {\n      if (ttls[index]) {\n        const ttl = ttls[index]\n        const start = starts[index]\n        status.ttl = ttl\n        status.start = start\n        status.now = cachedNow || getNow()\n        status.remainingTTL = status.now + ttl - start\n      }\n    }\n\n    // debounce calls to perf.now() to 1s so we're not hitting\n    // that costly call repeatedly.\n    let cachedNow = 0\n    const getNow = () => {\n      const n = perf.now()\n      if (this.ttlResolution > 0) {\n        cachedNow = n\n        const t = setTimeout(\n          () => (cachedNow = 0),\n          this.ttlResolution\n        )\n        // not available on all platforms\n        /* c8 ignore start */\n        if (t.unref) {\n          t.unref()\n        }\n        /* c8 ignore stop */\n      }\n      return n\n    }\n\n    this.getRemainingTTL = key => {\n      const index = this.#keyMap.get(key)\n      if (index === undefined) {\n        return 0\n      }\n      return ttls[index] === 0 || starts[index] === 0\n        ? Infinity\n        : starts[index] + ttls[index] - (cachedNow || getNow())\n    }\n\n    this.#isStale = index => {\n      return (\n        ttls[index] !== 0 &&\n        starts[index] !== 0 &&\n        (cachedNow || getNow()) - starts[index] > ttls[index]\n      )\n    }\n  }\n\n  // conditionally set private methods related to TTL\n  #updateItemAge: (index: Index) => void = () => {}\n  #statusTTL: (status: LRUCache.Status<V>, index: Index) => void =\n    () => {}\n  #setItemTTL: (\n    index: Index,\n    ttl: LRUCache.Milliseconds,\n    start?: LRUCache.Milliseconds\n    // ignore because we never call this if we're not already in TTL mode\n    /* c8 ignore start */\n  ) => void = () => {}\n  /* c8 ignore stop */\n\n  #isStale: (index: Index) => boolean = () => false\n\n  #initializeSizeTracking() {\n    const sizes = new ZeroArray(this.#max)\n    this.#calculatedSize = 0\n    this.#sizes = sizes\n    this.#removeItemSize = index => {\n      this.#calculatedSize -= sizes[index]\n      sizes[index] = 0\n    }\n    this.#requireSize = (k, v, size, sizeCalculation) => {\n      // provisionally accept background fetches.\n      // actual value size will be checked when they return.\n      if (this.#isBackgroundFetch(v)) {\n        return 0\n      }\n      if (!isPosInt(size)) {\n        if (sizeCalculation) {\n          if (typeof sizeCalculation !== 'function') {\n            throw new TypeError('sizeCalculation must be a function')\n          }\n          size = sizeCalculation(v, k)\n          if (!isPosInt(size)) {\n            throw new TypeError(\n              'sizeCalculation return invalid (expect positive integer)'\n            )\n          }\n        } else {\n          throw new TypeError(\n            'invalid size value (must be positive integer). ' +\n              'When maxSize or maxEntrySize is used, sizeCalculation ' +\n              'or size must be set.'\n          )\n        }\n      }\n      return size\n    }\n    this.#addItemSize = (\n      index: Index,\n      size: LRUCache.Size,\n      status?: LRUCache.Status<V>\n    ) => {\n      sizes[index] = size\n      if (this.#maxSize) {\n        const maxSize = this.#maxSize - sizes[index]\n        while (this.#calculatedSize > maxSize) {\n          this.#evict(true)\n        }\n      }\n      this.#calculatedSize += sizes[index]\n      if (status) {\n        status.entrySize = size\n        status.totalCalculatedSize = this.#calculatedSize\n      }\n    }\n  }\n\n  #removeItemSize: (index: Index) => void = _i => {}\n  #addItemSize: (\n    index: Index,\n    size: LRUCache.Size,\n    status?: LRUCache.Status<V>\n  ) => void = (_i, _s, _st) => {}\n  #requireSize: (\n    k: K,\n    v: V | BackgroundFetch<V>,\n    size?: LRUCache.Size,\n    sizeCalculation?: LRUCache.SizeCalculator<K, V>\n  ) => LRUCache.Size = (\n    _k: K,\n    _v: V | BackgroundFetch<V>,\n    size?: LRUCache.Size,\n    sizeCalculation?: LRUCache.SizeCalculator<K, V>\n  ) => {\n    if (size || sizeCalculation) {\n      throw new TypeError(\n        'cannot set size without setting maxSize or maxEntrySize on cache'\n      )\n    }\n    return 0\n  };\n\n  *#indexes({ allowStale = this.allowStale } = {}) {\n    if (this.#size) {\n      for (let i = this.#tail; true; ) {\n        if (!this.#isValidIndex(i)) {\n          break\n        }\n        if (allowStale || !this.#isStale(i)) {\n          yield i\n        }\n        if (i === this.#head) {\n          break\n        } else {\n          i = this.#prev[i] as Index\n        }\n      }\n    }\n  }\n\n  *#rindexes({ allowStale = this.allowStale } = {}) {\n    if (this.#size) {\n      for (let i = this.#head; true; ) {\n        if (!this.#isValidIndex(i)) {\n          break\n        }\n        if (allowStale || !this.#isStale(i)) {\n          yield i\n        }\n        if (i === this.#tail) {\n          break\n        } else {\n          i = this.#next[i] as Index\n        }\n      }\n    }\n  }\n\n  #isValidIndex(index: Index) {\n    return (\n      index !== undefined &&\n      this.#keyMap.get(this.#keyList[index] as K) === index\n    )\n  }\n\n  /**\n   * Return a generator yielding `[key, value]` pairs,\n   * in order from most recently used to least recently used.\n   */\n  *entries() {\n    for (const i of this.#indexes()) {\n      if (\n        this.#valList[i] !== undefined &&\n        this.#keyList[i] !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield [this.#keyList[i], this.#valList[i]]\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.entries}\n   *\n   * Return a generator yielding `[key, value]` pairs,\n   * in order from least recently used to most recently used.\n   */\n  *rentries() {\n    for (const i of this.#rindexes()) {\n      if (\n        this.#valList[i] !== undefined &&\n        this.#keyList[i] !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield [this.#keyList[i], this.#valList[i]]\n      }\n    }\n  }\n\n  /**\n   * Return a generator yielding the keys in the cache,\n   * in order from most recently used to least recently used.\n   */\n  *keys() {\n    for (const i of this.#indexes()) {\n      const k = this.#keyList[i]\n      if (\n        k !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield k\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.keys}\n   *\n   * Return a generator yielding the keys in the cache,\n   * in order from least recently used to most recently used.\n   */\n  *rkeys() {\n    for (const i of this.#rindexes()) {\n      const k = this.#keyList[i]\n      if (\n        k !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield k\n      }\n    }\n  }\n\n  /**\n   * Return a generator yielding the values in the cache,\n   * in order from most recently used to least recently used.\n   */\n  *values() {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      if (\n        v !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield this.#valList[i]\n      }\n    }\n  }\n\n  /**\n   * Inverse order version of {@link LRUCache.values}\n   *\n   * Return a generator yielding the values in the cache,\n   * in order from least recently used to most recently used.\n   */\n  *rvalues() {\n    for (const i of this.#rindexes()) {\n      const v = this.#valList[i]\n      if (\n        v !== undefined &&\n        !this.#isBackgroundFetch(this.#valList[i])\n      ) {\n        yield this.#valList[i]\n      }\n    }\n  }\n\n  /**\n   * Iterating over the cache itself yields the same results as\n   * {@link LRUCache.entries}\n   */\n  [Symbol.iterator]() {\n    return this.entries()\n  }\n\n  /**\n   * Find a value for which the supplied fn method returns a truthy value,\n   * similar to Array.find().  fn is called as fn(value, key, cache).\n   */\n  find(\n    fn: (v: V, k: K, self: LRUCache<K, V, FC>) => boolean,\n    getOptions: LRUCache.GetOptions<K, V, FC> = {}\n  ) {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v)\n        ? v.__staleWhileFetching\n        : v\n      if (value === undefined) continue\n      if (fn(value, this.#keyList[i] as K, this)) {\n        return this.get(this.#keyList[i] as K, getOptions)\n      }\n    }\n  }\n\n  /**\n   * Call the supplied function on each item in the cache, in order from\n   * most recently used to least recently used.  fn is called as\n   * fn(value, key, cache).  Does not update age or recenty of use.\n   * Does not iterate over stale values.\n   */\n  forEach(\n    fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any,\n    thisp: any = this\n  ) {\n    for (const i of this.#indexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v)\n        ? v.__staleWhileFetching\n        : v\n      if (value === undefined) continue\n      fn.call(thisp, value, this.#keyList[i] as K, this)\n    }\n  }\n\n  /**\n   * The same as {@link LRUCache.forEach} but items are iterated over in\n   * reverse order.  (ie, less recently used items are iterated over first.)\n   */\n  rforEach(\n    fn: (v: V, k: K, self: LRUCache<K, V, FC>) => any,\n    thisp: any = this\n  ) {\n    for (const i of this.#rindexes()) {\n      const v = this.#valList[i]\n      const value = this.#isBackgroundFetch(v)\n        ? v.__staleWhileFetching\n        : v\n      if (value === undefined) continue\n      fn.call(thisp, value, this.#keyList[i] as K, this)\n    }\n  }\n\n  /**\n   * Delete any stale entries. Returns true if anything was removed,\n   * false otherwise.\n   */\n  purgeStale() {\n    let deleted = false\n    for (const i of this.#rindexes({ allowStale: true })) {\n      if (this.#isStale(i)) {\n        this.delete(this.#keyList[i] as K)\n        deleted = true\n      }\n    }\n    return deleted\n  }\n\n  /**\n   * Return an array of [key, {@link LRUCache.Entry}] tuples which can be\n   * passed to cache.load()\n   */\n  dump() {\n    const arr: [K, LRUCache.Entry<V>][] = []\n    for (const i of this.#indexes({ allowStale: true })) {\n      const key = this.#keyList[i]\n      const v = this.#valList[i]\n      const value: V | undefined = this.#isBackgroundFetch(v)\n        ? v.__staleWhileFetching\n        : v\n      if (value === undefined || key === undefined) continue\n      const entry: LRUCache.Entry<V> = { value }\n      if (this.#ttls && this.#starts) {\n        entry.ttl = this.#ttls[i]\n        // always dump the start relative to a portable timestamp\n        // it's ok for this to be a bit slow, it's a rare operation.\n        const age = perf.now() - this.#starts[i]\n        entry.start = Math.floor(Date.now() - age)\n      }\n      if (this.#sizes) {\n        entry.size = this.#sizes[i]\n      }\n      arr.unshift([key, entry])\n    }\n    return arr\n  }\n\n  /**\n   * Reset the cache and load in the items in entries in the order listed.\n   * Note that the shape of the resulting cache may be different if the\n   * same options are not used in both caches.\n   */\n  load(arr: [K, LRUCache.Entry<V>][]) {\n    this.clear()\n    for (const [key, entry] of arr) {\n      if (entry.start) {\n        // entry.start is a portable timestamp, but we may be using\n        // node's performance.now(), so calculate the offset, so that\n        // we get the intended remaining TTL, no matter how long it's\n        // been on ice.\n        //\n        // it's ok for this to be a bit slow, it's a rare operation.\n        const age = Date.now() - entry.start\n        entry.start = perf.now() - age\n      }\n      this.set(key, entry.value, entry)\n    }\n  }\n\n  /**\n   * Add a value to the cache.\n   */\n  set(\n    k: K,\n    v: V | BackgroundFetch<V>,\n    setOptions: LRUCache.SetOptions<K, V, FC> = {}\n  ) {\n    const {\n      ttl = this.ttl,\n      start,\n      noDisposeOnSet = this.noDisposeOnSet,\n      sizeCalculation = this.sizeCalculation,\n      status,\n    } = setOptions\n    let { noUpdateTTL = this.noUpdateTTL } = setOptions\n\n    const size = this.#requireSize(\n      k,\n      v,\n      setOptions.size || 0,\n      sizeCalculation\n    )\n    // if the item doesn't fit, don't do anything\n    // NB: maxEntrySize set to maxSize by default\n    if (this.maxEntrySize && size > this.maxEntrySize) {\n      if (status) {\n        status.set = 'miss'\n        status.maxEntrySizeExceeded = true\n      }\n      // have to delete, in case something is there already.\n      this.delete(k)\n      return this\n    }\n    let index = this.#size === 0 ? undefined : this.#keyMap.get(k)\n    if (index === undefined) {\n      // addition\n      index = (\n        this.#size === 0\n          ? this.#tail\n          : this.#free.length !== 0\n          ? this.#free.pop()\n          : this.#size === this.#max\n          ? this.#evict(false)\n          : this.#size\n      ) as Index\n      this.#keyList[index] = k\n      this.#valList[index] = v\n      this.#keyMap.set(k, index)\n      this.#next[this.#tail] = index\n      this.#prev[index] = this.#tail\n      this.#tail = index\n      this.#size++\n      this.#addItemSize(index, size, status)\n      if (status) status.set = 'add'\n      noUpdateTTL = false\n    } else {\n      // update\n      this.#moveToTail(index)\n      const oldVal = this.#valList[index] as V | BackgroundFetch<V>\n      if (v !== oldVal) {\n        if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) {\n          oldVal.__abortController.abort(new Error('replaced'))\n        } else if (!noDisposeOnSet) {\n          if (this.#hasDispose) {\n            this.#dispose?.(oldVal as V, k, 'set')\n          }\n          if (this.#hasDisposeAfter) {\n            this.#disposed?.push([oldVal as V, k, 'set'])\n          }\n        }\n        this.#removeItemSize(index)\n        this.#addItemSize(index, size, status)\n        this.#valList[index] = v\n        if (status) {\n          status.set = 'replace'\n          const oldValue =\n            oldVal && this.#isBackgroundFetch(oldVal)\n              ? oldVal.__staleWhileFetching\n              : oldVal\n          if (oldValue !== undefined) status.oldValue = oldValue\n        }\n      } else if (status) {\n        status.set = 'update'\n      }\n    }\n    if (ttl !== 0 && !this.#ttls) {\n      this.#initializeTTLTracking()\n    }\n    if (this.#ttls) {\n      if (!noUpdateTTL) {\n        this.#setItemTTL(index, ttl, start)\n      }\n      if (status) this.#statusTTL(status, index)\n    }\n    if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) {\n      const dt = this.#disposed\n      let task: DisposeTask<K, V> | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n    return this\n  }\n\n  /**\n   * Evict the least recently used item, returning its value or\n   * `undefined` if cache is empty.\n   */\n  pop(): V | undefined {\n    try {\n      while (this.#size) {\n        const val = this.#valList[this.#head]\n        this.#evict(true)\n        if (this.#isBackgroundFetch(val)) {\n          if (val.__staleWhileFetching) {\n            return val.__staleWhileFetching\n          }\n        } else if (val !== undefined) {\n          return val\n        }\n      }\n    } finally {\n      if (this.#hasDisposeAfter && this.#disposed) {\n        const dt = this.#disposed\n        let task: DisposeTask<K, V> | undefined\n        while ((task = dt?.shift())) {\n          this.#disposeAfter?.(...task)\n        }\n      }\n    }\n  }\n\n  #evict(free: boolean) {\n    const head = this.#head\n    const k = this.#keyList[head] as K\n    const v = this.#valList[head] as V\n    if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) {\n      v.__abortController.abort(new Error('evicted'))\n    } else if (this.#hasDispose || this.#hasDisposeAfter) {\n      if (this.#hasDispose) {\n        this.#dispose?.(v, k, 'evict')\n      }\n      if (this.#hasDisposeAfter) {\n        this.#disposed?.push([v, k, 'evict'])\n      }\n    }\n    this.#removeItemSize(head)\n    // if we aren't about to use the index, then null these out\n    if (free) {\n      this.#keyList[head] = undefined\n      this.#valList[head] = undefined\n      this.#free.push(head)\n    }\n    if (this.#size === 1) {\n      this.#head = this.#tail = 0 as Index\n      this.#free.length = 0\n    } else {\n      this.#head = this.#next[head] as Index\n    }\n    this.#keyMap.delete(k)\n    this.#size--\n    return head\n  }\n\n  /**\n   * Check if a key is in the cache, without updating the recency of use.\n   * Will return false if the item is stale, even though it is technically\n   * in the cache.\n   *\n   * Will not update item age unless\n   * {@link LRUCache.OptionsBase.updateAgeOnHas} is set.\n   */\n  has(k: K, hasOptions: LRUCache.HasOptions<K, V, FC> = {}) {\n    const { updateAgeOnHas = this.updateAgeOnHas, status } =\n      hasOptions\n    const index = this.#keyMap.get(k)\n    if (index !== undefined) {\n      const v = this.#valList[index]\n      if (\n        this.#isBackgroundFetch(v) &&\n        v.__staleWhileFetching === undefined\n      ) {\n        return false\n      }\n      if (!this.#isStale(index)) {\n        if (updateAgeOnHas) {\n          this.#updateItemAge(index)\n        }\n        if (status) {\n          status.has = 'hit'\n          this.#statusTTL(status, index)\n        }\n        return true\n      } else if (status) {\n        status.has = 'stale'\n        this.#statusTTL(status, index)\n      }\n    } else if (status) {\n      status.has = 'miss'\n    }\n    return false\n  }\n\n  /**\n   * Like {@link LRUCache#get} but doesn't update recency or delete stale\n   * items.\n   *\n   * Returns `undefined` if the item is stale, unless\n   * {@link LRUCache.OptionsBase.allowStale} is set.\n   */\n  peek(k: K, peekOptions: LRUCache.PeekOptions<K, V, FC> = {}) {\n    const { allowStale = this.allowStale } = peekOptions\n    const index = this.#keyMap.get(k)\n    if (\n      index !== undefined &&\n      (allowStale || !this.#isStale(index))\n    ) {\n      const v = this.#valList[index]\n      // either stale and allowed, or forcing a refresh of non-stale value\n      return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v\n    }\n  }\n\n  #backgroundFetch(\n    k: K,\n    index: Index | undefined,\n    options: LRUCache.FetchOptions<K, V, FC>,\n    context: any\n  ): BackgroundFetch<V> {\n    const v = index === undefined ? undefined : this.#valList[index]\n    if (this.#isBackgroundFetch(v)) {\n      return v\n    }\n\n    const ac = new AbortController()\n    const { signal } = options\n    // when/if our AC signals, then stop listening to theirs.\n    signal?.addEventListener('abort', () => ac.abort(signal.reason), {\n      signal: ac.signal,\n    })\n\n    const fetchOpts = {\n      signal: ac.signal,\n      options,\n      context,\n    }\n\n    const cb = (\n      v: V | void | undefined,\n      updateCache = false\n    ): V | undefined | void => {\n      const { aborted } = ac.signal\n      const ignoreAbort = options.ignoreFetchAbort && v !== undefined\n      if (options.status) {\n        if (aborted && !updateCache) {\n          options.status.fetchAborted = true\n          options.status.fetchError = ac.signal.reason\n          if (ignoreAbort) options.status.fetchAbortIgnored = true\n        } else {\n          options.status.fetchResolved = true\n        }\n      }\n      if (aborted && !ignoreAbort && !updateCache) {\n        return fetchFail(ac.signal.reason)\n      }\n      // either we didn't abort, and are still here, or we did, and ignored\n      const bf = p as BackgroundFetch<V>\n      if (this.#valList[index as Index] === p) {\n        if (v === undefined) {\n          if (bf.__staleWhileFetching) {\n            this.#valList[index as Index] = bf.__staleWhileFetching\n          } else {\n            this.delete(k)\n          }\n        } else {\n          if (options.status) options.status.fetchUpdated = true\n          this.set(k, v, fetchOpts.options)\n        }\n      }\n      return v\n    }\n\n    const eb = (er: any) => {\n      if (options.status) {\n        options.status.fetchRejected = true\n        options.status.fetchError = er\n      }\n      return fetchFail(er)\n    }\n\n    const fetchFail = (er: any): V | undefined => {\n      const { aborted } = ac.signal\n      const allowStaleAborted =\n        aborted && options.allowStaleOnFetchAbort\n      const allowStale =\n        allowStaleAborted || options.allowStaleOnFetchRejection\n      const noDelete = allowStale || options.noDeleteOnFetchRejection\n      const bf = p as BackgroundFetch<V>\n      if (this.#valList[index as Index] === p) {\n        // if we allow stale on fetch rejections, then we need to ensure that\n        // the stale value is not removed from the cache when the fetch fails.\n        const del = !noDelete || bf.__staleWhileFetching === undefined\n        if (del) {\n          this.delete(k)\n        } else if (!allowStaleAborted) {\n          // still replace the *promise* with the stale value,\n          // since we are done with the promise at this point.\n          // leave it untouched if we're still waiting for an\n          // aborted background fetch that hasn't yet returned.\n          this.#valList[index as Index] = bf.__staleWhileFetching\n        }\n      }\n      if (allowStale) {\n        if (options.status && bf.__staleWhileFetching !== undefined) {\n          options.status.returnedStale = true\n        }\n        return bf.__staleWhileFetching\n      } else if (bf.__returned === bf) {\n        throw er\n      }\n    }\n\n    const pcall = (\n      res: (v: V | void | undefined) => void,\n      rej: (e: any) => void\n    ) => {\n      const fmp = this.#fetchMethod?.(k, v, fetchOpts)\n      if (fmp && fmp instanceof Promise) {\n        fmp.then(v => res(v), rej)\n      }\n      // ignored, we go until we finish, regardless.\n      // defer check until we are actually aborting,\n      // so fetchMethod can override.\n      ac.signal.addEventListener('abort', () => {\n        if (\n          !options.ignoreFetchAbort ||\n          options.allowStaleOnFetchAbort\n        ) {\n          res()\n          // when it eventually resolves, update the cache.\n          if (options.allowStaleOnFetchAbort) {\n            res = v => cb(v, true)\n          }\n        }\n      })\n    }\n\n    if (options.status) options.status.fetchDispatched = true\n    const p = new Promise(pcall).then(cb, eb)\n    const bf = Object.assign(p, {\n      __abortController: ac,\n      __staleWhileFetching: v,\n      __returned: undefined,\n    })\n\n    if (index === undefined) {\n      // internal, don't expose status.\n      this.set(k, bf, { ...fetchOpts.options, status: undefined })\n      index = this.#keyMap.get(k)\n    } else {\n      this.#valList[index] = bf\n    }\n    return bf\n  }\n\n  #isBackgroundFetch(p: any): p is BackgroundFetch<V> {\n    if (!this.#hasFetchMethod) return false\n    const b = p as BackgroundFetch<V>\n    return (\n      !!b &&\n      b instanceof Promise &&\n      b.hasOwnProperty('__staleWhileFetching') &&\n      b.__abortController instanceof AbortController\n    )\n  }\n\n  /**\n   * Make an asynchronous cached fetch using the\n   * {@link LRUCache.OptionsBase.fetchMethod} function.\n   *\n   * If multiple fetches for the same key are issued, then they will all be\n   * coalesced into a single call to fetchMethod.\n   *\n   * Note that this means that handling options such as\n   * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort},\n   * {@link LRUCache.FetchOptions.signal},\n   * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be\n   * determined by the FIRST fetch() call for a given key.\n   *\n   * This is a known (fixable) shortcoming which will be addresed on when\n   * someone complains about it, as the fix would involve added complexity and\n   * may not be worth the costs for this edge case.\n   */\n  fetch(\n    k: K,\n    fetchOptions: unknown extends FC\n      ? LRUCache.FetchOptions<K, V, FC>\n      : FC extends undefined | void\n      ? LRUCache.FetchOptionsNoContext<K, V, FC>\n      : LRUCache.FetchOptionsWithContext<K, V, FC>\n  ): Promise<void | V>\n  // this overload not allowed if context is required\n  fetch(\n    k: unknown extends FC\n      ? K\n      : FC extends undefined | void\n      ? K\n      : never,\n    fetchOptions?: unknown extends FC\n      ? LRUCache.FetchOptions<K, V, FC>\n      : FC extends undefined | void\n      ? LRUCache.FetchOptionsNoContext<K, V, FC>\n      : never\n  ): Promise<void | V>\n  async fetch(\n    k: K,\n    fetchOptions: LRUCache.FetchOptions<K, V, FC> = {}\n  ): Promise<void | V> {\n    const {\n      // get options\n      allowStale = this.allowStale,\n      updateAgeOnGet = this.updateAgeOnGet,\n      noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n      // set options\n      ttl = this.ttl,\n      noDisposeOnSet = this.noDisposeOnSet,\n      size = 0,\n      sizeCalculation = this.sizeCalculation,\n      noUpdateTTL = this.noUpdateTTL,\n      // fetch exclusive options\n      noDeleteOnFetchRejection = this.noDeleteOnFetchRejection,\n      allowStaleOnFetchRejection = this.allowStaleOnFetchRejection,\n      ignoreFetchAbort = this.ignoreFetchAbort,\n      allowStaleOnFetchAbort = this.allowStaleOnFetchAbort,\n      context,\n      forceRefresh = false,\n      status,\n      signal,\n    } = fetchOptions\n\n    if (!this.#hasFetchMethod) {\n      if (status) status.fetch = 'get'\n      return this.get(k, {\n        allowStale,\n        updateAgeOnGet,\n        noDeleteOnStaleGet,\n        status,\n      })\n    }\n\n    const options = {\n      allowStale,\n      updateAgeOnGet,\n      noDeleteOnStaleGet,\n      ttl,\n      noDisposeOnSet,\n      size,\n      sizeCalculation,\n      noUpdateTTL,\n      noDeleteOnFetchRejection,\n      allowStaleOnFetchRejection,\n      allowStaleOnFetchAbort,\n      ignoreFetchAbort,\n      status,\n      signal,\n    }\n\n    let index = this.#keyMap.get(k)\n    if (index === undefined) {\n      if (status) status.fetch = 'miss'\n      const p = this.#backgroundFetch(k, index, options, context)\n      return (p.__returned = p)\n    } else {\n      // in cache, maybe already fetching\n      const v = this.#valList[index]\n      if (this.#isBackgroundFetch(v)) {\n        const stale =\n          allowStale && v.__staleWhileFetching !== undefined\n        if (status) {\n          status.fetch = 'inflight'\n          if (stale) status.returnedStale = true\n        }\n        return stale ? v.__staleWhileFetching : (v.__returned = v)\n      }\n\n      // if we force a refresh, that means do NOT serve the cached value,\n      // unless we are already in the process of refreshing the cache.\n      const isStale = this.#isStale(index)\n      if (!forceRefresh && !isStale) {\n        if (status) status.fetch = 'hit'\n        this.#moveToTail(index)\n        if (updateAgeOnGet) {\n          this.#updateItemAge(index)\n        }\n        if (status) this.#statusTTL(status, index)\n        return v\n      }\n\n      // ok, it is stale or a forced refresh, and not already fetching.\n      // refresh the cache.\n      const p = this.#backgroundFetch(k, index, options, context)\n      const hasStale = p.__staleWhileFetching !== undefined\n      const staleVal = hasStale && allowStale\n      if (status) {\n        status.fetch = isStale ? 'stale' : 'refresh'\n        if (staleVal && isStale) status.returnedStale = true\n      }\n      return staleVal ? p.__staleWhileFetching : (p.__returned = p)\n    }\n  }\n\n  /**\n   * Return a value from the cache. Will update the recency of the cache\n   * entry found.\n   *\n   * If the key is not found, get() will return `undefined`.\n   */\n  get(k: K, getOptions: LRUCache.GetOptions<K, V, FC> = {}) {\n    const {\n      allowStale = this.allowStale,\n      updateAgeOnGet = this.updateAgeOnGet,\n      noDeleteOnStaleGet = this.noDeleteOnStaleGet,\n      status,\n    } = getOptions\n    const index = this.#keyMap.get(k)\n    if (index !== undefined) {\n      const value = this.#valList[index]\n      const fetching = this.#isBackgroundFetch(value)\n      if (status) this.#statusTTL(status, index)\n      if (this.#isStale(index)) {\n        if (status) status.get = 'stale'\n        // delete only if not an in-flight background fetch\n        if (!fetching) {\n          if (!noDeleteOnStaleGet) {\n            this.delete(k)\n          }\n          if (status && allowStale) status.returnedStale = true\n          return allowStale ? value : undefined\n        } else {\n          if (\n            status &&\n            allowStale &&\n            value.__staleWhileFetching !== undefined\n          ) {\n            status.returnedStale = true\n          }\n          return allowStale ? value.__staleWhileFetching : undefined\n        }\n      } else {\n        if (status) status.get = 'hit'\n        // if we're currently fetching it, we don't actually have it yet\n        // it's not stale, which means this isn't a staleWhileRefetching.\n        // If it's not stale, and fetching, AND has a __staleWhileFetching\n        // value, then that means the user fetched with {forceRefresh:true},\n        // so it's safe to return that value.\n        if (fetching) {\n          return value.__staleWhileFetching\n        }\n        this.#moveToTail(index)\n        if (updateAgeOnGet) {\n          this.#updateItemAge(index)\n        }\n        return value\n      }\n    } else if (status) {\n      status.get = 'miss'\n    }\n  }\n\n  #connect(p: Index, n: Index) {\n    this.#prev[n] = p\n    this.#next[p] = n\n  }\n\n  #moveToTail(index: Index): void {\n    // if tail already, nothing to do\n    // if head, move head to next[index]\n    // else\n    //   move next[prev[index]] to next[index] (head has no prev)\n    //   move prev[next[index]] to prev[index]\n    // prev[index] = tail\n    // next[tail] = index\n    // tail = index\n    if (index !== this.#tail) {\n      if (index === this.#head) {\n        this.#head = this.#next[index] as Index\n      } else {\n        this.#connect(\n          this.#prev[index] as Index,\n          this.#next[index] as Index\n        )\n      }\n      this.#connect(this.#tail, index)\n      this.#tail = index\n    }\n  }\n\n  /**\n   * Deletes a key out of the cache.\n   * Returns true if the key was deleted, false otherwise.\n   */\n  delete(k: K) {\n    let deleted = false\n    if (this.#size !== 0) {\n      const index = this.#keyMap.get(k)\n      if (index !== undefined) {\n        deleted = true\n        if (this.#size === 1) {\n          this.clear()\n        } else {\n          this.#removeItemSize(index)\n          const v = this.#valList[index]\n          if (this.#isBackgroundFetch(v)) {\n            v.__abortController.abort(new Error('deleted'))\n          } else if (this.#hasDispose || this.#hasDisposeAfter) {\n            if (this.#hasDispose) {\n              this.#dispose?.(v as V, k, 'delete')\n            }\n            if (this.#hasDisposeAfter) {\n              this.#disposed?.push([v as V, k, 'delete'])\n            }\n          }\n          this.#keyMap.delete(k)\n          this.#keyList[index] = undefined\n          this.#valList[index] = undefined\n          if (index === this.#tail) {\n            this.#tail = this.#prev[index] as Index\n          } else if (index === this.#head) {\n            this.#head = this.#next[index] as Index\n          } else {\n            this.#next[this.#prev[index]] = this.#next[index]\n            this.#prev[this.#next[index]] = this.#prev[index]\n          }\n          this.#size--\n          this.#free.push(index)\n        }\n      }\n    }\n    if (this.#hasDisposeAfter && this.#disposed?.length) {\n      const dt = this.#disposed\n      let task: DisposeTask<K, V> | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n    return deleted\n  }\n\n  /**\n   * Clear the cache entirely, throwing away all values.\n   */\n  clear() {\n    for (const index of this.#rindexes({ allowStale: true })) {\n      const v = this.#valList[index]\n      if (this.#isBackgroundFetch(v)) {\n        v.__abortController.abort(new Error('deleted'))\n      } else {\n        const k = this.#keyList[index]\n        if (this.#hasDispose) {\n          this.#dispose?.(v as V, k as K, 'delete')\n        }\n        if (this.#hasDisposeAfter) {\n          this.#disposed?.push([v as V, k as K, 'delete'])\n        }\n      }\n    }\n\n    this.#keyMap.clear()\n    this.#valList.fill(undefined)\n    this.#keyList.fill(undefined)\n    if (this.#ttls && this.#starts) {\n      this.#ttls.fill(0)\n      this.#starts.fill(0)\n    }\n    if (this.#sizes) {\n      this.#sizes.fill(0)\n    }\n    this.#head = 0 as Index\n    this.#tail = 0 as Index\n    this.#free.length = 0\n    this.#calculatedSize = 0\n    this.#size = 0\n    if (this.#hasDisposeAfter && this.#disposed) {\n      const dt = this.#disposed\n      let task: DisposeTask<K, V> | undefined\n      while ((task = dt?.shift())) {\n        this.#disposeAfter?.(...task)\n      }\n    }\n  }\n}\n\nexport default LRUCache\n", "import LRUCache from './index.js'\n\nexport = Object.assign(LRUCache, { default: LRUCache, LRUCache })\n"],
  "mappings": "0fAMA,IAAMA,EACJ,OAAO,aAAgB,UACvB,aACA,OAAO,YAAY,KAAQ,WACvB,YACA,KAEAC,EAAS,IAAI,IAKbC,EAAc,CAClBC,EACAC,EACAC,EACAC,IACE,CACF,OAAO,SAAY,UACnB,SACA,OAAO,QAAQ,aAAgB,WAC3B,QAAQ,YAAYH,EAAKC,EAAMC,EAAMC,CAAE,EACvC,QAAQ,MAAM,IAAID,MAASD,MAASD,GAAK,CAC/C,EAEMI,EAAcF,GAAiB,CAACJ,EAAO,IAAII,CAAI,EAE/CG,EAAO,OAAO,MAAM,EAIpBC,EAAYC,GAChBA,GAAKA,IAAM,KAAK,MAAMA,CAAC,GAAKA,EAAI,GAAK,SAASA,CAAC,EAc3CC,EAAgBC,GACnBH,EAASG,CAAG,EAETA,GAAO,KAAK,IAAI,EAAG,CAAC,EACpB,WACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,KAAK,IAAI,EAAG,EAAE,EACrB,YACAA,GAAO,OAAO,iBACdC,EACA,KATA,KAYAA,EAAN,cAAwB,KAAa,CACnC,YAAYC,EAAY,CACtB,MAAMA,CAAI,EACV,KAAK,KAAK,CAAC,CACb,KAIIC,EAAN,KAAW,CACT,KACA,OAGA,OAAO,OAAOH,EAAW,CACvB,IAAMI,EAAUL,EAAaC,CAAG,EAChC,GAAI,CAACI,EAAS,MAAO,CAAA,EACrBC,EAAAF,EAAMG,EAAgB,IACtB,IAAMC,EAAI,IAAIJ,EAAMH,EAAKI,CAAO,EAChC,OAAAC,EAAAF,EAAMG,EAAgB,IACfC,CACT,CACA,YACEP,EACAI,EAAyC,CAGzC,GAAI,CAACI,EAAAL,EAAMG,GACT,MAAM,IAAI,UAAU,yCAAyC,EAG/D,KAAK,KAAO,IAAIF,EAAQJ,CAAG,EAC3B,KAAK,OAAS,CAChB,CACA,KAAKF,EAAQ,CACX,KAAK,KAAK,KAAK,QAAQ,EAAIA,CAC7B,CACA,KAAG,CACD,OAAO,KAAK,KAAK,EAAE,KAAK,MAAM,CAChC,GA9BIW,EAANN,EAISG,EAAA,YAAPI,EAJID,EAIGH,EAAyB,IA0oBlC,IAAaK,EAAb,KAAqB,CAIVC,GACAC,GACAC,GACAC,GACAC,GAKT,IAKA,cAIA,aAIA,eAIA,eAIA,WAKA,eAIA,YAIA,aAIA,gBAIA,yBAIA,mBAIA,uBAIA,2BAIA,iBAGAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GACAC,GAEAC,GACAC,GACAC,GAWA,OAAO,sBAILC,EAAqB,CACrB,MAAO,CAEL,OAAQA,EAAEL,GACV,KAAMK,EAAEJ,GACR,MAAOI,EAAEN,GACT,OAAQM,EAAEf,GACV,QAASe,EAAEd,GACX,QAASc,EAAEb,GACX,KAAMa,EAAEZ,GACR,KAAMY,EAAEX,GACR,IAAI,MAAI,CACN,OAAOW,EAAEV,EACX,EACA,IAAI,MAAI,CACN,OAAOU,EAAET,EACX,EACA,KAAMS,EAAER,GAER,kBAAoBS,GAAWD,EAAEE,GAAmBD,CAAC,EACrD,gBAAiB,CACfE,EACAC,EACAC,EACAC,IAEAN,EAAEO,GACAJ,EACAC,EACAC,EACAC,CAAO,EAEX,WAAaF,GACXJ,EAAEQ,GAAYJ,CAAc,EAC9B,QAAUC,GACRL,EAAES,GAASJ,CAAO,EACpB,SAAWA,GACTL,EAAEU,GAAUL,CAAO,EACrB,QAAUD,GACRJ,EAAEW,GAASP,CAAc,EAE/B,CAOA,IAAI,KAAG,CACL,OAAO,KAAK1B,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKC,EACd,CAIA,IAAI,gBAAc,CAChB,OAAO,KAAKK,EACd,CAIA,IAAI,MAAI,CACN,OAAO,KAAKD,EACd,CAIA,IAAI,aAAW,CACb,OAAO,KAAKD,EACd,CAIA,IAAI,SAAO,CACT,OAAO,KAAKF,EACd,CAIA,IAAI,cAAY,CACd,OAAO,KAAKC,EACd,CAEA,YACEwB,EAAwD,CAExD,GAAM,CACJ,IAAAvC,EAAM,EACN,IAAA8C,EACA,cAAAC,EAAgB,EAChB,aAAAC,EACA,eAAAC,EACA,eAAAC,EACA,WAAAC,EACA,QAAAC,EACA,aAAAC,EACA,eAAAC,EACA,YAAAC,EACA,QAAAC,EAAU,EACV,aAAAC,EAAe,EACf,gBAAAC,EACA,YAAAC,EACA,yBAAAC,EACA,mBAAAC,EACA,2BAAAC,EACA,uBAAAC,EACA,iBAAAC,CAAgB,EACdzB,EAEJ,GAAIvC,IAAQ,GAAK,CAACH,EAASG,CAAG,EAC5B,MAAM,IAAI,UAAU,0CAA0C,EAGhE,IAAMiE,EAAYjE,EAAMD,EAAaC,CAAG,EAAI,MAC5C,GAAI,CAACiE,EACH,MAAM,IAAI,MAAM,sBAAwBjE,CAAG,EAO7C,GAJA,KAAKY,GAAOZ,EACZ,KAAKa,GAAW2C,EAChB,KAAK,aAAeC,GAAgB,KAAK5C,GACzC,KAAK,gBAAkB6C,EACnB,KAAK,gBAAiB,CACxB,GAAI,CAAC,KAAK7C,IAAY,CAAC,KAAK,aAC1B,MAAM,IAAI,UACR,oEAAoE,EAGxE,GAAI,OAAO,KAAK,iBAAoB,WAClC,MAAM,IAAI,UAAU,qCAAqC,EAI7D,GACE8C,IAAgB,QAChB,OAAOA,GAAgB,WAEvB,MAAM,IAAI,UACR,6CAA6C,EAsCjD,GAnCA,KAAK3C,GAAe2C,EACpB,KAAK3B,GAAkB,CAAC,CAAC2B,EAEzB,KAAKxC,GAAU,IAAI,IACnB,KAAKC,GAAW,IAAI,MAAMpB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKqB,GAAW,IAAI,MAAMrB,CAAG,EAAE,KAAK,MAAS,EAC7C,KAAKsB,GAAQ,IAAI2C,EAAUjE,CAAG,EAC9B,KAAKuB,GAAQ,IAAI0C,EAAUjE,CAAG,EAC9B,KAAKwB,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAQjB,EAAM,OAAOT,CAAG,EAC7B,KAAKiB,GAAQ,EACb,KAAKC,GAAkB,EAEnB,OAAOkC,GAAY,aACrB,KAAKtC,GAAWsC,GAEd,OAAOC,GAAiB,YAC1B,KAAKtC,GAAgBsC,EACrB,KAAK1B,GAAY,CAAA,IAEjB,KAAKZ,GAAgB,OACrB,KAAKY,GAAY,QAEnB,KAAKI,GAAc,CAAC,CAAC,KAAKjB,GAC1B,KAAKmB,GAAmB,CAAC,CAAC,KAAKlB,GAE/B,KAAK,eAAiB,CAAC,CAACuC,EACxB,KAAK,YAAc,CAAC,CAACC,EACrB,KAAK,yBAA2B,CAAC,CAACK,EAClC,KAAK,2BAA6B,CAAC,CAACE,EACpC,KAAK,uBAAyB,CAAC,CAACC,EAChC,KAAK,iBAAmB,CAAC,CAACC,EAGtB,KAAK,eAAiB,EAAG,CAC3B,GAAI,KAAKnD,KAAa,GAChB,CAAChB,EAAS,KAAKgB,EAAQ,EACzB,MAAM,IAAI,UACR,iDAAiD,EAIvD,GAAI,CAAChB,EAAS,KAAK,YAAY,EAC7B,MAAM,IAAI,UACR,sDAAsD,EAG1D,KAAKqE,GAAuB,EAa9B,GAVA,KAAK,WAAa,CAAC,CAACf,EACpB,KAAK,mBAAqB,CAAC,CAACU,EAC5B,KAAK,eAAiB,CAAC,CAACZ,EACxB,KAAK,eAAiB,CAAC,CAACC,EACxB,KAAK,cACHrD,EAASkD,CAAa,GAAKA,IAAkB,EACzCA,EACA,EACN,KAAK,aAAe,CAAC,CAACC,EACtB,KAAK,IAAMF,GAAO,EACd,KAAK,IAAK,CACZ,GAAI,CAACjD,EAAS,KAAK,GAAG,EACpB,MAAM,IAAI,UACR,6CAA6C,EAGjD,KAAKsE,GAAsB,EAI7B,GAAI,KAAKvD,KAAS,GAAK,KAAK,MAAQ,GAAK,KAAKC,KAAa,EACzD,MAAM,IAAI,UACR,kDAAkD,EAGtD,GAAI,CAAC,KAAK,cAAgB,CAAC,KAAKD,IAAQ,CAAC,KAAKC,GAAU,CACtD,IAAMpB,EAAO,sBACTE,EAAWF,CAAI,IACjBJ,EAAO,IAAII,CAAI,EAIfH,EAFE,gGAEe,wBAAyBG,EAAMkB,CAAQ,GAG9D,CAKA,gBAAgByD,EAAM,CACpB,OAAO,KAAKjD,GAAQ,IAAIiD,CAAG,EAAI,IAAW,CAC5C,CAEAD,IAAsB,CACpB,IAAME,EAAO,IAAIpE,EAAU,KAAKW,EAAI,EAC9B0D,EAAS,IAAIrE,EAAU,KAAKW,EAAI,EACtC,KAAKkB,GAAQuC,EACb,KAAKxC,GAAUyC,EAEf,KAAKC,GAAc,CAACjC,EAAOQ,EAAK0B,EAAQpF,EAAK,IAAG,IAAM,CAGpD,GAFAkF,EAAOhC,CAAK,EAAIQ,IAAQ,EAAI0B,EAAQ,EACpCH,EAAK/B,CAAK,EAAIQ,EACVA,IAAQ,GAAK,KAAK,aAAc,CAClC,IAAM2B,EAAI,WAAW,IAAK,CACpB,KAAK5B,GAASP,CAAK,GACrB,KAAK,OAAO,KAAKlB,GAASkB,CAAK,CAAM,CAEzC,EAAGQ,EAAM,CAAC,EAGN2B,EAAE,OACJA,EAAE,MAAK,EAIb,EAEA,KAAKC,GAAiBpC,GAAQ,CAC5BgC,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,IAAM,EAAIlD,EAAK,IAAG,EAAK,CACnD,EAEA,KAAKuF,GAAa,CAACC,EAAQtC,IAAS,CAClC,GAAI+B,EAAK/B,CAAK,EAAG,CACf,IAAMQ,EAAMuB,EAAK/B,CAAK,EAChBkC,EAAQF,EAAOhC,CAAK,EAC1BsC,EAAO,IAAM9B,EACb8B,EAAO,MAAQJ,EACfI,EAAO,IAAMC,GAAaC,EAAM,EAChCF,EAAO,aAAeA,EAAO,IAAM9B,EAAM0B,EAE7C,EAIA,IAAIK,EAAY,EACVC,EAAS,IAAK,CAClB,IAAM,EAAI1F,EAAK,IAAG,EAClB,GAAI,KAAK,cAAgB,EAAG,CAC1ByF,EAAY,EACZ,IAAMJ,EAAI,WACR,IAAOI,EAAY,EACnB,KAAK,aAAa,EAIhBJ,EAAE,OACJA,EAAE,MAAK,EAIX,OAAO,CACT,EAEA,KAAK,gBAAkBL,GAAM,CAC3B,IAAM9B,EAAQ,KAAKnB,GAAQ,IAAIiD,CAAG,EAClC,OAAI9B,IAAU,OACL,EAEF+B,EAAK/B,CAAK,IAAM,GAAKgC,EAAOhC,CAAK,IAAM,EAC1C,IACAgC,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,GAAKuC,GAAaC,EAAM,EACxD,EAEA,KAAKjC,GAAWP,GAEZ+B,EAAK/B,CAAK,IAAM,GAChBgC,EAAOhC,CAAK,IAAM,IACjBuC,GAAaC,EAAM,GAAMR,EAAOhC,CAAK,EAAI+B,EAAK/B,CAAK,CAG1D,CAGAoC,GAAyC,IAAK,CAAE,EAChDC,GACE,IAAK,CAAE,EACTJ,GAMY,IAAK,CAAE,EAGnB1B,GAAsC,IAAM,GAE5CqB,IAAuB,CACrB,IAAMa,EAAQ,IAAI9E,EAAU,KAAKW,EAAI,EACrC,KAAKM,GAAkB,EACvB,KAAKU,GAASmD,EACd,KAAKC,GAAkB1C,GAAQ,CAC7B,KAAKpB,IAAmB6D,EAAMzC,CAAK,EACnCyC,EAAMzC,CAAK,EAAI,CACjB,EACA,KAAK2C,GAAe,CAAC5C,EAAG6C,EAAGhF,EAAMwD,IAAmB,CAGlD,GAAI,KAAKtB,GAAmB8C,CAAC,EAC3B,MAAO,GAET,GAAI,CAACrF,EAASK,CAAI,EAChB,GAAIwD,EAAiB,CACnB,GAAI,OAAOA,GAAoB,WAC7B,MAAM,IAAI,UAAU,oCAAoC,EAG1D,GADAxD,EAAOwD,EAAgBwB,EAAG7C,CAAC,EACvB,CAACxC,EAASK,CAAI,EAChB,MAAM,IAAI,UACR,0DAA0D,MAI9D,OAAM,IAAI,UACR,2HAEwB,EAI9B,OAAOA,CACT,EACA,KAAKiF,GAAe,CAClB7C,EACApC,EACA0E,IACE,CAEF,GADAG,EAAMzC,CAAK,EAAIpC,EACX,KAAKW,GAAU,CACjB,IAAM2C,EAAU,KAAK3C,GAAWkE,EAAMzC,CAAK,EAC3C,KAAO,KAAKpB,GAAkBsC,GAC5B,KAAK4B,GAAO,EAAI,EAGpB,KAAKlE,IAAmB6D,EAAMzC,CAAK,EAC/BsC,IACFA,EAAO,UAAY1E,EACnB0E,EAAO,oBAAsB,KAAK1D,GAEtC,CACF,CAEA8D,GAA0CK,GAAK,CAAE,EACjDF,GAIY,CAACE,EAAIC,EAAIC,IAAO,CAAE,EAC9BN,GAKqB,CACnBO,EACAC,EACAvF,EACAwD,IACE,CACF,GAAIxD,GAAQwD,EACV,MAAM,IAAI,UACR,kEAAkE,EAGtE,MAAO,EACT,EAEA,CAACf,GAAS,CAAE,WAAAQ,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC7C,GAAI,KAAKlC,GACP,QAASyE,EAAI,KAAKjE,GACZ,GAAC,KAAKkE,GAAcD,CAAC,KAGrBvC,GAAc,CAAC,KAAKN,GAAS6C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKlE,MAGbkE,EAAI,KAAKnE,GAAMmE,CAAC,CAIxB,CAEA,CAAC9C,GAAU,CAAE,WAAAO,EAAa,KAAK,UAAU,EAAK,CAAA,EAAE,CAC9C,GAAI,KAAKlC,GACP,QAASyE,EAAI,KAAKlE,GACZ,GAAC,KAAKmE,GAAcD,CAAC,KAGrBvC,GAAc,CAAC,KAAKN,GAAS6C,CAAC,KAChC,MAAMA,GAEJA,IAAM,KAAKjE,MAGbiE,EAAI,KAAKpE,GAAMoE,CAAC,CAIxB,CAEAC,GAAcrD,EAAY,CACxB,OACEA,IAAU,QACV,KAAKnB,GAAQ,IAAI,KAAKC,GAASkB,CAAK,CAAM,IAAMA,CAEpD,CAMA,CAAC,SAAO,CACN,QAAWoD,KAAK,KAAK/C,GAAQ,EAEzB,KAAKtB,GAASqE,CAAC,IAAM,QACrB,KAAKtE,GAASsE,CAAC,IAAM,QACrB,CAAC,KAAKtD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKtE,GAASsE,CAAC,EAAG,KAAKrE,GAASqE,CAAC,CAAC,EAG/C,CAQA,CAAC,UAAQ,CACP,QAAWA,KAAK,KAAK9C,GAAS,EAE1B,KAAKvB,GAASqE,CAAC,IAAM,QACrB,KAAKtE,GAASsE,CAAC,IAAM,QACrB,CAAC,KAAKtD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,KAAM,CAAC,KAAKtE,GAASsE,CAAC,EAAG,KAAKrE,GAASqE,CAAC,CAAC,EAG/C,CAMA,CAAC,MAAI,CACH,QAAWA,KAAK,KAAK/C,GAAQ,EAAI,CAC/B,IAAMN,EAAI,KAAKjB,GAASsE,CAAC,EAEvBrD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,MAAMrD,GAGZ,CAQA,CAAC,OAAK,CACJ,QAAWqD,KAAK,KAAK9C,GAAS,EAAI,CAChC,IAAMP,EAAI,KAAKjB,GAASsE,CAAC,EAEvBrD,IAAM,QACN,CAAC,KAAKD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,MAAMrD,GAGZ,CAMA,CAAC,QAAM,CACL,QAAWqD,KAAK,KAAK/C,GAAQ,EACjB,KAAKtB,GAASqE,CAAC,IAEjB,QACN,CAAC,KAAKtD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,MAAM,KAAKrE,GAASqE,CAAC,EAG3B,CAQA,CAAC,SAAO,CACN,QAAWA,KAAK,KAAK9C,GAAS,EAClB,KAAKvB,GAASqE,CAAC,IAEjB,QACN,CAAC,KAAKtD,GAAmB,KAAKf,GAASqE,CAAC,CAAC,IAEzC,MAAM,KAAKrE,GAASqE,CAAC,EAG3B,CAMA,CAAC,OAAO,QAAQ,GAAC,CACf,OAAO,KAAK,QAAO,CACrB,CAMA,KACEhG,EACAkG,EAA4C,CAAA,EAAE,CAE9C,QAAW,KAAK,KAAKjD,GAAQ,EAAI,CAC/B,IAAMuC,EAAI,KAAK7D,GAAS,CAAC,EACnBwE,EAAQ,KAAKzD,GAAmB8C,CAAC,EACnCA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QACVnG,EAAGmG,EAAO,KAAKzE,GAAS,CAAC,EAAQ,IAAI,EACvC,OAAO,KAAK,IAAI,KAAKA,GAAS,CAAC,EAAQwE,CAAU,EAGvD,CAQA,QACElG,EACAoG,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKnD,GAAQ,EAAI,CAC/B,IAAMuC,EAAI,KAAK7D,GAAS,CAAC,EACnBwE,EAAQ,KAAKzD,GAAmB8C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACdnG,EAAG,KAAKoG,EAAOD,EAAO,KAAKzE,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,SACE1B,EACAoG,EAAa,KAAI,CAEjB,QAAW,KAAK,KAAKlD,GAAS,EAAI,CAChC,IAAMsC,EAAI,KAAK7D,GAAS,CAAC,EACnBwE,EAAQ,KAAKzD,GAAmB8C,CAAC,EACnCA,EAAE,qBACFA,EACAW,IAAU,QACdnG,EAAG,KAAKoG,EAAOD,EAAO,KAAKzE,GAAS,CAAC,EAAQ,IAAI,EAErD,CAMA,YAAU,CACR,IAAI2E,EAAU,GACd,QAAWL,KAAK,KAAK9C,GAAU,CAAE,WAAY,EAAI,CAAE,EAC7C,KAAKC,GAAS6C,CAAC,IACjB,KAAK,OAAO,KAAKtE,GAASsE,CAAC,CAAM,EACjCK,EAAU,IAGd,OAAOA,CACT,CAMA,MAAI,CACF,IAAMC,EAAgC,CAAA,EACtC,QAAWN,KAAK,KAAK/C,GAAS,CAAE,WAAY,EAAI,CAAE,EAAG,CACnD,IAAMyB,EAAM,KAAKhD,GAASsE,CAAC,EACrBR,EAAI,KAAK7D,GAASqE,CAAC,EACnBG,EAAuB,KAAKzD,GAAmB8C,CAAC,EAClDA,EAAE,qBACFA,EACJ,GAAIW,IAAU,QAAazB,IAAQ,OAAW,SAC9C,IAAM6B,EAA2B,CAAE,MAAAJ,CAAK,EACxC,GAAI,KAAK/D,IAAS,KAAKD,GAAS,CAC9BoE,EAAM,IAAM,KAAKnE,GAAM4D,CAAC,EAGxB,IAAMQ,EAAM9G,EAAK,IAAG,EAAK,KAAKyC,GAAQ6D,CAAC,EACvCO,EAAM,MAAQ,KAAK,MAAM,KAAK,IAAG,EAAKC,CAAG,EAEvC,KAAKtE,KACPqE,EAAM,KAAO,KAAKrE,GAAO8D,CAAC,GAE5BM,EAAI,QAAQ,CAAC5B,EAAK6B,CAAK,CAAC,EAE1B,OAAOD,CACT,CAOA,KAAKA,EAA6B,CAChC,KAAK,MAAK,EACV,OAAW,CAAC5B,EAAK6B,CAAK,IAAKD,EAAK,CAC9B,GAAIC,EAAM,MAAO,CAOf,IAAMC,EAAM,KAAK,IAAG,EAAKD,EAAM,MAC/BA,EAAM,MAAQ7G,EAAK,IAAG,EAAK8G,EAE7B,KAAK,IAAI9B,EAAK6B,EAAM,MAAOA,CAAK,EAEpC,CAKA,IACE5D,EACA6C,EACAiB,EAA4C,CAAA,EAAE,CAE9C,GAAM,CACJ,IAAArD,EAAM,KAAK,IACX,MAAA0B,EACA,eAAAlB,EAAiB,KAAK,eACtB,gBAAAI,EAAkB,KAAK,gBACvB,OAAAkB,CAAM,EACJuB,EACA,CAAE,YAAA5C,EAAc,KAAK,WAAW,EAAK4C,EAEnCjG,EAAO,KAAK+E,GAChB5C,EACA6C,EACAiB,EAAW,MAAQ,EACnBzC,CAAe,EAIjB,GAAI,KAAK,cAAgBxD,EAAO,KAAK,aACnC,OAAI0E,IACFA,EAAO,IAAM,OACbA,EAAO,qBAAuB,IAGhC,KAAK,OAAOvC,CAAC,EACN,KAET,IAAIC,EAAQ,KAAKrB,KAAU,EAAI,OAAY,KAAKE,GAAQ,IAAIkB,CAAC,EAC7D,GAAIC,IAAU,OAEZA,EACE,KAAKrB,KAAU,EACX,KAAKQ,GACL,KAAKC,GAAM,SAAW,EACtB,KAAKA,GAAM,IAAG,EACd,KAAKT,KAAU,KAAKL,GACpB,KAAKwE,GAAO,EAAK,EACjB,KAAKnE,GAEX,KAAKG,GAASkB,CAAK,EAAID,EACvB,KAAKhB,GAASiB,CAAK,EAAI4C,EACvB,KAAK/D,GAAQ,IAAIkB,EAAGC,CAAK,EACzB,KAAKhB,GAAM,KAAKG,EAAK,EAAIa,EACzB,KAAKf,GAAMe,CAAK,EAAI,KAAKb,GACzB,KAAKA,GAAQa,EACb,KAAKrB,KACL,KAAKkE,GAAa7C,EAAOpC,EAAM0E,CAAM,EACjCA,IAAQA,EAAO,IAAM,OACzBrB,EAAc,OACT,CAEL,KAAKb,GAAYJ,CAAK,EACtB,IAAM8D,EAAS,KAAK/E,GAASiB,CAAK,EAClC,GAAI4C,IAAMkB,GAcR,GAbI,KAAKpE,IAAmB,KAAKI,GAAmBgE,CAAM,EACxDA,EAAO,kBAAkB,MAAM,IAAI,MAAM,UAAU,CAAC,EAC1C9C,IACN,KAAKvB,IACP,KAAKjB,KAAWsF,EAAa/D,EAAG,KAAK,EAEnC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACyE,EAAa/D,EAAG,KAAK,CAAC,GAGhD,KAAK2C,GAAgB1C,CAAK,EAC1B,KAAK6C,GAAa7C,EAAOpC,EAAM0E,CAAM,EACrC,KAAKvD,GAASiB,CAAK,EAAI4C,EACnBN,EAAQ,CACVA,EAAO,IAAM,UACb,IAAMyB,EACJD,GAAU,KAAKhE,GAAmBgE,CAAM,EACpCA,EAAO,qBACPA,EACFC,IAAa,SAAWzB,EAAO,SAAWyB,SAEvCzB,IACTA,EAAO,IAAM,UAYjB,GATI9B,IAAQ,GAAK,CAAC,KAAKhB,IACrB,KAAKqC,GAAsB,EAEzB,KAAKrC,KACFyB,GACH,KAAKgB,GAAYjC,EAAOQ,EAAK0B,CAAK,EAEhCI,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,GAEvC,CAACgB,GAAkB,KAAKrB,IAAoB,KAAKN,GAAW,CAC9D,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAO,IACT,CAMA,KAAG,CACD,GAAI,CACF,KAAO,KAAKtF,IAAO,CACjB,IAAMuF,EAAM,KAAKnF,GAAS,KAAKG,EAAK,EAEpC,GADA,KAAK4D,GAAO,EAAI,EACZ,KAAKhD,GAAmBoE,CAAG,GAC7B,GAAIA,EAAI,qBACN,OAAOA,EAAI,6BAEJA,IAAQ,OACjB,OAAOA,WAIX,GAAI,KAAKvE,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,GAIpC,CAEAnB,GAAOqB,EAAa,CAClB,IAAMC,EAAO,KAAKlF,GACZa,EAAI,KAAKjB,GAASsF,CAAI,EACtBxB,EAAI,KAAK7D,GAASqF,CAAI,EAC5B,OAAI,KAAK1E,IAAmB,KAAKI,GAAmB8C,CAAC,EACnDA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKnD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWoE,EAAG7C,EAAG,OAAO,EAE3B,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACuD,EAAG7C,EAAG,OAAO,CAAC,GAGxC,KAAK2C,GAAgB0B,CAAI,EAErBD,IACF,KAAKrF,GAASsF,CAAI,EAAI,OACtB,KAAKrF,GAASqF,CAAI,EAAI,OACtB,KAAKhF,GAAM,KAAKgF,CAAI,GAElB,KAAKzF,KAAU,GACjB,KAAKO,GAAQ,KAAKC,GAAQ,EAC1B,KAAKC,GAAM,OAAS,GAEpB,KAAKF,GAAQ,KAAKF,GAAMoF,CAAI,EAE9B,KAAKvF,GAAQ,OAAOkB,CAAC,EACrB,KAAKpB,KACEyF,CACT,CAUA,IAAIrE,EAAMsE,EAA4C,CAAA,EAAE,CACtD,GAAM,CAAE,eAAAzD,EAAiB,KAAK,eAAgB,OAAA0B,CAAM,EAClD+B,EACIrE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAM4C,EAAI,KAAK7D,GAASiB,CAAK,EAC7B,GACE,KAAKF,GAAmB8C,CAAC,GACzBA,EAAE,uBAAyB,OAE3B,MAAO,GAET,GAAK,KAAKrC,GAASP,CAAK,EASbsC,IACTA,EAAO,IAAM,QACb,KAAKD,GAAWC,EAAQtC,CAAK,OAV7B,QAAIY,GACF,KAAKwB,GAAepC,CAAK,EAEvBsC,IACFA,EAAO,IAAM,MACb,KAAKD,GAAWC,EAAQtC,CAAK,GAExB,QAKAsC,IACTA,EAAO,IAAM,QAEf,MAAO,EACT,CASA,KAAKvC,EAAMuE,EAA8C,CAAA,EAAE,CACzD,GAAM,CAAE,WAAAzD,EAAa,KAAK,UAAU,EAAKyD,EACnCtE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GACEC,IAAU,SACTa,GAAc,CAAC,KAAKN,GAASP,CAAK,GACnC,CACA,IAAM4C,EAAI,KAAK7D,GAASiB,CAAK,EAE7B,OAAO,KAAKF,GAAmB8C,CAAC,EAAIA,EAAE,qBAAuBA,EAEjE,CAEAzC,GACEJ,EACAC,EACAC,EACAC,EAAY,CAEZ,IAAM0C,EAAI5C,IAAU,OAAY,OAAY,KAAKjB,GAASiB,CAAK,EAC/D,GAAI,KAAKF,GAAmB8C,CAAC,EAC3B,OAAOA,EAGT,IAAM2B,EAAK,IAAI,gBACT,CAAE,OAAAC,CAAM,EAAKvE,EAEnBuE,GAAQ,iBAAiB,QAAS,IAAMD,EAAG,MAAMC,EAAO,MAAM,EAAG,CAC/D,OAAQD,EAAG,OACZ,EAED,IAAME,EAAY,CAChB,OAAQF,EAAG,OACX,QAAAtE,EACA,QAAAC,GAGIwE,EAAK,CACT9B,EACA+B,EAAc,KACU,CACxB,GAAM,CAAE,QAAAC,CAAO,EAAKL,EAAG,OACjBM,EAAc5E,EAAQ,kBAAoB2C,IAAM,OAUtD,GATI3C,EAAQ,SACN2E,GAAW,CAACD,GACd1E,EAAQ,OAAO,aAAe,GAC9BA,EAAQ,OAAO,WAAasE,EAAG,OAAO,OAClCM,IAAa5E,EAAQ,OAAO,kBAAoB,KAEpDA,EAAQ,OAAO,cAAgB,IAG/B2E,GAAW,CAACC,GAAe,CAACF,EAC9B,OAAOG,EAAUP,EAAG,OAAO,MAAM,EAGnC,IAAMQ,EAAKlF,EACX,OAAI,KAAKd,GAASiB,CAAc,IAAMH,IAChC+C,IAAM,OACJmC,EAAG,qBACL,KAAKhG,GAASiB,CAAc,EAAI+E,EAAG,qBAEnC,KAAK,OAAOhF,CAAC,GAGXE,EAAQ,SAAQA,EAAQ,OAAO,aAAe,IAClD,KAAK,IAAIF,EAAG6C,EAAG6B,EAAU,OAAO,IAG7B7B,CACT,EAEMoC,EAAMC,IACNhF,EAAQ,SACVA,EAAQ,OAAO,cAAgB,GAC/BA,EAAQ,OAAO,WAAagF,GAEvBH,EAAUG,CAAE,GAGfH,EAAaG,GAA0B,CAC3C,GAAM,CAAE,QAAAL,CAAO,EAAKL,EAAG,OACjBW,EACJN,GAAW3E,EAAQ,uBACfY,EACJqE,GAAqBjF,EAAQ,2BACzBkF,EAAWtE,GAAcZ,EAAQ,yBACjC8E,EAAKlF,EAeX,GAdI,KAAKd,GAASiB,CAAc,IAAMH,IAGxB,CAACsF,GAAYJ,EAAG,uBAAyB,OAEnD,KAAK,OAAOhF,CAAC,EACHmF,IAKV,KAAKnG,GAASiB,CAAc,EAAI+E,EAAG,uBAGnClE,EACF,OAAIZ,EAAQ,QAAU8E,EAAG,uBAAyB,SAChD9E,EAAQ,OAAO,cAAgB,IAE1B8E,EAAG,qBACL,GAAIA,EAAG,aAAeA,EAC3B,MAAME,CAEV,EAEMG,EAAQ,CACZC,EACAC,IACE,CACF,IAAMC,EAAM,KAAK7G,KAAeqB,EAAG6C,EAAG6B,CAAS,EAC3Cc,GAAOA,aAAe,SACxBA,EAAI,KAAK3C,GAAKyC,EAAIzC,CAAC,EAAG0C,CAAG,EAK3Bf,EAAG,OAAO,iBAAiB,QAAS,IAAK,EAErC,CAACtE,EAAQ,kBACTA,EAAQ,0BAERoF,EAAG,EAECpF,EAAQ,yBACVoF,EAAMzC,GAAK8B,EAAG9B,EAAG,EAAI,GAG3B,CAAC,CACH,EAEI3C,EAAQ,SAAQA,EAAQ,OAAO,gBAAkB,IACrD,IAAMJ,EAAI,IAAI,QAAQuF,CAAK,EAAE,KAAKV,EAAIM,CAAE,EAClCD,EAAK,OAAO,OAAOlF,EAAG,CAC1B,kBAAmB0E,EACnB,qBAAsB3B,EACtB,WAAY,OACb,EAED,OAAI5C,IAAU,QAEZ,KAAK,IAAID,EAAGgF,EAAI,CAAE,GAAGN,EAAU,QAAS,OAAQ,MAAS,CAAE,EAC3DzE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,GAE1B,KAAKhB,GAASiB,CAAK,EAAI+E,EAElBA,CACT,CAEAjF,GAAmBD,EAAM,CACvB,GAAI,CAAC,KAAKH,GAAiB,MAAO,GAClC,IAAM8F,EAAI3F,EACV,MACE,CAAC,CAAC2F,GACFA,aAAa,SACbA,EAAE,eAAe,sBAAsB,GACvCA,EAAE,6BAA6B,eAEnC,CAwCA,MAAM,MACJzF,EACA0F,EAAgD,CAAA,EAAE,CAElD,GAAM,CAEJ,WAAA5E,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAE1B,IAAAf,EAAM,KAAK,IACX,eAAAQ,EAAiB,KAAK,eACtB,KAAApD,EAAO,EACP,gBAAAwD,EAAkB,KAAK,gBACvB,YAAAH,EAAc,KAAK,YAEnB,yBAAAK,EAA2B,KAAK,yBAChC,2BAAAE,EAA6B,KAAK,2BAClC,iBAAAE,EAAmB,KAAK,iBACxB,uBAAAD,EAAyB,KAAK,uBAC9B,QAAAvB,EACA,aAAAwF,EAAe,GACf,OAAApD,EACA,OAAAkC,CAAM,EACJiB,EAEJ,GAAI,CAAC,KAAK/F,GACR,OAAI4C,IAAQA,EAAO,MAAQ,OACpB,KAAK,IAAIvC,EAAG,CACjB,WAAAc,EACA,eAAAF,EACA,mBAAAY,EACA,OAAAe,EACD,EAGH,IAAMrC,EAAU,CACd,WAAAY,EACA,eAAAF,EACA,mBAAAY,EACA,IAAAf,EACA,eAAAQ,EACA,KAAApD,EACA,gBAAAwD,EACA,YAAAH,EACA,yBAAAK,EACA,2BAAAE,EACA,uBAAAC,EACA,iBAAAC,EACA,OAAAY,EACA,OAAAkC,GAGExE,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAC9B,GAAIC,IAAU,OAAW,CACnBsC,IAAQA,EAAO,MAAQ,QAC3B,IAAMzC,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAC1D,OAAQL,EAAE,WAAaA,MAClB,CAEL,IAAM+C,EAAI,KAAK7D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB8C,CAAC,EAAG,CAC9B,IAAM+C,EACJ9E,GAAc+B,EAAE,uBAAyB,OAC3C,OAAIN,IACFA,EAAO,MAAQ,WACXqD,IAAOrD,EAAO,cAAgB,KAE7BqD,EAAQ/C,EAAE,qBAAwBA,EAAE,WAAaA,EAK1D,IAAMgD,EAAU,KAAKrF,GAASP,CAAK,EACnC,GAAI,CAAC0F,GAAgB,CAACE,EACpB,OAAItD,IAAQA,EAAO,MAAQ,OAC3B,KAAKlC,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEvBsC,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EAClC4C,EAKT,IAAM/C,EAAI,KAAKM,GAAiBJ,EAAGC,EAAOC,EAASC,CAAO,EAEpD2F,EADWhG,EAAE,uBAAyB,QACfgB,EAC7B,OAAIyB,IACFA,EAAO,MAAQsD,EAAU,QAAU,UAC/BC,GAAYD,IAAStD,EAAO,cAAgB,KAE3CuD,EAAWhG,EAAE,qBAAwBA,EAAE,WAAaA,EAE/D,CAQA,IAAIE,EAAMuD,EAA4C,CAAA,EAAE,CACtD,GAAM,CACJ,WAAAzC,EAAa,KAAK,WAClB,eAAAF,EAAiB,KAAK,eACtB,mBAAAY,EAAqB,KAAK,mBAC1B,OAAAe,CAAM,EACJgB,EACEtD,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAAW,CACvB,IAAMuD,EAAQ,KAAKxE,GAASiB,CAAK,EAC3B8F,EAAW,KAAKhG,GAAmByD,CAAK,EAE9C,OADIjB,GAAQ,KAAKD,GAAWC,EAAQtC,CAAK,EACrC,KAAKO,GAASP,CAAK,GACjBsC,IAAQA,EAAO,IAAM,SAEpBwD,GAQDxD,GACAzB,GACA0C,EAAM,uBAAyB,SAE/BjB,EAAO,cAAgB,IAElBzB,EAAa0C,EAAM,qBAAuB,SAb5ChC,GACH,KAAK,OAAOxB,CAAC,EAEXuC,GAAUzB,IAAYyB,EAAO,cAAgB,IAC1CzB,EAAa0C,EAAQ,UAY1BjB,IAAQA,EAAO,IAAM,OAMrBwD,EACKvC,EAAM,sBAEf,KAAKnD,GAAYJ,CAAK,EAClBW,GACF,KAAKyB,GAAepC,CAAK,EAEpBuD,SAEAjB,IACTA,EAAO,IAAM,OAEjB,CAEAyD,GAASlG,EAAUrC,EAAQ,CACzB,KAAKyB,GAAMzB,CAAC,EAAIqC,EAChB,KAAKb,GAAMa,CAAC,EAAIrC,CAClB,CAEA4C,GAAYJ,EAAY,CASlBA,IAAU,KAAKb,KACba,IAAU,KAAKd,GACjB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,EAE7B,KAAK+F,GACH,KAAK9G,GAAMe,CAAK,EAChB,KAAKhB,GAAMgB,CAAK,CAAU,EAG9B,KAAK+F,GAAS,KAAK5G,GAAOa,CAAK,EAC/B,KAAKb,GAAQa,EAEjB,CAMA,OAAOD,EAAI,CACT,IAAI0D,EAAU,GACd,GAAI,KAAK9E,KAAU,EAAG,CACpB,IAAMqB,EAAQ,KAAKnB,GAAQ,IAAIkB,CAAC,EAChC,GAAIC,IAAU,OAEZ,GADAyD,EAAU,GACN,KAAK9E,KAAU,EACjB,KAAK,MAAK,MACL,CACL,KAAK+D,GAAgB1C,CAAK,EAC1B,IAAM4C,EAAI,KAAK7D,GAASiB,CAAK,EACzB,KAAKF,GAAmB8C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,GACrC,KAAKnD,IAAe,KAAKE,MAC9B,KAAKF,IACP,KAAKjB,KAAWoE,EAAQ7C,EAAG,QAAQ,EAEjC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACuD,EAAQ7C,EAAG,QAAQ,CAAC,GAG9C,KAAKlB,GAAQ,OAAOkB,CAAC,EACrB,KAAKjB,GAASkB,CAAK,EAAI,OACvB,KAAKjB,GAASiB,CAAK,EAAI,OACnBA,IAAU,KAAKb,GACjB,KAAKA,GAAQ,KAAKF,GAAMe,CAAK,EACpBA,IAAU,KAAKd,GACxB,KAAKA,GAAQ,KAAKF,GAAMgB,CAAK,GAE7B,KAAKhB,GAAM,KAAKC,GAAMe,CAAK,CAAC,EAAI,KAAKhB,GAAMgB,CAAK,EAChD,KAAKf,GAAM,KAAKD,GAAMgB,CAAK,CAAC,EAAI,KAAKf,GAAMe,CAAK,GAElD,KAAKrB,KACL,KAAKS,GAAM,KAAKY,CAAK,GAI3B,GAAI,KAAKL,IAAoB,KAAKN,IAAW,OAAQ,CACnD,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGhC,OAAOR,CACT,CAKA,OAAK,CACH,QAAWzD,KAAS,KAAKM,GAAU,CAAE,WAAY,EAAI,CAAE,EAAG,CACxD,IAAMsC,EAAI,KAAK7D,GAASiB,CAAK,EAC7B,GAAI,KAAKF,GAAmB8C,CAAC,EAC3BA,EAAE,kBAAkB,MAAM,IAAI,MAAM,SAAS,CAAC,MACzC,CACL,IAAM7C,EAAI,KAAKjB,GAASkB,CAAK,EACzB,KAAKP,IACP,KAAKjB,KAAWoE,EAAQ7C,EAAQ,QAAQ,EAEtC,KAAKJ,IACP,KAAKN,IAAW,KAAK,CAACuD,EAAQ7C,EAAQ,QAAQ,CAAC,GAoBrD,GAfA,KAAKlB,GAAQ,MAAK,EAClB,KAAKE,GAAS,KAAK,MAAS,EAC5B,KAAKD,GAAS,KAAK,MAAS,EACxB,KAAKU,IAAS,KAAKD,KACrB,KAAKC,GAAM,KAAK,CAAC,EACjB,KAAKD,GAAQ,KAAK,CAAC,GAEjB,KAAKD,IACP,KAAKA,GAAO,KAAK,CAAC,EAEpB,KAAKJ,GAAQ,EACb,KAAKC,GAAQ,EACb,KAAKC,GAAM,OAAS,EACpB,KAAKR,GAAkB,EACvB,KAAKD,GAAQ,EACT,KAAKgB,IAAoB,KAAKN,GAAW,CAC3C,IAAM2E,EAAK,KAAK3E,GACZ4E,EACJ,KAAQA,EAAOD,GAAI,MAAK,GACtB,KAAKvF,KAAgB,GAAGwF,CAAI,EAGlC,GAh9CF+B,EAAA,SAAA3H,EAm9CA2H,EAAA,QAAe3H,8FC3qEf4H,EAAAC,EAAA,GAAA,EAEA,OAAA,QAAS,OAAO,OAAOD,EAAA,QAAU,CAAE,QAASA,EAAA,QAAU,SAAAA,EAAA,OAAQ,CAAE",
  "names": ["perf", "warned", "emitWarning", "msg", "type", "code", "fn", "shouldWarn", "TYPE", "isPosInt", "n", "getUintArray", "max", "ZeroArray", "size", "_Stack", "HeapCls", "__privateSet", "_constructing", "s", "__privateGet", "Stack", "__privateAdd", "LRUCache", "#max", "#maxSize", "#dispose", "#disposeAfter", "#fetchMethod", "#size", "#calculatedSize", "#keyMap", "#keyList", "#valList", "#next", "#prev", "#head", "#tail", "#free", "#disposed", "#sizes", "#starts", "#ttls", "#hasDispose", "#hasFetchMethod", "#hasDisposeAfter", "c", "p", "#isBackgroundFetch", "k", "index", "options", "context", "#backgroundFetch", "#moveToTail", "#indexes", "#rindexes", "#isStale", "ttl", "ttlResolution", "ttlAutopurge", "updateAgeOnGet", "updateAgeOnHas", "allowStale", "dispose", "disposeAfter", "noDisposeOnSet", "noUpdateTTL", "maxSize", "maxEntrySize", "sizeCalculation", "fetchMethod", "noDeleteOnFetchRejection", "noDeleteOnStaleGet", "allowStaleOnFetchRejection", "allowStaleOnFetchAbort", "ignoreFetchAbort", "UintArray", "#initializeSizeTracking", "#initializeTTLTracking", "key", "ttls", "starts", "#setItemTTL", "start", "t", "#updateItemAge", "#statusTTL", "status", "cachedNow", "getNow", "sizes", "#removeItemSize", "#requireSize", "v", "#addItemSize", "#evict", "_i", "_s", "_st", "_k", "_v", "i", "#isValidIndex", "getOptions", "value", "thisp", "deleted", "arr", "entry", "age", "setOptions", "oldVal", "oldValue", "dt", "task", "val", "free", "head", "hasOptions", "peekOptions", "ac", "signal", "fetchOpts", "cb", "updateCache", "aborted", "ignoreAbort", "fetchFail", "bf", "eb", "er", "allowStaleAborted", "noDelete", "pcall", "res", "rej", "fmp", "b", "fetchOptions", "forceRefresh", "stale", "isStale", "staleVal", "fetching", "#connect", "exports", "index_js_1", "__importDefault"]
}

Sindbad File Manager Version 1.0, Coded By Sindbad EG ~ The Terrorists