diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml new file mode 100644 index 00000000..9414d715 --- /dev/null +++ b/.github/workflows/benchmark.yml @@ -0,0 +1,40 @@ +name: Benchmarks + +on: [push, pull_request] + +jobs: + build: + strategy: + matrix: + node-version: [16.x, 18.x, 19.x] + platform: + - os: ubuntu-latest + shell: bash + - os: macos-latest + shell: bash + - os: windows-latest + shell: bash + fail-fast: false + + runs-on: ${{ matrix.platform.os }} + defaults: + run: + shell: ${{ matrix.platform.shell }} + + steps: + - name: Checkout Repository + uses: actions/checkout@v3 + + - name: Use Nodejs ${{ matrix.node-version }} + uses: actions/setup-node@v3 + with: + node-version: ${{ matrix.node-version }} + + - name: Use latest npm + run: npm i -g npm@latest + + - name: Install dependencies + run: npm install + + - name: Run Benchmarks + run: npm run benchmark diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 863b4dd8..1c22fcc2 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -6,7 +6,7 @@ jobs: build: strategy: matrix: - node-version: [14.x, 16.x, 18.x, 19.x] + node-version: [16.x, 18.x, 20.x] platform: - os: ubuntu-latest shell: bash diff --git a/.github/workflows/typedoc.yml b/.github/workflows/typedoc.yml index e5bc0ef8..d8d6c8d6 100644 --- a/.github/workflows/typedoc.yml +++ b/.github/workflows/typedoc.yml @@ -38,7 +38,10 @@ jobs: run: npm install - name: Generate typedocs run: npm run typedoc - + - name: Generate Benchmarks + run: npm run benchmark + - name: Copy Benchmarks to Docs + run: npm run benchmark-results-typedoc - name: Setup Pages uses: actions/configure-pages@v3 - name: Upload artifact diff --git a/.gitignore b/.gitignore index 966e2e0e..a1965794 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,5 @@ /nyc_output /coverage /bundle -/index.mjs \ No newline at end of file +/index.mjs +/dist diff --git a/.prettierignore b/.prettierignore index c01ac688..4edd771b 100644 --- a/.prettierignore +++ b/.prettierignore @@ -2,8 +2,9 @@ /tap-snapshots /coverage /.nyc_output -/bench-lru +/benchmark /.github /scripts /CHANGELOG.md /docs +/dist diff --git a/CHANGELOG.md b/CHANGELOG.md index 22705b68..93ac2085 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,107 +1,140 @@ # cringe lorg +## 10.0.0 + +- `cache.fetch()` return type is now `Promise` + instead of `Promise`. This is an irrelevant change + practically speaking, but can require changes for TypeScript + users. + +## 9.1.0 + +- `cache.set(key, undefined)` is now an alias for + `cache.delete(key)` + +## 9.0.0 + +- Use named export only, no default export. +- Bring back minimal polyfill. If this polyfill ends up being + used, then a warning is printed, as it is not safe for use + outside of LRUCache. + +## 8.0.0 + +- The `fetchContext` option was renamed to `context`, and may no + longer be set on the cache instance itself. +- Rewritten in TypeScript, so pretty much all the types moved + around a lot. +- The AbortController/AbortSignal polyfill is removed. For this + reason, **Node version 16.14.0 or higher is now required**. +- Internal properties were moved to actual private class + properties. +- Keys and values must not be `null` or `undefined`. +- Minified export available at `'lru-cache/min'`, for both CJS + and MJS builds. + ## 7.18.0 -* Add support for internal state investigation through the use of +- Add support for internal state investigation through the use of a `status` option to `has()`, `set()`, `get()`, and `fetch()`. ## 7.17.0 -* Add `signal` option for `fetch` to pass a user-supplied +- Add `signal` option for `fetch` to pass a user-supplied AbortSignal -* Add `ignoreFetchAbort` and `allowStaleOnFetchAbort` options +- Add `ignoreFetchAbort` and `allowStaleOnFetchAbort` options ## 7.16.2 -* Fail fetch() promises when they are aborted +- Fail fetch() promises when they are aborted ## 7.16.0 -* Add `allowStaleOnFetchRejection` option +- Add `allowStaleOnFetchRejection` option ## 7.15.0 -* Provide both ESM and CommonJS exports +- Provide both ESM and CommonJS exports ## 7.14.0 -* Add `maxEntrySize` option to prevent caching items above a +- Add `maxEntrySize` option to prevent caching items above a given calculated size. ## 7.13.0 -* Add `forceRefresh` option to trigger a call to the +- Add `forceRefresh` option to trigger a call to the `fetchMethod` even if the item is found in cache, and not older than its `ttl`. ## 7.12.0 -* Add `fetchContext` option to provide additional information to +- Add `fetchContext` option to provide additional information to the `fetchMethod` -* 7.12.1: Fix bug where adding an item with size greater than +- 7.12.1: Fix bug where adding an item with size greater than `maxSize` would cause bizarre behavior. ## 7.11.0 -* Add 'noDeleteOnStaleGet' option, to suppress behavior where a +- Add 'noDeleteOnStaleGet' option, to suppress behavior where a `get()` of a stale item would remove it from the cache. ## 7.10.0 -* Add `noDeleteOnFetchRejection` option, to suppress behavior +- Add `noDeleteOnFetchRejection` option, to suppress behavior where a failed `fetch` will delete a previous stale value. -* Ship types along with the package, rather than relying on +- Ship types along with the package, rather than relying on out of date types coming from DefinitelyTyped. ## 7.9.0 -* Better AbortController polyfill, supporting +- Better AbortController polyfill, supporting `signal.addEventListener('abort')` and `signal.onabort`. -* (7.9.1) Drop item from cache instead of crashing with an +- (7.9.1) Drop item from cache instead of crashing with an `unhandledRejection` when the `fetchMethod` throws an error or returns a rejected Promise. ## 7.8.0 -* add `updateAgeOnHas` option -* warnings sent to `console.error` if `process.emitWarning` unavailable +- add `updateAgeOnHas` option +- warnings sent to `console.error` if `process.emitWarning` unavailable ## 7.7.0 -* fetch: provide options and abort signal +- fetch: provide options and abort signal ## 7.6.0 -* add cache.getRemainingTTL(key) -* Add async cache.fetch() method, fetchMethod option -* Allow unbounded storage if maxSize or ttl set +- add cache.getRemainingTTL(key) +- Add async cache.fetch() method, fetchMethod option +- Allow unbounded storage if maxSize or ttl set ## 7.5.0 -* defend against mutation while iterating -* Add rentries, rkeys, rvalues -* remove bundler and unnecessary package.json fields +- defend against mutation while iterating +- Add rentries, rkeys, rvalues +- remove bundler and unnecessary package.json fields ## 7.4.0 -* Add browser optimized webpack bundle, exposed as `'lru-cache/browser'` -* Track size of compiled bundle in CI ([@SuperOleg39](https://github.com/SuperOleg39)) -* Add `noUpdateTTL` option for `set()` +- Add browser optimized webpack bundle, exposed as `'lru-cache/browser'` +- Track size of compiled bundle in CI ([@SuperOleg39](https://github.com/SuperOleg39)) +- Add `noUpdateTTL` option for `set()` ## 7.3.0 -* Add `disposeAfter()` -* `set()` returns the cache object -* `delete()` returns boolean indicating whether anything was deleted +- Add `disposeAfter()` +- `set()` returns the cache object +- `delete()` returns boolean indicating whether anything was deleted ## 7.2.0 -* Add reason to dispose() calls. +- Add reason to dispose() calls. ## 7.1.0 -* Add `ttlResolution` option -* Add `ttlAutopurge` option +- Add `ttlResolution` option +- Add `ttlAutopurge` option ## v7 - 2022-02 @@ -119,61 +152,61 @@ For the most part, the feature set has been maintained as much as possible. However, some other cleanup and refactoring changes were made in v7 as well. -* The `set()`, `get()`, and `has()` functions take options objects +- The `set()`, `get()`, and `has()` functions take options objects instead of positional booleans/integers for optional parameters. -* `size` can be set explicitly on `set()`. -* `cache.length` was renamed to the more fitting `cache.size`. -* Deprecations: - * `stale` option -> `allowStale` - * `maxAge` option -> `ttl` - * `length` option -> `sizeCalculation` - * `length` property -> `size` - * `del()` method -> `delete()` - * `prune()` method -> `purgeStale()` - * `reset()` method -> `clear()` -* The objects used by `cache.load()` and `cache.dump()` are incompatible +- `size` can be set explicitly on `set()`. +- `cache.length` was renamed to the more fitting `cache.size`. +- Deprecations: + - `stale` option -> `allowStale` + - `maxAge` option -> `ttl` + - `length` option -> `sizeCalculation` + - `length` property -> `size` + - `del()` method -> `delete()` + - `prune()` method -> `purgeStale()` + - `reset()` method -> `clear()` +- The objects used by `cache.load()` and `cache.dump()` are incompatible with previous versions. -* `max` and `maxSize` are now two separate options. (Previously, they were +- `max` and `maxSize` are now two separate options. (Previously, they were a single `max` option, which would be based on either count or computed size.) -* The function assigned to the `dispose` option is now expected to have signature +- The function assigned to the `dispose` option is now expected to have signature `(value, key, reason)` rather than `(key, value)`, reversing the order of `value` and `key`. ## v6 - 2020-07 -* Drop support for node v8 and earlier +- Drop support for node v8 and earlier ## v5 - 2018-11 -* Add updateAgeOnGet option -* Guards around setting max/maxAge to non-numbers -* Use classes, drop support for old nodes +- Add updateAgeOnGet option +- Guards around setting max/maxAge to non-numbers +- Use classes, drop support for old nodes ## v4 - 2015-12 -* Improve performance -* add noDisposeOnSet option -* feat(prune): allow users to proactively prune old entries -* Use Symbols for private members -* Add maxAge setter/getter +- Improve performance +- add noDisposeOnSet option +- feat(prune): allow users to proactively prune old entries +- Use Symbols for private members +- Add maxAge setter/getter ## v3 - 2015-11 -* Add cache.rforEach -* Allow non-string keys +- Add cache.rforEach +- Allow non-string keys ## v2 - 2012-08 -* add cache.pop() -* add cache.peek() -* add cache.keys() -* add cache.values() -* fix memory leak -* add `stale` option to return stale values before deleting -* use null-prototype object to avoid hazards -* make options argument an object +- add cache.pop() +- add cache.peek() +- add cache.keys() +- add cache.values() +- fix memory leak +- add `stale` option to return stale values before deleting +- use null-prototype object to avoid hazards +- make options argument an object ## v1 - 2010-05 -* initial implementation +- initial implementation diff --git a/README.md b/README.md index f1283307..c3d0f204 100644 --- a/README.md +++ b/README.md @@ -31,9 +31,11 @@ npm install lru-cache --save ```js // hybrid module, either works -import LRUCache from 'lru-cache' +import { LRUCache } from 'lru-cache' // or: -const LRUCache = require('lru-cache') +const { LRUCache } = require('lru-cache') +// or in minified form for web browsers: +import { LRUCache } from 'http://unpkg.com/lru-cache@9/dist/mjs/index.min.mjs' // At least one of 'max', 'ttl', or 'maxSize' is required, to prevent // unsafe unbounded storage. @@ -70,7 +72,11 @@ const options = { // async method to use for cache.fetch(), for // stale-while-revalidate type of behavior - fetchMethod: async (key, staleValue, { options, signal }) => {}, + fetchMethod: async ( + key, + staleValue, + { options, signal, context } + ) => {}, } const cache = new LRUCache(options) @@ -93,11 +99,34 @@ assert.equal(cache.get({ a: 1 }), undefined) cache.clear() // empty the cache ``` -If you put more stuff in it, then items will fall out. +If you put more stuff in the cache, then less recently used items +will fall out. That's what an LRU cache is. + +## `class LRUCache(options)` + +Create a new `LRUCache` object. + +When using TypeScript, set the `K` and `V` types to the `key` and +`value` types, respectively. + +The `FC` ("fetch context") generic type defaults to `unknown`. +If set to a value other than `void` or `undefined`, then any +calls to `cache.fetch()` _must_ provide a `context` option +matching the `FC` type. If `FC` is set to `void` or `undefined`, +then `cache.fetch()` _must not_ provide a `context` option. See +the documentation on `async fetch()` below. ## Options -### `max` +All options are available on the LRUCache instance, making it +safe to pass an LRUCache instance as the options argument to make +another empty cache of the same type. + +Some options are marked read-only because changing them after +instantiation is not safe. Changing any of the other options +will of course only have an effect on subsequent method calls. + +### `max` (read only) The maximum number of items that remain in the cache (assuming no TTL pruning or explicit deletions). Note that fewer items may be @@ -110,7 +139,7 @@ must be a positive integer if set. **It is strongly recommended to set a `max` to prevent unbounded growth of the cache.** See "Storage Bounds Safety" below. -### `maxSize` +### `maxSize` (read only) Set to a positive integer to track the sizes of items added to the cache, and automatically evict items in order to stay below @@ -160,9 +189,7 @@ If the `size` (or return value of `sizeCalculation`) for a given entry is greater than `maxEntrySize`, then the item will not be added to the cache. -Deprecated alias: `length` - -### `fetchMethod` +### `fetchMethod` (read only) Function that is used to make background asynchronous fetches. Called with `fetchMethod(key, staleValue, { signal, options, @@ -171,9 +198,6 @@ context })`. May return a Promise. If `fetchMethod` is not provided, then `cache.fetch(key)` is equivalent to `Promise.resolve(cache.get(key))`. -The `signal` object is an `AbortSignal` if that's available in -the global object, otherwise it's a pretty close polyfill. - If at any time, `signal.aborted` is set to `true`, or if the `signal.onabort` method is called, or if it emits an `'abort'` event which you can listen to with `addEventListener`, then that @@ -197,18 +221,6 @@ For example, a DNS cache may update the TTL based on the value returned from a remote DNS server by changing `options.ttl` in the `fetchMethod`. -### `fetchContext` - -Arbitrary data that can be passed to the `fetchMethod` as the -`context` option. - -Note that this will only be relevant when the `cache.fetch()` -call needs to call `fetchMethod()`. Thus, any data which will -meaningfully vary the fetch response needs to be present in the -key. This is primarily intended for including `x-request-id` -headers and the like for debugging purposes, which do not affect -the `fetchMethod()` response. - ### `noDeleteOnFetchRejection` If a `fetchMethod` throws an error or returns a rejected promise, @@ -252,8 +264,32 @@ Set to true to return a stale value from the cache when the event, whether user-triggered, or due to internal cache behavior. Unless `ignoreFetchAbort` is also set, the underlying -`fetchMethod` will still be considered canceled, and its return -value will be ignored and not cached. +`fetchMethod` will still be considered canceled, and any value +it returns will be ignored and not cached. + +Caveat: since fetches are aborted when a new value is explicitly +set in the cache, this can lead to fetch returning a stale value, +since that was the fallback value _at the moment the `fetch()` was +initiated_, even though the new updated value is now present in +the cache. + +For example: + +```ts +const cache = new LRUCache({ + ttl: 100, + fetchMethod: async (url, oldValue, { signal }) => { + const res = await fetch(url, { signal }) + return await res.json() + } +}) +cache.set('https://example.com/', { some: 'data' }) +// 100ms go by... +const result = cache.fetch('https://example.com/') +cache.set('https://example.com/', { other: 'thing' }) +console.log(await result) // { some: 'data' } +console.log(cache.get('https://example.com/')) // { other: 'thing' } +``` ### `ignoreFetchAbort` @@ -301,7 +337,7 @@ AbortSignals. This may be overridden on the `fetch()` call or in the `fetchMethod` itself. -### `dispose` +### `dispose` (read only) Function that is called on items when they are dropped from the cache, as `this.dispose(value, key, reason)`. @@ -316,10 +352,7 @@ the `dispose()` function call, it will break things in subtle and weird ways. Unlike several other options, this may _not_ be overridden by -passing an option to `set()`, for performance reasons. If -disposal functions may vary between cache entries, then the -entire list must be scanned on every cache swap, even if no -disposal function is in use. +passing an option to `set()`, for performance reasons. The `reason` will be one of the following strings, corresponding to the reason for the item's deletion: @@ -336,7 +369,7 @@ and deletes of in-flight asynchronous fetches, you must use the Optional, must be a function. -### `disposeAfter` +### `disposeAfter` (read only) The same as `dispose`, but called _after_ the entry is completely removed and the cache is once again in a clean state. @@ -391,8 +424,7 @@ set a `max` to prevent unbounded growth of the cache.** See If ttl tracking is enabled, and `max` and `maxSize` are not set, and `ttlAutopurge` is not set, then a warning will be emitted cautioning about the potential for unbounded memory consumption. - -Deprecated alias: `maxAge` +(The TypeScript definitions will also discourage this.) ### `noUpdateTTL` @@ -455,8 +487,6 @@ This may be overridden by passing an options object to Boolean, default false, only relevant if `ttl` is set. -Deprecated alias: `stale` - ### `noDeleteOnStaleGet` When using time-expiring entries with `ttl`, by default stale @@ -500,11 +530,17 @@ Boolean, default false, only relevant if `ttl` is set. ## API -### `new LRUCache(options)` +### `new LRUCache(options)` Create a new LRUCache. All options are documented above, and are on the cache as public members. +The `K` and `V` types define the key and value types, +respectively. The optional `FC` type defines the type of the +`context` object passed to `cache.fetch()`. + +Keys and values **must not** be `null` or `undefined`. + ### `cache.max`, `cache.maxSize`, `cache.allowStale`, `cache.noDisposeOnSet`, `cache.sizeCalculation`, `cache.dispose`, @@ -556,16 +592,17 @@ Returns the cache object. For the usage of the `status` option, see **Status Tracking** below. +If the value is `undefined`, then this is an alias for +`cache.delete(key)`. `undefined` is never stored in the cache. +See **Storing Undefined Values** below. + ### `get(key, { updateAgeOnGet, allowStale, status } = {}) => value` Return a value from the cache. Will update the recency of the cache entry found. -If the key is not found, `get()` will return `undefined`. This -can be confusing when setting values specifically to `undefined`, -as in `cache.set(key, undefined)`. Use `cache.has()` to -determine whether a key is present in the cache at all. +If the key is not found, `get()` will return `undefined`. For the usage of the `status` option, see **Status Tracking** below. @@ -587,8 +624,8 @@ The following options are supported: a different object, because it must also respond to internal cache state changes, but aborting this signal will abort the one passed to `fetchMethod` as well. -- `fetchContext` - sets the `context` option passed to the - underlying `fetchMethod`. +- `context` - sets the `context` option passed to the underlying + `fetchMethod`. If the value is in the cache and not stale, then the returned Promise resolves to the value. @@ -629,6 +666,39 @@ If a `signal` is passed to the `fetch()` call, then aborting the signal will abort the fetch and cause the `fetch()` promise to reject with the reason provided. +#### Setting `context` + +If an `FC` type is set to a type other than `unknown`, `void`, or +`undefined` in the LRUCache constructor, then all +calls to `cache.fetch()` _must_ provide a `context` option. If +set to `undefined` or `void`, then calls to fetch _must not_ +provide a `context` option. + +The `context` param allows you to provide arbitrary data that +might be relevant in the course of fetching the data. It is only +relevant for the course of a single `fetch()` operation, and +discarded afterwards. + +#### Note: `fetch()` calls are inflight-unique + +If you call `fetch()` multiple times with the same key value, +then every call after the first will resolve on the same +promise1, +_even if they have different settings that would otherwise change +the behvavior of the fetch_, such as `noDeleteOnFetchRejection` +or `ignoreFetchAbort`. + +In most cases, this is not a problem (in fact, only fetching +something once is what you probably want, if you're caching in +the first place). If you are changing the fetch() options +dramatically between runs, there's a good chance that you might +be trying to fit divergent semantics into a single object, and +would be better off with multiple cache instances. + +**1**: Ie, they're not the "same Promise", but they resolve at +the same time, because they're both waiting on the same +underlying fetchMethod response. + ### `peek(key, { allowStale } = {}) => value` Like `get()` but doesn't update recency or delete stale items. @@ -643,7 +713,7 @@ use. Age is updated if `updateAgeOnHas` is set to `true` in either the options or the constructor. Will return `false` if the item is stale, even though it is -technically in the cache. The difference can be determined (if +technically in the cache. The difference can be determined (if it matters) by using a `status` argument, and inspecting the `has` field. @@ -660,8 +730,6 @@ Returns `true` if the key was deleted, `false` otherwise. Clear the cache entirely, throwing away all values. -Deprecated alias: `reset()` - ### `keys()` Return a generator yielding the keys in the cache, in order from @@ -731,8 +799,6 @@ available. Delete any stale entries. Returns `true` if anything was removed, `false` otherwise. -Deprecated alias: `prune` - ### `getRemainingTTL(key)` Return the number of ms left in the item's TTL. If item is not @@ -760,68 +826,11 @@ Evict the least recently used item, returning its value. Returns `undefined` if cache is empty. -### Internal Methods and Properties - -In order to optimize performance as much as possible, "private" -members and methods are exposed on the object as normal -properties, rather than being accessed via Symbols, private -members, or closure variables. - -**Do not use or rely on these.** They will change or be removed -without notice. They will cause undefined behavior if used -inappropriately. There is no need or reason to ever call them -directly. - -This documentation is here so that it is especially clear that -this not "undocumented" because someone forgot; it _is_ -documented, and the documentation is telling you not to do it. - -**Do not report bugs that stem from using these properties.** -They will be ignored. - -- `initializeTTLTracking()` Set up the cache for tracking TTLs -- `updateItemAge(index)` Called when an item age is updated, by - internal ID -- `setItemTTL(index)` Called when an item ttl is updated, by - internal ID -- `isStale(index)` Called to check an item's staleness, by - internal ID -- `initializeSizeTracking()` Set up the cache for tracking item - size. Called automatically when a size is specified. -- `removeItemSize(index)` Updates the internal size calculation - when an item is removed or modified, by internal ID -- `addItemSize(index)` Updates the internal size calculation when - an item is added or modified, by internal ID -- `indexes()` An iterator over the non-stale internal IDs, from - most recently to least recently used. -- `rindexes()` An iterator over the non-stale internal IDs, from - least recently to most recently used. -- `newIndex()` Create a new internal ID, either reusing a deleted - ID, evicting the least recently used ID, or walking to the end - of the allotted space. -- `evict()` Evict the least recently used internal ID, returning - its ID. Does not do any bounds checking. -- `connect(p, n)` Connect the `p` and `n` internal IDs in the - linked list. -- `moveToTail(index)` Move the specified internal ID to the most - recently used position. -- `keyMap` Map of keys to internal IDs -- `keyList` List of keys by internal ID -- `valList` List of values by internal ID -- `sizes` List of calculated sizes by internal ID -- `ttls` List of TTL values by internal ID -- `starts` List of start time values by internal ID -- `next` Array of "next" pointers by internal ID -- `prev` Array of "previous" pointers by internal ID -- `head` Internal ID of least recently used item -- `tail` Internal ID of most recently used item -- `free` Stack of deleted internal IDs - ## Status Tracking Occasionally, it may be useful to track the internal behavior of the cache, particularly for logging, debugging, or for behavior -within the `fetchMethod`. To do this, you can pass a `status` +within the `fetchMethod`. To do this, you can pass a `status` object to the `get()`, `set()`, `has()`, and `fetch()` methods. The `status` option should be a plain JavaScript object. @@ -1036,6 +1045,38 @@ const cache = { If that isn't to your liking, check out [@isaacs/ttlcache](http://npm.im/@isaacs/ttlcache). +## Storing Undefined Values + +This cache never stores undefined values, as `undefined` is used +internally in a few places to indicate that a key is not in the +cache. + +You may call `cache.set(key, undefined)`, but this is just an +an alias for `cache.delete(key)`. Note that this has the effect +that `cache.has(key)` will return _false_ after setting it to +undefined. + +```js +cache.set(myKey, undefined) +cache.has(myKey) // false! +``` + +If you need to track `undefined` values, and still note that the +key is in the cache, an easy workaround is to use a sigil object +of your own. + +```js +import { LRUCache } from 'lru-cache' +const undefinedValue = Symbol('undefined') +const cache = new LRUCache(...) +const mySet = (key, value) => + cache.set(key, value === undefined ? undefinedValue : value) +const myGet = (key, value) => { + const v = cache.get(key) + return v === undefinedValue ? undefined : v +} +``` + ## Performance As of January 2022, version 7 of this library is one of the most @@ -1092,13 +1133,24 @@ If performance matters to you: [mnemonist's LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache) which uses an Object as its data store. + 2. Failing that, if at all possible, use short non-numeric strings (ie, less than 256 characters) as your keys, and use [mnemonist's LRUCache](https://yomguithereal.github.io/mnemonist/lru-cache). -3. If the types of your keys will be long strings, strings that - look like floats, `null`, objects, or some mix of types, or if - you aren't sure, then this library will work well for you. + +3. If the types of your keys will be anything else, especially + long strings, strings that look like floats, objects, or some + mix of types, or if you aren't sure, then this library will + work well for you. + + If you do not need the features that this library provides + (like asynchronous fetching, a variety of TTL staleness + options, and so on), then [mnemonist's + LRUMap](https://yomguithereal.github.io/mnemonist/lru-map) is + a very good option, and just slightly faster than this module + (since it does considerably less). + 4. Do not use a `dispose` function, size tracking, or especially ttl behavior, unless absolutely needed. These features are convenient, and necessary in some use cases, and every attempt @@ -1114,4 +1166,24 @@ performance, albeit with some subtle changes as a result. If you were relying on the internals of LRUCache in version 6 or before, it probably will not work in version 7 and above. +## Breaking Changes in Version 8 + +- The `fetchContext` option was renamed to `context`, and may no + longer be set on the cache instance itself. +- Rewritten in TypeScript, so pretty much all the types moved + around a lot. +- The AbortController/AbortSignal polyfill was removed. For this + reason, **Node version 16.14.0 or higher is now required**. +- Internal properties were moved to actual private class + properties. +- Keys and values must not be `null` or `undefined`. +- Minified export available at `'lru-cache/min'`, for both CJS + and MJS builds. + +## Changes in Version 9 + +- Named export only, no default export. +- AbortController polyfill returned, albeit with a warning when + used. + For more info, see the [change log](CHANGELOG.md). diff --git a/benchmark/.gitignore b/benchmark/.gitignore new file mode 100644 index 00000000..a1749a34 --- /dev/null +++ b/benchmark/.gitignore @@ -0,0 +1,10 @@ +.DS_Store +/node_modules +/package.json +/package-lock.json +/impls.txt +/results +/results.md +/profiles +/profile.txt +/isolate*.log diff --git a/benchmark/CHANGELOG.md b/benchmark/CHANGELOG.md new file mode 100644 index 00000000..56b2d33e --- /dev/null +++ b/benchmark/CHANGELOG.md @@ -0,0 +1,36 @@ + +# 1.1.0 (2017-10-02) + +* 1.0.0 ([4b43691](https://github.com/dominictarr/bench-lru/commit/4b43691)) +* Add bench specification ([c55b726](https://github.com/dominictarr/bench-lru/commit/c55b726)) +* add hashlru ([09e99a0](https://github.com/dominictarr/bench-lru/commit/09e99a0)) +* Add ignore ([4f8b103](https://github.com/dominictarr/bench-lru/commit/4f8b103)) +* Add linter and changelog automatization ([8eadb2d](https://github.com/dominictarr/bench-lru/commit/8eadb2d)) +* Add UI feedback ([d4a7977](https://github.com/dominictarr/bench-lru/commit/d4a7977)) +* Avoid store data ([bf49f44](https://github.com/dominictarr/bench-lru/commit/bf49f44)) +* Calculate bundle size ([685dbfa](https://github.com/dominictarr/bench-lru/commit/685dbfa)) +* deps ([4c8f827](https://github.com/dominictarr/bench-lru/commit/4c8f827)) +* Fix find ([f8c979e](https://github.com/dominictarr/bench-lru/commit/f8c979e)) +* Fix scope ([2be6027](https://github.com/dominictarr/bench-lru/commit/2be6027)) +* fix typo @chentsulin found ([46eead9](https://github.com/dominictarr/bench-lru/commit/46eead9)) +* Improve format ([7dec452](https://github.com/dominictarr/bench-lru/commit/7dec452)) +* initial ([e945b02](https://github.com/dominictarr/bench-lru/commit/e945b02)) +* Moar runs ([00133bd](https://github.com/dominictarr/bench-lru/commit/00133bd)) +* Move round inside bench ([ba91f0c](https://github.com/dominictarr/bench-lru/commit/ba91f0c)) +* new results ([cf2a362](https://github.com/dominictarr/bench-lru/commit/cf2a362)) +* Re-testing with `tiny-lru`, fixes #4 ([8130d27](https://github.com/dominictarr/bench-lru/commit/8130d27)), closes [#4](https://github.com/dominictarr/bench-lru/issues/4) +* results and discussion ([f566cd2](https://github.com/dominictarr/bench-lru/commit/f566cd2)) +* Sort by name ([9c85fb2](https://github.com/dominictarr/bench-lru/commit/9c85fb2)) +* Sort results ([60dbed3](https://github.com/dominictarr/bench-lru/commit/60dbed3)) +* Sort results ([f294ccc](https://github.com/dominictarr/bench-lru/commit/f294ccc)) +* Update ([5c244a6](https://github.com/dominictarr/bench-lru/commit/5c244a6)) +* Update deps ([35ac9f7](https://github.com/dominictarr/bench-lru/commit/35ac9f7)) +* update readme ([df5c278](https://github.com/dominictarr/bench-lru/commit/df5c278)) +* Update README.md ([f5e6dd4](https://github.com/dominictarr/bench-lru/commit/f5e6dd4)) +* Updating `data.csv` ([6103c7c](https://github.com/dominictarr/bench-lru/commit/6103c7c)) +* Updating a typo ([8286afa](https://github.com/dominictarr/bench-lru/commit/8286afa)) +* Updating tiny-lru & re-enabling it's test ([954e28a](https://github.com/dominictarr/bench-lru/commit/954e28a)) +* use hashlru, and benchmark reads also ([6fea600](https://github.com/dominictarr/bench-lru/commit/6fea600)) + + + diff --git a/benchmark/LICENSE b/benchmark/LICENSE new file mode 100644 index 00000000..54a0f6b4 --- /dev/null +++ b/benchmark/LICENSE @@ -0,0 +1,22 @@ +Copyright (c) 2016 'Dominic Tarr' + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/benchmark/Makefile b/benchmark/Makefile new file mode 100644 index 00000000..1f44fe16 --- /dev/null +++ b/benchmark/Makefile @@ -0,0 +1,12 @@ +all: package.json index.js worker.js + rm -rf results.txt results + npm run benchmark | tee results.md + +impls.txt: fetch-impls.sh + bash fetch-impls.sh + +profile: worker.js + bash profile.sh + +package.json: make-deps.sh impls.txt + bash make-deps.sh diff --git a/benchmark/README.md b/benchmark/README.md new file mode 100644 index 00000000..a475913c --- /dev/null +++ b/benchmark/README.md @@ -0,0 +1,533 @@ +# bench-lru + +benchmark the least-recently-used caches which are available on npm. + +## Update: March, 2023 + +Forked and ported over to be used within the lru-cache project +directly. Made a bunch of changes to make it easier to run this +on an ongoing basis and detect regressions. + +More implementations can be added by adding them to the list in +the `make-deps.sh` script, but for my purposes, the only decently +fast and reasonably correct LRU implementations apart from this +one are hashlru, lru-fast, and especially, mnemonist. My purpose +is not to win a contest, it's to easily track and debug +performance characteristics of this library. + +Run the tests by running `make` in this directory. + +## Update: January, 2022 + +This is a fork of Dominc Tarr's original `bench-lru` script. I've made the +following modifications. + +First, I noted that cache performance and correctness in JavaScript is +highly dependent on the types of keys used to store items in the cache. + +Specifically: + +1. When using keys that aren't strings or symbols, it's possible for keys + to collide if using an `Object` as the backing store rather than a + `Map`. +2. When using integer numbers as object keys, V8 is extremely optimized for + `Object` data storage, especially if the values are also integers. + However, if the values are numeric strings but numeric _float_ strings, + performance goes out the window. +3. Long strings are much slower Object keys than long strings. + +In the real world, it's quite rare to store 200k integers using the exact +same 200k integers as keys. This iteration of the benchmark uses a variety +of integers, floats, numeric integer strings, numeric float strings, long +strings, strings and integers that collide, objects, and so on, and +disqualifies caches that don't pass a basic correctness smoke test. + +Next, the weighting of scores doesn't much match real world use cases +either. In observing several production use cases of LRU caches, the +some consistent patterns can be observed. + +Typically, an LRUCache is being used (if it is actually needed) for a case +where: + +1. The total data corpus is _very large_, and cannot comfortably fit in + memory. (If it isn't large, just save it all, don't bother with an + LRU.) +2. The time required to fetch any given item is significant. (If it isn't, + just fetch it each time, don't bother with an LRU.) +3. The time over which the data will be accessed is significant, and thus + the subset of the corpus of data which will _eventually_ need to be + accessed is by the process is more than can comfortably fit in memory. +4. Items tend to spike in popularity for a while, then become less + frequenty accessed. + +If these criteria are met, an LRUCache is a good fit. If a few of them are +likely, and the others _might_ be true, then it might still be a good fit +to be safe. It's a fairly common need, if somewhat specific. + +Given this behavior pattern, the weights in the benchmark were off. Simply +reporting updates per ms next to evictions per ms is a bit unhelpful. +Dominic was correct that evictions are important. + +However, an eviction _can only happen_ at the time of making a `set()` +call, which means that you just performed some expensive upstream action to +get the thing that you're caching from its origin. + +`update`s (that is, setting a key which is already in the cache) are +extremely rare in normal LRU-friendly workloads. If you already have it in +the cache, don't fetch it upstream or write it again, use the cached one. +That's the whole point! + +The _most_ frequent operations an LRUCache is normally called upon for is: +"fetching an item from the queue again". + +That is to say, to the greatest extent possible, `get()` performance should +be roughly equivalent, regardless of where in the heirarchy of recency a +given item is found. If fetching the most recently used item is fast, but +fetching the item 50% most recently used, or even least recently used, is +slow, then the cache will perform poorly (and unpredictably!) under real +workloads. + +To account for the priorities (and the fact that eviction is much slower in +every cache measured), the following weights are applied to form a +final "score", which is used to sort the list: + +1. `evict * 5` +2. `get2 * 5` +3. `get1 * 3` +4. `set * 2` +5. `update * 1` + +Note that since `get2` tends to be much faster than `evict` in all caches +tested, this ends up being the most important metric. + +Also, I observed that some caches perform very well when `get()` calls are +made in the order in which they were inserted into the cache, but much more +poorly when `get()` calls are made out of order. Under real workloads, a +cache is rarely called upon to list its contents in insertion order, but +instead is used in an unpredictable order. + +To accomplish this, the ordering of the data used in the `update` and +`get2` benchmarks is randomized, so that the items need to be constantly +reshuffled, as they would be in a real use case. + +### Conclusions from this new approach, and my attempts to make lru-cache perform well + +1. Only caches with `Map`-based key stores are capable of handling keys + that are long string, numeric float strings, or `Symbol` objects with + adequate performance. + + This was surprising to me! I expected that `Symbol` objects would + perform well in an `Object` key store, and I suspect that future + versions of V8 may optimize this code path if more people use it. The + performance gains on long strings (and especially numeric float strings) + in `Map` key stores was somewhat surprising as well, but this just shows + the hazard of optimizing for a benchmark instead of making a benchmark + match real workloads. + +2. Only caches with `Map`-based key stores are capable of handling + non-string/symbol keys correctly. + +3. The garbage-collection penalty for throwing away an object (the approach + advocated below) is very low for an object full of integer keys and + numeric values. However, it rises dramatically for almost any other + shape of data, making linked-list style approaches more effective. + +4. Similarly, the gc penalty for object-based linked list approaches makes + them perform significantly worse than pointer-based linked list + approaches. + + That is, it's much faster to implement the linked list as two arrays of + integers and do `this.next[index]` and `this.previous[index]` rather + than an array of node objects and `node.next` and `node.previous`. No + amount of object optimization (reusing objects from a pool, etc.) + seemed able to get around this. + + This wasn't surprising, but it was disappointing. `node.next.value` is + much more ergonomic and readable than + `this.valueList[this.next[index]]`. + +Almost any of these cache implementations will perform well enough in any +situation where you find yourself with a problem that needs a cache. But +as always, if you are optimizing a hot path and performance matters, make +sure to test it against your actual scenario. If you are strictly using +integers as keys, it's worth using one of the "worse" caches on this list. + +## Results + +``` +int: just an integer +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|-------|-------|--------|-------|-------|--------| +| [lru-fast](https://npmjs.com/package/lru-fast) | 27663 | 63492 | 8780 | 59880 | 6425 | 586107 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 22396 | 55096 | 9639 | 51282 | 6359 | 507924 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 37736 | 36765 | 15674 | 35778 | 15974 | 460201 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 15723 | 46083 | 6388 | 44346 | 12225 | 458938 | +| [hashlru](https://npmjs.com/package/hashlru) | 29112 | 31696 | 12747 | 34130 | 12666 | 400039 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 7313 | 30395 | 8547 | 25445 | 6398 | 273573 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 10655 | 20471 | 6796 | 19084 | 5244 | 211159 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 10395 | 20141 | 6662 | 18727 | 5417 | 208595 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 9790 | 20346 | 4652 | 17809 | 5180 | 200215 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 12547 | 16488 | 8921 | 16000 | 6002 | 193489 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 7605 | 17513 | 4121 | 15911 | 4558 | 174215 | +| [lru](https://www.npmjs.com/package/lru) | 11779 | 14194 | 5732 | 14914 | 4168 | 167282 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 8107 | 15373 | 7460 | 14296 | 3853 | 160538 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 9195 | 11179 | 5258 | 15962 | 3995 | 156970 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 5218 | 12247 | 4380 | 10422 | 3341 | 120372 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 4985 | 11534 | 4858 | 11001 | 2582 | 117345 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 7027 | 9896 | 4308 | 8803 | 2876 | 106445 | + +strint: stringified integer +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|-------|-------|--------|-------|-------|--------| +| [hashlru](https://npmjs.com/package/hashlru) | 42373 | 37383 | 14025 | 37383 | 13889 | 467280 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 18709 | 42105 | 7505 | 41322 | 17094 | 463318 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 18570 | 42373 | 7767 | 40984 | 16807 | 460981 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 16625 | 41408 | 7189 | 40486 | 16026 | 447223 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 28050 | 42644 | 8120 | 40241 | 6020 | 423457 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 26702 | 35273 | 11111 | 35273 | 14738 | 420389 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 20222 | 33389 | 10352 | 32787 | 17969 | 404743 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 12895 | 36166 | 7933 | 36496 | 10995 | 379676 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 11587 | 36232 | 4918 | 36697 | 11461 | 377578 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 23095 | 4836 | 8150 | 54795 | 6141 | 373528 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 10554 | 29197 | 7719 | 28777 | 7669 | 298648 | +| [lru](https://www.npmjs.com/package/lru) | 20964 | 28818 | 5132 | 27894 | 4749 | 296729 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 7997 | 29240 | 5983 | 28249 | 5267 | 277277 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 6570 | 26774 | 5072 | 26385 | 6693 | 263924 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 6037 | 22396 | 6244 | 20263 | 5520 | 214421 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 8197 | 17668 | 6470 | 19436 | 6234 | 204218 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 7496 | 15760 | 4614 | 13643 | 3716 | 153681 | + +str: string that is not a number +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 7449 | 17637 | 7321 | 16260 | 5587 | 184365 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 6691 | 12788 | 5349 | 11396 | 3661 | 132380 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 6466 | 12821 | 3752 | 11409 | 3662 | 130502 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 6568 | 12547 | 5362 | 11142 | 3604 | 129869 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 7095 | 12531 | 6250 | 10256 | 3142 | 125023 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 7278 | 10341 | 5919 | 9341 | 3801 | 117208 | +| [hashlru](https://npmjs.com/package/hashlru) | 9311 | 6517 | 4614 | 6307 | 8478 | 116712 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 4241 | 9302 | 5745 | 8772 | 4632 | 109153 | +| [lru](https://www.npmjs.com/package/lru) | 6711 | 10449 | 5947 | 8937 | 2120 | 106001 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 4379 | 9770 | 5583 | 9350 | 2702 | 103911 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 5259 | 9315 | 4915 | 9166 | 2539 | 101903 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 5302 | 8897 | 3049 | 8241 | 3269 | 97894 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 4028 | 8214 | 3213 | 7339 | 2219 | 83701 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 3716 | 7321 | 3600 | 7110 | 2236 | 79725 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3512 | 7800 | 3361 | 6698 | 2057 | 77560 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 4660 | 6616 | 2740 | 4827 | 1910 | 65593 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 4360 | 5792 | 2826 | 5330 | 1956 | 65352 | + +numstr: a mix of integers and strings that look like them +⠴ Benchmarking 1 of 17 caches [hashlru] failed correctness check at key="2" +⠧ Benchmarking 3 of 17 caches [hyperlru-object] failed correctness check at key="2" +⠋ Benchmarking 5 of 17 caches [lru] failed correctness check at key="2" +⠋ Benchmarking 11 of 17 caches [lru-fast] failed correctness check at key="2" +⠦ Benchmarking 13 of 17 caches [secondary-cache] failed correctness check at key="2" +⠹ Benchmarking 14 of 17 caches [simple-lru-cache] failed correctness check at key="2" +⠇ Benchmarking 15 of 17 caches [tiny-lru] failed correctness check at key="2" +⠼ Benchmarking 16 of 17 caches [mnemonist-object] failed correctness check at key="2" +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|-------|-------|--------|-------|-------|--------| +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 10309 | 18519 | 6470 | 16736 | 6105 | 196850 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 10194 | 18587 | 6112 | 16327 | 5863 | 193211 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 9281 | 18382 | 5215 | 16779 | 5757 | 191603 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 11211 | 17483 | 5025 | 15552 | 6085 | 188081 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 7257 | 13996 | 3567 | 13477 | 4880 | 151854 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 7070 | 13803 | 6260 | 12469 | 3387 | 141089 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 5044 | 12682 | 3731 | 10667 | 3192 | 121160 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 4302 | 11461 | 3851 | 9901 | 2620 | 109443 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 6510 | 9276 | 4511 | 8969 | 3217 | 106289 | +| [hashlru](https://npmjs.com/package/hashlru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru](https://www.npmjs.com/package/lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 0 | 0 | 0 | 0 | 0 | 0 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 0 | 0 | 0 | 0 | 0 | 0 | + +pi: multiples of pi +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 5588 | 11891 | 4519 | 10905 | 3064 | 121213 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 7457 | 9980 | 5838 | 9579 | 3842 | 117797 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 4700 | 10096 | 2950 | 11148 | 2719 | 111973 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 5372 | 10256 | 4248 | 10262 | 2951 | 111825 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 3807 | 8893 | 5237 | 8058 | 2400 | 91820 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 3269 | 5313 | 3204 | 7968 | 2526 | 78151 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 3309 | 6709 | 3230 | 6861 | 1837 | 73465 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3152 | 6234 | 3444 | 6246 | 1694 | 68150 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 2642 | 4103 | 2452 | 4190 | 1508 | 48535 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 1643 | 2244 | 2016 | 2315 | 836 | 27789 | +| [lru](https://www.npmjs.com/package/lru) | 1688 | 1980 | 1744 | 1891 | 1277 | 26900 | +| [hashlru](https://npmjs.com/package/hashlru) | 1801 | 1599 | 1362 | 1545 | 1655 | 25761 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 1358 | 1948 | 1803 | 1998 | 789 | 24298 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 1326 | 1944 | 1590 | 1994 | 681 | 23449 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 1542 | 1917 | 1484 | 1803 | 749 | 23079 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 1304 | 1755 | 1473 | 1871 | 663 | 22016 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 1336 | 1597 | 1162 | 1661 | 610 | 19980 | + +float: floating point values +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 5420 | 10493 | 3197 | 12255 | 3091 | 122246 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 5478 | 11827 | 4633 | 10858 | 3048 | 120600 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 7582 | 10325 | 6209 | 9766 | 3849 | 120423 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 5318 | 8150 | 3664 | 11912 | 3293 | 114775 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 4004 | 8610 | 5488 | 8407 | 2555 | 94136 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 3327 | 7067 | 3033 | 7776 | 2532 | 82428 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3077 | 6609 | 3506 | 6414 | 1681 | 69962 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 3151 | 6279 | 3212 | 6623 | 1698 | 69956 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 2964 | 4551 | 2616 | 4665 | 1701 | 54027 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 1657 | 2376 | 2028 | 2448 | 846 | 28940 | +| [lru](https://www.npmjs.com/package/lru) | 1749 | 2181 | 1821 | 1985 | 1207 | 27822 | +| [hashlru](https://npmjs.com/package/hashlru) | 1784 | 1675 | 1456 | 1604 | 1639 | 26264 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 1662 | 2006 | 1612 | 1917 | 840 | 24739 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 1367 | 2018 | 1788 | 2071 | 703 | 24446 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 1368 | 1919 | 1880 | 1975 | 797 | 24233 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 1305 | 1866 | 1515 | 1886 | 684 | 22573 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 1323 | 1662 | 1232 | 1690 | 634 | 20484 | + +obj: an object with a single key +⠴ Benchmarking 1 of 17 caches [hashlru] failed correctness check at key={"z":0} +⠧ Benchmarking 3 of 17 caches [hyperlru-object] failed correctness check at key={"z":0} +⠇ Benchmarking 5 of 17 caches [lru] failed correctness check at key={"z":0} +⠼ Benchmarking 11 of 17 caches [lru-fast] failed correctness check at key={"z":0} +⠋ Benchmarking 13 of 17 caches [secondary-cache] failed correctness check at key={"z":0} +⠴ Benchmarking 14 of 17 caches [simple-lru-cache] failed correctness check at key={"z":0} +⠙ Benchmarking 15 of 17 caches [tiny-lru] failed correctness check at key={"z":0} +⠧ Benchmarking 16 of 17 caches [mnemonist-object] failed correctness check at key={"z":0} +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|-------|-------|--------|-------|-------|--------| +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 10215 | 19822 | 6581 | 19157 | 5623 | 210377 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 9921 | 20429 | 6718 | 18349 | 5548 | 207332 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 8913 | 20387 | 5954 | 18639 | 5366 | 204966 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 11710 | 18100 | 5161 | 16273 | 5700 | 192746 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 7055 | 17227 | 3566 | 16064 | 4551 | 172432 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 7613 | 14286 | 6892 | 12723 | 3630 | 146741 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 5061 | 12158 | 4043 | 10655 | 3644 | 122134 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 6129 | 10616 | 5444 | 10304 | 2957 | 115855 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 4537 | 11056 | 3974 | 9128 | 2557 | 104641 | +| [hashlru](https://npmjs.com/package/hashlru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru](https://www.npmjs.com/package/lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 0 | 0 | 0 | 0 | 0 | 0 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 0 | 0 | 0 | 0 | 0 | 0 | + +rand: random floating point number +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 4912 | 10644 | 3218 | 11744 | 3027 | 118829 | +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 5789 | 11013 | 4197 | 10834 | 3099 | 118479 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 7524 | 10050 | 5936 | 9398 | 3826 | 117254 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 5640 | 9217 | 3837 | 10846 | 3061 | 112303 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 3982 | 8052 | 5155 | 8651 | 2431 | 92685 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 3176 | 7246 | 2881 | 7070 | 2488 | 78761 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 3071 | 6810 | 3150 | 6481 | 1812 | 71187 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3175 | 6295 | 3386 | 6109 | 1712 | 67726 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 3341 | 4206 | 2594 | 4619 | 1653 | 53254 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 1669 | 2380 | 1984 | 2350 | 859 | 28507 | +| [lru](https://www.npmjs.com/package/lru) | 1723 | 1866 | 1668 | 1829 | 1265 | 26182 | +| [hashlru](https://npmjs.com/package/hashlru) | 1790 | 1610 | 1402 | 1546 | 1613 | 25607 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 1667 | 2060 | 1572 | 1946 | 809 | 24861 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 1368 | 1958 | 1791 | 1999 | 768 | 24236 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 1360 | 1952 | 1760 | 1987 | 696 | 23751 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 1205 | 1815 | 1509 | 1949 | 682 | 22519 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 1391 | 1665 | 1217 | 1740 | 645 | 20919 | + +sym: a Symbol object +⠼ Benchmarking 5 of 17 caches [lru] failed correctness check TypeError: Cannot convert a Symbol value to a string + at LRU.set (/Users/isaacs/dev/isaacs/lru-cache/bench-lru/node_modules/lru/index.js:69:41) + at self.onmessage (evalmachine.:116:38) + at process. (/Users/isaacs/dev/isaacs/lru-cache/bench-lru/node_modules/tiny-worker/lib/worker.js:60:55) + at process.emit (node:events:520:28) + at emit (node:internal/child_process:936:14) + at processTicksAndRejections (node:internal/process/task_queues:84:21) +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|-------|-------|--------|-------|-------|--------| +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 9886 | 19361 | 6991 | 17809 | 5445 | 201116 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 9809 | 19455 | 6258 | 17794 | 5430 | 200361 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 9074 | 19417 | 6450 | 17953 | 5359 | 199409 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 7776 | 20492 | 6831 | 17391 | 5593 | 198779 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 10893 | 17652 | 5375 | 15540 | 5616 | 185897 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 7297 | 14399 | 6925 | 12650 | 3591 | 145921 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 7174 | 14124 | 3938 | 12300 | 4535 | 144833 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 7758 | 12845 | 6824 | 11682 | 3125 | 134910 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 6028 | 3537 | 6916 | 16340 | 2874 | 125653 | +| [hashlru](https://npmjs.com/package/hashlru) | 9602 | 7148 | 5144 | 6761 | 8264 | 120917 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 5179 | 12114 | 3814 | 10846 | 3207 | 120779 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 4636 | 8957 | 6044 | 8981 | 4601 | 110097 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 4540 | 10959 | 4344 | 8799 | 2696 | 103776 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 6129 | 8421 | 5045 | 8460 | 2817 | 98951 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 3577 | 7533 | 3856 | 6349 | 2258 | 76644 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 5319 | 7573 | 3177 | 5238 | 2055 | 72999 | +| [lru](https://www.npmjs.com/package/lru) | 0 | 0 | 0 | 0 | 0 | 0 | + +longstr: a very long string +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 5882 | 11044 | 5009 | 10147 | 3107 | 116175 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 5828 | 11287 | 4102 | 10320 | 2738 | 114909 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 5936 | 9960 | 4827 | 10020 | 3224 | 112799 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 6464 | 9264 | 6291 | 8396 | 3193 | 104956 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 4900 | 8834 | 3499 | 8094 | 2759 | 94066 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 4700 | 8316 | 5185 | 7997 | 2298 | 91008 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 3666 | 7862 | 3514 | 6991 | 2044 | 79607 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 2949 | 6658 | 4930 | 6640 | 2690 | 77452 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3377 | 7283 | 3492 | 6607 | 1856 | 74410 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 2248 | 4006 | 4218 | 3946 | 2547 | 53197 | +| [hashlru](https://npmjs.com/package/hashlru) | 4003 | 3281 | 2791 | 3078 | 3169 | 51875 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 2261 | 4432 | 3907 | 4352 | 1616 | 51565 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 3132 | 4287 | 2691 | 4307 | 1282 | 49761 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 2502 | 3859 | 3854 | 3752 | 1826 | 48325 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 1989 | 3715 | 2690 | 3430 | 1235 | 41138 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 2500 | 3394 | 2086 | 2845 | 1148 | 37233 | +| [lru](https://www.npmjs.com/package/lru) | 2345 | 3073 | 2714 | 2903 | 1042 | 36348 | + +mix: a mix of all the types +⠦ Benchmarking 1 of 17 caches [hashlru] failed correctness check at key={"z":3} +⠧ Benchmarking 3 of 17 caches [hyperlru-object] failed correctness check at key={"z":3} +⠏ Benchmarking 5 of 17 caches [lru] failed correctness check TypeError: Cannot convert a Symbol value to a string + at LRU.set (/Users/isaacs/dev/isaacs/lru-cache/bench-lru/node_modules/lru/index.js:69:41) + at self.onmessage (evalmachine.:116:38) + at process. (/Users/isaacs/dev/isaacs/lru-cache/bench-lru/node_modules/tiny-worker/lib/worker.js:60:55) + at process.emit (node:events:520:28) + at emit (node:internal/child_process:936:14) + at processTicksAndRejections (node:internal/process/task_queues:84:21) +⠼ Benchmarking 11 of 17 caches [lru-fast] failed correctness check at key={"z":3} +⠴ Benchmarking 13 of 17 caches [secondary-cache] failed correctness check at key={"z":3} +⠙ Benchmarking 14 of 17 caches [simple-lru-cache] failed correctness check at key={"z":3} +⠧ Benchmarking 15 of 17 caches [tiny-lru] failed correctness check at key={"z":3} +⠸ Benchmarking 16 of 17 caches [mnemonist-object] failed correctness check at key={"z":3} +| name | set | get1 | update | get2 | evict | score | +|----------------------------------------------------------------|------|-------|--------|-------|-------|--------| +| [lru-cache-7](https://npmjs.com/package/lru-cache) | 7457 | 13342 | 5802 | 12195 | 3979 | 141612 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 8061 | 14015 | 6369 | 11312 | 3975 | 140971 | +| [lru-cache-7-dispose](https://npmjs.com/package/lru-cache) | 7138 | 13271 | 5321 | 12210 | 3953 | 140225 | +| [lru-cache-7-size](https://npmjs.com/package/lru-cache) | 6847 | 13405 | 4351 | 12070 | 3757 | 137395 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 5316 | 9676 | 4381 | 9170 | 2900 | 104391 | +| [lru-cache-7-ttl](https://npmjs.com/package/lru-cache) | 5708 | 9833 | 3164 | 8333 | 3419 | 102839 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 4165 | 8624 | 3328 | 7890 | 2557 | 89765 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 3875 | 7758 | 3351 | 7179 | 2017 | 80355 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 4919 | 6256 | 3227 | 6129 | 2147 | 73213 | +| [hashlru](https://npmjs.com/package/hashlru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru](https://www.npmjs.com/package/lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 0 | 0 | 0 | 0 | 0 | 0 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 0 | 0 | 0 | 0 | 0 | 0 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 0 | 0 | 0 | 0 | 0 | 0 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 0 | 0 | 0 | 0 | 0 | 0 | +``` + +The best performers are `lru-cache` version 7 and `mnemonist`'s `LRUMap`, across +most categories. `mnemonist-map` seems to consistently have slightly +better eviction and set performance, and slightly worse get performance, +for many key types. The difference is small enough to be negligible, +which is to be expected. + +For object-friendly key spaces (strictly integers or strictly short strings), +`mnemonist`'s `LRUCache` and `hashlru` seem to do the best. + +For strictly integer key sets, `lru-fast` lives up to its name, blowing the +other implementations out of the water, but did not perform nearly as well +with other types of keys. + +--- + +What follows below is Dominic Tarr's original discussion from 2016. + +_[@isaacs](https://github.com/isaacs)_ + +--- + +## Introduction + +An LRU cache is a cache with bounded memory use. +The point of a cache is to improve performance, +so how performant are the available implementations? + +LRUs achive bounded memory use by removing the oldest items when a threashold number of items +is reached. We measure 3 cases, adding an item, updating an item, and adding items +which push other items out of the LRU. + +There is a [previous benchmark](https://www.npmjs.com/package/bench-cache) +but it did not describe it's methodology. (and since it measures the memory used, +but tests everything in the same process, it does not get clear results) + +## Benchmark + +I run a very simple multi-process benchmark, with 5 iterations to get a median of ops/ms: + +1. Set the LRU to fit max N=200,000 items. +2. Add N random numbers to the cache, with keys 0-N. +3. Then update those keys with new random numbers. +4. Then _evict_ those keys, by adding keys N-2N. + +### Results + +Operations per millisecond (_higher is better_): + +| name | set | get1 | update | get2 | evict | +| -------------------------------------------------------------- | ----- | ----- | ------ | ----- | ----- | +| [hashlru](https://npmjs.com/package/hashlru) | 18536 | 17590 | 17794 | 18332 | 9381 | +| [mnemonist-object](https://www.npmjs.com/package/mnemonist) | 15314 | 69444 | 35026 | 68966 | 7949 | +| [quick-lru](https://npmjs.com/package/quick-lru) | 8214 | 4572 | 6777 | 4608 | 6345 | +| [tiny-lru](https://npmjs.com/package/tiny-lru) | 6530 | 46296 | 37244 | 42017 | 5961 | +| [lru-fast](https://npmjs.com/package/lru-fast) | 5979 | 36832 | 32626 | 40900 | 5929 | +| [mnemonist-map](https://www.npmjs.com/package/mnemonist) | 6272 | 15785 | 10923 | 16077 | 3738 | +| [lru](https://www.npmjs.com/package/lru) | 3927 | 5454 | 5001 | 5366 | 2827 | +| [simple-lru-cache](https://npmjs.com/package/simple-lru-cache) | 3393 | 3855 | 3701 | 3899 | 2496 | +| [hyperlru-object](https://npmjs.com/package/hyperlru-object) | 3515 | 3953 | 4044 | 4102 | 2495 | +| [js-lru](https://www.npmjs.com/package/js-lru) | 3813 | 10010 | 9246 | 10309 | 1843 | +| [secondary-cache](https://npmjs.com/package/secondary-cache) | 2780 | 5705 | 5790 | 10549 | 1727 | +| [lru-cache](https://npmjs.com/package/lru-cache) | 2275 | 3388 | 3334 | 3301 | 1593 | +| [hyperlru-map](https://npmjs.com/package/hyperlru-map) | 2424 | 2508 | 2443 | 2540 | 1552 | +| [modern-lru](https://npmjs.com/package/modern-lru) | 2710 | 3946 | 3581 | 4021 | 1327 | +| [mkc](https://npmjs.com/packacge/package/mkc) | 1559 | 2044 | 1178 | 2161 | 1037 | + +We can group the results in a few categories: + +- all rounders (mnemonist, lru_cache, tiny-lru, simple-lru-cache, lru-fast) where the performance to add update and evict are comparable. +- fast-write, slow-evict (lru, hashlru, lru-native, modern-lru) these have better set/update times, but for some reason are quite slow to evict items! +- slow in at least 2 categories (lru-cache, mkc, faster-lru-cache, secondary-cache) + +## Discussion + +It appears that all-round performance is the most difficult to achive, in particular, +performance on eviction is difficult to achive. I think eviction performance is the most important +consideration, because once the cache is _warm_ each subsequent addition causes an eviction, +and actively used, _hot_, cache will run close to it's eviction performance. +Also, some have faster add than update, and some faster update than add. + +`modern-lru` gets pretty close to `lru-native` perf. +I wrote `hashlru` after my seeing the other results from this benchmark, it's important to point +out that it does not use the classic LRU algorithm, but has the important properties of the LRU +(bounded memory use and O(1) time complexity) + +Splitting the benchmark into multiple processes helps minimize JIT state pollution (gc, turbofan opt/deopt, etc.), and we see a much clearer picture of performance per library. + +## Future work + +This is still pretty early results, take any difference smaller than an order of magnitude with a grain of salt. + +It is necessary to measure the statistical significance of the results to know accurately the relative performance of two closely matched implementations. + +I also didn't test the memory usage. This should be done running the benchmarks each in a separate process, so that the memory used by each run is not left over while the next is running. + +## Conclusion + +Javascript is generally slow, so one of the best ways to make it fast is to write less of it. +LRUs are also quite difficult to implement (linked lists!). In trying to come up with a faster +LRU implementation I realized that something far simpler could do the same job. Especially +given the strengths and weaknesses of javascript, this is significantly faster than any of the +other implementations, _including_ the C implementation. Likely, the overhead of the C<->js boundry +is partly to blame here. + +## License + +MIT diff --git a/benchmark/fetch-impls.sh b/benchmark/fetch-impls.sh new file mode 100644 index 00000000..260d83f2 --- /dev/null +++ b/benchmark/fetch-impls.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +# get the latest patch in each lru-cache 7.x and up, +# plus mnemonist, hashlru, and lru-fast + +nvs=($( + npm view 'lru-cache@>=7' name | awk -F. '{print $1 "." $2}' | sort -r -V | uniq +) +'mnemonist@0.39' +'hashlru@2' +'lru-fast@0.2') + +echo "lru-cache_CURRENT" > impls.txt +for dep in "${nvs[@]}"; do + name=${dep/@/_} + echo $name >> impls.txt +done diff --git a/benchmark/impls.js b/benchmark/impls.js new file mode 100644 index 00000000..0a21f2f6 --- /dev/null +++ b/benchmark/impls.js @@ -0,0 +1,36 @@ +const { readFileSync } = require('fs') +const impls = readFileSync(__dirname + '/impls.txt', 'utf8') + .trim() + .split('\n') +for (const impl of impls) { + if (impl.startsWith('lru-cache_')) { + const LRUCache = require(impl) + exports[impl] = max => new LRUCache({ max }) + } else if (impl.startsWith('mnemonist_')) { + MnemonistLRUMap = require(impl + '/lru-map-with-delete') + MnemonistLRUCache = require(impl + '/lru-cache-with-delete') + exports[impl + '_obj'] = max => new MnemonistLRUCache(max) + exports[impl + '_map'] = max => new MnemonistLRUMap(max) + } else if (impl.startsWith('hashlru_')) { + exports[impl] = require(impl) + } else if (impl.startsWith('lru-fast_')) { + const { LRUCache } = require(impl) + exports[impl] = max => new LRUCache(max) + } else { + throw new Error( + 'found an impl i dont know how to create: ' + impl + ) + } +} + +exports['just a Map'] = _ => new Map() + +exports['just a null obj'] = _ => { + const data = Object.create(null) + return { set: (k, v) => (data[k] = v), get: k => data[k] } +} + +exports['just a {}'] = _ => { + const data = {} + return { set: (k, v) => (data[k] = v), get: k => data[k] } +} diff --git a/benchmark/index.js b/benchmark/index.js new file mode 100644 index 00000000..7d876b1f --- /dev/null +++ b/benchmark/index.js @@ -0,0 +1,109 @@ +'use strict' + +process.env.__LRU_BENCH_DIR = __dirname +require('mkdirp').sync(__dirname + '/results') + +const Worker = require('tiny-worker') +const ora = require('ora') +const caches = Object.keys(require('./impls.js')) +const nth = caches.length +const { writeFileSync } = require('fs') + +const types = { + int: 'just an integer', + strint: 'stringified integer', + str: 'string that is not a number', + numstr: 'a mix of integers and strings that look like them', + pi: 'multiples of pi', + float: 'floating point values', + obj: 'an object with a single key', + rand: 'random floating point number', + sym: 'a Symbol object', + longstr: 'a very long string', + mix: 'a mix of all the types', +} + +if (!process.env.TYPE) { + const spawn = require('child_process').spawn + const todo = Object.keys(types) + const run = () => + new Promise(res => { + const TYPE = todo.shift() + if (!TYPE) return res() + console.log(`${TYPE}: ${types[TYPE]}`) + const child = spawn(process.execPath, [__filename], { + env: { TYPE }, + stdio: 'inherit', + }) + child.on('close', () => res(run())) + }) + run() +} else { + const spinner = ora(`Starting benchmark of ${nth} caches`).start(), + promises = [] + + caches.forEach((i, idx) => { + promises.push( + new Promise((resolve, reject) => { + return (idx === 0 ? Promise.resolve() : promises[idx - 1]) + .then(() => { + const worker = new Worker('worker.js') + + worker.onmessage = ev => { + resolve(ev.data) + worker.terminate() + } + + worker.onerror = err => { + reject(err) + worker.terminate() + } + + spinner.text = `Benchmarking ${ + idx + 1 + } of ${nth} caches [${i}]` + worker.postMessage(i) + }) + .catch(reject) + }) + ) + }) + + Promise.all(promises) + .then(results => { + const toMD = require('markdown-tables') + const keysort = require('keysort') + spinner.stop() + const data = keysort( + results.map(i => { + const obj = JSON.parse(i) + obj.score = + obj.evict * 5 + + obj.get2 * 5 + + obj.get1 * 3 + + obj.set * 2 + + obj.update + return obj + }), + 'score desc' + ) + + const heading = 'name,set,get1,update,get2,evict,score' + const csv = + [heading] + .concat( + data.map( + i => + `${i.name},${i.set},${i.get1},${i.update},${i.get2},${i.evict},${i.score}` + ) + ) + .join('\n') + '\n' + const resultsFile = `${__dirname}/results/${process.env.TYPE}.csv` + writeFileSync(resultsFile, csv, 'utf8') + console.log(toMD(csv)) + }) + .catch(err => { + console.error(err.stack || err.message || err) + process.exit(1) + }) +} diff --git a/benchmark/make-deps.sh b/benchmark/make-deps.sh new file mode 100644 index 00000000..5e4d47ff --- /dev/null +++ b/benchmark/make-deps.sh @@ -0,0 +1,39 @@ +#!/usr/bin/env bash + +deps="" +install=() +for name in $(cat impls.txt); do + if [ "$name" = "lru-cache_CURRENT" ]; then + continue + fi + dep=${name/_/@} + deps="${deps}"' "'"$name"'": "'"npm:$dep"$'",\n' +done + +cat >package.json < $d +ln ${PWD}/$d profile.txt +cat profile.txt diff --git a/benchmark/worker.js b/benchmark/worker.js new file mode 100644 index 00000000..89ab774a --- /dev/null +++ b/benchmark/worker.js @@ -0,0 +1,189 @@ +'use strict' + +const precise = require('precise') +const retsu = require('retsu') +const dir = process.env.__LRU_BENCH_DIR || __dirname +const caches = require(dir + '/impls.js') +const num = +process.env.N || 10_000 +const evict = num * 2 +const times = 10 +const x = 1e6 +const dataOrder = [] +const data1 = new Array(evict) +const data2 = new Array(evict) +const data3 = new Array(evict) + +const typeGen = { + numstr: z => (z % 2 === 0 ? z : String(z + 1)), + pi: z => z * Math.PI, + float: z => z + z / (evict + 1), + obj: z => ({ z }), + strint: z => String(z), + str: z => 'foo' + z + 'bar', + rand: z => z * Math.random(), + sym: z => Symbol(String(z)), + longstr: z => z + 'z'.repeat(1024 * 4), + int: z => z, + mix: z => typeGen[typeKeys[z % (typeKeys.length - 1)]](z), +} +const typeKeys = Object.keys(typeGen) + +;(function seed() { + let z = -1 + + const t = process.env.TYPE || 'mix' + while (++z < evict) { + const x = typeGen[t](z) + data1[z] = [x, Math.floor(Math.random() * 1e7)] + dataOrder.push(z) + } + + // shuffle up the key orders, so we're not just walking down the list. + for (const key of dataOrder.sort(() => Math.random() - 0.5)) { + data2[key] = [data1[key][0], Math.random() * 1e7] + } + + for (const key of dataOrder.sort(() => Math.random() - 0.5)) { + data3[key] = data1[key] + } +})() + +const runTest = id => { + const time = { + set: [], + get1: [], + update: [], + get2: [], + evict: [], + } + const results = { + name: id, + set: 0, + get1: 0, + update: 0, + get2: 0, + evict: 0, + } + + let n = -1 + + // super rudimentary correctness check + // make sure that 5 puts get back the same 5 items we put + // ignore stderr, some caches are complainy about some keys + let error = console.error + console.error = () => {} + try { + const s = Math.max(5, Math.min(Math.floor(num / 2), 50)) + const m = Math.min(s * 5, num) + const lru = caches[id](s) + for (let i = 0; i < s; i++) lru.set(data1[i][0], data1[i][1]) + for (let i = 0; i < s; i++) { + if (lru.get(data1[i][0]) !== data1[i][1]) { + if (!process.stdout.isTTY) process.stderr.write(id) + error(' failed correctness check at key=%j', data1[i][0]) + postMessage( + JSON.stringify({ + name: id, + set: 0, + get1: 0, + update: 0, + get2: 0, + evict: 0, + }) + ) + process.exit(1) + } + } + if (!/^just a/.test(id) && !/unbounded$/.test(id)) { + for (let i = s + 1; i < m; i++) + lru.set(data1[i][0], data1[i][1]) + if (lru.get(data1[0][0])) { + if (!process.stdout.isTTY) process.stderr.write(id) + error(' failed eviction correctness check') + postMessage( + JSON.stringify({ + name: id, + set: 0, + get1: 0, + update: 0, + get2: 0, + evict: 0, + }) + ) + process.exit(1) + } + } + lru.set('__proto__', { [__filename]: 'pwned' }) + if (lru.get(__filename)) { + error(' failed prototype pollution check') + if (!/^just a/.test(id)) { + postMessage( + JSON.stringify({ + name: id, + set: 0, + get1: 0, + update: 0, + get2: 0, + evict: 0, + }) + ) + process.exit(1) + } + } + } catch (er) { + if (!process.stdout.isTTY) process.stderr.write(id) + error(' failed correctness check', er.stack) + postMessage( + JSON.stringify({ + name: id, + set: 0, + get1: 0, + update: 0, + get2: 0, + evict: 0, + }) + ) + process.exit(1) + } + + console.error = error + + while (++n < times) { + const lru = caches[id](num) + const stimer = precise().start() + for (let i = 0; i < num; i++) lru.set(data1[i][0], data1[i][1]) + time.set.push(stimer.stop().diff() / x) + + const gtimer = precise().start() + for (let i = 0; i < num; i++) lru.get(data1[i][0]) + time.get1.push(gtimer.stop().diff() / x) + + const utimer = precise().start() + for (let i = 0; i < num; i++) lru.set(data2[i][0], data2[i][1]) + time.update.push(utimer.stop().diff() / x) + + const g2timer = precise().start() + for (let i = 0; i < num; i++) lru.get(data3[i][0]) + time.get2.push(g2timer.stop().diff() / x) + + const etimer = precise().start() + for (let i = num; i < evict; i++) + lru.set(data1[i][0], data1[i][1]) + time.evict.push(etimer.stop().diff() / x) + } + + ;['set', 'get1', 'update', 'get2', 'evict'].forEach(i => { + results[i] = Number( + (num / retsu.median(time[i]).toFixed(2)).toFixed(0) + ) + }) + + postMessage(JSON.stringify(results)) +} + +if (typeof self !== 'undefined') { + self.onmessage = ev => runTest(ev.data) +} else { + global.postMessage = console.log + runTest('lru-cache_CURRENT') +} diff --git a/fixup.sh b/fixup.sh new file mode 100644 index 00000000..9c14f86b --- /dev/null +++ b/fixup.sh @@ -0,0 +1,25 @@ +#!/usr/bin/env bash + +esbuild --minify \ + --sourcemap \ + --bundle dist/cjs/index.js \ + --outfile=dist/cjs/index.min.js \ + --format=cjs + +esbuild --minify \ + --sourcemap \ + --bundle dist/mjs/index.js \ + --outfile=dist/mjs/index.min.js \ + --format=esm + +cat >dist/cjs/package.json <dist/mjs/package.json < implements Iterable<[K, V]> { - constructor(options: LRUCache.Options) - - /** - * Number of items in the cache. - * Alias for {@link size} - * - * @deprecated since 7.0 use {@link size} instead - */ - public readonly length: LRUCount - - public readonly max: LRUCount - public readonly maxSize: LRUSize - public readonly maxEntrySize: LRUSize - public readonly sizeCalculation: - | LRUCache.SizeCalculator - | undefined - public readonly dispose: LRUCache.Disposer - /** - * @since 7.4.0 - */ - public readonly disposeAfter: LRUCache.Disposer | null - public readonly noDisposeOnSet: boolean - public readonly ttl: LRUMilliseconds - public readonly ttlResolution: LRUMilliseconds - public readonly ttlAutopurge: boolean - public readonly allowStale: boolean - public readonly updateAgeOnGet: boolean - /** - * @since 7.11.0 - */ - public readonly noDeleteOnStaleGet: boolean - /** - * @since 7.6.0 - */ - public readonly fetchMethod: LRUCache.Fetcher | null - - /** - * The total number of items held in the cache at the current moment. - */ - public readonly size: LRUCount - - /** - * The total size of items in cache when using size tracking. - */ - public readonly calculatedSize: LRUSize - - /** - * Add a value to the cache. - */ - public set( - key: K, - value: V, - options?: LRUCache.SetOptions - ): this - - /** - * Return a value from the cache. Will update the recency of the cache entry - * found. - * - * If the key is not found, {@link get} will return `undefined`. This can be - * confusing when setting values specifically to `undefined`, as in - * `cache.set(key, undefined)`. Use {@link has} to determine whether a key is - * present in the cache at all. - */ - public get(key: K, options?: LRUCache.GetOptions): V | undefined - - /** - * Like {@link get} but doesn't update recency or delete stale items. - * Returns `undefined` if the item is stale, unless {@link allowStale} is set - * either on the cache or in the options object. - */ - public peek(key: K, options?: LRUCache.PeekOptions): V | undefined - - /** - * Check if a key is in the cache, without updating the recency of use. - * Will return false if the item is stale, even though it is technically - * in the cache. - * - * Will not update item age unless {@link updateAgeOnHas} is set in the - * options or constructor. - */ - public has(key: K, options?: LRUCache.HasOptions): boolean - - /** - * Deletes a key out of the cache. - * Returns true if the key was deleted, false otherwise. - */ - public delete(key: K): boolean - - /** - * Clear the cache entirely, throwing away all values. - */ - public clear(): void - - /** - * Delete any stale entries. Returns true if anything was removed, false - * otherwise. - */ - public purgeStale(): boolean - - /** - * Find a value for which the supplied fn method returns a truthy value, - * similar to Array.find(). fn is called as fn(value, key, cache). - */ - public find( - callbackFn: ( - value: V, - key: K, - cache: this - ) => boolean | undefined | void, - options?: LRUCache.GetOptions - ): V | undefined - - /** - * Call the supplied function on each item in the cache, in order from - * most recently used to least recently used. fn is called as - * fn(value, key, cache). Does not update age or recenty of use. - */ - public forEach( - callbackFn: (this: T, value: V, key: K, cache: this) => void, - thisArg?: T - ): void - - /** - * The same as {@link forEach} but items are iterated over in reverse - * order. (ie, less recently used items are iterated over first.) - */ - public rforEach( - callbackFn: (this: T, value: V, key: K, cache: this) => void, - thisArg?: T - ): void - - /** - * Return a generator yielding the keys in the cache, - * in order from most recently used to least recently used. - */ - public keys(): Generator - - /** - * Inverse order version of {@link keys} - * - * Return a generator yielding the keys in the cache, - * in order from least recently used to most recently used. - */ - public rkeys(): Generator - - /** - * Return a generator yielding the values in the cache, - * in order from most recently used to least recently used. - */ - public values(): Generator - - /** - * Inverse order version of {@link values} - * - * Return a generator yielding the values in the cache, - * in order from least recently used to most recently used. - */ - public rvalues(): Generator - - /** - * Return a generator yielding `[key, value]` pairs, - * in order from most recently used to least recently used. - */ - public entries(): Generator<[K, V], void, void> - - /** - * Inverse order version of {@link entries} - * - * Return a generator yielding `[key, value]` pairs, - * in order from least recently used to most recently used. - */ - public rentries(): Generator<[K, V], void, void> - - /** - * Iterating over the cache itself yields the same results as - * {@link entries} - */ - public [Symbol.iterator](): Generator<[K, V], void, void> - - /** - * Return an array of [key, entry] objects which can be passed to - * cache.load() - */ - public dump(): Array<[K, LRUCache.Entry]> - - /** - * Reset the cache and load in the items in entries in the order listed. - * Note that the shape of the resulting cache may be different if the - * same options are not used in both caches. - */ - public load( - cacheEntries: ReadonlyArray<[K, LRUCache.Entry]> - ): void - - /** - * Evict the least recently used item, returning its value or `undefined` - * if cache is empty. - */ - public pop(): V | undefined - - /** - * Deletes a key out of the cache. - * - * @deprecated since 7.0 use delete() instead - */ - public del(key: K): boolean - - /** - * Clear the cache entirely, throwing away all values. - * - * @deprecated since 7.0 use clear() instead - */ - public reset(): void - - /** - * Manually iterates over the entire cache proactively pruning old entries. - * - * @deprecated since 7.0 use purgeStale() instead - */ - public prune(): boolean - - /** - * Make an asynchronous cached fetch using the {@link fetchMethod} function. - * - * If multiple fetches for the same key are issued, then they will all be - * coalesced into a single call to fetchMethod. - * - * Note that this means that handling options such as - * {@link allowStaleOnFetchAbort}, {@link signal}, and - * {@link allowStaleOnFetchRejection} will be determined by the FIRST fetch() - * call for a given key. - * - * This is a known (fixable) shortcoming which will be addresed on when - * someone complains about it, as the fix would involve added complexity and - * may not be worth the costs for this edge case. - * - * since: 7.6.0 - */ - public fetch( - key: K, - options?: LRUCache.FetchOptions - ): Promise - - /** - * since: 7.6.0 - */ - public getRemainingTTL(key: K): LRUMilliseconds -} - -declare namespace LRUCache { - type DisposeReason = 'evict' | 'set' | 'delete' - - type SizeCalculator = (value: V, key: K) => LRUSize - type Disposer = ( - value: V, - key: K, - reason: DisposeReason - ) => void - type Fetcher = ( - key: K, - staleValue: V | undefined, - options: FetcherOptions - ) => Promise | V | void | undefined - - interface DeprecatedOptions { - /** - * alias for ttl - * - * @deprecated since 7.0 use options.ttl instead - */ - maxAge?: LRUMilliseconds - - /** - * alias for {@link sizeCalculation} - * - * @deprecated since 7.0 use {@link sizeCalculation} instead - */ - length?: SizeCalculator - - /** - * alias for allowStale - * - * @deprecated since 7.0 use options.allowStale instead - */ - stale?: boolean - } - - interface LimitedByCount { - /** - * The number of most recently used items to keep. - * Note that we may store fewer items than this if maxSize is hit. - */ - max: LRUCount - } - - type MaybeMaxEntrySizeLimit = - | { - /** - * The maximum allowed size for any single item in the cache. - * - * If a larger item is passed to {@link set} or returned by a - * {@link fetchMethod}, then it will not be stored in the cache. - */ - maxEntrySize: LRUSize - sizeCalculation?: SizeCalculator - } - | {} - - interface LimitedBySize { - /** - * If you wish to track item size, you must provide a maxSize - * note that we still will only keep up to max *actual items*, - * if max is set, so size tracking may cause fewer than max items - * to be stored. At the extreme, a single item of maxSize size - * will cause everything else in the cache to be dropped when it - * is added. Use with caution! - * - * Note also that size tracking can negatively impact performance, - * though for most cases, only minimally. - */ - maxSize: LRUSize - - /** - * Function to calculate size of items. Useful if storing strings or - * buffers or other items where memory size depends on the object itself. - * - * Items larger than {@link maxEntrySize} will not be stored in the cache. - * - * Note that when {@link maxSize} or {@link maxEntrySize} are set, every - * item added MUST have a size specified, either via a `sizeCalculation` in - * the constructor, or `sizeCalculation` or {@link size} options to - * {@link set}. - */ - sizeCalculation?: SizeCalculator - } - - interface LimitedByTTL { - /** - * Max time in milliseconds for items to live in cache before they are - * considered stale. Note that stale items are NOT preemptively removed - * by default, and MAY live in the cache, contributing to its LRU max, - * long after they have expired. - * - * Also, as this cache is optimized for LRU/MRU operations, some of - * the staleness/TTL checks will reduce performance, as they will incur - * overhead by deleting items. - * - * Must be an integer number of ms, defaults to 0, which means "no TTL" - */ - ttl: LRUMilliseconds - - /** - * Boolean flag to tell the cache to not update the TTL when - * setting a new value for an existing key (ie, when updating a value - * rather than inserting a new value). Note that the TTL value is - * _always_ set (if provided) when adding a new entry into the cache. - * - * @default false - * @since 7.4.0 - */ - noUpdateTTL?: boolean - - /** - * Minimum amount of time in ms in which to check for staleness. - * Defaults to 1, which means that the current time is checked - * at most once per millisecond. - * - * Set to 0 to check the current time every time staleness is tested. - * (This reduces performance, and is theoretically unnecessary.) - * - * Setting this to a higher value will improve performance somewhat - * while using ttl tracking, albeit at the expense of keeping stale - * items around a bit longer than their TTLs would indicate. - * - * @default 1 - * @since 7.1.0 - */ - ttlResolution?: LRUMilliseconds - - /** - * Preemptively remove stale items from the cache. - * Note that this may significantly degrade performance, - * especially if the cache is storing a large number of items. - * It is almost always best to just leave the stale items in - * the cache, and let them fall out as new items are added. - * - * Note that this means that {@link allowStale} is a bit pointless, - * as stale items will be deleted almost as soon as they expire. - * - * Use with caution! - * - * @default false - * @since 7.1.0 - */ - ttlAutopurge?: boolean - - /** - * Return stale items from {@link get} before disposing of them. - * Return stale values from {@link fetch} while performing a call - * to the {@link fetchMethod} in the background. - * - * @default false - */ - allowStale?: boolean - - /** - * Update the age of items on {@link get}, renewing their TTL - * - * @default false - */ - updateAgeOnGet?: boolean - - /** - * Do not delete stale items when they are retrieved with {@link get}. - * Note that the {@link get} return value will still be `undefined` unless - * allowStale is true. - * - * @default false - * @since 7.11.0 - */ - noDeleteOnStaleGet?: boolean - - /** - * Update the age of items on {@link has}, renewing their TTL - * - * @default false - */ - updateAgeOnHas?: boolean - } - - type SafetyBounds = - | LimitedByCount - | LimitedBySize - | LimitedByTTL - - // options shared by all three of the limiting scenarios - interface SharedOptions { - /** - * Function that is called on items when they are dropped from the cache. - * This can be handy if you want to close file descriptors or do other - * cleanup tasks when items are no longer accessible. Called with `key, - * value`. It's called before actually removing the item from the - * internal cache, so it is *NOT* safe to re-add them. - * Use {@link disposeAfter} if you wish to dispose items after they have - * been full removed, when it is safe to add them back to the cache. - */ - dispose?: Disposer - - /** - * The same as dispose, but called *after* the entry is completely - * removed and the cache is once again in a clean state. It is safe to - * add an item right back into the cache at this point. - * However, note that it is *very* easy to inadvertently create infinite - * recursion this way. - * - * @since 7.3.0 - */ - disposeAfter?: Disposer - - /** - * Set to true to suppress calling the dispose() function if the entry - * key is still accessible within the cache. - * This may be overridden by passing an options object to {@link set}. - * - * @default false - */ - noDisposeOnSet?: boolean - - /** - * Function that is used to make background asynchronous fetches. Called - * with `fetchMethod(key, staleValue, { signal, options, context })`. - * - * If `fetchMethod` is not provided, then {@link fetch} is - * equivalent to `Promise.resolve(cache.get(key))`. - * - * The `fetchMethod` should ONLY return `undefined` in cases where the - * abort controller has sent an abort signal. - * - * @since 7.6.0 - */ - fetchMethod?: LRUCache.Fetcher - - /** - * Set to true to suppress the deletion of stale data when a - * {@link fetchMethod} throws an error or returns a rejected promise - * - * This may be overridden in the {@link fetchMethod}. - * - * @default false - * @since 7.10.0 - */ - noDeleteOnFetchRejection?: boolean - - /** - * Set to true to allow returning stale data when a {@link fetchMethod} - * throws an error or returns a rejected promise. Note that this - * differs from using {@link allowStale} in that stale data will - * ONLY be returned in the case that the fetch fails, not any other - * times. - * - * This may be overridden in the {@link fetchMethod}. - * - * @default false - * @since 7.16.0 - */ - allowStaleOnFetchRejection?: boolean - - /** - * - * Set to true to ignore the `abort` event emitted by the `AbortSignal` - * object passed to {@link fetchMethod}, and still cache the - * resulting resolution value, as long as it is not `undefined`. - * - * When used on its own, this means aborted {@link fetch} calls are not - * immediately resolved or rejected when they are aborted, and instead take - * the full time to await. - * - * When used with {@link allowStaleOnFetchAbort}, aborted {@link fetch} - * calls will resolve immediately to their stale cached value or - * `undefined`, and will continue to process and eventually update the - * cache when they resolve, as long as the resulting value is not - * `undefined`, thus supporting a "return stale on timeout while - * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. - * - * **Note**: regardless of this setting, an `abort` event _is still emitted - * on the `AbortSignal` object_, so may result in invalid results when - * passed to other underlying APIs that use AbortSignals. - * - * This may be overridden in the {@link fetchMethod} or the call to - * {@link fetch}. - * - * @default false - * @since 7.17.0 - */ - ignoreFetchAbort?: boolean - - /** - * Set to true to return a stale value from the cache when the - * `AbortSignal` passed to the {@link fetchMethod} dispatches an `'abort'` - * event, whether user-triggered, or due to internal cache behavior. - * - * Unless {@link ignoreFetchAbort} is also set, the underlying - * {@link fetchMethod} will still be considered canceled, and its return - * value will be ignored and not cached. - * - * This may be overridden in the {@link fetchMethod} or the call to - * {@link fetch}. - * - * @default false - * @since 7.17.0 - */ - allowStaleOnFetchAbort?: boolean - - /** - * Set to any value in the constructor or {@link fetch} options to - * pass arbitrary data to the {@link fetchMethod} in the {@link context} - * options field. - * - * @since 7.12.0 - */ - fetchContext?: any - } - - type Options = SharedOptions & - DeprecatedOptions & - SafetyBounds & - MaybeMaxEntrySizeLimit - - /** - * options which override the options set in the LRUCache constructor - * when making calling {@link set}. - */ - interface SetOptions { - /** - * A value for the size of the entry, prevents calls to - * {@link sizeCalculation}. - * - * Items larger than {@link maxEntrySize} will not be stored in the cache. - * - * Note that when {@link maxSize} or {@link maxEntrySize} are set, every - * item added MUST have a size specified, either via a `sizeCalculation` in - * the constructor, or {@link sizeCalculation} or `size` options to - * {@link set}. - */ - size?: LRUSize - /** - * Overrides the {@link sizeCalculation} method set in the constructor. - * - * Items larger than {@link maxEntrySize} will not be stored in the cache. - * - * Note that when {@link maxSize} or {@link maxEntrySize} are set, every - * item added MUST have a size specified, either via a `sizeCalculation` in - * the constructor, or `sizeCalculation` or {@link size} options to - * {@link set}. - */ - sizeCalculation?: SizeCalculator - ttl?: LRUMilliseconds - start?: LRUMilliseconds - noDisposeOnSet?: boolean - noUpdateTTL?: boolean - status?: Status - } - - /** - * options which override the options set in the LRUCAche constructor - * when calling {@link has}. - */ - interface HasOptions { - updateAgeOnHas?: boolean - status: Status - } - - /** - * options which override the options set in the LRUCache constructor - * when calling {@link get}. - */ - interface GetOptions { - allowStale?: boolean - updateAgeOnGet?: boolean - noDeleteOnStaleGet?: boolean - status?: Status - } - - /** - * options which override the options set in the LRUCache constructor - * when calling {@link peek}. - */ - interface PeekOptions { - allowStale?: boolean - } - - /** - * Options object passed to the {@link fetchMethod} - * - * May be mutated by the {@link fetchMethod} to affect the behavior of the - * resulting {@link set} operation on resolution, or in the case of - * {@link noDeleteOnFetchRejection}, {@link ignoreFetchAbort}, and - * {@link allowStaleOnFetchRejection}, the handling of failure. - */ - interface FetcherFetchOptions { - allowStale?: boolean - updateAgeOnGet?: boolean - noDeleteOnStaleGet?: boolean - size?: LRUSize - sizeCalculation?: SizeCalculator - ttl?: LRUMilliseconds - noDisposeOnSet?: boolean - noUpdateTTL?: boolean - noDeleteOnFetchRejection?: boolean - allowStaleOnFetchRejection?: boolean - ignoreFetchAbort?: boolean - allowStaleOnFetchAbort?: boolean - status?: Status - } - - /** - * Status object that may be passed to {@link fetch}, {@link get}, - * {@link set}, and {@link has}. - */ - interface Status { - /** - * The status of a set() operation. - * - * - add: the item was not found in the cache, and was added - * - update: the item was in the cache, with the same value provided - * - replace: the item was in the cache, and replaced - * - miss: the item was not added to the cache for some reason - */ - set?: 'add' | 'update' | 'replace' | 'miss' - - /** - * the ttl stored for the item, or undefined if ttls are not used. - */ - ttl?: LRUMilliseconds - - /** - * the start time for the item, or undefined if ttls are not used. - */ - start?: LRUMilliseconds - - /** - * The timestamp used for TTL calculation - */ - now?: LRUMilliseconds - - /** - * the remaining ttl for the item, or undefined if ttls are not used. - */ - remainingTTL?: LRUMilliseconds - - /** - * The calculated size for the item, if sizes are used. - */ - size?: LRUSize - - /** - * A flag indicating that the item was not stored, due to exceeding the - * {@link maxEntrySize} - */ - maxEntrySizeExceeded?: true - - /** - * The old value, specified in the case of `set:'update'` or - * `set:'replace'` - */ - oldValue?: V - - /** - * The results of a {@link has} operation - * - * - hit: the item was found in the cache - * - stale: the item was found in the cache, but is stale - * - miss: the item was not found in the cache - */ - has?: 'hit' | 'stale' | 'miss' - - /** - * The status of a {@link fetch} operation. - * Note that this can change as the underlying fetch() moves through - * various states. - * - * - inflight: there is another fetch() for this key which is in process - * - get: there is no fetchMethod, so {@link get} was called. - * - miss: the item is not in cache, and will be fetched. - * - hit: the item is in the cache, and was resolved immediately. - * - stale: the item is in the cache, but stale. - * - refresh: the item is in the cache, and not stale, but - * {@link forceRefresh} was specified. - */ - fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' - - /** - * The {@link fetchMethod} was called - */ - fetchDispatched?: true - - /** - * The cached value was updated after a successful call to fetchMethod - */ - fetchUpdated?: true - - /** - * The reason for a fetch() rejection. Either the error raised by the - * {@link fetchMethod}, or the reason for an AbortSignal. - */ - fetchError?: Error - - /** - * The fetch received an abort signal - */ - fetchAborted?: true - - /** - * The abort signal received was ignored, and the fetch was allowed to - * continue. - */ - fetchAbortIgnored?: true - - /** - * The fetchMethod promise resolved successfully - */ - fetchResolved?: true - - /** - * The fetchMethod promise was rejected - */ - fetchRejected?: true - - /** - * The status of a {@link get} operation. - * - * - fetching: The item is currently being fetched. If a previous value is - * present and allowed, that will be returned. - * - stale: The item is in the cache, and is stale. - * - hit: the item is in the cache - * - miss: the item is not in the cache - */ - get?: 'stale' | 'hit' | 'miss' - - /** - * A fetch or get operation returned a stale value. - */ - returnedStale?: true - } - - /** - * options which override the options set in the LRUCache constructor - * when calling {@link fetch}. - * - * This is the union of GetOptions and SetOptions, plus - * {@link noDeleteOnFetchRejection}, {@link allowStaleOnFetchRejection}, - * {@link forceRefresh}, and {@link fetchContext} - */ - interface FetchOptions extends FetcherFetchOptions { - forceRefresh?: boolean - fetchContext?: any - signal?: AbortSignal - status?: Status - } - - interface FetcherOptions { - signal: AbortSignal - options: FetcherFetchOptions - /** - * Object provided in the {@link fetchContext} option - */ - context: any - } - - interface Entry { - value: V - ttl?: LRUMilliseconds - size?: LRUSize - start?: LRUMilliseconds - } -} - -export = LRUCache diff --git a/index.js b/index.js deleted file mode 100644 index 48e99fe5..00000000 --- a/index.js +++ /dev/null @@ -1,1227 +0,0 @@ -const perf = - typeof performance === 'object' && - performance && - typeof performance.now === 'function' - ? performance - : Date - -const hasAbortController = typeof AbortController === 'function' - -// minimal backwards-compatibility polyfill -// this doesn't have nearly all the checks and whatnot that -// actual AbortController/Signal has, but it's enough for -// our purposes, and if used properly, behaves the same. -const AC = hasAbortController - ? AbortController - : class AbortController { - constructor() { - this.signal = new AS() - } - abort(reason = new Error('This operation was aborted')) { - this.signal.reason = this.signal.reason || reason - this.signal.aborted = true - this.signal.dispatchEvent({ - type: 'abort', - target: this.signal, - }) - } - } - -const hasAbortSignal = typeof AbortSignal === 'function' -// Some polyfills put this on the AC class, not global -const hasACAbortSignal = typeof AC.AbortSignal === 'function' -const AS = hasAbortSignal - ? AbortSignal - : hasACAbortSignal - ? AC.AbortController - : class AbortSignal { - constructor() { - this.reason = undefined - this.aborted = false - this._listeners = [] - } - dispatchEvent(e) { - if (e.type === 'abort') { - this.aborted = true - this.onabort(e) - this._listeners.forEach(f => f(e), this) - } - } - onabort() {} - addEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners.push(fn) - } - } - removeEventListener(ev, fn) { - if (ev === 'abort') { - this._listeners = this._listeners.filter(f => f !== fn) - } - } - } - -const warned = new Set() -const deprecatedOption = (opt, instead) => { - const code = `LRU_CACHE_OPTION_${opt}` - if (shouldWarn(code)) { - warn(code, `${opt} option`, `options.${instead}`, LRUCache) - } -} -const deprecatedMethod = (method, instead) => { - const code = `LRU_CACHE_METHOD_${method}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, method) - warn(code, `${method} method`, `cache.${instead}()`, get) - } -} -const deprecatedProperty = (field, instead) => { - const code = `LRU_CACHE_PROPERTY_${field}` - if (shouldWarn(code)) { - const { prototype } = LRUCache - const { get } = Object.getOwnPropertyDescriptor(prototype, field) - warn(code, `${field} property`, `cache.${instead}`, get) - } -} - -const emitWarning = (...a) => { - typeof process === 'object' && - process && - typeof process.emitWarning === 'function' - ? process.emitWarning(...a) - : console.error(...a) -} - -const shouldWarn = code => !warned.has(code) - -const warn = (code, what, instead, fn) => { - warned.add(code) - const msg = `The ${what} is deprecated. Please use ${instead} instead.` - emitWarning(msg, 'DeprecationWarning', code, fn) -} - -const isPosInt = n => n && n === Math.floor(n) && n > 0 && isFinite(n) - -/* istanbul ignore next - This is a little bit ridiculous, tbh. - * The maximum array length is 2^32-1 or thereabouts on most JS impls. - * And well before that point, you're caching the entire world, I mean, - * that's ~32GB of just integers for the next/prev links, plus whatever - * else to hold that many keys and values. Just filling the memory with - * zeroes at init time is brutal when you get that big. - * But why not be complete? - * Maybe in the future, these limits will have expanded. */ -const getUintArray = max => - !isPosInt(max) - ? null - : max <= Math.pow(2, 8) - ? Uint8Array - : max <= Math.pow(2, 16) - ? Uint16Array - : max <= Math.pow(2, 32) - ? Uint32Array - : max <= Number.MAX_SAFE_INTEGER - ? ZeroArray - : null - -class ZeroArray extends Array { - constructor(size) { - super(size) - this.fill(0) - } -} - -class Stack { - constructor(max) { - if (max === 0) { - return [] - } - const UintArray = getUintArray(max) - this.heap = new UintArray(max) - this.length = 0 - } - push(n) { - this.heap[this.length++] = n - } - pop() { - return this.heap[--this.length] - } -} - -class LRUCache { - constructor(options = {}) { - const { - max = 0, - ttl, - ttlResolution = 1, - ttlAutopurge, - updateAgeOnGet, - updateAgeOnHas, - allowStale, - dispose, - disposeAfter, - noDisposeOnSet, - noUpdateTTL, - maxSize = 0, - maxEntrySize = 0, - sizeCalculation, - fetchMethod, - fetchContext, - noDeleteOnFetchRejection, - noDeleteOnStaleGet, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - } = options - - // deprecated options, don't trigger a warning for getting them if - // the thing being passed in is another LRUCache we're copying. - const { length, maxAge, stale } = - options instanceof LRUCache ? {} : options - - if (max !== 0 && !isPosInt(max)) { - throw new TypeError('max option must be a nonnegative integer') - } - - const UintArray = max ? getUintArray(max) : Array - if (!UintArray) { - throw new Error('invalid max value: ' + max) - } - - this.max = max - this.maxSize = maxSize - this.maxEntrySize = maxEntrySize || this.maxSize - this.sizeCalculation = sizeCalculation || length - if (this.sizeCalculation) { - if (!this.maxSize && !this.maxEntrySize) { - throw new TypeError( - 'cannot set sizeCalculation without setting maxSize or maxEntrySize' - ) - } - if (typeof this.sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation set to non-function') - } - } - - this.fetchMethod = fetchMethod || null - if (this.fetchMethod && typeof this.fetchMethod !== 'function') { - throw new TypeError( - 'fetchMethod must be a function if specified' - ) - } - - this.fetchContext = fetchContext - if (!this.fetchMethod && fetchContext !== undefined) { - throw new TypeError( - 'cannot set fetchContext without fetchMethod' - ) - } - - this.keyMap = new Map() - this.keyList = new Array(max).fill(null) - this.valList = new Array(max).fill(null) - this.next = new UintArray(max) - this.prev = new UintArray(max) - this.head = 0 - this.tail = 0 - this.free = new Stack(max) - this.initialFill = 1 - this.size = 0 - - if (typeof dispose === 'function') { - this.dispose = dispose - } - if (typeof disposeAfter === 'function') { - this.disposeAfter = disposeAfter - this.disposed = [] - } else { - this.disposeAfter = null - this.disposed = null - } - this.noDisposeOnSet = !!noDisposeOnSet - this.noUpdateTTL = !!noUpdateTTL - this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection - this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection - this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort - this.ignoreFetchAbort = !!ignoreFetchAbort - - // NB: maxEntrySize is set to maxSize if it's set - if (this.maxEntrySize !== 0) { - if (this.maxSize !== 0) { - if (!isPosInt(this.maxSize)) { - throw new TypeError( - 'maxSize must be a positive integer if specified' - ) - } - } - if (!isPosInt(this.maxEntrySize)) { - throw new TypeError( - 'maxEntrySize must be a positive integer if specified' - ) - } - this.initializeSizeTracking() - } - - this.allowStale = !!allowStale || !!stale - this.noDeleteOnStaleGet = !!noDeleteOnStaleGet - this.updateAgeOnGet = !!updateAgeOnGet - this.updateAgeOnHas = !!updateAgeOnHas - this.ttlResolution = - isPosInt(ttlResolution) || ttlResolution === 0 - ? ttlResolution - : 1 - this.ttlAutopurge = !!ttlAutopurge - this.ttl = ttl || maxAge || 0 - if (this.ttl) { - if (!isPosInt(this.ttl)) { - throw new TypeError( - 'ttl must be a positive integer if specified' - ) - } - this.initializeTTLTracking() - } - - // do not allow completely unbounded caches - if (this.max === 0 && this.ttl === 0 && this.maxSize === 0) { - throw new TypeError( - 'At least one of max, maxSize, or ttl is required' - ) - } - if (!this.ttlAutopurge && !this.max && !this.maxSize) { - const code = 'LRU_CACHE_UNBOUNDED' - if (shouldWarn(code)) { - warned.add(code) - const msg = - 'TTL caching without ttlAutopurge, max, or maxSize can ' + - 'result in unbounded memory consumption.' - emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) - } - } - - if (stale) { - deprecatedOption('stale', 'allowStale') - } - if (maxAge) { - deprecatedOption('maxAge', 'ttl') - } - if (length) { - deprecatedOption('length', 'sizeCalculation') - } - } - - getRemainingTTL(key) { - return this.has(key, { updateAgeOnHas: false }) ? Infinity : 0 - } - - initializeTTLTracking() { - this.ttls = new ZeroArray(this.max) - this.starts = new ZeroArray(this.max) - - this.setItemTTL = (index, ttl, start = perf.now()) => { - this.starts[index] = ttl !== 0 ? start : 0 - this.ttls[index] = ttl - if (ttl !== 0 && this.ttlAutopurge) { - const t = setTimeout(() => { - if (this.isStale(index)) { - this.delete(this.keyList[index]) - } - }, ttl + 1) - /* istanbul ignore else - unref() not supported on all platforms */ - if (t.unref) { - t.unref() - } - } - } - - this.updateItemAge = index => { - this.starts[index] = this.ttls[index] !== 0 ? perf.now() : 0 - } - - this.statusTTL = (status, index) => { - if (status) { - status.ttl = this.ttls[index] - status.start = this.starts[index] - status.now = cachedNow || getNow() - status.remainingTTL = status.now + status.ttl - status.start - } - } - - // debounce calls to perf.now() to 1s so we're not hitting - // that costly call repeatedly. - let cachedNow = 0 - const getNow = () => { - const n = perf.now() - if (this.ttlResolution > 0) { - cachedNow = n - const t = setTimeout( - () => (cachedNow = 0), - this.ttlResolution - ) - /* istanbul ignore else - not available on all platforms */ - if (t.unref) { - t.unref() - } - } - return n - } - - this.getRemainingTTL = key => { - const index = this.keyMap.get(key) - if (index === undefined) { - return 0 - } - return this.ttls[index] === 0 || this.starts[index] === 0 - ? Infinity - : this.starts[index] + - this.ttls[index] - - (cachedNow || getNow()) - } - - this.isStale = index => { - return ( - this.ttls[index] !== 0 && - this.starts[index] !== 0 && - (cachedNow || getNow()) - this.starts[index] > - this.ttls[index] - ) - } - } - updateItemAge(_index) {} - statusTTL(_status, _index) {} - setItemTTL(_index, _ttl, _start) {} - isStale(_index) { - return false - } - - initializeSizeTracking() { - this.calculatedSize = 0 - this.sizes = new ZeroArray(this.max) - this.removeItemSize = index => { - this.calculatedSize -= this.sizes[index] - this.sizes[index] = 0 - } - this.requireSize = (k, v, size, sizeCalculation) => { - // provisionally accept background fetches. - // actual value size will be checked when they return. - if (this.isBackgroundFetch(v)) { - return 0 - } - if (!isPosInt(size)) { - if (sizeCalculation) { - if (typeof sizeCalculation !== 'function') { - throw new TypeError('sizeCalculation must be a function') - } - size = sizeCalculation(v, k) - if (!isPosInt(size)) { - throw new TypeError( - 'sizeCalculation return invalid (expect positive integer)' - ) - } - } else { - throw new TypeError( - 'invalid size value (must be positive integer). ' + - 'When maxSize or maxEntrySize is used, sizeCalculation or size ' + - 'must be set.' - ) - } - } - return size - } - this.addItemSize = (index, size, status) => { - this.sizes[index] = size - if (this.maxSize) { - const maxSize = this.maxSize - this.sizes[index] - while (this.calculatedSize > maxSize) { - this.evict(true) - } - } - this.calculatedSize += this.sizes[index] - if (status) { - status.entrySize = size - status.totalCalculatedSize = this.calculatedSize - } - } - } - removeItemSize(_index) {} - addItemSize(_index, _size) {} - requireSize(_k, _v, size, sizeCalculation) { - if (size || sizeCalculation) { - throw new TypeError( - 'cannot set size without setting maxSize or maxEntrySize on cache' - ) - } - } - - *indexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.tail; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.head) { - break - } else { - i = this.prev[i] - } - } - } - } - - *rindexes({ allowStale = this.allowStale } = {}) { - if (this.size) { - for (let i = this.head; true; ) { - if (!this.isValidIndex(i)) { - break - } - if (allowStale || !this.isStale(i)) { - yield i - } - if (i === this.tail) { - break - } else { - i = this.next[i] - } - } - } - } - - isValidIndex(index) { - return ( - index !== undefined && - this.keyMap.get(this.keyList[index]) === index - ) - } - - *entries() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - *rentries() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield [this.keyList[i], this.valList[i]] - } - } - } - - *keys() { - for (const i of this.indexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - *rkeys() { - for (const i of this.rindexes()) { - if ( - this.keyList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.keyList[i] - } - } - } - - *values() { - for (const i of this.indexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - *rvalues() { - for (const i of this.rindexes()) { - if ( - this.valList[i] !== undefined && - !this.isBackgroundFetch(this.valList[i]) - ) { - yield this.valList[i] - } - } - } - - [Symbol.iterator]() { - return this.entries() - } - - find(fn, getOptions) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - if (fn(value, this.keyList[i], this)) { - return this.get(this.keyList[i], getOptions) - } - } - } - - forEach(fn, thisp = this) { - for (const i of this.indexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - rforEach(fn, thisp = this) { - for (const i of this.rindexes()) { - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - fn.call(thisp, value, this.keyList[i], this) - } - } - - get prune() { - deprecatedMethod('prune', 'purgeStale') - return this.purgeStale - } - - purgeStale() { - let deleted = false - for (const i of this.rindexes({ allowStale: true })) { - if (this.isStale(i)) { - this.delete(this.keyList[i]) - deleted = true - } - } - return deleted - } - - dump() { - const arr = [] - for (const i of this.indexes({ allowStale: true })) { - const key = this.keyList[i] - const v = this.valList[i] - const value = this.isBackgroundFetch(v) - ? v.__staleWhileFetching - : v - if (value === undefined) continue - const entry = { value } - if (this.ttls) { - entry.ttl = this.ttls[i] - // always dump the start relative to a portable timestamp - // it's ok for this to be a bit slow, it's a rare operation. - const age = perf.now() - this.starts[i] - entry.start = Math.floor(Date.now() - age) - } - if (this.sizes) { - entry.size = this.sizes[i] - } - arr.unshift([key, entry]) - } - return arr - } - - load(arr) { - this.clear() - for (const [key, entry] of arr) { - if (entry.start) { - // entry.start is a portable timestamp, but we may be using - // node's performance.now(), so calculate the offset. - // it's ok for this to be a bit slow, it's a rare operation. - const age = Date.now() - entry.start - entry.start = perf.now() - age - } - this.set(key, entry.value, entry) - } - } - - dispose(_v, _k, _reason) {} - - set( - k, - v, - { - ttl = this.ttl, - start, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - status, - } = {} - ) { - size = this.requireSize(k, v, size, sizeCalculation) - // if the item doesn't fit, don't do anything - // NB: maxEntrySize set to maxSize by default - if (this.maxEntrySize && size > this.maxEntrySize) { - if (status) { - status.set = 'miss' - status.maxEntrySizeExceeded = true - } - // have to delete, in case a background fetch is there already. - // in non-async cases, this is a no-op - this.delete(k) - return this - } - let index = this.size === 0 ? undefined : this.keyMap.get(k) - if (index === undefined) { - // addition - index = this.newIndex() - this.keyList[index] = k - this.valList[index] = v - this.keyMap.set(k, index) - this.next[this.tail] = index - this.prev[index] = this.tail - this.tail = index - this.size++ - this.addItemSize(index, size, status) - if (status) { - status.set = 'add' - } - noUpdateTTL = false - } else { - // update - this.moveToTail(index) - const oldVal = this.valList[index] - if (v !== oldVal) { - if (this.isBackgroundFetch(oldVal)) { - oldVal.__abortController.abort(new Error('replaced')) - } else { - if (!noDisposeOnSet) { - this.dispose(oldVal, k, 'set') - if (this.disposeAfter) { - this.disposed.push([oldVal, k, 'set']) - } - } - } - this.removeItemSize(index) - this.valList[index] = v - this.addItemSize(index, size, status) - if (status) { - status.set = 'replace' - const oldValue = - oldVal && this.isBackgroundFetch(oldVal) - ? oldVal.__staleWhileFetching - : oldVal - if (oldValue !== undefined) status.oldValue = oldValue - } - } else if (status) { - status.set = 'update' - } - } - if (ttl !== 0 && this.ttl === 0 && !this.ttls) { - this.initializeTTLTracking() - } - if (!noUpdateTTL) { - this.setItemTTL(index, ttl, start) - } - this.statusTTL(status, index) - if (this.disposeAfter) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return this - } - - newIndex() { - if (this.size === 0) { - return this.tail - } - if (this.size === this.max && this.max !== 0) { - return this.evict(false) - } - if (this.free.length !== 0) { - return this.free.pop() - } - // initial fill, just keep writing down the list - return this.initialFill++ - } - - pop() { - if (this.size) { - const val = this.valList[this.head] - this.evict(true) - return val - } - } - - evict(free) { - const head = this.head - const k = this.keyList[head] - const v = this.valList[head] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('evicted')) - } else { - this.dispose(v, k, 'evict') - if (this.disposeAfter) { - this.disposed.push([v, k, 'evict']) - } - } - this.removeItemSize(head) - // if we aren't about to use the index, then null these out - if (free) { - this.keyList[head] = null - this.valList[head] = null - this.free.push(head) - } - this.head = this.next[head] - this.keyMap.delete(k) - this.size-- - return head - } - - has(k, { updateAgeOnHas = this.updateAgeOnHas, status } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined) { - if (!this.isStale(index)) { - if (updateAgeOnHas) { - this.updateItemAge(index) - } - if (status) status.has = 'hit' - this.statusTTL(status, index) - return true - } else if (status) { - status.has = 'stale' - this.statusTTL(status, index) - } - } else if (status) { - status.has = 'miss' - } - return false - } - - // like get(), but without any LRU updating or TTL expiration - peek(k, { allowStale = this.allowStale } = {}) { - const index = this.keyMap.get(k) - if (index !== undefined && (allowStale || !this.isStale(index))) { - const v = this.valList[index] - // either stale and allowed, or forcing a refresh of non-stale value - return this.isBackgroundFetch(v) ? v.__staleWhileFetching : v - } - } - - backgroundFetch(k, index, options, context) { - const v = index === undefined ? undefined : this.valList[index] - if (this.isBackgroundFetch(v)) { - return v - } - const ac = new AC() - if (options.signal) { - options.signal.addEventListener('abort', () => - ac.abort(options.signal.reason) - ) - } - const fetchOpts = { - signal: ac.signal, - options, - context, - } - const cb = (v, updateCache = false) => { - const { aborted } = ac.signal - const ignoreAbort = options.ignoreFetchAbort && v !== undefined - if (options.status) { - if (aborted && !updateCache) { - options.status.fetchAborted = true - options.status.fetchError = ac.signal.reason - if (ignoreAbort) options.status.fetchAbortIgnored = true - } else { - options.status.fetchResolved = true - } - } - if (aborted && !ignoreAbort && !updateCache) { - return fetchFail(ac.signal.reason) - } - // either we didn't abort, and are still here, or we did, and ignored - if (this.valList[index] === p) { - if (v === undefined) { - if (p.__staleWhileFetching) { - this.valList[index] = p.__staleWhileFetching - } else { - this.delete(k) - } - } else { - if (options.status) options.status.fetchUpdated = true - this.set(k, v, fetchOpts.options) - } - } - return v - } - const eb = er => { - if (options.status) { - options.status.fetchRejected = true - options.status.fetchError = er - } - return fetchFail(er) - } - const fetchFail = er => { - const { aborted } = ac.signal - const allowStaleAborted = - aborted && options.allowStaleOnFetchAbort - const allowStale = - allowStaleAborted || options.allowStaleOnFetchRejection - const noDelete = allowStale || options.noDeleteOnFetchRejection - if (this.valList[index] === p) { - // if we allow stale on fetch rejections, then we need to ensure that - // the stale value is not removed from the cache when the fetch fails. - const del = !noDelete || p.__staleWhileFetching === undefined - if (del) { - this.delete(k) - } else if (!allowStaleAborted) { - // still replace the *promise* with the stale value, - // since we are done with the promise at this point. - // leave it untouched if we're still waiting for an - // aborted background fetch that hasn't yet returned. - this.valList[index] = p.__staleWhileFetching - } - } - if (allowStale) { - if (options.status && p.__staleWhileFetching !== undefined) { - options.status.returnedStale = true - } - return p.__staleWhileFetching - } else if (p.__returned === p) { - throw er - } - } - const pcall = (res, rej) => { - this.fetchMethod(k, v, fetchOpts).then(v => res(v), rej) - // ignored, we go until we finish, regardless. - // defer check until we are actually aborting, - // so fetchMethod can override. - ac.signal.addEventListener('abort', () => { - if ( - !options.ignoreFetchAbort || - options.allowStaleOnFetchAbort - ) { - res() - // when it eventually resolves, update the cache. - if (options.allowStaleOnFetchAbort) { - res = v => cb(v, true) - } - } - }) - } - if (options.status) options.status.fetchDispatched = true - const p = new Promise(pcall).then(cb, eb) - p.__abortController = ac - p.__staleWhileFetching = v - p.__returned = null - if (index === undefined) { - // internal, don't expose status. - this.set(k, p, { ...fetchOpts.options, status: undefined }) - index = this.keyMap.get(k) - } else { - this.valList[index] = p - } - return p - } - - isBackgroundFetch(p) { - return ( - p && - typeof p === 'object' && - typeof p.then === 'function' && - Object.prototype.hasOwnProperty.call( - p, - '__staleWhileFetching' - ) && - Object.prototype.hasOwnProperty.call(p, '__returned') && - (p.__returned === p || p.__returned === null) - ) - } - - // this takes the union of get() and set() opts, because it does both - async fetch( - k, - { - // get options - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - // set options - ttl = this.ttl, - noDisposeOnSet = this.noDisposeOnSet, - size = 0, - sizeCalculation = this.sizeCalculation, - noUpdateTTL = this.noUpdateTTL, - // fetch exclusive options - noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, - allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, - ignoreFetchAbort = this.ignoreFetchAbort, - allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, - fetchContext = this.fetchContext, - forceRefresh = false, - status, - signal, - } = {} - ) { - if (!this.fetchMethod) { - if (status) status.fetch = 'get' - return this.get(k, { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - status, - }) - } - - const options = { - allowStale, - updateAgeOnGet, - noDeleteOnStaleGet, - ttl, - noDisposeOnSet, - size, - sizeCalculation, - noUpdateTTL, - noDeleteOnFetchRejection, - allowStaleOnFetchRejection, - allowStaleOnFetchAbort, - ignoreFetchAbort, - status, - signal, - } - - let index = this.keyMap.get(k) - if (index === undefined) { - if (status) status.fetch = 'miss' - const p = this.backgroundFetch(k, index, options, fetchContext) - return (p.__returned = p) - } else { - // in cache, maybe already fetching - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - const stale = - allowStale && v.__staleWhileFetching !== undefined - if (status) { - status.fetch = 'inflight' - if (stale) status.returnedStale = true - } - return stale ? v.__staleWhileFetching : (v.__returned = v) - } - - // if we force a refresh, that means do NOT serve the cached value, - // unless we are already in the process of refreshing the cache. - const isStale = this.isStale(index) - if (!forceRefresh && !isStale) { - if (status) status.fetch = 'hit' - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - this.statusTTL(status, index) - return v - } - - // ok, it is stale or a forced refresh, and not already fetching. - // refresh the cache. - const p = this.backgroundFetch(k, index, options, fetchContext) - const hasStale = p.__staleWhileFetching !== undefined - const staleVal = hasStale && allowStale - if (status) { - status.fetch = hasStale && isStale ? 'stale' : 'refresh' - if (staleVal && isStale) status.returnedStale = true - } - return staleVal ? p.__staleWhileFetching : (p.__returned = p) - } - } - - get( - k, - { - allowStale = this.allowStale, - updateAgeOnGet = this.updateAgeOnGet, - noDeleteOnStaleGet = this.noDeleteOnStaleGet, - status, - } = {} - ) { - const index = this.keyMap.get(k) - if (index !== undefined) { - const value = this.valList[index] - const fetching = this.isBackgroundFetch(value) - this.statusTTL(status, index) - if (this.isStale(index)) { - if (status) status.get = 'stale' - // delete only if not an in-flight background fetch - if (!fetching) { - if (!noDeleteOnStaleGet) { - this.delete(k) - } - if (status) status.returnedStale = allowStale - return allowStale ? value : undefined - } else { - if (status) { - status.returnedStale = - allowStale && value.__staleWhileFetching !== undefined - } - return allowStale ? value.__staleWhileFetching : undefined - } - } else { - if (status) status.get = 'hit' - // if we're currently fetching it, we don't actually have it yet - // it's not stale, which means this isn't a staleWhileRefetching. - // If it's not stale, and fetching, AND has a __staleWhileFetching - // value, then that means the user fetched with {forceRefresh:true}, - // so it's safe to return that value. - if (fetching) { - return value.__staleWhileFetching - } - this.moveToTail(index) - if (updateAgeOnGet) { - this.updateItemAge(index) - } - return value - } - } else if (status) { - status.get = 'miss' - } - } - - connect(p, n) { - this.prev[n] = p - this.next[p] = n - } - - moveToTail(index) { - // if tail already, nothing to do - // if head, move head to next[index] - // else - // move next[prev[index]] to next[index] (head has no prev) - // move prev[next[index]] to prev[index] - // prev[index] = tail - // next[tail] = index - // tail = index - if (index !== this.tail) { - if (index === this.head) { - this.head = this.next[index] - } else { - this.connect(this.prev[index], this.next[index]) - } - this.connect(this.tail, index) - this.tail = index - } - } - - get del() { - deprecatedMethod('del', 'delete') - return this.delete - } - - delete(k) { - let deleted = false - if (this.size !== 0) { - const index = this.keyMap.get(k) - if (index !== undefined) { - deleted = true - if (this.size === 1) { - this.clear() - } else { - this.removeItemSize(index) - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - this.keyMap.delete(k) - this.keyList[index] = null - this.valList[index] = null - if (index === this.tail) { - this.tail = this.prev[index] - } else if (index === this.head) { - this.head = this.next[index] - } else { - this.next[this.prev[index]] = this.next[index] - this.prev[this.next[index]] = this.prev[index] - } - this.size-- - this.free.push(index) - } - } - } - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - return deleted - } - - clear() { - for (const index of this.rindexes({ allowStale: true })) { - const v = this.valList[index] - if (this.isBackgroundFetch(v)) { - v.__abortController.abort(new Error('deleted')) - } else { - const k = this.keyList[index] - this.dispose(v, k, 'delete') - if (this.disposeAfter) { - this.disposed.push([v, k, 'delete']) - } - } - } - - this.keyMap.clear() - this.valList.fill(null) - this.keyList.fill(null) - if (this.ttls) { - this.ttls.fill(0) - this.starts.fill(0) - } - if (this.sizes) { - this.sizes.fill(0) - } - this.head = 0 - this.tail = 0 - this.initialFill = 1 - this.free.length = 0 - this.calculatedSize = 0 - this.size = 0 - if (this.disposed) { - while (this.disposed.length) { - this.disposeAfter(...this.disposed.shift()) - } - } - } - - get reset() { - deprecatedMethod('reset', 'clear') - return this.clear - } - - get length() { - deprecatedProperty('length', 'size') - return this.size - } - - static get AbortController() { - return AC - } - static get AbortSignal() { - return AS - } -} - -module.exports = LRUCache diff --git a/package-lock.json b/package-lock.json index 2157c89d..b9361a8b 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,31 +1,34 @@ { "name": "lru-cache", - "version": "7.18.3", + "version": "10.0.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "lru-cache", - "version": "7.18.3", + "version": "10.0.1", "license": "ISC", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", + "@types/node": "^20.2.5", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.7.0", + "ts-node": "^10.9.1", "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" + "typedoc": "^0.24.6", + "typescript": "^5.0.4" }, "engines": { - "node": ">=12" + "node": "14 || >=16.14" } }, "node_modules/@ampproject/remapping": { @@ -54,30 +57,30 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.20.14", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.20.14.tgz", - "integrity": "sha512-0YpKHD6ImkWMEINCyDAD0HLLUH/lPCefG8ld9it8DJB2wnApraKuhgYTvTY1z7UFIfBTGy5LwncZ+5HWWGbhFw==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.21.0.tgz", + "integrity": "sha512-gMuZsmsgxk/ENC3O/fRw5QY8A9/uxQbbCEypnLIiYYc/qVJtEV7ouxC3EllIIwNzMqAQee5tanFabWsUOutS7g==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/core": { - "version": "7.20.12", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.20.12.tgz", - "integrity": "sha512-XsMfHovsUYHFMdrIHkZphTN/2Hzzi78R08NuHfDBehym2VsPDL6Zn/JAD/JQdnRvbSsbQc4mVaU1m6JgtTEElg==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.21.0.tgz", + "integrity": "sha512-PuxUbxcW6ZYe656yL3EAhpy7qXKq0DmYsrJLpbB8XrsCP9Nm+XCg9XFMb5vIDliPD7+U/+M+QJlH17XOcB7eXA==", "dev": true, "dependencies": { - "@ampproject/remapping": "^2.1.0", + "@ampproject/remapping": "^2.2.0", "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.20.7", + "@babel/generator": "^7.21.0", "@babel/helper-compilation-targets": "^7.20.7", - "@babel/helper-module-transforms": "^7.20.11", - "@babel/helpers": "^7.20.7", - "@babel/parser": "^7.20.7", + "@babel/helper-module-transforms": "^7.21.0", + "@babel/helpers": "^7.21.0", + "@babel/parser": "^7.21.0", "@babel/template": "^7.20.7", - "@babel/traverse": "^7.20.12", - "@babel/types": "^7.20.7", + "@babel/traverse": "^7.21.0", + "@babel/types": "^7.21.0", "convert-source-map": "^1.7.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -102,13 +105,14 @@ } }, "node_modules/@babel/generator": { - "version": "7.20.14", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.20.14.tgz", - "integrity": "sha512-AEmuXHdcD3A52HHXxaTmYlb8q/xMEhoRP67B3T4Oq7lbmSoqroMZzjnGj3+i1io3pdnF8iBYVu4Ilj+c4hBxYg==", + "version": "7.21.1", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.21.1.tgz", + "integrity": "sha512-1lT45bAYlQhFn/BHivJs43AiW2rg3/UbLyShGfF3C0KmHvO5fSghWd5kBJy30kpRRucGzXStvnnCFniCR2kXAA==", "dev": true, "dependencies": { - "@babel/types": "^7.20.7", + "@babel/types": "^7.21.0", "@jridgewell/gen-mapping": "^0.3.2", + "@jridgewell/trace-mapping": "^0.3.17", "jsesc": "^2.5.1" }, "engines": { @@ -182,13 +186,13 @@ } }, "node_modules/@babel/helper-function-name": { - "version": "7.19.0", - "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.19.0.tgz", - "integrity": "sha512-WAwHBINyrpqywkUH0nTnNgI5ina5TFn85HKS0pbPDfxFfhyR/aNQEn4hGi1P1JyT//I0t4OgXUlofzWILRvS5w==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helper-function-name/-/helper-function-name-7.21.0.tgz", + "integrity": "sha512-HfK1aMRanKHpxemaY2gqBmL04iAPOPRj7DxtNbiDOrJK+gdwkiNRVpCpUJYbUT+aZyemKN8brqTOxzCaG6ExRg==", "dev": true, "dependencies": { - "@babel/template": "^7.18.10", - "@babel/types": "^7.19.0" + "@babel/template": "^7.20.7", + "@babel/types": "^7.21.0" }, "engines": { "node": ">=6.9.0" @@ -219,9 +223,9 @@ } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.20.11", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.20.11.tgz", - "integrity": "sha512-uRy78kN4psmji1s2QtbtcCSaj/LILFDp0f/ymhpQH5QY3nljUZCaNWz9X1dEj/8MBdBEFECs7yRhKn8i7NjZgg==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.21.2.tgz", + "integrity": "sha512-79yj2AR4U/Oqq/WOV7Lx6hUjau1Zfo4cI+JLAVYeMV5XIlbOhmjEk5ulbTc9fMpmlojzZHkUUxAiK+UKn+hNQQ==", "dev": true, "dependencies": { "@babel/helper-environment-visitor": "^7.18.9", @@ -230,8 +234,8 @@ "@babel/helper-split-export-declaration": "^7.18.6", "@babel/helper-validator-identifier": "^7.19.1", "@babel/template": "^7.20.7", - "@babel/traverse": "^7.20.10", - "@babel/types": "^7.20.7" + "@babel/traverse": "^7.21.2", + "@babel/types": "^7.21.2" }, "engines": { "node": ">=6.9.0" @@ -280,23 +284,23 @@ } }, "node_modules/@babel/helper-validator-option": { - "version": "7.18.6", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.18.6.tgz", - "integrity": "sha512-XO7gESt5ouv/LRJdrVjkShckw6STTaB7l9BrpBaAHDeF5YZT+01PCwmR0SJHnkW6i8OwW/EVWRShfi4j2x+KQw==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.21.0.tgz", + "integrity": "sha512-rmL/B8/f0mKS2baE9ZpyTcTavvEuWhTTW8amjzXNvYG4AwBsqTLikfXsEofsJEfKHf+HQVQbFOHy6o+4cnC/fQ==", "dev": true, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helpers": { - "version": "7.20.13", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.20.13.tgz", - "integrity": "sha512-nzJ0DWCL3gB5RCXbUO3KIMMsBY2Eqbx8mBpKGE/02PgyRQFcPQLbkQ1vyy596mZLaP+dAfD+R4ckASzNVmW3jg==", + "version": "7.21.0", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.21.0.tgz", + "integrity": "sha512-XXve0CBtOW0pd7MRzzmoyuSj0e3SEzj8pgyFxnTT1NJZL38BD1MK7yYrm8yefRPIDvNNe14xR4FdbHwpInD4rA==", "dev": true, "dependencies": { "@babel/template": "^7.20.7", - "@babel/traverse": "^7.20.13", - "@babel/types": "^7.20.7" + "@babel/traverse": "^7.21.0", + "@babel/types": "^7.21.0" }, "engines": { "node": ">=6.9.0" @@ -388,9 +392,9 @@ } }, "node_modules/@babel/parser": { - "version": "7.20.13", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.20.13.tgz", - "integrity": "sha512-gFDLKMfpiXCsjt4za2JA9oTMn70CeseCehb11kRZgvd7+F67Hih3OHOK24cRrWECJ/ljfPGac6ygXAs/C8kIvw==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.21.2.tgz", + "integrity": "sha512-URpaIJQwEkEC2T9Kn+Ai6Xe/02iNaVCuT/PtoRz3GPVJVDpPd7mLo+VddTbhCRU9TXqW5mSrQfXZyi8kDKOVpQ==", "dev": true, "bin": { "parser": "bin/babel-parser.js" @@ -414,19 +418,19 @@ } }, "node_modules/@babel/traverse": { - "version": "7.20.13", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.20.13.tgz", - "integrity": "sha512-kMJXfF0T6DIS9E8cgdLCSAL+cuCK+YEZHWiLK0SXpTo8YRj5lpJu3CDNKiIBCne4m9hhTIqUg6SYTAI39tAiVQ==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.21.2.tgz", + "integrity": "sha512-ts5FFU/dSUPS13tv8XiEObDu9K+iagEKME9kAbaP7r0Y9KtZJZ+NGndDvWoRAYNpeWafbpFeki3q9QoMD6gxyw==", "dev": true, "dependencies": { "@babel/code-frame": "^7.18.6", - "@babel/generator": "^7.20.7", + "@babel/generator": "^7.21.1", "@babel/helper-environment-visitor": "^7.18.9", - "@babel/helper-function-name": "^7.19.0", + "@babel/helper-function-name": "^7.21.0", "@babel/helper-hoist-variables": "^7.18.6", "@babel/helper-split-export-declaration": "^7.18.6", - "@babel/parser": "^7.20.13", - "@babel/types": "^7.20.7", + "@babel/parser": "^7.21.2", + "@babel/types": "^7.21.2", "debug": "^4.1.0", "globals": "^11.1.0" }, @@ -444,9 +448,9 @@ } }, "node_modules/@babel/types": { - "version": "7.20.7", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.20.7.tgz", - "integrity": "sha512-69OnhBxSSgK0OzTJai4kyPDiKTIe3j+ctaHdIGVbRahTLAT7L3R9oeXHC2aVSuGYt3cVnoAMDmOCgJ2yaiLMvg==", + "version": "7.21.2", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.21.2.tgz", + "integrity": "sha512-3wRZSs7jiFaB8AjxiiD+VqN5DTG2iRvJGQ+qYFrs/654lg6kGTQWIOFjlBo5RaXuAZjBmP3+OQH4dmhqiiyYxw==", "dev": true, "dependencies": { "@babel/helper-string-parser": "^7.19.4", @@ -485,10 +489,170 @@ "@jridgewell/sourcemap-codec": "^1.4.10" } }, + "node_modules/@esbuild/android-arm": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.17.11.tgz", + "integrity": "sha512-CdyX6sRVh1NzFCsf5vw3kULwlAhfy9wVt8SZlrhQ7eL2qBjGbFhRBWkkAzuZm9IIEOCKJw4DXA6R85g+qc8RDw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.17.11.tgz", + "integrity": "sha512-QnK4d/zhVTuV4/pRM4HUjcsbl43POALU2zvBynmrrqZt9LPcLA3x1fTZPBg2RRguBQnJcnU059yKr+bydkntjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.17.11.tgz", + "integrity": "sha512-3PL3HKtsDIXGQcSCKtWD/dy+mgc4p2Tvo2qKgKHj9Yf+eniwFnuoQ0OUhlSfAEpKAFzF9N21Nwgnap6zy3L3MQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.17.11.tgz", + "integrity": "sha512-pJ950bNKgzhkGNO3Z9TeHzIFtEyC2GDQL3wxkMApDEghYx5Qers84UTNc1bAxWbRkuJOgmOha5V0WUeh8G+YGw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.17.11.tgz", + "integrity": "sha512-iB0dQkIHXyczK3BZtzw1tqegf0F0Ab5texX2TvMQjiJIWXAfM4FQl7D909YfXWnB92OQz4ivBYQ2RlxBJrMJOw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.17.11.tgz", + "integrity": "sha512-7EFzUADmI1jCHeDRGKgbnF5sDIceZsQGapoO6dmw7r/ZBEKX7CCDnIz8m9yEclzr7mFsd+DyasHzpjfJnmBB1Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.17.11.tgz", + "integrity": "sha512-iPgenptC8i8pdvkHQvXJFzc1eVMR7W2lBPrTE6GbhR54sLcF42mk3zBOjKPOodezzuAz/KSu8CPyFSjcBMkE9g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.17.11.tgz", + "integrity": "sha512-M9iK/d4lgZH0U5M1R2p2gqhPV/7JPJcRz+8O8GBKVgqndTzydQ7B2XGDbxtbvFkvIs53uXTobOhv+RyaqhUiMg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.17.11.tgz", + "integrity": "sha512-Qxth3gsWWGKz2/qG2d5DsW/57SeA2AmpSMhdg9TSB5Svn2KDob3qxfQSkdnWjSd42kqoxIPy3EJFs+6w1+6Qjg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.17.11.tgz", + "integrity": "sha512-dB1nGaVWtUlb/rRDHmuDQhfqazWE0LMro/AIbT2lWM3CDMHJNpLckH+gCddQyhhcLac2OYw69ikUMO34JLt3wA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, "node_modules/@esbuild/linux-loong64": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz", - "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==", + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.17.11.tgz", + "integrity": "sha512-aCWlq70Q7Nc9WDnormntGS1ar6ZFvUpqr8gXtO+HRejRYPweAFQN615PcgaSJkZjhHp61+MNLhzyVALSF2/Q0g==", "cpu": [ "loong64" ], @@ -501,10 +665,186 @@ "node": ">=12" } }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.17.11.tgz", + "integrity": "sha512-cGeGNdQxqY8qJwlYH1BP6rjIIiEcrM05H7k3tR7WxOLmD1ZxRMd6/QIOWMb8mD2s2YJFNRuNQ+wjMhgEL2oCEw==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.17.11.tgz", + "integrity": "sha512-BdlziJQPW/bNe0E8eYsHB40mYOluS+jULPCjlWiHzDgr+ZBRXPtgMV1nkLEGdpjrwgmtkZHEGEPaKdS/8faLDA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.17.11.tgz", + "integrity": "sha512-MDLwQbtF+83oJCI1Cixn68Et/ME6gelmhssPebC40RdJaect+IM+l7o/CuG0ZlDs6tZTEIoxUe53H3GmMn8oMA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.17.11.tgz", + "integrity": "sha512-4N5EMESvws0Ozr2J94VoUD8HIRi7X0uvUv4c0wpTHZyZY9qpaaN7THjosdiW56irQ4qnJ6Lsc+i+5zGWnyqWqQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.17.11.tgz", + "integrity": "sha512-rM/v8UlluxpytFSmVdbCe1yyKQd/e+FmIJE2oPJvbBo+D0XVWi1y/NQ4iTNx+436WmDHQBjVLrbnAQLQ6U7wlw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.17.11.tgz", + "integrity": "sha512-4WaAhuz5f91h3/g43VBGdto1Q+X7VEZfpcWGtOFXnggEuLvjV+cP6DyLRU15IjiU9fKLLk41OoJfBFN5DhPvag==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.17.11.tgz", + "integrity": "sha512-UBj135Nx4FpnvtE+C8TWGp98oUgBcmNmdYgl5ToKc0mBHxVVqVE7FUS5/ELMImOp205qDAittL6Ezhasc2Ev/w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.17.11.tgz", + "integrity": "sha512-1/gxTifDC9aXbV2xOfCbOceh5AlIidUrPsMpivgzo8P8zUtczlq1ncFpeN1ZyQJ9lVs2hILy1PG5KPp+w8QPPg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.17.11.tgz", + "integrity": "sha512-vtSfyx5yRdpiOW9yp6Ax0zyNOv9HjOAw8WaZg3dF5djEHKKm3UnoohftVvIJtRh0Ec7Hso0RIdTqZvPXJ7FdvQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.17.11.tgz", + "integrity": "sha512-GFPSLEGQr4wHFTiIUJQrnJKZhZjjq4Sphf+mM76nQR6WkQn73vm7IsacmBRPkALfpOCHsopSvLgqdd4iUW2mYw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.17.11.tgz", + "integrity": "sha512-N9vXqLP3eRL8BqSy8yn4Y98cZI2pZ8fyuHx6lKjiG2WABpT2l01TXdzq5Ma2ZUBzfB7tx5dXVhge8X9u0S70ZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, "node_modules/@eslint/eslintrc": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-1.4.1.tgz", - "integrity": "sha512-XXrH9Uarn0stsyldqDYq8r++mROmWRI1xKMXa640Bb//SY1+ECYX6VzT6Lcx5frD0V30XieqJ0oX9I2Xj5aoMA==", + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-2.0.0.tgz", + "integrity": "sha512-fluIaaV+GyV24CCu/ggiHdV+j4RNh85yQnAYS/G2mZODZgGmmlrgCydjUcV3YvxCm9x8nMAfThsqTni4KiXT4A==", "dev": true, "peer": true, "dependencies": { @@ -525,6 +865,16 @@ "url": "https://opencollective.com/eslint" } }, + "node_modules/@eslint/js": { + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.35.0.tgz", + "integrity": "sha512-JXdzbRiWclLVoD8sNUjR443VVlYqiYmDVT6rGUEIEHU5YJW0gaVZwV2xgM7D4arkvASqD0IlLUVjHiFuxaftRw==", + "dev": true, + "peer": true, + "engines": { + "node": "^12.22.0 || ^14.17.0 || >=16.0.0" + } + }, "node_modules/@humanwhocodes/config-array": { "version": "0.11.8", "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.8.tgz", @@ -767,6 +1117,58 @@ "size-limit": "7.0.8" } }, + "node_modules/@size-limit/esbuild/node_modules/@esbuild/linux-loong64": { + "version": "0.14.54", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.14.54.tgz", + "integrity": "sha512-bZBrLAIX1kpWelV0XemxBZllyRmM6vgFQQG2GdNb+r3Fkp0FOh1NJSvekXDs7jq70k4euu1cryLMfU+mTXlEpw==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@size-limit/esbuild/node_modules/esbuild": { + "version": "0.14.54", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz", + "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/linux-loong64": "0.14.54", + "esbuild-android-64": "0.14.54", + "esbuild-android-arm64": "0.14.54", + "esbuild-darwin-64": "0.14.54", + "esbuild-darwin-arm64": "0.14.54", + "esbuild-freebsd-64": "0.14.54", + "esbuild-freebsd-arm64": "0.14.54", + "esbuild-linux-32": "0.14.54", + "esbuild-linux-64": "0.14.54", + "esbuild-linux-arm": "0.14.54", + "esbuild-linux-arm64": "0.14.54", + "esbuild-linux-mips64le": "0.14.54", + "esbuild-linux-ppc64le": "0.14.54", + "esbuild-linux-riscv64": "0.14.54", + "esbuild-linux-s390x": "0.14.54", + "esbuild-netbsd-64": "0.14.54", + "esbuild-openbsd-64": "0.14.54", + "esbuild-sunos-64": "0.14.54", + "esbuild-windows-32": "0.14.54", + "esbuild-windows-64": "0.14.54", + "esbuild-windows-arm64": "0.14.54" + } + }, "node_modules/@size-limit/file": { "version": "7.0.8", "resolved": "https://registry.npmjs.org/@size-limit/file/-/file-7.0.8.tgz", @@ -826,15 +1228,15 @@ "dev": true }, "node_modules/@types/node": { - "version": "17.0.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", - "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", + "version": "20.2.5", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.2.5.tgz", + "integrity": "sha512-JJulVEQXmiY9Px5axXHeYGLSjhkZEnD+MDPDGbCbIAbMslkKwmygtZFy1X6s/075Yo94sf8GuSlFfPzysQrWZQ==", "dev": true }, "node_modules/@types/tap": { - "version": "15.0.7", - "resolved": "https://registry.npmjs.org/@types/tap/-/tap-15.0.7.tgz", - "integrity": "sha512-TTMajw4gxQfFgYbhXhy/Tb2OiNcwS+4oP/9yp1/GdU0pFJo3wtnkYhRgmQy39ksh+rnoa0VrPHJ4Tuv2cLNQ5A==", + "version": "15.0.8", + "resolved": "https://registry.npmjs.org/@types/tap/-/tap-15.0.8.tgz", + "integrity": "sha512-ZfeoiZlLIaFi4t6wccwbTEicrHREkP0bOq8dZVi/nWvG5F8O7LlS2cSUZBiOW/D4cgWS/p2uhM3lJoyzFAl80w==", "dev": true, "dependencies": { "@types/node": "*" @@ -910,6 +1312,12 @@ "node": ">=8" } }, + "node_modules/ansi-sequence-parser": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/ansi-sequence-parser/-/ansi-sequence-parser-1.1.0.tgz", + "integrity": "sha512-lEm8mt52to2fT8GhciPCGeCXACSz2UwIN4X2e2LJSnZ5uAbn2/dsYdOmUXq0AtWS5cpAupysIneExOgH0Vd2TQ==", + "dev": true + }, "node_modules/ansi-styles": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", @@ -1044,9 +1452,9 @@ } }, "node_modules/browserslist": { - "version": "4.21.4", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.4.tgz", - "integrity": "sha512-CBHJJdDmgjl3daYjN5Cp5kbTf1mUhZoS+beLklHIvkOWscs83YAhLlF3Wsh/lciQYAcbBJgTOD44VtG31ZM4Hw==", + "version": "4.21.5", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.21.5.tgz", + "integrity": "sha512-tUkiguQGW7S3IhB7N+c2MV/HZPSCPAAiYBZXLsBhFB/PCy6ZKKsZrmBayHV9fdGV/ARIfJ14NkxKzRDjvp7L6w==", "dev": true, "funding": [ { @@ -1059,10 +1467,10 @@ } ], "dependencies": { - "caniuse-lite": "^1.0.30001400", - "electron-to-chromium": "^1.4.251", - "node-releases": "^2.0.6", - "update-browserslist-db": "^1.0.9" + "caniuse-lite": "^1.0.30001449", + "electron-to-chromium": "^1.4.284", + "node-releases": "^2.0.8", + "update-browserslist-db": "^1.0.10" }, "bin": { "browserslist": "cli.js" @@ -1087,9 +1495,9 @@ } }, "node_modules/c8": { - "version": "7.12.0", - "resolved": "https://registry.npmjs.org/c8/-/c8-7.12.0.tgz", - "integrity": "sha512-CtgQrHOkyxr5koX1wEUmN/5cfDa2ckbHRA4Gy5LAL0zaCFtVWJS5++n+w4/sr2GWGerBxgTjpKeDclk/Qk6W/A==", + "version": "7.13.0", + "resolved": "https://registry.npmjs.org/c8/-/c8-7.13.0.tgz", + "integrity": "sha512-/NL4hQTv1gBL6J6ei80zu3IiTrmePDKXKXOTLpHvcIWZTVYQlDhVWjjWvkhICylE8EwwnMVzDZugCvdx0/DIIA==", "dev": true, "dependencies": { "@bcoe/v8-coverage": "^0.2.3", @@ -1147,9 +1555,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001449", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001449.tgz", - "integrity": "sha512-CPB+UL9XMT/Av+pJxCKGhdx+yg1hzplvFJQlJ2n68PyQGMz9L/E2zCyLdOL8uasbouTUgnPl+y0tccI/se+BEw==", + "version": "1.0.30001460", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001460.tgz", + "integrity": "sha512-Bud7abqjvEjipUkpLs4D7gR0l8hBYBHoa+tGtKJHvT2AYzLp1z7EmVkUT4ERpVUfca8S2HGIVs883D8pUH1ZzQ==", "dev": true, "funding": [ { @@ -1245,9 +1653,9 @@ } }, "node_modules/clock-mock": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/clock-mock/-/clock-mock-1.0.6.tgz", - "integrity": "sha512-mPXwJHSN8pWm5H8l42taxldL+LA5dTHd/S7TldqvJy7RkMM3+HwsvLt3g1mQTFcu1oK9nV+9+laKf1hSM9EiVQ==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/clock-mock/-/clock-mock-1.1.0.tgz", + "integrity": "sha512-YL1fqZqrdZ9jHzBvwcuG2oFdfJ4Mkh4H8t+ue3ZIdKmWUWkkUMMNUnZ1akLyfC1rFwdVt0lVE0llDh742K0fcg==", "dev": true, "engines": { "node": ">= 12" @@ -1401,9 +1809,9 @@ } }, "node_modules/electron-to-chromium": { - "version": "1.4.284", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz", - "integrity": "sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA==", + "version": "1.4.320", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.4.320.tgz", + "integrity": "sha512-h70iRscrNluMZPVICXYl5SSB+rBKo22XfuIS1ER0OQxQZpKTnFpuS6coj7wY9M/3trv7OR88rRMOlKmRvDty7Q==", "dev": true }, "node_modules/emoji-regex": { @@ -1419,9 +1827,9 @@ "dev": true }, "node_modules/esbuild": { - "version": "0.14.54", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.14.54.tgz", - "integrity": "sha512-Cy9llcy8DvET5uznocPyqL3BFRrFXSVqbgpMJ9Wz8oVjZlh/zUSNbPRbov0VX7VxN2JH1Oa0uNxZ7eLRb62pJA==", + "version": "0.17.11", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.17.11.tgz", + "integrity": "sha512-pAMImyokbWDtnA/ufPxjQg0fYo2DDuzAlqwnDvbXqHLphe+m80eF++perYKVm8LeTuj2zUuFXC+xgSVxyoHUdg==", "dev": true, "hasInstallScript": true, "bin": { @@ -1431,27 +1839,28 @@ "node": ">=12" }, "optionalDependencies": { - "@esbuild/linux-loong64": "0.14.54", - "esbuild-android-64": "0.14.54", - "esbuild-android-arm64": "0.14.54", - "esbuild-darwin-64": "0.14.54", - "esbuild-darwin-arm64": "0.14.54", - "esbuild-freebsd-64": "0.14.54", - "esbuild-freebsd-arm64": "0.14.54", - "esbuild-linux-32": "0.14.54", - "esbuild-linux-64": "0.14.54", - "esbuild-linux-arm": "0.14.54", - "esbuild-linux-arm64": "0.14.54", - "esbuild-linux-mips64le": "0.14.54", - "esbuild-linux-ppc64le": "0.14.54", - "esbuild-linux-riscv64": "0.14.54", - "esbuild-linux-s390x": "0.14.54", - "esbuild-netbsd-64": "0.14.54", - "esbuild-openbsd-64": "0.14.54", - "esbuild-sunos-64": "0.14.54", - "esbuild-windows-32": "0.14.54", - "esbuild-windows-64": "0.14.54", - "esbuild-windows-arm64": "0.14.54" + "@esbuild/android-arm": "0.17.11", + "@esbuild/android-arm64": "0.17.11", + "@esbuild/android-x64": "0.17.11", + "@esbuild/darwin-arm64": "0.17.11", + "@esbuild/darwin-x64": "0.17.11", + "@esbuild/freebsd-arm64": "0.17.11", + "@esbuild/freebsd-x64": "0.17.11", + "@esbuild/linux-arm": "0.17.11", + "@esbuild/linux-arm64": "0.17.11", + "@esbuild/linux-ia32": "0.17.11", + "@esbuild/linux-loong64": "0.17.11", + "@esbuild/linux-mips64el": "0.17.11", + "@esbuild/linux-ppc64": "0.17.11", + "@esbuild/linux-riscv64": "0.17.11", + "@esbuild/linux-s390x": "0.17.11", + "@esbuild/linux-x64": "0.17.11", + "@esbuild/netbsd-x64": "0.17.11", + "@esbuild/openbsd-x64": "0.17.11", + "@esbuild/sunos-x64": "0.17.11", + "@esbuild/win32-arm64": "0.17.11", + "@esbuild/win32-ia32": "0.17.11", + "@esbuild/win32-x64": "0.17.11" } }, "node_modules/esbuild-android-64": { @@ -1797,13 +2206,14 @@ } }, "node_modules/eslint": { - "version": "8.33.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.33.0.tgz", - "integrity": "sha512-WjOpFQgKK8VrCnAtl8We0SUOy/oVZ5NHykyMiagV1M9r8IFpIJX7DduK6n1mpfhlG7T1NLWm2SuD8QB7KFySaA==", + "version": "8.35.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.35.0.tgz", + "integrity": "sha512-BxAf1fVL7w+JLRQhWl2pzGeSiGqbWumV4WNvc9Rhp6tiCtm4oHnyPBSEtMGZwrQgudFQ+otqzWoPB7x+hxoWsw==", "dev": true, "peer": true, "dependencies": { - "@eslint/eslintrc": "^1.4.1", + "@eslint/eslintrc": "^2.0.0", + "@eslint/js": "8.35.0", "@humanwhocodes/config-array": "^0.11.8", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", @@ -1817,7 +2227,7 @@ "eslint-utils": "^3.0.0", "eslint-visitor-keys": "^3.3.0", "espree": "^9.4.0", - "esquery": "^1.4.0", + "esquery": "^1.4.2", "esutils": "^2.0.2", "fast-deep-equal": "^3.1.3", "file-entry-cache": "^6.0.1", @@ -1950,9 +2360,9 @@ } }, "node_modules/esquery": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.4.0.tgz", - "integrity": "sha512-cCDispWt5vHHtwMY2YrAQ4ibFkAL8RbH5YGBnZBc90MolvvfkkQcJro/aZiAQUlQ3qgrYS6D6v8Gc5G5CQsc9w==", + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.5.0.tgz", + "integrity": "sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==", "dev": true, "peer": true, "dependencies": { @@ -2743,9 +3153,9 @@ } }, "node_modules/lilconfig": { - "version": "2.0.6", - "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.0.6.tgz", - "integrity": "sha512-9JROoBW7pobfsx+Sq2JsASvCo6Pfo6WWoUW79HuB1BCoBXD4PLWJPqDF6fNj67pqBYTbAHkE57M1kS/+L1neOg==", + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/lilconfig/-/lilconfig-2.1.0.tgz", + "integrity": "sha512-utWOt/GHzuUxnLKxB6dk81RoOeoNeHgbrXiuGk4yyF5qlRz+iIVWu56E2fqGHFrXz0QNUhLB/8nKqvRH66JKGQ==", "dev": true, "engines": { "node": ">=10" @@ -2834,9 +3244,9 @@ "dev": true }, "node_modules/marked": { - "version": "4.2.12", - "resolved": "https://registry.npmjs.org/marked/-/marked-4.2.12.tgz", - "integrity": "sha512-yr8hSKa3Fv4D3jdZmtMMPghgVt6TWbk86WQaWhDloQjRSQhMMYCAro7jP7VDJrjjdV8pxVxMssXS8B8Y5DZ5aw==", + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/marked/-/marked-4.3.0.tgz", + "integrity": "sha512-PRsaiG84bK+AMvxziE/lCFss8juXjNaWzVbN5tXAm4XjeaS9NAHhop+PjQxz2A9h8Q4M/xGmzP8vqNwy6JeK0A==", "dev": true, "bin": { "marked": "bin/marked.js" @@ -2892,15 +3302,18 @@ } }, "node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-2.1.5.tgz", + "integrity": "sha512-jbjfql+shJtAPrFoKxHOXip4xS+kul9W3OzfzzrqueWK2QMGon2bFH2opl6W9EagBThjEz+iysyi/swOoVfB/w==", "dev": true, "bin": { - "mkdirp": "bin/cmd.js" + "mkdirp": "dist/cjs/src/bin.js" }, "engines": { "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/ms": { @@ -2950,9 +3363,9 @@ } }, "node_modules/node-releases": { - "version": "2.0.8", - "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.8.tgz", - "integrity": "sha512-dFSmB8fFHEH/s81Xi+Y/15DQY6VHW81nXRj86EMSL3lmuTmK1e+aT4wrFCkTbm+gSwkw4KpX+rT/pMM2c1mF+A==", + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.10.tgz", + "integrity": "sha512-5GFldHPXVG/YZmFzJvKK2zDSzPKhEp0+ZR5SVaoSag9fsL5YgHbUHDfnG5494ISANDcK4KwPXAx2xqVEydmd7w==", "dev": true }, "node_modules/normalize-path": { @@ -3397,9 +3810,9 @@ } }, "node_modules/prettier": { - "version": "2.8.3", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.3.tgz", - "integrity": "sha512-tJ/oJ4amDihPoufT5sM0Z1SKEuKay8LfVAMlbbhnnkvt6BUserZylqo2PN+p9KeljLr0OHa2rXHU1T8reeoTrw==", + "version": "2.8.4", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-2.8.4.tgz", + "integrity": "sha512-vIS4Rlc2FNh0BySk3Wkd6xmwxB0FpOndW5fisM5H8hsZSxU2VWVB5CWIkIjWvrHjIhxk2g3bfMKM87zNTrZddw==", "dev": true, "bin": { "prettier": "bin-prettier.js" @@ -3605,11 +4018,12 @@ } }, "node_modules/shiki": { - "version": "0.12.1", - "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.12.1.tgz", - "integrity": "sha512-aieaV1m349rZINEBkjxh2QbBvFFQOlgqYTNtCal82hHj4dDZ76oMlQIX+C7ryerBTDiga3e5NfH6smjdJ02BbQ==", + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-0.14.1.tgz", + "integrity": "sha512-+Jz4nBkCBe0mEDqo1eKRcCdjRtrCjozmcbTUjbPTX7OOJfEbTZzlUWlZtGe3Gb5oV1/jnojhG//YZc3rs9zSEw==", "dev": true, "dependencies": { + "ansi-sequence-parser": "^1.1.0", "jsonc-parser": "^3.2.0", "vscode-oniguruma": "^1.7.0", "vscode-textmate": "^8.0.0" @@ -3643,6 +4057,18 @@ "node": "^12.0.0 || ^14.0.0 || >=16.0.0" } }, + "node_modules/size-limit/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/slash": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", @@ -5119,6 +5545,18 @@ "node": ">=8" } }, + "node_modules/tap/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, "node_modules/tap/node_modules/ms": { "version": "2.1.2", "dev": true, @@ -5914,15 +6352,15 @@ } }, "node_modules/typedoc": { - "version": "0.23.24", - "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.23.24.tgz", - "integrity": "sha512-bfmy8lNQh+WrPYcJbtjQ6JEEsVl/ce1ZIXyXhyW+a1vFrjO39t6J8sL/d6FfAGrJTc7McCXgk9AanYBSNvLdIA==", + "version": "0.24.6", + "resolved": "https://registry.npmjs.org/typedoc/-/typedoc-0.24.6.tgz", + "integrity": "sha512-c3y3h45xJv3qYwKDAwU6Cl+26CjT0ZvblHzfHJ+SjQDM4p1mZxtgHky4lhmG0+nNarRht8kADfZlbspJWdZarQ==", "dev": true, "dependencies": { "lunr": "^2.3.9", - "marked": "^4.2.5", - "minimatch": "^5.1.2", - "shiki": "^0.12.1" + "marked": "^4.3.0", + "minimatch": "^9.0.0", + "shiki": "^0.14.1" }, "bin": { "typedoc": "bin/typedoc" @@ -5931,7 +6369,7 @@ "node": ">= 14.14" }, "peerDependencies": { - "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x" + "typescript": "4.6.x || 4.7.x || 4.8.x || 4.9.x || 5.0.x" } }, "node_modules/typedoc/node_modules/brace-expansion": { @@ -5944,28 +6382,31 @@ } }, "node_modules/typedoc/node_modules/minimatch": { - "version": "5.1.6", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-5.1.6.tgz", - "integrity": "sha512-lKwV/1brpG6mBUFHtb7NUmtABCb2WZZmm2wNiOA5hAb8VdCS4B3dtMWyvcoViccwAW/COERjXLt0zP1zXUN26g==", + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.0.tgz", + "integrity": "sha512-0jJj8AvgKqWN05mrwuqi8QYKx1WmYSUoKSxu5Qhs9prezTz10sxAHGNZe9J9cqIJzta8DWsleh2KaVaLl6Ru2w==", "dev": true, "dependencies": { "brace-expansion": "^2.0.1" }, "engines": { - "node": ">=10" + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" } }, "node_modules/typescript": { - "version": "4.9.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.9.4.tgz", - "integrity": "sha512-Uz+dTXYzxXXbsFpM86Wh3dKCxrQqUcVMxwU54orwlJjOpO3ao8L7j5lH+dWfTwgCwIuM9GQ2kvVotzYJMXTBZg==", + "version": "5.0.4", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.0.4.tgz", + "integrity": "sha512-cW9T5W9xY37cc+jfEnaUvX91foxtHkza3Nw3wkoF4sSlKn0MONdkdEndig/qPBWXNkmplh3NzayQzCiHM4/hqw==", "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { - "node": ">=4.2.0" + "node": ">=12.20" } }, "node_modules/unicode-length": { @@ -6029,9 +6470,9 @@ "dev": true }, "node_modules/v8-to-istanbul": { - "version": "9.0.1", - "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.0.1.tgz", - "integrity": "sha512-74Y4LqY74kLE6IFyIjPtkSTWzUZmj8tdHT9Ii/26dvQ6K9Dl2NbEfj0XgU2sHCtKgt5VupqhlO/5aWuqS+IY1w==", + "version": "9.1.0", + "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.1.0.tgz", + "integrity": "sha512-6z3GW9x8G1gd+JIIgQQQxXuiJtCXeAjp6RaPEPLv62mH3iPHPxV6W3robxtCzNErRo6ZwTmzWhsbNvjyEBKzKA==", "dev": true, "dependencies": { "@jridgewell/trace-mapping": "^0.3.12", diff --git a/package.json b/package.json index 96849917..bae4a048 100644 --- a/package.json +++ b/package.json @@ -1,7 +1,7 @@ { "name": "lru-cache", "description": "A cache object that deletes the least-recently-used items.", - "version": "7.18.3", + "version": "10.0.1", "author": "Isaac Z. Schlueter ", "keywords": [ "mru", @@ -11,60 +11,74 @@ "sideEffects": false, "scripts": { "build": "npm run prepare", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", "pretest": "npm run prepare", "presnap": "npm run prepare", - "prepare": "node ./scripts/transpile-to-esm.js", - "size": "size-limit", - "test": "tap", - "snap": "tap", + "test": "c8 tap", + "snap": "c8 tap", "preversion": "npm test", "postversion": "npm publish", "prepublishOnly": "git push origin --follow-tags", "format": "prettier --write .", - "typedoc": "typedoc ./index.d.ts" + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts", + "benchmark-results-typedoc": "bash scripts/benchmark-results-typedoc.sh", + "prebenchmark": "npm run prepare", + "benchmark": "make -C benchmark", + "preprofile": "npm run prepare", + "profile": "make -C benchmark profile" }, - "type": "commonjs", - "main": "./index.js", - "module": "./index.mjs", - "types": "./index.d.ts", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", "exports": { - ".": { + "./min": { "import": { - "types": "./index.d.ts", - "default": "./index.mjs" + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.min.js" }, "require": { - "types": "./index.d.ts", - "default": "./index.js" + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.min.js" } }, - "./package.json": "./package.json" + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + } }, "repository": "git://github.com/isaacs/node-lru-cache.git", "devDependencies": { "@size-limit/preset-small-lib": "^7.0.8", - "@types/node": "^17.0.31", + "@types/node": "^20.2.5", "@types/tap": "^15.0.6", "benchmark": "^2.1.4", "c8": "^7.11.2", "clock-mock": "^1.0.6", + "esbuild": "^0.17.11", "eslint-config-prettier": "^8.5.0", + "marked": "^4.2.12", + "mkdirp": "^2.1.5", "prettier": "^2.6.2", "size-limit": "^7.0.8", "tap": "^16.3.4", - "ts-node": "^10.7.0", + "ts-node": "^10.9.1", "tslib": "^2.4.0", - "typedoc": "^0.23.24", - "typescript": "^4.6.4" + "typedoc": "^0.24.6", + "typescript": "^5.0.4" }, "license": "ISC", "files": [ - "index.js", - "index.mjs", - "index.d.ts" + "dist" ], "engines": { - "node": ">=12" + "node": "14 || >=16.14" }, "prettier": { "semi": false, @@ -78,19 +92,17 @@ "endOfLine": "lf" }, "tap": { - "nyc-arg": [ - "--include=index.js" - ], + "coverage": false, "node-arg": [ "--expose-gc", - "--require", + "-r", "ts-node/register" ], "ts": false }, "size-limit": [ { - "path": "./index.js" + "path": "./dist/mjs/index.js" } ] } diff --git a/scripts/benchmark-results-typedoc.sh b/scripts/benchmark-results-typedoc.sh new file mode 100644 index 00000000..3a25e656 --- /dev/null +++ b/scripts/benchmark-results-typedoc.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash +set -x +set -e +mkdir -p docs/benchmark/results +cp -r benchmark/results/* docs/benchmark/results/ +echo 'benchmark results overview' > docs/benchmark/index.html +echo '' >> docs/benchmark/index.html +echo '

raw CSV results

' >> docs/benchmark/index.html +marked < benchmark/results.md >> docs/benchmark/index.html +echo '' > docs/benchmark/results/index.html +echo 'benchmark results' >> docs/benchmark/results/index.html +echo '
    ' >> docs/benchmark/results/index.html +ls docs/benchmark/results | while read p; do + f=$(basename "$p") + echo '
  • '$f'
  • ' >> docs/benchmark/results/index.html +done +echo '
' >> docs/benchmark/results/index.html diff --git a/src/index.ts b/src/index.ts new file mode 100644 index 00000000..400f4cc6 --- /dev/null +++ b/src/index.ts @@ -0,0 +1,2322 @@ +/** + * @module LRUCache + */ + +// module-private names and types +type Perf = { now: () => number } +const perf: Perf = + typeof performance === 'object' && + performance && + typeof performance.now === 'function' + ? performance + : Date + +const warned = new Set() + +// either a function or a class +type ForC = ((...a: any[]) => any) | { new (...a: any[]): any } + +/* c8 ignore start */ +const PROCESS = ( + typeof process === 'object' && !!process ? process : {} +) as { [k: string]: any } +/* c8 ignore start */ + +const emitWarning = ( + msg: string, + type: string, + code: string, + fn: ForC +) => { + typeof PROCESS.emitWarning === 'function' + ? PROCESS.emitWarning(msg, type, code, fn) + : console.error(`[${code}] ${type}: ${msg}`) +} + +let AC = globalThis.AbortController +let AS = globalThis.AbortSignal + +/* c8 ignore start */ +if (typeof AC === 'undefined') { + //@ts-ignore + AS = class AbortSignal { + onabort?: (...a: any[]) => any + _onabort: ((...a: any[]) => any)[] = [] + reason?: any + aborted: boolean = false + addEventListener(_: string, fn: (...a: any[]) => any) { + this._onabort.push(fn) + } + } + //@ts-ignore + AC = class AbortController { + constructor() { + warnACPolyfill() + } + signal = new AS() + abort(reason: any) { + if (this.signal.aborted) return + //@ts-ignore + this.signal.reason = reason + //@ts-ignore + this.signal.aborted = true + //@ts-ignore + for (const fn of this.signal._onabort) { + fn(reason) + } + this.signal.onabort?.(reason) + } + } + let printACPolyfillWarning = + PROCESS.env?.LRU_CACHE_IGNORE_AC_WARNING !== '1' + const warnACPolyfill = () => { + if (!printACPolyfillWarning) return + printACPolyfillWarning = false + emitWarning( + 'AbortController is not defined. If using lru-cache in ' + + 'node 14, load an AbortController polyfill from the ' + + '`node-abort-controller` package. A minimal polyfill is ' + + 'provided for use by LRUCache.fetch(), but it should not be ' + + 'relied upon in other contexts (eg, passing it to other APIs that ' + + 'use AbortController/AbortSignal might have undesirable effects). ' + + 'You may disable this with LRU_CACHE_IGNORE_AC_WARNING=1 in the env.', + 'NO_ABORT_CONTROLLER', + 'ENOTSUP', + warnACPolyfill + ) + } +} +/* c8 ignore stop */ + +const shouldWarn = (code: string) => !warned.has(code) + +const TYPE = Symbol('type') +type PosInt = number & { [TYPE]: 'Positive Integer' } +type Index = number & { [TYPE]: 'LRUCache Index' } + +const isPosInt = (n: any): n is PosInt => + n && n === Math.floor(n) && n > 0 && isFinite(n) + +type UintArray = Uint8Array | Uint16Array | Uint32Array +type NumberArray = UintArray | number[] + +/* c8 ignore start */ +// This is a little bit ridiculous, tbh. +// The maximum array length is 2^32-1 or thereabouts on most JS impls. +// And well before that point, you're caching the entire world, I mean, +// that's ~32GB of just integers for the next/prev links, plus whatever +// else to hold that many keys and values. Just filling the memory with +// zeroes at init time is brutal when you get that big. +// But why not be complete? +// Maybe in the future, these limits will have expanded. +const getUintArray = (max: number) => + !isPosInt(max) + ? null + : max <= Math.pow(2, 8) + ? Uint8Array + : max <= Math.pow(2, 16) + ? Uint16Array + : max <= Math.pow(2, 32) + ? Uint32Array + : max <= Number.MAX_SAFE_INTEGER + ? ZeroArray + : null +/* c8 ignore stop */ + +class ZeroArray extends Array { + constructor(size: number) { + super(size) + this.fill(0) + } +} + +type StackLike = Stack | Index[] +class Stack { + heap: NumberArray + length: number + // private constructor + static #constructing: boolean = false + static create(max: number): StackLike { + const HeapCls = getUintArray(max) + if (!HeapCls) return [] + Stack.#constructing = true + const s = new Stack(max, HeapCls) + Stack.#constructing = false + return s + } + constructor( + max: number, + HeapCls: { new (n: number): NumberArray } + ) { + /* c8 ignore start */ + if (!Stack.#constructing) { + throw new TypeError('instantiate Stack using Stack.create(n)') + } + /* c8 ignore stop */ + this.heap = new HeapCls(max) + this.length = 0 + } + push(n: Index) { + this.heap[this.length++] = n + } + pop(): Index { + return this.heap[--this.length] as Index + } +} + +/** + * Promise representing an in-progress {@link LRUCache#fetch} call + */ +export type BackgroundFetch = Promise & { + __returned: BackgroundFetch | undefined + __abortController: AbortController + __staleWhileFetching: V | undefined +} + +type DisposeTask = [ + value: V, + key: K, + reason: LRUCache.DisposeReason +] + +export namespace LRUCache { + /** + * An integer greater than 0, reflecting the calculated size of items + */ + export type Size = number + + /** + * Integer greater than 0, representing some number of milliseconds, or the + * time at which a TTL started counting from. + */ + export type Milliseconds = number + + /** + * An integer greater than 0, reflecting a number of items + */ + export type Count = number + + /** + * The reason why an item was removed from the cache, passed + * to the {@link Disposer} methods. + */ + export type DisposeReason = 'evict' | 'set' | 'delete' + /** + * A method called upon item removal, passed as the + * {@link OptionsBase.dispose} and/or + * {@link OptionsBase.disposeAfter} options. + */ + export type Disposer = ( + value: V, + key: K, + reason: DisposeReason + ) => void + + /** + * A function that returns the effective calculated size + * of an entry in the cache. + */ + export type SizeCalculator = (value: V, key: K) => Size + + /** + * Options provided to the + * {@link OptionsBase.fetchMethod} function. + */ + export interface FetcherOptions { + signal: AbortSignal + options: FetcherFetchOptions + /** + * Object provided in the {@link FetchOptions.context} option to + * {@link LRUCache#fetch} + */ + context: FC + } + + /** + * Status object that may be passed to {@link LRUCache#fetch}, + * {@link LRUCache#get}, {@link LRUCache#set}, and {@link LRUCache#has}. + */ + export interface Status { + /** + * The status of a set() operation. + * + * - add: the item was not found in the cache, and was added + * - update: the item was in the cache, with the same value provided + * - replace: the item was in the cache, and replaced + * - miss: the item was not added to the cache for some reason + */ + set?: 'add' | 'update' | 'replace' | 'miss' + + /** + * the ttl stored for the item, or undefined if ttls are not used. + */ + ttl?: Milliseconds + + /** + * the start time for the item, or undefined if ttls are not used. + */ + start?: Milliseconds + + /** + * The timestamp used for TTL calculation + */ + now?: Milliseconds + + /** + * the remaining ttl for the item, or undefined if ttls are not used. + */ + remainingTTL?: Milliseconds + + /** + * The calculated size for the item, if sizes are used. + */ + entrySize?: Size + + /** + * The total calculated size of the cache, if sizes are used. + */ + totalCalculatedSize?: Size + + /** + * A flag indicating that the item was not stored, due to exceeding the + * {@link OptionsBase.maxEntrySize} + */ + maxEntrySizeExceeded?: true + + /** + * The old value, specified in the case of `set:'update'` or + * `set:'replace'` + */ + oldValue?: V + + /** + * The results of a {@link LRUCache#has} operation + * + * - hit: the item was found in the cache + * - stale: the item was found in the cache, but is stale + * - miss: the item was not found in the cache + */ + has?: 'hit' | 'stale' | 'miss' + + /** + * The status of a {@link LRUCache#fetch} operation. + * Note that this can change as the underlying fetch() moves through + * various states. + * + * - inflight: there is another fetch() for this key which is in process + * - get: there is no fetchMethod, so {@link LRUCache#get} was called. + * - miss: the item is not in cache, and will be fetched. + * - hit: the item is in the cache, and was resolved immediately. + * - stale: the item is in the cache, but stale. + * - refresh: the item is in the cache, and not stale, but + * {@link FetchOptions.forceRefresh} was specified. + */ + fetch?: 'get' | 'inflight' | 'miss' | 'hit' | 'stale' | 'refresh' + + /** + * The {@link OptionsBase.fetchMethod} was called + */ + fetchDispatched?: true + + /** + * The cached value was updated after a successful call to + * {@link OptionsBase.fetchMethod} + */ + fetchUpdated?: true + + /** + * The reason for a fetch() rejection. Either the error raised by the + * {@link OptionsBase.fetchMethod}, or the reason for an + * AbortSignal. + */ + fetchError?: Error + + /** + * The fetch received an abort signal + */ + fetchAborted?: true + + /** + * The abort signal received was ignored, and the fetch was allowed to + * continue. + */ + fetchAbortIgnored?: true + + /** + * The fetchMethod promise resolved successfully + */ + fetchResolved?: true + + /** + * The fetchMethod promise was rejected + */ + fetchRejected?: true + + /** + * The status of a {@link LRUCache#get} operation. + * + * - fetching: The item is currently being fetched. If a previous value + * is present and allowed, that will be returned. + * - stale: The item is in the cache, and is stale. + * - hit: the item is in the cache + * - miss: the item is not in the cache + */ + get?: 'stale' | 'hit' | 'miss' + + /** + * A fetch or get operation returned a stale value. + */ + returnedStale?: true + } + + /** + * options which override the options set in the LRUCache constructor + * when calling {@link LRUCache#fetch}. + * + * This is the union of {@link GetOptions} and {@link SetOptions}, plus + * {@link OptionsBase.noDeleteOnFetchRejection}, + * {@link OptionsBase.allowStaleOnFetchRejection}, + * {@link FetchOptions.forceRefresh}, and + * {@link OptionsBase.context} + * + * Any of these may be modified in the {@link OptionsBase.fetchMethod} + * function, but the {@link GetOptions} fields will of course have no + * effect, as the {@link LRUCache#get} call already happened by the time + * the fetchMethod is called. + */ + export interface FetcherFetchOptions + extends Pick< + OptionsBase, + | 'allowStale' + | 'updateAgeOnGet' + | 'noDeleteOnStaleGet' + | 'sizeCalculation' + | 'ttl' + | 'noDisposeOnSet' + | 'noUpdateTTL' + | 'noDeleteOnFetchRejection' + | 'allowStaleOnFetchRejection' + | 'ignoreFetchAbort' + | 'allowStaleOnFetchAbort' + > { + status?: Status + size?: Size + } + + /** + * Options that may be passed to the {@link LRUCache#fetch} method. + */ + export interface FetchOptions + extends FetcherFetchOptions { + /** + * Set to true to force a re-load of the existing data, even if it + * is not yet stale. + */ + forceRefresh?: boolean + /** + * Context provided to the {@link OptionsBase.fetchMethod} as + * the {@link FetcherOptions.context} param. + * + * If the FC type is specified as unknown (the default), + * undefined or void, then this is optional. Otherwise, it will + * be required. + */ + context?: FC + signal?: AbortSignal + status?: Status + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is something + * other than `unknown`, `undefined`, or `void` + */ + export interface FetchOptionsWithContext + extends FetchOptions { + context: FC + } + /** + * Options provided to {@link LRUCache#fetch} when the FC type is + * `undefined` or `void` + */ + export interface FetchOptionsNoContext + extends FetchOptions { + context?: undefined + } + + /** + * Options that may be passed to the {@link LRUCache#has} method. + */ + export interface HasOptions + extends Pick, 'updateAgeOnHas'> { + status?: Status + } + + /** + * Options that may be passed to the {@link LRUCache#get} method. + */ + export interface GetOptions + extends Pick< + OptionsBase, + 'allowStale' | 'updateAgeOnGet' | 'noDeleteOnStaleGet' + > { + status?: Status + } + + /** + * Options that may be passed to the {@link LRUCache#peek} method. + */ + export interface PeekOptions + extends Pick, 'allowStale'> {} + + /** + * Options that may be passed to the {@link LRUCache#set} method. + */ + export interface SetOptions + extends Pick< + OptionsBase, + 'sizeCalculation' | 'ttl' | 'noDisposeOnSet' | 'noUpdateTTL' + > { + /** + * If size tracking is enabled, then setting an explicit size + * in the {@link LRUCache#set} call will prevent calling the + * {@link OptionsBase.sizeCalculation} function. + */ + size?: Size + /** + * If TTL tracking is enabled, then setting an explicit start + * time in the {@link LRUCache#set} call will override the + * default time from `performance.now()` or `Date.now()`. + * + * Note that it must be a valid value for whichever time-tracking + * method is in use. + */ + start?: Milliseconds + status?: Status + } + + /** + * The type signature for the {@link OptionsBase.fetchMethod} option. + */ + export type Fetcher = ( + key: K, + staleValue: V | undefined, + options: FetcherOptions + ) => Promise | V | undefined | void + + /** + * Options which may be passed to the {@link LRUCache} constructor. + * + * Most of these may be overridden in the various options that use + * them. + * + * Despite all being technically optional, the constructor requires that + * a cache is at minimum limited by one or more of {@link OptionsBase.max}, + * {@link OptionsBase.ttl}, or {@link OptionsBase.maxSize}. + * + * If {@link OptionsBase.ttl} is used alone, then it is strongly advised + * (and in fact required by the type definitions here) that the cache + * also set {@link OptionsBase.ttlAutopurge}, to prevent potentially + * unbounded storage. + */ + export interface OptionsBase { + /** + * The maximum number of items to store in the cache before evicting + * old entries. This is read-only on the {@link LRUCache} instance, + * and may not be overridden. + * + * If set, then storage space will be pre-allocated at construction + * time, and the cache will perform significantly faster. + * + * Note that significantly fewer items may be stored, if + * {@link OptionsBase.maxSize} and/or {@link OptionsBase.ttl} are also + * set. + */ + max?: Count + + /** + * Max time in milliseconds for items to live in cache before they are + * considered stale. Note that stale items are NOT preemptively removed + * by default, and MAY live in the cache long after they have expired. + * + * Also, as this cache is optimized for LRU/MRU operations, some of + * the staleness/TTL checks will reduce performance, as they will incur + * overhead by deleting items. + * + * Must be an integer number of ms. If set to 0, this indicates "no TTL" + * + * @default 0 + */ + ttl?: Milliseconds + + /** + * Minimum amount of time in ms in which to check for staleness. + * Defaults to 1, which means that the current time is checked + * at most once per millisecond. + * + * Set to 0 to check the current time every time staleness is tested. + * (This reduces performance, and is theoretically unnecessary.) + * + * Setting this to a higher value will improve performance somewhat + * while using ttl tracking, albeit at the expense of keeping stale + * items around a bit longer than their TTLs would indicate. + * + * @default 1 + */ + ttlResolution?: Milliseconds + + /** + * Preemptively remove stale items from the cache. + * Note that this may significantly degrade performance, + * especially if the cache is storing a large number of items. + * It is almost always best to just leave the stale items in + * the cache, and let them fall out as new items are added. + * + * Note that this means that {@link OptionsBase.allowStale} is a bit + * pointless, as stale items will be deleted almost as soon as they + * expire. + * + * @default false + */ + ttlAutopurge?: boolean + + /** + * Update the age of items on {@link LRUCache#get}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnGet?: boolean + + /** + * Update the age of items on {@link LRUCache#has}, renewing their TTL + * + * Has no effect if {@link OptionsBase.ttl} is not set. + * + * @default false + */ + updateAgeOnHas?: boolean + + /** + * Allow {@link LRUCache#get} and {@link LRUCache#fetch} calls to return + * stale data, if available. + */ + allowStale?: boolean + + /** + * Function that is called on items when they are dropped from the cache. + * This can be handy if you want to close file descriptors or do other + * cleanup tasks when items are no longer accessible. Called with `key, + * value`. It's called before actually removing the item from the + * internal cache, so it is *NOT* safe to re-add them. + * + * Use {@link OptionsBase.disposeAfter} if you wish to dispose items after + * they have been full removed, when it is safe to add them back to the + * cache. + */ + dispose?: Disposer + + /** + * The same as {@link OptionsBase.dispose}, but called *after* the entry + * is completely removed and the cache is once again in a clean state. + * It is safe to add an item right back into the cache at this point. + * However, note that it is *very* easy to inadvertently create infinite + * recursion this way. + */ + disposeAfter?: Disposer + + /** + * Set to true to suppress calling the + * {@link OptionsBase.dispose} function if the entry key is + * still accessible within the cache. + * This may be overridden by passing an options object to + * {@link LRUCache#set}. + */ + noDisposeOnSet?: boolean + + /** + * Boolean flag to tell the cache to not update the TTL when + * setting a new value for an existing key (ie, when updating a value + * rather than inserting a new value). Note that the TTL value is + * _always_ set (if provided) when adding a new entry into the cache. + * + * Has no effect if a {@link OptionsBase.ttl} is not set. + */ + noUpdateTTL?: boolean + + /** + * If you wish to track item size, you must provide a maxSize + * note that we still will only keep up to max *actual items*, + * if max is set, so size tracking may cause fewer than max items + * to be stored. At the extreme, a single item of maxSize size + * will cause everything else in the cache to be dropped when it + * is added. Use with caution! + * + * Note also that size tracking can negatively impact performance, + * though for most cases, only minimally. + */ + maxSize?: Size + + /** + * The maximum allowed size for any single item in the cache. + * + * If a larger item is passed to {@link LRUCache#set} or returned by a + * {@link OptionsBase.fetchMethod}, then it will not be stored in the + * cache. + */ + maxEntrySize?: Size + + /** + * A function that returns a number indicating the item's size. + * + * If not provided, and {@link OptionsBase.maxSize} or + * {@link OptionsBase.maxEntrySize} are set, then all + * {@link LRUCache#set} calls **must** provide an explicit + * {@link SetOptions.size} or sizeCalculation param. + */ + sizeCalculation?: SizeCalculator + + /** + * Method that provides the implementation for {@link LRUCache#fetch} + */ + fetchMethod?: Fetcher + + /** + * Set to true to suppress the deletion of stale data when a + * {@link OptionsBase.fetchMethod} returns a rejected promise. + */ + noDeleteOnFetchRejection?: boolean + + /** + * Do not delete stale items when they are retrieved with + * {@link LRUCache#get}. + * + * Note that the `get` return value will still be `undefined` + * unless {@link OptionsBase.allowStale} is true. + */ + noDeleteOnStaleGet?: boolean + + /** + * Set to true to allow returning stale data when a + * {@link OptionsBase.fetchMethod} throws an error or returns a rejected + * promise. + * + * This differs from using {@link OptionsBase.allowStale} in that stale + * data will ONLY be returned in the case that the + * {@link LRUCache#fetch} fails, not any other times. + */ + allowStaleOnFetchRejection?: boolean + + /** + * Set to true to return a stale value from the cache when the + * `AbortSignal` passed to the {@link OptionsBase.fetchMethod} dispatches an `'abort'` + * event, whether user-triggered, or due to internal cache behavior. + * + * Unless {@link OptionsBase.ignoreFetchAbort} is also set, the underlying + * {@link OptionsBase.fetchMethod} will still be considered canceled, and + * any value it returns will be ignored and not cached. + * + * Caveat: since fetches are aborted when a new value is explicitly + * set in the cache, this can lead to fetch returning a stale value, + * since that was the fallback value _at the moment the `fetch()` was + * initiated_, even though the new updated value is now present in + * the cache. + * + * For example: + * + * ```ts + * const cache = new LRUCache({ + * ttl: 100, + * fetchMethod: async (url, oldValue, { signal }) => { + * const res = await fetch(url, { signal }) + * return await res.json() + * } + * }) + * cache.set('https://example.com/', { some: 'data' }) + * // 100ms go by... + * const result = cache.fetch('https://example.com/') + * cache.set('https://example.com/', { other: 'thing' }) + * console.log(await result) // { some: 'data' } + * console.log(cache.get('https://example.com/')) // { other: 'thing' } + * ``` + */ + allowStaleOnFetchAbort?: boolean + + /** + * Set to true to ignore the `abort` event emitted by the `AbortSignal` + * object passed to {@link OptionsBase.fetchMethod}, and still cache the + * resulting resolution value, as long as it is not `undefined`. + * + * When used on its own, this means aborted {@link LRUCache#fetch} calls are not + * immediately resolved or rejected when they are aborted, and instead + * take the full time to await. + * + * When used with {@link OptionsBase.allowStaleOnFetchAbort}, aborted + * {@link LRUCache#fetch} calls will resolve immediately to their stale + * cached value or `undefined`, and will continue to process and eventually + * update the cache when they resolve, as long as the resulting value is + * not `undefined`, thus supporting a "return stale on timeout while + * refreshing" mechanism by passing `AbortSignal.timeout(n)` as the signal. + * + * **Note**: regardless of this setting, an `abort` event _is still + * emitted on the `AbortSignal` object_, so may result in invalid results + * when passed to other underlying APIs that use AbortSignals. + * + * This may be overridden in the {@link OptionsBase.fetchMethod} or the + * call to {@link LRUCache#fetch}. + */ + ignoreFetchAbort?: boolean + } + + export interface OptionsMaxLimit + extends OptionsBase { + max: Count + } + export interface OptionsTTLLimit + extends OptionsBase { + ttl: Milliseconds + ttlAutopurge: boolean + } + export interface OptionsSizeLimit + extends OptionsBase { + maxSize: Size + } + + /** + * The valid safe options for the {@link LRUCache} constructor + */ + export type Options = + | OptionsMaxLimit + | OptionsSizeLimit + | OptionsTTLLimit + + /** + * Entry objects used by {@link LRUCache#load} and {@link LRUCache#dump} + */ + export interface Entry { + value: V + ttl?: Milliseconds + size?: Size + start?: Milliseconds + } +} + +/** + * Default export, the thing you're using this module to get. + * + * All properties from the options object (with the exception of + * {@link OptionsBase.max} and {@link OptionsBase.maxSize}) are added as + * normal public members. (`max` and `maxBase` are read-only getters.) + * Changing any of these will alter the defaults for subsequent method calls, + * but is otherwise safe. + */ +export class LRUCache { + // properties coming in from the options of these, only max and maxSize + // really *need* to be protected. The rest can be modified, as they just + // set defaults for various methods. + readonly #max: LRUCache.Count + readonly #maxSize: LRUCache.Size + readonly #dispose?: LRUCache.Disposer + readonly #disposeAfter?: LRUCache.Disposer + readonly #fetchMethod?: LRUCache.Fetcher + + /** + * {@link LRUCache.OptionsBase.ttl} + */ + ttl: LRUCache.Milliseconds + + /** + * {@link LRUCache.OptionsBase.ttlResolution} + */ + ttlResolution: LRUCache.Milliseconds + /** + * {@link LRUCache.OptionsBase.ttlAutopurge} + */ + ttlAutopurge: boolean + /** + * {@link LRUCache.OptionsBase.updateAgeOnGet} + */ + updateAgeOnGet: boolean + /** + * {@link LRUCache.OptionsBase.updateAgeOnHas} + */ + updateAgeOnHas: boolean + /** + * {@link LRUCache.OptionsBase.allowStale} + */ + allowStale: boolean + + /** + * {@link LRUCache.OptionsBase.noDisposeOnSet} + */ + noDisposeOnSet: boolean + /** + * {@link LRUCache.OptionsBase.noUpdateTTL} + */ + noUpdateTTL: boolean + /** + * {@link LRUCache.OptionsBase.maxEntrySize} + */ + maxEntrySize: LRUCache.Size + /** + * {@link LRUCache.OptionsBase.sizeCalculation} + */ + sizeCalculation?: LRUCache.SizeCalculator + /** + * {@link LRUCache.OptionsBase.noDeleteOnFetchRejection} + */ + noDeleteOnFetchRejection: boolean + /** + * {@link LRUCache.OptionsBase.noDeleteOnStaleGet} + */ + noDeleteOnStaleGet: boolean + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort} + */ + allowStaleOnFetchAbort: boolean + /** + * {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} + */ + allowStaleOnFetchRejection: boolean + /** + * {@link LRUCache.OptionsBase.ignoreFetchAbort} + */ + ignoreFetchAbort: boolean + + // computed properties + #size: LRUCache.Count + #calculatedSize: LRUCache.Size + #keyMap: Map + #keyList: (K | undefined)[] + #valList: (V | BackgroundFetch | undefined)[] + #next: NumberArray + #prev: NumberArray + #head: Index + #tail: Index + #free: StackLike + #disposed?: DisposeTask[] + #sizes?: ZeroArray + #starts?: ZeroArray + #ttls?: ZeroArray + + #hasDispose: boolean + #hasFetchMethod: boolean + #hasDisposeAfter: boolean + + /** + * Do not call this method unless you need to inspect the + * inner workings of the cache. If anything returned by this + * object is modified in any way, strange breakage may occur. + * + * These fields are private for a reason! + * + * @internal + */ + static unsafeExposeInternals< + K extends {}, + V extends {}, + FC extends unknown = unknown + >(c: LRUCache) { + return { + // properties + starts: c.#starts, + ttls: c.#ttls, + sizes: c.#sizes, + keyMap: c.#keyMap as Map, + keyList: c.#keyList, + valList: c.#valList, + next: c.#next, + prev: c.#prev, + get head() { + return c.#head + }, + get tail() { + return c.#tail + }, + free: c.#free, + // methods + isBackgroundFetch: (p: any) => c.#isBackgroundFetch(p), + backgroundFetch: ( + k: K, + index: number | undefined, + options: LRUCache.FetchOptions, + context: any + ): BackgroundFetch => + c.#backgroundFetch( + k, + index as Index | undefined, + options, + context + ), + moveToTail: (index: number): void => + c.#moveToTail(index as Index), + indexes: (options?: { allowStale: boolean }) => + c.#indexes(options), + rindexes: (options?: { allowStale: boolean }) => + c.#rindexes(options), + isStale: (index: number | undefined) => + c.#isStale(index as Index), + } + } + + // Protected read-only members + + /** + * {@link LRUCache.OptionsBase.max} (read-only) + */ + get max(): LRUCache.Count { + return this.#max + } + /** + * {@link LRUCache.OptionsBase.maxSize} (read-only) + */ + get maxSize(): LRUCache.Count { + return this.#maxSize + } + /** + * The total computed size of items in the cache (read-only) + */ + get calculatedSize(): LRUCache.Size { + return this.#calculatedSize + } + /** + * The number of items stored in the cache (read-only) + */ + get size(): LRUCache.Count { + return this.#size + } + /** + * {@link LRUCache.OptionsBase.fetchMethod} (read-only) + */ + get fetchMethod(): LRUCache.Fetcher | undefined { + return this.#fetchMethod + } + /** + * {@link LRUCache.OptionsBase.dispose} (read-only) + */ + get dispose() { + return this.#dispose + } + /** + * {@link LRUCache.OptionsBase.disposeAfter} (read-only) + */ + get disposeAfter() { + return this.#disposeAfter + } + + constructor( + options: LRUCache.Options | LRUCache + ) { + const { + max = 0, + ttl, + ttlResolution = 1, + ttlAutopurge, + updateAgeOnGet, + updateAgeOnHas, + allowStale, + dispose, + disposeAfter, + noDisposeOnSet, + noUpdateTTL, + maxSize = 0, + maxEntrySize = 0, + sizeCalculation, + fetchMethod, + noDeleteOnFetchRejection, + noDeleteOnStaleGet, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + } = options + + if (max !== 0 && !isPosInt(max)) { + throw new TypeError('max option must be a nonnegative integer') + } + + const UintArray = max ? getUintArray(max) : Array + if (!UintArray) { + throw new Error('invalid max value: ' + max) + } + + this.#max = max + this.#maxSize = maxSize + this.maxEntrySize = maxEntrySize || this.#maxSize + this.sizeCalculation = sizeCalculation + if (this.sizeCalculation) { + if (!this.#maxSize && !this.maxEntrySize) { + throw new TypeError( + 'cannot set sizeCalculation without setting maxSize or maxEntrySize' + ) + } + if (typeof this.sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation set to non-function') + } + } + + if ( + fetchMethod !== undefined && + typeof fetchMethod !== 'function' + ) { + throw new TypeError( + 'fetchMethod must be a function if specified' + ) + } + this.#fetchMethod = fetchMethod + this.#hasFetchMethod = !!fetchMethod + + this.#keyMap = new Map() + this.#keyList = new Array(max).fill(undefined) + this.#valList = new Array(max).fill(undefined) + this.#next = new UintArray(max) + this.#prev = new UintArray(max) + this.#head = 0 as Index + this.#tail = 0 as Index + this.#free = Stack.create(max) + this.#size = 0 + this.#calculatedSize = 0 + + if (typeof dispose === 'function') { + this.#dispose = dispose + } + if (typeof disposeAfter === 'function') { + this.#disposeAfter = disposeAfter + this.#disposed = [] + } else { + this.#disposeAfter = undefined + this.#disposed = undefined + } + this.#hasDispose = !!this.#dispose + this.#hasDisposeAfter = !!this.#disposeAfter + + this.noDisposeOnSet = !!noDisposeOnSet + this.noUpdateTTL = !!noUpdateTTL + this.noDeleteOnFetchRejection = !!noDeleteOnFetchRejection + this.allowStaleOnFetchRejection = !!allowStaleOnFetchRejection + this.allowStaleOnFetchAbort = !!allowStaleOnFetchAbort + this.ignoreFetchAbort = !!ignoreFetchAbort + + // NB: maxEntrySize is set to maxSize if it's set + if (this.maxEntrySize !== 0) { + if (this.#maxSize !== 0) { + if (!isPosInt(this.#maxSize)) { + throw new TypeError( + 'maxSize must be a positive integer if specified' + ) + } + } + if (!isPosInt(this.maxEntrySize)) { + throw new TypeError( + 'maxEntrySize must be a positive integer if specified' + ) + } + this.#initializeSizeTracking() + } + + this.allowStale = !!allowStale + this.noDeleteOnStaleGet = !!noDeleteOnStaleGet + this.updateAgeOnGet = !!updateAgeOnGet + this.updateAgeOnHas = !!updateAgeOnHas + this.ttlResolution = + isPosInt(ttlResolution) || ttlResolution === 0 + ? ttlResolution + : 1 + this.ttlAutopurge = !!ttlAutopurge + this.ttl = ttl || 0 + if (this.ttl) { + if (!isPosInt(this.ttl)) { + throw new TypeError( + 'ttl must be a positive integer if specified' + ) + } + this.#initializeTTLTracking() + } + + // do not allow completely unbounded caches + if (this.#max === 0 && this.ttl === 0 && this.#maxSize === 0) { + throw new TypeError( + 'At least one of max, maxSize, or ttl is required' + ) + } + if (!this.ttlAutopurge && !this.#max && !this.#maxSize) { + const code = 'LRU_CACHE_UNBOUNDED' + if (shouldWarn(code)) { + warned.add(code) + const msg = + 'TTL caching without ttlAutopurge, max, or maxSize can ' + + 'result in unbounded memory consumption.' + emitWarning(msg, 'UnboundedCacheWarning', code, LRUCache) + } + } + } + + /** + * Return the remaining TTL time for a given entry key + */ + getRemainingTTL(key: K) { + return this.#keyMap.has(key) ? Infinity : 0 + } + + #initializeTTLTracking() { + const ttls = new ZeroArray(this.#max) + const starts = new ZeroArray(this.#max) + this.#ttls = ttls + this.#starts = starts + + this.#setItemTTL = (index, ttl, start = perf.now()) => { + starts[index] = ttl !== 0 ? start : 0 + ttls[index] = ttl + if (ttl !== 0 && this.ttlAutopurge) { + const t = setTimeout(() => { + if (this.#isStale(index)) { + this.delete(this.#keyList[index] as K) + } + }, ttl + 1) + // unref() not supported on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref() + } + /* c8 ignore stop */ + } + } + + this.#updateItemAge = index => { + starts[index] = ttls[index] !== 0 ? perf.now() : 0 + } + + this.#statusTTL = (status, index) => { + if (ttls[index]) { + const ttl = ttls[index] + const start = starts[index] + status.ttl = ttl + status.start = start + status.now = cachedNow || getNow() + const age = status.now - start + status.remainingTTL = ttl - age + } + } + + // debounce calls to perf.now() to 1s so we're not hitting + // that costly call repeatedly. + let cachedNow = 0 + const getNow = () => { + const n = perf.now() + if (this.ttlResolution > 0) { + cachedNow = n + const t = setTimeout( + () => (cachedNow = 0), + this.ttlResolution + ) + // not available on all platforms + /* c8 ignore start */ + if (t.unref) { + t.unref() + } + /* c8 ignore stop */ + } + return n + } + + this.getRemainingTTL = key => { + const index = this.#keyMap.get(key) + if (index === undefined) { + return 0 + } + const ttl = ttls[index] + const start = starts[index] + if (ttl === 0 || start === 0) { + return Infinity + } + const age = (cachedNow || getNow()) - start + return ttl - age + } + + this.#isStale = index => { + return ( + ttls[index] !== 0 && + starts[index] !== 0 && + (cachedNow || getNow()) - starts[index] > ttls[index] + ) + } + } + + // conditionally set private methods related to TTL + #updateItemAge: (index: Index) => void = () => {} + #statusTTL: (status: LRUCache.Status, index: Index) => void = + () => {} + #setItemTTL: ( + index: Index, + ttl: LRUCache.Milliseconds, + start?: LRUCache.Milliseconds + // ignore because we never call this if we're not already in TTL mode + /* c8 ignore start */ + ) => void = () => {} + /* c8 ignore stop */ + + #isStale: (index: Index) => boolean = () => false + + #initializeSizeTracking() { + const sizes = new ZeroArray(this.#max) + this.#calculatedSize = 0 + this.#sizes = sizes + this.#removeItemSize = index => { + this.#calculatedSize -= sizes[index] + sizes[index] = 0 + } + this.#requireSize = (k, v, size, sizeCalculation) => { + // provisionally accept background fetches. + // actual value size will be checked when they return. + if (this.#isBackgroundFetch(v)) { + return 0 + } + if (!isPosInt(size)) { + if (sizeCalculation) { + if (typeof sizeCalculation !== 'function') { + throw new TypeError('sizeCalculation must be a function') + } + size = sizeCalculation(v, k) + if (!isPosInt(size)) { + throw new TypeError( + 'sizeCalculation return invalid (expect positive integer)' + ) + } + } else { + throw new TypeError( + 'invalid size value (must be positive integer). ' + + 'When maxSize or maxEntrySize is used, sizeCalculation ' + + 'or size must be set.' + ) + } + } + return size + } + this.#addItemSize = ( + index: Index, + size: LRUCache.Size, + status?: LRUCache.Status + ) => { + sizes[index] = size + if (this.#maxSize) { + const maxSize = this.#maxSize - sizes[index] + while (this.#calculatedSize > maxSize) { + this.#evict(true) + } + } + this.#calculatedSize += sizes[index] + if (status) { + status.entrySize = size + status.totalCalculatedSize = this.#calculatedSize + } + } + } + + #removeItemSize: (index: Index) => void = _i => {} + #addItemSize: ( + index: Index, + size: LRUCache.Size, + status?: LRUCache.Status + ) => void = (_i, _s, _st) => {} + #requireSize: ( + k: K, + v: V | BackgroundFetch, + size?: LRUCache.Size, + sizeCalculation?: LRUCache.SizeCalculator + ) => LRUCache.Size = ( + _k: K, + _v: V | BackgroundFetch, + size?: LRUCache.Size, + sizeCalculation?: LRUCache.SizeCalculator + ) => { + if (size || sizeCalculation) { + throw new TypeError( + 'cannot set size without setting maxSize or maxEntrySize on cache' + ) + } + return 0 + }; + + *#indexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#tail; true; ) { + if (!this.#isValidIndex(i)) { + break + } + if (allowStale || !this.#isStale(i)) { + yield i + } + if (i === this.#head) { + break + } else { + i = this.#prev[i] as Index + } + } + } + } + + *#rindexes({ allowStale = this.allowStale } = {}) { + if (this.#size) { + for (let i = this.#head; true; ) { + if (!this.#isValidIndex(i)) { + break + } + if (allowStale || !this.#isStale(i)) { + yield i + } + if (i === this.#tail) { + break + } else { + i = this.#next[i] as Index + } + } + } + } + + #isValidIndex(index: Index) { + return ( + index !== undefined && + this.#keyMap.get(this.#keyList[index] as K) === index + ) + } + + /** + * Return a generator yielding `[key, value]` pairs, + * in order from most recently used to least recently used. + */ + *entries() { + for (const i of this.#indexes()) { + if ( + this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield [this.#keyList[i], this.#valList[i]] + } + } + } + + /** + * Inverse order version of {@link LRUCache.entries} + * + * Return a generator yielding `[key, value]` pairs, + * in order from least recently used to most recently used. + */ + *rentries() { + for (const i of this.#rindexes()) { + if ( + this.#valList[i] !== undefined && + this.#keyList[i] !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield [this.#keyList[i], this.#valList[i]] + } + } + } + + /** + * Return a generator yielding the keys in the cache, + * in order from most recently used to least recently used. + */ + *keys() { + for (const i of this.#indexes()) { + const k = this.#keyList[i] + if ( + k !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield k + } + } + } + + /** + * Inverse order version of {@link LRUCache.keys} + * + * Return a generator yielding the keys in the cache, + * in order from least recently used to most recently used. + */ + *rkeys() { + for (const i of this.#rindexes()) { + const k = this.#keyList[i] + if ( + k !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield k + } + } + } + + /** + * Return a generator yielding the values in the cache, + * in order from most recently used to least recently used. + */ + *values() { + for (const i of this.#indexes()) { + const v = this.#valList[i] + if ( + v !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield this.#valList[i] + } + } + } + + /** + * Inverse order version of {@link LRUCache.values} + * + * Return a generator yielding the values in the cache, + * in order from least recently used to most recently used. + */ + *rvalues() { + for (const i of this.#rindexes()) { + const v = this.#valList[i] + if ( + v !== undefined && + !this.#isBackgroundFetch(this.#valList[i]) + ) { + yield this.#valList[i] + } + } + } + + /** + * Iterating over the cache itself yields the same results as + * {@link LRUCache.entries} + */ + [Symbol.iterator]() { + return this.entries() + } + + /** + * Find a value for which the supplied fn method returns a truthy value, + * similar to Array.find(). fn is called as fn(value, key, cache). + */ + find( + fn: (v: V, k: K, self: LRUCache) => boolean, + getOptions: LRUCache.GetOptions = {} + ) { + for (const i of this.#indexes()) { + const v = this.#valList[i] + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + if (fn(value, this.#keyList[i] as K, this)) { + return this.get(this.#keyList[i] as K, getOptions) + } + } + } + + /** + * Call the supplied function on each item in the cache, in order from + * most recently used to least recently used. fn is called as + * fn(value, key, cache). Does not update age or recenty of use. + * Does not iterate over stale values. + */ + forEach( + fn: (v: V, k: K, self: LRUCache) => any, + thisp: any = this + ) { + for (const i of this.#indexes()) { + const v = this.#valList[i] + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.#keyList[i] as K, this) + } + } + + /** + * The same as {@link LRUCache.forEach} but items are iterated over in + * reverse order. (ie, less recently used items are iterated over first.) + */ + rforEach( + fn: (v: V, k: K, self: LRUCache) => any, + thisp: any = this + ) { + for (const i of this.#rindexes()) { + const v = this.#valList[i] + const value = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined) continue + fn.call(thisp, value, this.#keyList[i] as K, this) + } + } + + /** + * Delete any stale entries. Returns true if anything was removed, + * false otherwise. + */ + purgeStale() { + let deleted = false + for (const i of this.#rindexes({ allowStale: true })) { + if (this.#isStale(i)) { + this.delete(this.#keyList[i] as K) + deleted = true + } + } + return deleted + } + + /** + * Return an array of [key, {@link LRUCache.Entry}] tuples which can be + * passed to cache.load() + */ + dump() { + const arr: [K, LRUCache.Entry][] = [] + for (const i of this.#indexes({ allowStale: true })) { + const key = this.#keyList[i] + const v = this.#valList[i] + const value: V | undefined = this.#isBackgroundFetch(v) + ? v.__staleWhileFetching + : v + if (value === undefined || key === undefined) continue + const entry: LRUCache.Entry = { value } + if (this.#ttls && this.#starts) { + entry.ttl = this.#ttls[i] + // always dump the start relative to a portable timestamp + // it's ok for this to be a bit slow, it's a rare operation. + const age = perf.now() - this.#starts[i] + entry.start = Math.floor(Date.now() - age) + } + if (this.#sizes) { + entry.size = this.#sizes[i] + } + arr.unshift([key, entry]) + } + return arr + } + + /** + * Reset the cache and load in the items in entries in the order listed. + * Note that the shape of the resulting cache may be different if the + * same options are not used in both caches. + */ + load(arr: [K, LRUCache.Entry][]) { + this.clear() + for (const [key, entry] of arr) { + if (entry.start) { + // entry.start is a portable timestamp, but we may be using + // node's performance.now(), so calculate the offset, so that + // we get the intended remaining TTL, no matter how long it's + // been on ice. + // + // it's ok for this to be a bit slow, it's a rare operation. + const age = Date.now() - entry.start + entry.start = perf.now() - age + } + this.set(key, entry.value, entry) + } + } + + /** + * Add a value to the cache. + * + * Note: if `undefined` is specified as a value, this is an alias for + * {@link LRUCache#delete} + */ + set( + k: K, + v: V | BackgroundFetch | undefined, + setOptions: LRUCache.SetOptions = {} + ) { + if (v === undefined) { + this.delete(k) + return this + } + const { + ttl = this.ttl, + start, + noDisposeOnSet = this.noDisposeOnSet, + sizeCalculation = this.sizeCalculation, + status, + } = setOptions + let { noUpdateTTL = this.noUpdateTTL } = setOptions + + const size = this.#requireSize( + k, + v, + setOptions.size || 0, + sizeCalculation + ) + // if the item doesn't fit, don't do anything + // NB: maxEntrySize set to maxSize by default + if (this.maxEntrySize && size > this.maxEntrySize) { + if (status) { + status.set = 'miss' + status.maxEntrySizeExceeded = true + } + // have to delete, in case something is there already. + this.delete(k) + return this + } + let index = this.#size === 0 ? undefined : this.#keyMap.get(k) + if (index === undefined) { + // addition + index = ( + this.#size === 0 + ? this.#tail + : this.#free.length !== 0 + ? this.#free.pop() + : this.#size === this.#max + ? this.#evict(false) + : this.#size + ) as Index + this.#keyList[index] = k + this.#valList[index] = v + this.#keyMap.set(k, index) + this.#next[this.#tail] = index + this.#prev[index] = this.#tail + this.#tail = index + this.#size++ + this.#addItemSize(index, size, status) + if (status) status.set = 'add' + noUpdateTTL = false + } else { + // update + this.#moveToTail(index) + const oldVal = this.#valList[index] as V | BackgroundFetch + if (v !== oldVal) { + if (this.#hasFetchMethod && this.#isBackgroundFetch(oldVal)) { + oldVal.__abortController.abort(new Error('replaced')) + const { __staleWhileFetching: s } = oldVal + if (s !== undefined && !noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(s as V, k, 'set') + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([s as V, k, 'set']) + } + } + } else if (!noDisposeOnSet) { + if (this.#hasDispose) { + this.#dispose?.(oldVal as V, k, 'set') + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([oldVal as V, k, 'set']) + } + } + this.#removeItemSize(index) + this.#addItemSize(index, size, status) + this.#valList[index] = v + if (status) { + status.set = 'replace' + const oldValue = + oldVal && this.#isBackgroundFetch(oldVal) + ? oldVal.__staleWhileFetching + : oldVal + if (oldValue !== undefined) status.oldValue = oldValue + } + } else if (status) { + status.set = 'update' + } + } + if (ttl !== 0 && !this.#ttls) { + this.#initializeTTLTracking() + } + if (this.#ttls) { + if (!noUpdateTTL) { + this.#setItemTTL(index, ttl, start) + } + if (status) this.#statusTTL(status, index) + } + if (!noDisposeOnSet && this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed + let task: DisposeTask | undefined + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task) + } + } + return this + } + + /** + * Evict the least recently used item, returning its value or + * `undefined` if cache is empty. + */ + pop(): V | undefined { + try { + while (this.#size) { + const val = this.#valList[this.#head] + this.#evict(true) + if (this.#isBackgroundFetch(val)) { + if (val.__staleWhileFetching) { + return val.__staleWhileFetching + } + } else if (val !== undefined) { + return val + } + } + } finally { + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed + let task: DisposeTask | undefined + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task) + } + } + } + } + + #evict(free: boolean) { + const head = this.#head + const k = this.#keyList[head] as K + const v = this.#valList[head] as V + if (this.#hasFetchMethod && this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('evicted')) + } else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v, k, 'evict') + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v, k, 'evict']) + } + } + this.#removeItemSize(head) + // if we aren't about to use the index, then null these out + if (free) { + this.#keyList[head] = undefined + this.#valList[head] = undefined + this.#free.push(head) + } + if (this.#size === 1) { + this.#head = this.#tail = 0 as Index + this.#free.length = 0 + } else { + this.#head = this.#next[head] as Index + } + this.#keyMap.delete(k) + this.#size-- + return head + } + + /** + * Check if a key is in the cache, without updating the recency of use. + * Will return false if the item is stale, even though it is technically + * in the cache. + * + * Will not update item age unless + * {@link LRUCache.OptionsBase.updateAgeOnHas} is set. + */ + has(k: K, hasOptions: LRUCache.HasOptions = {}) { + const { updateAgeOnHas = this.updateAgeOnHas, status } = + hasOptions + const index = this.#keyMap.get(k) + if (index !== undefined) { + const v = this.#valList[index] + if ( + this.#isBackgroundFetch(v) && + v.__staleWhileFetching === undefined + ) { + return false + } + if (!this.#isStale(index)) { + if (updateAgeOnHas) { + this.#updateItemAge(index) + } + if (status) { + status.has = 'hit' + this.#statusTTL(status, index) + } + return true + } else if (status) { + status.has = 'stale' + this.#statusTTL(status, index) + } + } else if (status) { + status.has = 'miss' + } + return false + } + + /** + * Like {@link LRUCache#get} but doesn't update recency or delete stale + * items. + * + * Returns `undefined` if the item is stale, unless + * {@link LRUCache.OptionsBase.allowStale} is set. + */ + peek(k: K, peekOptions: LRUCache.PeekOptions = {}) { + const { allowStale = this.allowStale } = peekOptions + const index = this.#keyMap.get(k) + if ( + index !== undefined && + (allowStale || !this.#isStale(index)) + ) { + const v = this.#valList[index] + // either stale and allowed, or forcing a refresh of non-stale value + return this.#isBackgroundFetch(v) ? v.__staleWhileFetching : v + } + } + + #backgroundFetch( + k: K, + index: Index | undefined, + options: LRUCache.FetchOptions, + context: any + ): BackgroundFetch { + const v = index === undefined ? undefined : this.#valList[index] + if (this.#isBackgroundFetch(v)) { + return v + } + + const ac = new AC() + const { signal } = options + // when/if our AC signals, then stop listening to theirs. + signal?.addEventListener('abort', () => ac.abort(signal.reason), { + signal: ac.signal, + }) + + const fetchOpts = { + signal: ac.signal, + options, + context, + } + + const cb = ( + v: V | undefined, + updateCache = false + ): V | undefined => { + const { aborted } = ac.signal + const ignoreAbort = options.ignoreFetchAbort && v !== undefined + if (options.status) { + if (aborted && !updateCache) { + options.status.fetchAborted = true + options.status.fetchError = ac.signal.reason + if (ignoreAbort) options.status.fetchAbortIgnored = true + } else { + options.status.fetchResolved = true + } + } + if (aborted && !ignoreAbort && !updateCache) { + return fetchFail(ac.signal.reason) + } + // either we didn't abort, and are still here, or we did, and ignored + const bf = p as BackgroundFetch + if (this.#valList[index as Index] === p) { + if (v === undefined) { + if (bf.__staleWhileFetching) { + this.#valList[index as Index] = bf.__staleWhileFetching + } else { + this.delete(k) + } + } else { + if (options.status) options.status.fetchUpdated = true + this.set(k, v, fetchOpts.options) + } + } + return v + } + + const eb = (er: any) => { + if (options.status) { + options.status.fetchRejected = true + options.status.fetchError = er + } + return fetchFail(er) + } + + const fetchFail = (er: any): V | undefined => { + const { aborted } = ac.signal + const allowStaleAborted = + aborted && options.allowStaleOnFetchAbort + const allowStale = + allowStaleAborted || options.allowStaleOnFetchRejection + const noDelete = allowStale || options.noDeleteOnFetchRejection + const bf = p as BackgroundFetch + if (this.#valList[index as Index] === p) { + // if we allow stale on fetch rejections, then we need to ensure that + // the stale value is not removed from the cache when the fetch fails. + const del = !noDelete || bf.__staleWhileFetching === undefined + if (del) { + this.delete(k) + } else if (!allowStaleAborted) { + // still replace the *promise* with the stale value, + // since we are done with the promise at this point. + // leave it untouched if we're still waiting for an + // aborted background fetch that hasn't yet returned. + this.#valList[index as Index] = bf.__staleWhileFetching + } + } + if (allowStale) { + if (options.status && bf.__staleWhileFetching !== undefined) { + options.status.returnedStale = true + } + return bf.__staleWhileFetching + } else if (bf.__returned === bf) { + throw er + } + } + + const pcall = ( + res: (v: V | undefined) => void, + rej: (e: any) => void + ) => { + const fmp = this.#fetchMethod?.(k, v, fetchOpts) + if (fmp && fmp instanceof Promise) { + fmp.then(v => res(v === undefined ? undefined : v), rej) + } + // ignored, we go until we finish, regardless. + // defer check until we are actually aborting, + // so fetchMethod can override. + ac.signal.addEventListener('abort', () => { + if ( + !options.ignoreFetchAbort || + options.allowStaleOnFetchAbort + ) { + res(undefined) + // when it eventually resolves, update the cache. + if (options.allowStaleOnFetchAbort) { + res = v => cb(v, true) + } + } + }) + } + + if (options.status) options.status.fetchDispatched = true + const p = new Promise(pcall).then(cb, eb) + const bf: BackgroundFetch = Object.assign(p, { + __abortController: ac, + __staleWhileFetching: v, + __returned: undefined, + }) + + if (index === undefined) { + // internal, don't expose status. + this.set(k, bf, { ...fetchOpts.options, status: undefined }) + index = this.#keyMap.get(k) + } else { + this.#valList[index] = bf + } + return bf + } + + #isBackgroundFetch(p: any): p is BackgroundFetch { + if (!this.#hasFetchMethod) return false + const b = p as BackgroundFetch + return ( + !!b && + b instanceof Promise && + b.hasOwnProperty('__staleWhileFetching') && + b.__abortController instanceof AC + ) + } + + /** + * Make an asynchronous cached fetch using the + * {@link LRUCache.OptionsBase.fetchMethod} function. + * + * If multiple fetches for the same key are issued, then they will all be + * coalesced into a single call to fetchMethod. + * + * Note that this means that handling options such as + * {@link LRUCache.OptionsBase.allowStaleOnFetchAbort}, + * {@link LRUCache.FetchOptions.signal}, + * and {@link LRUCache.OptionsBase.allowStaleOnFetchRejection} will be + * determined by the FIRST fetch() call for a given key. + * + * This is a known (fixable) shortcoming which will be addresed on when + * someone complains about it, as the fix would involve added complexity and + * may not be worth the costs for this edge case. + */ + fetch( + k: K, + fetchOptions: unknown extends FC + ? LRUCache.FetchOptions + : FC extends undefined | void + ? LRUCache.FetchOptionsNoContext + : LRUCache.FetchOptionsWithContext + ): Promise + // this overload not allowed if context is required + fetch( + k: unknown extends FC + ? K + : FC extends undefined | void + ? K + : never, + fetchOptions?: unknown extends FC + ? LRUCache.FetchOptions + : FC extends undefined | void + ? LRUCache.FetchOptionsNoContext + : never + ): Promise + async fetch( + k: K, + fetchOptions: LRUCache.FetchOptions = {} + ): Promise { + const { + // get options + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + // set options + ttl = this.ttl, + noDisposeOnSet = this.noDisposeOnSet, + size = 0, + sizeCalculation = this.sizeCalculation, + noUpdateTTL = this.noUpdateTTL, + // fetch exclusive options + noDeleteOnFetchRejection = this.noDeleteOnFetchRejection, + allowStaleOnFetchRejection = this.allowStaleOnFetchRejection, + ignoreFetchAbort = this.ignoreFetchAbort, + allowStaleOnFetchAbort = this.allowStaleOnFetchAbort, + context, + forceRefresh = false, + status, + signal, + } = fetchOptions + + if (!this.#hasFetchMethod) { + if (status) status.fetch = 'get' + return this.get(k, { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + status, + }) + } + + const options = { + allowStale, + updateAgeOnGet, + noDeleteOnStaleGet, + ttl, + noDisposeOnSet, + size, + sizeCalculation, + noUpdateTTL, + noDeleteOnFetchRejection, + allowStaleOnFetchRejection, + allowStaleOnFetchAbort, + ignoreFetchAbort, + status, + signal, + } + + let index = this.#keyMap.get(k) + if (index === undefined) { + if (status) status.fetch = 'miss' + const p = this.#backgroundFetch(k, index, options, context) + return (p.__returned = p) + } else { + // in cache, maybe already fetching + const v = this.#valList[index] + if (this.#isBackgroundFetch(v)) { + const stale = + allowStale && v.__staleWhileFetching !== undefined + if (status) { + status.fetch = 'inflight' + if (stale) status.returnedStale = true + } + return stale ? v.__staleWhileFetching : (v.__returned = v) + } + + // if we force a refresh, that means do NOT serve the cached value, + // unless we are already in the process of refreshing the cache. + const isStale = this.#isStale(index) + if (!forceRefresh && !isStale) { + if (status) status.fetch = 'hit' + this.#moveToTail(index) + if (updateAgeOnGet) { + this.#updateItemAge(index) + } + if (status) this.#statusTTL(status, index) + return v + } + + // ok, it is stale or a forced refresh, and not already fetching. + // refresh the cache. + const p = this.#backgroundFetch(k, index, options, context) + const hasStale = p.__staleWhileFetching !== undefined + const staleVal = hasStale && allowStale + if (status) { + status.fetch = isStale ? 'stale' : 'refresh' + if (staleVal && isStale) status.returnedStale = true + } + return staleVal ? p.__staleWhileFetching : (p.__returned = p) + } + } + + /** + * Return a value from the cache. Will update the recency of the cache + * entry found. + * + * If the key is not found, get() will return `undefined`. + */ + get(k: K, getOptions: LRUCache.GetOptions = {}) { + const { + allowStale = this.allowStale, + updateAgeOnGet = this.updateAgeOnGet, + noDeleteOnStaleGet = this.noDeleteOnStaleGet, + status, + } = getOptions + const index = this.#keyMap.get(k) + if (index !== undefined) { + const value = this.#valList[index] + const fetching = this.#isBackgroundFetch(value) + if (status) this.#statusTTL(status, index) + if (this.#isStale(index)) { + if (status) status.get = 'stale' + // delete only if not an in-flight background fetch + if (!fetching) { + if (!noDeleteOnStaleGet) { + this.delete(k) + } + if (status && allowStale) status.returnedStale = true + return allowStale ? value : undefined + } else { + if ( + status && + allowStale && + value.__staleWhileFetching !== undefined + ) { + status.returnedStale = true + } + return allowStale ? value.__staleWhileFetching : undefined + } + } else { + if (status) status.get = 'hit' + // if we're currently fetching it, we don't actually have it yet + // it's not stale, which means this isn't a staleWhileRefetching. + // If it's not stale, and fetching, AND has a __staleWhileFetching + // value, then that means the user fetched with {forceRefresh:true}, + // so it's safe to return that value. + if (fetching) { + return value.__staleWhileFetching + } + this.#moveToTail(index) + if (updateAgeOnGet) { + this.#updateItemAge(index) + } + return value + } + } else if (status) { + status.get = 'miss' + } + } + + #connect(p: Index, n: Index) { + this.#prev[n] = p + this.#next[p] = n + } + + #moveToTail(index: Index): void { + // if tail already, nothing to do + // if head, move head to next[index] + // else + // move next[prev[index]] to next[index] (head has no prev) + // move prev[next[index]] to prev[index] + // prev[index] = tail + // next[tail] = index + // tail = index + if (index !== this.#tail) { + if (index === this.#head) { + this.#head = this.#next[index] as Index + } else { + this.#connect( + this.#prev[index] as Index, + this.#next[index] as Index + ) + } + this.#connect(this.#tail, index) + this.#tail = index + } + } + + /** + * Deletes a key out of the cache. + * Returns true if the key was deleted, false otherwise. + */ + delete(k: K) { + let deleted = false + if (this.#size !== 0) { + const index = this.#keyMap.get(k) + if (index !== undefined) { + deleted = true + if (this.#size === 1) { + this.clear() + } else { + this.#removeItemSize(index) + const v = this.#valList[index] + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else if (this.#hasDispose || this.#hasDisposeAfter) { + if (this.#hasDispose) { + this.#dispose?.(v as V, k, 'delete') + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v as V, k, 'delete']) + } + } + this.#keyMap.delete(k) + this.#keyList[index] = undefined + this.#valList[index] = undefined + if (index === this.#tail) { + this.#tail = this.#prev[index] as Index + } else if (index === this.#head) { + this.#head = this.#next[index] as Index + } else { + this.#next[this.#prev[index]] = this.#next[index] + this.#prev[this.#next[index]] = this.#prev[index] + } + this.#size-- + this.#free.push(index) + } + } + } + if (this.#hasDisposeAfter && this.#disposed?.length) { + const dt = this.#disposed + let task: DisposeTask | undefined + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task) + } + } + return deleted + } + + /** + * Clear the cache entirely, throwing away all values. + */ + clear() { + for (const index of this.#rindexes({ allowStale: true })) { + const v = this.#valList[index] + if (this.#isBackgroundFetch(v)) { + v.__abortController.abort(new Error('deleted')) + } else { + const k = this.#keyList[index] + if (this.#hasDispose) { + this.#dispose?.(v as V, k as K, 'delete') + } + if (this.#hasDisposeAfter) { + this.#disposed?.push([v as V, k as K, 'delete']) + } + } + } + + this.#keyMap.clear() + this.#valList.fill(undefined) + this.#keyList.fill(undefined) + if (this.#ttls && this.#starts) { + this.#ttls.fill(0) + this.#starts.fill(0) + } + if (this.#sizes) { + this.#sizes.fill(0) + } + this.#head = 0 as Index + this.#tail = 0 as Index + this.#free.length = 0 + this.#calculatedSize = 0 + this.#size = 0 + if (this.#hasDisposeAfter && this.#disposed) { + const dt = this.#disposed + let task: DisposeTask | undefined + while ((task = dt?.shift())) { + this.#disposeAfter?.(...task) + } + } + } +} diff --git a/tap-snapshots/test/ttl.ts.test.cjs b/tap-snapshots/test/ttl.ts.test.cjs index 019d56a2..0459cf25 100644 --- a/tap-snapshots/test/ttl.ts.test.cjs +++ b/tap-snapshots/test/ttl.ts.test.cjs @@ -45,29 +45,28 @@ Array [ Object { "get": "hit", "now": 1969, - "remainingTTL": 15, + "remainingTTL": 5, "start": 1964, "ttl": 10, }, Object { "get": "hit", "now": 1974, - "remainingTTL": 20, + "remainingTTL": 0, "start": 1964, "ttl": 10, }, Object { "has": "stale", "now": 1975, - "remainingTTL": 21, + "remainingTTL": -1, "start": 1964, "ttl": 10, }, Object { "get": "stale", "now": 1975, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 1964, "ttl": 10, }, @@ -81,29 +80,28 @@ Array [ Object { "has": "hit", "now": 2025, - "remainingTTL": 150, + "remainingTTL": 50, "start": 1975, "ttl": 100, }, Object { "get": "hit", "now": 2025, - "remainingTTL": 150, + "remainingTTL": 50, "start": 1975, "ttl": 100, }, Object { "has": "stale", "now": 2076, - "remainingTTL": 201, + "remainingTTL": -1, "start": 1975, "ttl": 100, }, Object { "get": "stale", "now": 2076, - "remainingTTL": 201, - "returnedStale": false, + "remainingTTL": -1, "start": 1975, "ttl": 100, }, @@ -173,15 +171,14 @@ Array [ Object { "has": "stale", "now": 2087, - "remainingTTL": 21, + "remainingTTL": -1, "start": 2076, "ttl": 10, }, Object { "get": "stale", "now": 2087, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 2076, "ttl": 10, }, @@ -207,51 +204,49 @@ Array [ Object { "get": "hit", "now": 1522, - "remainingTTL": 15, + "remainingTTL": 5, "start": 1517, "ttl": 10, }, Object { "get": "hit", "now": 1527, - "remainingTTL": 20, + "remainingTTL": 0, "start": 1517, "ttl": 10, }, Object { "has": "stale", "now": 1529, - "remainingTTL": 22, + "remainingTTL": -2, "start": 1517, "ttl": 10, }, Object { "get": "stale", "now": 1529, - "remainingTTL": 22, - "returnedStale": false, + "remainingTTL": -2, "start": 1517, "ttl": 10, }, Object { "has": "hit", "now": 1579, - "remainingTTL": 150, + "remainingTTL": 50, "start": 1529, "ttl": 100, }, Object { "get": "hit", "now": 1579, - "remainingTTL": 150, + "remainingTTL": 50, "start": 1529, "ttl": 100, }, Object { "get": "stale", "now": 1630, - "remainingTTL": 201, - "returnedStale": false, + "remainingTTL": -1, "start": 1529, "ttl": 100, }, @@ -321,31 +316,22 @@ Array [ Object { "has": "stale", "now": 1641, - "remainingTTL": 21, + "remainingTTL": -1, "start": 1630, "ttl": 10, }, Object { "get": "stale", "now": 1641, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 1630, "ttl": 10, }, Object { "get": "hit", - "now": 1741, - "remainingTTL": 1741, - "start": 0, - "ttl": 0, }, Object { "get": "hit", - "now": 1841, - "remainingTTL": 1841, - "start": 0, - "ttl": 0, }, ] ` @@ -397,15 +383,14 @@ Array [ Object { "has": "stale", "now": 1952, - "remainingTTL": 121, + "remainingTTL": -101, "start": 1841, "ttl": 10, }, Object { "get": "stale", "now": 1952, - "remainingTTL": 121, - "returnedStale": false, + "remainingTTL": -101, "start": 1841, "ttl": 10, }, @@ -479,29 +464,28 @@ Array [ Object { "get": "hit", "now": 453, - "remainingTTL": 15, + "remainingTTL": 5, "start": 448, "ttl": 10, }, Object { "get": "hit", "now": 458, - "remainingTTL": 20, + "remainingTTL": 0, "start": 448, "ttl": 10, }, Object { "has": "stale", "now": 459, - "remainingTTL": 21, + "remainingTTL": -1, "start": 448, "ttl": 10, }, Object { "get": "stale", "now": 459, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 448, "ttl": 10, }, @@ -515,29 +499,28 @@ Array [ Object { "has": "hit", "now": 509, - "remainingTTL": 150, + "remainingTTL": 50, "start": 459, "ttl": 100, }, Object { "get": "hit", "now": 509, - "remainingTTL": 150, + "remainingTTL": 50, "start": 459, "ttl": 100, }, Object { "has": "stale", "now": 560, - "remainingTTL": 201, + "remainingTTL": -1, "start": 459, "ttl": 100, }, Object { "get": "stale", "now": 560, - "remainingTTL": 201, - "returnedStale": false, + "remainingTTL": -1, "start": 459, "ttl": 100, }, @@ -607,15 +590,14 @@ Array [ Object { "has": "stale", "now": 571, - "remainingTTL": 21, + "remainingTTL": -1, "start": 560, "ttl": 10, }, Object { "get": "stale", "now": 571, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 560, "ttl": 10, }, @@ -641,51 +623,49 @@ Array [ Object { "get": "hit", "now": 6, - "remainingTTL": 15, + "remainingTTL": 5, "start": 1, "ttl": 10, }, Object { "get": "hit", "now": 11, - "remainingTTL": 20, + "remainingTTL": 0, "start": 1, "ttl": 10, }, Object { "has": "stale", "now": 13, - "remainingTTL": 22, + "remainingTTL": -2, "start": 1, "ttl": 10, }, Object { "get": "stale", "now": 13, - "remainingTTL": 22, - "returnedStale": false, + "remainingTTL": -2, "start": 1, "ttl": 10, }, Object { "has": "hit", "now": 63, - "remainingTTL": 150, + "remainingTTL": 50, "start": 13, "ttl": 100, }, Object { "get": "hit", "now": 63, - "remainingTTL": 150, + "remainingTTL": 50, "start": 13, "ttl": 100, }, Object { "get": "stale", "now": 114, - "remainingTTL": 201, - "returnedStale": false, + "remainingTTL": -1, "start": 13, "ttl": 100, }, @@ -755,31 +735,22 @@ Array [ Object { "has": "stale", "now": 125, - "remainingTTL": 21, + "remainingTTL": -1, "start": 114, "ttl": 10, }, Object { "get": "stale", "now": 125, - "remainingTTL": 21, - "returnedStale": false, + "remainingTTL": -1, "start": 114, "ttl": 10, }, Object { "get": "hit", - "now": 225, - "remainingTTL": 225, - "start": 0, - "ttl": 0, }, Object { "get": "hit", - "now": 325, - "remainingTTL": 325, - "start": 0, - "ttl": 0, }, ] ` @@ -831,15 +802,14 @@ Array [ Object { "has": "stale", "now": 436, - "remainingTTL": 121, + "remainingTTL": -101, "start": 325, "ttl": 10, }, Object { "get": "stale", "now": 436, - "remainingTTL": 121, - "returnedStale": false, + "remainingTTL": -101, "start": 325, "ttl": 10, }, diff --git a/test/abort-controller.js b/test/abort-controller.js deleted file mode 100644 index 13caa962..00000000 --- a/test/abort-controller.js +++ /dev/null @@ -1,47 +0,0 @@ -// this is just a test of the AbortController polyfill -// which is a little bit weird, since that's not about lru caching -// at all, so it's tempting to think that this module should -// pull it in as a dep or something. that would be the -// javascripty thing to do, right? but it would mean that -// this is no longer a zero-deps module, so meh. it's fine. -// This is a JS test rather than TS, because we have to do some -// improper things with types in order to make the polyfill load. -global.AbortController = null -global.AbortSignal = null - -const t = require('tap') - -const LRUCache = require('../') -const { AbortController, AbortSignal } = LRUCache - -t.type(AbortController, 'function') -t.type(AbortSignal, 'function') - -t.test('onabort method', t => { - const ac = new AbortController() - t.type(ac.signal, AbortSignal) - - let calledOnAbort = false - ac.signal.onabort = () => (calledOnAbort = true) - ac.abort() - t.equal(calledOnAbort, true, 'called onabort method') - - t.end() -}) - -t.test('add/remove event listener', t => { - const ac = new AbortController() - let receivedEvent = null - ac.signal.addEventListener('abort', e => (receivedEvent = e)) - const nope = () => { - throw 'nope' - } - ac.signal.addEventListener('abort', nope) - ac.signal.removeEventListener('abort', nope) - ac.signal.addEventListener('foo', nope) - ac.signal.dispatchEvent({ type: 'foo', target: ac.signal }) - ac.signal.removeEventListener('foo', nope) - ac.abort() - t.match(receivedEvent, { type: 'abort', target: ac.signal }) - t.end() -}) diff --git a/test/avoid-memory-leak.ts b/test/avoid-memory-leak.ts index 07b542fa..51a37e77 100644 --- a/test/avoid-memory-leak.ts +++ b/test/avoid-memory-leak.ts @@ -1,4 +1,4 @@ -#!/usr/bin/env node --expose-gc +#!/usr/bin/env node --no-warnings --loader=ts-node/esm --expose-gc // https://github.com/isaacs/node-lru-cache/issues/227 @@ -26,7 +26,7 @@ const tryReq = (mod: string) => { const v8 = tryReq('v8') -import LRUCache from '../' +import { LRUCache } from '../' const expectItemCount = Math.ceil(maxSize / itemSize) const max = expectItemCount + 1 const keyRange = expectItemCount * 2 @@ -55,7 +55,7 @@ const runTest = async (t: Tap.Test, cache: LRUCache) => { } // now start the setting and profiling - const profiles = [] + const profiles: ReturnType[] = [] for (let i = 0; i < n; i++) { if (i % profEvery === 0) { const profile = prof(i, cache) diff --git a/test/basic.ts b/test/basic.ts index 06f4dca6..e7542d0b 100644 --- a/test/basic.ts +++ b/test/basic.ts @@ -3,14 +3,27 @@ if (typeof performance === 'undefined') { } import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' import { expose } from './fixtures/expose' +t.test('verify require works as expected', t => { + t.equal( + require.resolve('../'), + require.resolve('../dist/cjs/index.js'), + 'require resolves to expected module' + ) + const { LRUCache } = t.mock('../dist/cjs/index.js', {}) + t.equal( + LRUCache.toString().split(/\r?\n/)[0].trim(), + 'class LRUCache {' + ) + t.end() +}) t.test('basic operation', t => { - const statuses:LRU.Status[] = [] - const s = ():LRU.Status => { - const status:LRU.Status = {} + const statuses: LRU.Status[] = [] + const s = (): LRU.Status => { + const status: LRU.Status = {} statuses.push(status) return status } @@ -93,7 +106,7 @@ t.test('basic operation', t => { c.set(true, 'true', { status: s() }) t.equal(c.has(true, { status: s() }), true) t.equal(c.get(true, { status: s() }), 'true') - c.delete(true) + c.set(true, undefined) t.equal(c.has(true, { status: s() }), false) t.matchSnapshot(statuses, 'status tracking') @@ -106,6 +119,8 @@ t.test('bad max values', t => { // @ts-expect-error t.throws(() => new LRU(123)) // @ts-expect-error + t.throws(() => new LRU({})) + // @ts-expect-error t.throws(() => new LRU(null)) t.throws(() => new LRU({ max: -123 })) t.throws(() => new LRU({ max: 0 })) @@ -208,9 +223,9 @@ t.test('peek does not disturb order', t => { }) t.test('re-use key before initial fill completed', t => { - const statuses:LRU.Status[] = [] - const s = ():LRU.Status => { - const status:LRU.Status = {} + const statuses: LRU.Status[] = [] + const s = (): LRU.Status => { + const status: LRU.Status = {} statuses.push(status) return status } diff --git a/test/delete-while-iterating.ts b/test/delete-while-iterating.ts index a6ab6458..33298253 100644 --- a/test/delete-while-iterating.ts +++ b/test/delete-while-iterating.ts @@ -1,5 +1,5 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' t.beforeEach(t => { const c = new LRU({ max: 5 }) diff --git a/test/deprecations.ts b/test/deprecations.ts deleted file mode 100644 index 2faf4a6f..00000000 --- a/test/deprecations.ts +++ /dev/null @@ -1,85 +0,0 @@ -import t from 'tap' -import LRU from '../' - -const warnings: any[] = [] -process.emitWarning = (...w) => warnings.push(w) - -t.test('warns exactly once for a given deprecation', t => { - const c = new LRU({ - max: 100, - maxSize: 100, - maxAge: 1000, - stale: true, - length: () => 1, - }) - c.reset() - t.equal(c.length, 0) - t.equal(c.prune, c.purgeStale) - t.equal(c.reset, c.clear) - t.equal(c.del, c.delete) - - // not technically a "deprecation" but similar - new LRU({ ttl: 10 }) - - t.matchSnapshot(warnings) - - warnings.length = 0 - const d = new LRU({ - max: 100, - maxSize: 100, - maxAge: 1000, - stale: true, - length: () => 1, - }) - d.reset() - - t.equal(d.length, 0) - t.equal(d.prune, d.purgeStale) - t.equal(d.reset, d.clear) - new LRU({ ttl: 10 }) - - t.strictSame(warnings, [], 'only warn once') - - warnings.length = 0 - t.end() -}) - -t.test( - 'does not do deprecation warning without process object', - t => { - // set process to null (emulate a browser) - const proc = global.process - const { error } = console - t.teardown(() => { - global.process = proc - console.error = error - }) - const consoleErrors: any[] = [] - console.error = (...a) => consoleErrors.push(a) - // @ts-ignore - global.process = { - ...proc, - // @ts-ignore - emitWarning: null, - } - const LRU = t.mock('../', {}) - const c = new LRU({ - max: 100, - maxSize: 100, - maxAge: 1000, - stale: true, - length: () => 1, - }) - c.reset() - t.equal(c.length, 0) - t.equal(c.prune, c.purgeStale) - t.equal(c.reset, c.clear) - t.equal(c.del, c.delete) - global.process = proc - - t.strictSame(warnings, [], 'no process exists') - t.matchSnapshot(consoleErrors, 'warnings sent to console.error') - - t.end() - } -) diff --git a/test/dispose.ts b/test/dispose.ts index 8283f527..d487f711 100644 --- a/test/dispose.ts +++ b/test/dispose.ts @@ -1,5 +1,5 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' t.test('disposal', t => { const disposed: any[] = [] @@ -90,7 +90,6 @@ t.test('disposal', t => { } t.strictSame(disposed, [[2, 2, 'set']]) - // @ts-expect-error c.noDisposeOnSet = true c.clear() disposed.length = 0 diff --git a/test/esm-load.mjs b/test/esm-load.mjs index 40739fdc..5f113162 100644 --- a/test/esm-load.mjs +++ b/test/esm-load.mjs @@ -1,5 +1,5 @@ import t from 'tap' -import LRUCache from '../index.mjs' +import { LRUCache } from '../dist/mjs/index.js' const c = new LRUCache({ max: 2 }) t.type(c, LRUCache) c.set(1, 1) diff --git a/test/fetch.ts b/test/fetch.ts index 7c6be80c..984e78d5 100644 --- a/test/fetch.ts +++ b/test/fetch.ts @@ -2,11 +2,10 @@ if (typeof performance === 'undefined') { global.performance = require('perf_hooks').performance } import t from 'tap' -import type { Fetcher, Status } from '../' -import LRUCache from '../' -import { expose, exposeStatics } from './fixtures/expose' +import { BackgroundFetch, LRUCache } from '../' +import { expose } from './fixtures/expose' -const fn: Fetcher = async (_, v) => +const fn: LRUCache.Fetcher = async (_, v) => new Promise(res => setImmediate(() => res(v === undefined ? 0 : v + 1)) ) @@ -18,14 +17,6 @@ clock.advance(1) let LRU = LRUCache -// if we're on a version that *doesn't* have a native AbortController, -// put the polyfill in there to start with, so LRU covers both cases. -if (!global.AbortController || !global.AbortSignal) { - global.AbortController = exposeStatics(LRU).AbortController - global.AbortSignal = exposeStatics(LRU).AbortSignal - LRU = t.mock('../', {}) as typeof LRUCache -} - const c = new LRU({ fetchMethod: fn, max: 5, @@ -85,7 +76,7 @@ t.test('asynchronous fetching', async t => { t.matchSnapshot(JSON.stringify(dump), 'safe to stringify dump') t.equal(e.isBackgroundFetch(v), true) - t.equal(e.backgroundFetch('key', 0, {}), v) + t.equal(e.backgroundFetch('key', 0, {}, undefined), v) await v const v7 = await c.fetch('key', { allowStale: true, @@ -165,10 +156,6 @@ t.test('fetchMethod must be a function', async t => { t.throws(() => new LRU({ fetchMethod: true, max: 2 })) }) -t.test('no fetchContext without fetchMethod', async t => { - t.throws(() => new LRU({ fetchContext: true, max: 2 })) -}) - t.test('fetch without fetch method', async t => { const c = new LRU({ max: 3 }) c.set(0, 0) @@ -262,170 +249,6 @@ t.test('fetch options, signal', async t => { t.matchSnapshot(statuses, 'status updates') }) -t.test('fetch options, signal, with polyfill', async t => { - const { AbortController, AbortSignal } = global - t.teardown(() => { - Object.assign(global, { AbortController, AbortSignal }) - }) - // @ts-expect-error - global.AbortController = undefined - // @ts-expect-error - global.AbortSignal = undefined - const LRU = t.mock('../', {}) as typeof LRUCache - let aborted = false - const disposed: any[] = [] - const disposedAfter: any[] = [] - const c = new LRU({ - max: 3, - ttl: 100, - fetchMethod: async (k, oldVal, { signal, options }) => { - // do something async - await new Promise(res => setImmediate(res)) - if (signal.aborted) { - aborted = true - return - } - if (k === 2) { - options.ttl = 25 - } - return (oldVal || 0) + 1 - }, - dispose: (v, k, reason) => { - disposed.push([v, k, reason]) - }, - disposeAfter: (v, k, reason) => { - disposedAfter.push([v, k, reason]) - }, - }) - - const v1 = c.fetch(2) - const testp1 = t.rejects(v1, 'aborted by delete') - c.delete(2) - await testp1 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - - aborted = false - const v2 = c.fetch(2) - const testp2 = t.rejects(v2, 'aborted by set') - c.set(2, 2) - await testp2 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - c.delete(2) - disposed.length = 0 - disposedAfter.length = 0 - - aborted = false - const v3 = c.fetch(2) - const testp3 = t.rejects(v3, 'aborted by evict') - c.set(3, 3) - c.set(4, 4) - c.set(5, 5) - await testp3 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - - aborted = false - await c.fetch(6, { ttl: 1000 }) - t.equal( - c.getRemainingTTL(6), - 1000, - 'overridden ttl in fetch() opts' - ) - await c.fetch(2, { ttl: 1 }) - t.equal(c.getRemainingTTL(2), 25, 'overridden ttl in fetchMethod') -}) - -t.test('fetch options, signal, with half polyfill', async t => { - const { AbortController, AbortSignal } = global - t.teardown(() => { - global.AbortSignal = AbortSignal - //@ts-expect-error - delete AbortController.AbortSignal - }) - // @ts-expect-error - global.AbortController.AbortSignal = AbortSignal - // @ts-expect-error - global.AbortSignal = undefined - const LRU = t.mock('../', {}) as typeof LRUCache - let aborted = false - const disposed: any[] = [] - const disposedAfter: any[] = [] - const c = new LRU({ - max: 3, - ttl: 100, - fetchMethod: async (k, oldVal, { signal, options }) => { - // do something async - await new Promise(res => setImmediate(res)) - if (signal.aborted) { - aborted = true - return - } - if (k === 2) { - options.ttl = 25 - } - return (oldVal || 0) + 1 - }, - dispose: (v, k, reason) => { - disposed.push([v, k, reason]) - }, - disposeAfter: (v, k, reason) => { - disposedAfter.push([v, k, reason]) - }, - }) - - const v1 = c.fetch(2) - const testp1 = t.rejects(v1, 'aborted by delete') - c.delete(2) - await testp1 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - - aborted = false - const v2 = c.fetch(2) - const testp2 = t.rejects(v2, 'aborted by set') - c.set(2, 2) - await testp2 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - c.delete(2) - disposed.length = 0 - disposedAfter.length = 0 - - aborted = false - const v3 = c.fetch(2) - const testp3 = t.rejects(v3, 'aborted by evict') - c.set(3, 3) - c.set(4, 4) - c.set(5, 5) - await testp3 - await new Promise(res => setImmediate(res)) - t.equal(aborted, true) - t.same(disposed, [], 'no disposals for aborted promises') - t.same(disposedAfter, [], 'no disposals for aborted promises') - - aborted = false - await c.fetch(6, { ttl: 1000 }) - t.equal( - c.getRemainingTTL(6), - 1000, - 'overridden ttl in fetch() opts' - ) - await c.fetch(2, { ttl: 1 }) - t.equal(c.getRemainingTTL(2), 25, 'overridden ttl in fetchMethod') -}) - t.test('fetchMethod throws', async t => { const statuses: LRUCache.Status[] = [] const s = (): LRUCache.Status => { @@ -556,36 +379,38 @@ t.test( t.equal(cache.get('a'), 99, 'did not delete, was replaced') await t.rejects(cache.fetch('b'), { message: 'fetch failure' }) t.equal(e.keyMap.get('b'), undefined, 'not in cache') - t.equal(e.valList[1], null, 'not in cache') + t.equal(e.valList[1], undefined, 'not in cache') } ) -t.test('fetchContext', async t => { - const cache = new LRU({ +t.test('fetch context', async t => { + const cache = new LRU({ max: 10, ttl: 10, allowStale: true, noDeleteOnFetchRejection: true, - fetchContext: 'default context', fetchMethod: async (k, _, { context, options }) => { //@ts-expect-error - t.equal(options.fetchContext, undefined) + t.equal(options.context, undefined) t.equal(context, expectContext) return [k, context] }, }) - let expectContext = 'default context' - t.strictSame(await cache.fetch('x'), ['x', 'default context']) - expectContext = 'overridden' - t.strictSame( - await cache.fetch('y', { fetchContext: 'overridden' }), - ['y', 'overridden'] - ) + let expectContext = 'overridden' + t.strictSame(await cache.fetch('y', { context: 'overridden' }), [ + 'y', + 'overridden', + ]) + expectContext = 'first context' + t.strictSame(await cache.fetch('x', { context: 'first context' }), [ + 'x', + 'first context', + ]) // if still in cache, doesn't call fetchMethod again - t.strictSame(await cache.fetch('x', { fetchContext: 'ignored' }), [ + t.strictSame(await cache.fetch('x', { context: 'ignored' }), [ 'x', - 'default context', + 'first context', ]) }) @@ -619,7 +444,11 @@ t.test('forceRefresh', async t => { // still there, because we're allowing stale, and it's not stale const status: LRUCache.Status = {} t.equal( - await cache.fetch(2, { forceRefresh: true, allowStale: false, status }), + await cache.fetch(2, { + forceRefresh: true, + allowStale: false, + status, + }), 2 ) t.equal(status.fetch, 'refresh', 'status reflects forced refresh') @@ -662,7 +491,7 @@ t.test('allowStaleOnFetchRejection', async t => { t.equal(await c.fetch(1), 1) clock.advance(11) fetchFail = true - const status: Status = {} + const status: LRUCache.Status = {} t.equal(await c.fetch(1, { status }), 1) t.equal( status.returnedStale, @@ -818,7 +647,8 @@ t.test('abort, but then keep on fetching anyway', async t => { return new Promise(res => setTimeout(() => { resolved = true - res(returnUndefined ? undefined : k) + if (returnUndefined) res() + else res(k) }, 100) ) }, @@ -830,7 +660,11 @@ t.test('abort, but then keep on fetching anyway', async t => { ac.abort(er) clock.advance(100) t.equal(await p, 1) - t.equal(status.fetchAbortIgnored, true, 'status reflects ignored abort') + t.equal( + status.fetchAbortIgnored, + true, + 'status reflects ignored abort' + ) t.equal(status.fetchError, er) t.equal(status.fetchUpdated, true) @@ -866,14 +700,17 @@ t.test('abort, but then keep on fetching anyway', async t => { }) t.test('allowStaleOnFetchAbort', async t => { - const c = new LRUCache({ + const c = new LRUCache({ ttl: 10, max: 10, allowStaleOnFetchAbort: true, fetchMethod: async (k, _, { signal }) => { return new Promise(res => { const t = setTimeout(() => res(k), 100) - signal.addEventListener('abort', () => clearTimeout(t)) + signal.addEventListener('abort', () => { + clearTimeout(t) + res() + }) }) }, }) @@ -883,12 +720,19 @@ t.test('allowStaleOnFetchAbort', async t => { const p = c.fetch(1, { signal: ac.signal }) ac.abort(new Error('gimme the stale value')) t.equal(await p, 10) - t.equal(c.get(1, { allowStale: true }), 10) + t.equal( + c.get(1, { allowStale: true, noDeleteOnStaleGet: true }), + 10 + ) + const p2 = c.fetch(1) + c.set(1, 100) + t.equal(await p2, 10) + t.equal(c.get(1), 100) }) t.test('background update on timeout, return stale', async t => { let returnUndefined = false - const c = new LRUCache({ + const c = new LRUCache({ ttl: 10, max: 10, ignoreFetchAbort: true, @@ -931,3 +775,94 @@ t.test('background update on timeout, return stale', async t => { await new Promise(res => setImmediate(res)) t.equal(e.valList[0], 99) }) + +t.test('fetch context required if set in ctor type', async t => { + const c = new LRUCache({ + max: 5, + fetchMethod: async (k, _, { context }) => { + if (k === 'y') t.equal(context, undefined) + else if (k === 'z') t.same(context, { x: 1 }) + else t.same(context, { a: 1 }) + return k + }, + }) + c.fetch('x', { context: { a: 1 } }) + //@ts-expect-error + c.fetch('y') + //@ts-expect-error + c.fetch('z', { context: { x: 1 } }) + + const c2 = new LRUCache({ + max: 5, + fetchMethod: async (k, _, { context }) => { + if (k === 'y') t.equal(context, undefined) + else if (k === 'z') t.same(context, { x: 1 }) + else t.same(context, { a: 1 }) + return k + }, + }) + //@ts-expect-error + c2.fetch('x', { context: { a: 1 } }) + c2.fetch('y') + c2.fetch('y', { allowStale: true }) + //@ts-expect-error + c2.fetch('z', { context: { x: 1 } }) + + t.end() +}) + +t.test('has false for pending fetch without stale val', async t => { + const c = new LRUCache({ + max: 10, + fetchMethod: async (key: number) => + new Promise(r => setTimeout(() => r(key), 10)), + }) + const e = expose(c) + { + const p = c.fetch(1) + const index = e.keyMap.get(1) as number + t.not(index, undefined) + const bf = e.valList[index] as BackgroundFetch + t.type(bf, Promise, 'pending fetch') + t.equal(bf.hasOwnProperty('__staleWhileFetching'), true) + t.equal(c.has(1), false) + clock.advance(10) + const res = await p + t.equal(res, 1) + t.equal(c.has(1), true) + } + + { + // background fetch that DOES have a __staleWhileFetching value + const p = c.fetch(1, { forceRefresh: true }) + const index = e.keyMap.get(1) as number + t.not(index, undefined) + const bf = e.valList[index] as BackgroundFetch + t.type(bf, Promise, 'pending fetch') + t.equal(bf.__staleWhileFetching, 1) + t.equal(c.has(1), true) + clock.advance(10) + const res = await p + t.equal(res, 1) + t.equal(c.has(1), true) + } +}) + +t.test('properly dispose when using fetch', async t => { + const disposes: [number, number, string][] = [] + const disposeAfters: [number, number, string][] = [] + let i = 0 + const c = new LRUCache({ + max: 3, + ttl: 10, + dispose: (key, val, reason) => disposes.push([key, val, reason]), + disposeAfter: (key, val, reason) => + disposeAfters.push([key, val, reason]), + fetchMethod: async () => Promise.resolve(i++), + }) + t.equal(await c.fetch(1), 0) + clock.advance(20) + t.equal(await c.fetch(1), 1) + t.strictSame(disposes, [[0, 1, 'set']]) + t.strictSame(disposeAfters, [[0, 1, 'set']]) +}) diff --git a/test/find.ts b/test/find.ts index b6a89c3b..d1bf7853 100644 --- a/test/find.ts +++ b/test/find.ts @@ -1,5 +1,5 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' const resolves: Record< number, diff --git a/test/fixtures/expose.ts b/test/fixtures/expose.ts index 7bcfc3f9..044777e5 100644 --- a/test/fixtures/expose.ts +++ b/test/fixtures/expose.ts @@ -1,33 +1,11 @@ -import LRUCache from '../../index' -export const exposeStatics = (LRU: typeof LRUCache) => { - return LRU as unknown as { - AbortController: any - AbortSignal: any - } -} -export const expose = (cache: LRUCache) => { - return cache as unknown as { - isBackgroundFetch: (v: any) => boolean - backgroundFetch: ( - v: any, - index: number, - options: { [k: string]: any }, - context?: any - ) => Promise - isStale: (index?: number) => boolean - valList: any[] - keyList: any[] - free: number[] - keyMap: Map - starts: number[] - ttls: number[] - sizes: number[] - indexes: (...a: any[]) => Iterable - rindexes: (...a: any[]) => Iterable - next: number[] - prev: number[] - head: number - tail: number - moveToTail: (i: number) => void - } +import { LRUCache } from '../../' +export const expose = < + K extends {}, + V extends {}, + FC extends unknown = unknown +>( + cache: LRUCache, + LRU = LRUCache +) => { + return Object.assign(LRU.unsafeExposeInternals(cache), cache) } diff --git a/test/import.mjs b/test/import.mjs new file mode 100644 index 00000000..285b4219 --- /dev/null +++ b/test/import.mjs @@ -0,0 +1,14 @@ +import t from 'tap' +t.test('import', async t => { + const imp = await import('../dist/mjs/index.js') + t.equal(Object.getPrototypeOf(imp), null, 'import returns null obj') + t.equal( + typeof imp.LRUCache, + 'function', + 'LRUCache export is function' + ) + t.equal( + imp.LRUCache.toString().split(/\r?\n/)[0].trim(), + 'class LRUCache {' + ) +}) diff --git a/test/load-check.ts b/test/load-check.ts index ad54ef17..fddc9130 100644 --- a/test/load-check.ts +++ b/test/load-check.ts @@ -1,6 +1,6 @@ process.env.TAP_BAIL = '1' import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' import { expose } from './fixtures/expose' const max = 10000 @@ -27,7 +27,7 @@ const verifyCache = () => { // index in the keyMap, and the value matches. const e = expose(cache) for (const [k, i] of e.keyMap.entries()) { - const v = e.valList[i] + const v = e.valList[i] as number[] const key = e.keyList[i] if (k !== key) { t.equal(k, key, 'key at proper index', { k, i }) diff --git a/test/load.ts b/test/load.ts index 72ecebc3..007ca8a5 100644 --- a/test/load.ts +++ b/test/load.ts @@ -1,12 +1,12 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' const c = new LRU({ max: 5 }) for (let i = 0; i < 9; i++) { c.set(i, i) } -const d = new LRU(c as unknown as LRU.Options) +const d = new LRU(c) d.load(c.dump()) t.strictSame(d, c) diff --git a/test/map-like.ts b/test/map-like.ts index 27e576bc..c25c62bb 100644 --- a/test/map-like.ts +++ b/test/map-like.ts @@ -2,24 +2,30 @@ if (typeof global.performance === 'undefined') { global.performance = require('perf_hooks').performance } import t from 'tap' -const Clock = require('clock-mock') +import Clock from 'clock-mock' const clock = new Clock() const { performance, Date } = global // @ts-ignore t.teardown(() => Object.assign(global, { performance, Date })) +//@ts-ignore global.Date = clock.Date +//@ts-ignore global.performance = clock -import LRU from '../' +import { LRUCache as LRU } from '../' import { expose } from './fixtures/expose' -const entriesFromForeach = (c: LRU):[k:K,v:V][] => { - const e:[k:K,v:V][] = [] +const entriesFromForeach = ( + c: LRU +): [k: K, v: V][] => { + const e: [k: K, v: V][] = [] c.forEach((v, k) => e.push([k, v])) return e } -const entriesFromRForeach = (c: LRU):[k:K,v:V][] => { - const e:[k:K,v:V][] = [] +const entriesFromRForeach = ( + c: LRU +): [k: K, v: V][] => { + const e: [k: K, v: V][] = [] c.rforEach((v, k) => e.push([k, v])) return e } @@ -67,11 +73,17 @@ t.test('bunch of iteration things', async t => { t.matchSnapshot(c.keys(), 'fetch 123 resolved, keys') t.matchSnapshot(c.values(), 'fetch 123 resolved, values') t.matchSnapshot(c.entries(), 'fetch 123 resolved, entries') - t.matchSnapshot(entriesFromForeach(c), 'fetch 123 resolved, foreach') + t.matchSnapshot( + entriesFromForeach(c), + 'fetch 123 resolved, foreach' + ) t.matchSnapshot(c.rkeys(), 'fetch 123 resolved, rkeys') t.matchSnapshot(c.rvalues(), 'fetch 123 resolved, rvalues') t.matchSnapshot(c.rentries(), 'fetch 123 resolved, rentries') - t.matchSnapshot(entriesFromRForeach(c), 'fetch 123 resolved, rforeach') + t.matchSnapshot( + entriesFromRForeach(c), + 'fetch 123 resolved, rforeach' + ) t.matchSnapshot(c.dump(), 'fetch 123 resolved, dump') for (let i = 3; i < 8; i++) { @@ -102,13 +114,17 @@ t.test('bunch of iteration things', async t => { t.matchSnapshot(c.entries(), 'entries, resolved fetch 99 too late') t.matchSnapshot(c.rkeys(), 'rkeys, resolved fetch 99 too late') t.matchSnapshot(c.rvalues(), 'rvalues, resolved fetch 99 too late') - t.matchSnapshot(c.rentries(), 'rentries, resolved fetch 99 too late') + t.matchSnapshot( + c.rentries(), + 'rentries, resolved fetch 99 too late' + ) t.matchSnapshot(c.dump(), 'dump, resolved fetch 99 too late') // pretend an entry is stale for some reason c.set(7, 'stale', { ttl: 1, size: 1 }) const e = expose(c) const idx = e.keyMap.get(7) + if (!e.starts) throw new Error('no starts??') e.starts[idx as number] = clock.now() - 10000 const seen: number[] = [] for (const i of e.indexes()) { @@ -142,13 +158,13 @@ t.test('bunch of iteration things', async t => { t.matchSnapshot(rfeArr, 'rforEach, no thisp') const feArrThisp: any[] = [] const thisp = { a: 1 } - c.forEach(function (value, key) { + c.forEach(function (this: typeof thisp, value, key) { feArrThisp.push([value, key, this]) }, thisp) t.matchSnapshot(feArrThisp, 'forEach, with thisp') const rfeArrThisp: any[] = [] const rthisp = { r: 1 } - c.rforEach(function (value, key) { + c.rforEach(function (this: typeof thisp, value, key) { rfeArrThisp.push([value, key, this]) }, rthisp) t.matchSnapshot(rfeArrThisp, 'forEach, with thisp') diff --git a/test/move-to-tail.ts b/test/move-to-tail.ts index 93e35f14..3ce769fe 100644 --- a/test/move-to-tail.ts +++ b/test/move-to-tail.ts @@ -1,5 +1,5 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' import { expose } from './fixtures/expose' const c = new LRU({ max: 5 }) @@ -20,7 +20,7 @@ t.test('list integrity', { bail: true }, t => { tail: exp.tail, }) const snap = () => { - const a = [] + const a: ReturnType[] = [] for (let i = 0; i < 5; i++) { a.push(e(i)) } diff --git a/test/pop.ts b/test/pop.ts index 960497c0..bb8e07e8 100644 --- a/test/pop.ts +++ b/test/pop.ts @@ -1,15 +1,78 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' -const cache = new LRU({ max: 5 }) +const cache = new LRU({ max: 5 }) for (let i = 0; i < 5; i++) { cache.set(i, i) } cache.get(2) -const popped = [] -let p +const popped: (number | undefined)[] = [] +let p: number | undefined do { p = cache.pop() popped.push(p) } while (p !== undefined) t.same(popped, [0, 1, 3, 4, 2, undefined]) + +t.test('pop with background fetches', async t => { + const resolves: Record void> = {} + let aborted = false + const f = new LRU({ + max: 5, + ttl: 10, + fetchMethod: (k: number, _v, { signal }) => { + signal.addEventListener('abort', () => (aborted = true)) + return new Promise(res => (resolves[k] = res)) + }, + }) + + // a fetch that's in progress with no stale val gets popped + // without returning anything + f.set(0, 0) + let pf = f.fetch(1) + f.set(2, 2) + t.equal(f.size, 3) + t.equal(f.pop(), 0) + t.equal(f.size, 2) + t.equal(f.pop(), 2) + t.equal(f.size, 0) + t.equal(aborted, true) + resolves[1](1) + await t.rejects(pf) + + f.set(0, 0, { ttl: 0 }) + f.set(1, 111) + await new Promise(r => setTimeout(r, 20)) + pf = f.fetch(1) + f.set(2, 2, { ttl: 0 }) + t.equal(f.size, 3) + t.equal(f.pop(), 0) + t.equal(f.size, 2) + t.equal(f.pop(), 111) + t.equal(f.size, 1) + t.equal(f.pop(), 2) + t.equal(f.size, 0) + resolves[1](1) + await t.rejects(pf) +}) + +t.test('pop calls dispose and disposeAfter', t => { + let disposeCalled = 0 + let disposeAfterCalled = 0 + const c = new LRU({ + max: 5, + dispose: () => disposeCalled++, + disposeAfter: () => disposeAfterCalled++, + }) + c.set(0, 0) + c.set(1, 1) + c.set(2, 2) + t.equal(c.pop(), 0) + t.equal(c.pop(), 1) + t.equal(c.pop(), 2) + t.equal(c.pop(), undefined) + t.equal(c.size, 0) + t.equal(disposeCalled, 3) + t.equal(disposeAfterCalled, 3) + t.end() +}) diff --git a/test/purge-stale-exhaustive.ts b/test/purge-stale-exhaustive.ts index f2da9f0c..dc185c18 100644 --- a/test/purge-stale-exhaustive.ts +++ b/test/purge-stale-exhaustive.ts @@ -3,7 +3,7 @@ if (typeof performance === 'undefined') { } import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' import { expose } from './fixtures/expose' const Clock = require('clock-mock') diff --git a/test/reverse-iterate-delete-all.ts b/test/reverse-iterate-delete-all.ts index da0ea72a..690855bb 100644 --- a/test/reverse-iterate-delete-all.ts +++ b/test/reverse-iterate-delete-all.ts @@ -1,10 +1,10 @@ // https://github.com/isaacs/node-lru-cache/issues/278 import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' const lru = new LRU({ - maxSize:2, + maxSize: 2, sizeCalculation: () => 1, -}); +}) lru.set('x', 'x') lru.set('y', 'y') for (const key of lru.rkeys()) { diff --git a/test/size-calculation.ts b/test/size-calculation.ts index 2a3e2158..7c371d9c 100644 --- a/test/size-calculation.ts +++ b/test/size-calculation.ts @@ -1,12 +1,16 @@ import t from 'tap' -import LRU from '../' +import { LRUCache as LRU } from '../' + +import { expose } from './fixtures/expose' const checkSize = (c: LRU) => { - const sizes = (c as unknown as { sizes: number[] }).sizes + const e = expose(c) + const sizes = e.sizes + if (!sizes) throw new Error('no sizes??') const { calculatedSize, maxSize } = c const sum = [...sizes].reduce((a, b) => a + b, 0) if (sum !== calculatedSize) { - console.error({ sum, calculatedSize, sizes }) + console.error({ sum, calculatedSize, sizes }, c, e) throw new Error('calculatedSize does not equal sum of sizes') } if (calculatedSize > maxSize) { @@ -153,9 +157,9 @@ t.test('delete while empty, or missing key, is no-op', t => { }) t.test('large item falls out of cache, sizes are kept correct', t => { - const statuses:LRU.Status[] = [] - const s = ():LRU.Status => { - const status:LRU.Status = {} + const statuses: LRU.Status[] = [] + const s = (): LRU.Status => { + const status: LRU.Status = {} statuses.push(status) return status } @@ -164,7 +168,7 @@ t.test('large item falls out of cache, sizes are kept correct', t => { maxSize: 10, sizeCalculation: () => 100, }) - const sizes: number[] = (c as unknown as { sizes: number[] }).sizes + const sizes = expose(c).sizes checkSize(c) t.equal(c.size, 0) @@ -206,9 +210,9 @@ t.test('large item falls out of cache, sizes are kept correct', t => { }) t.test('large item falls out of cache because maxEntrySize', t => { - const statuses:LRU.Status[] = [] - const s = ():LRU.Status => { - const status:LRU.Status = {} + const statuses: LRU.Status[] = [] + const s = (): LRU.Status => { + const status: LRU.Status = {} statuses.push(status) return status } @@ -218,7 +222,7 @@ t.test('large item falls out of cache because maxEntrySize', t => { maxEntrySize: 10, sizeCalculation: () => 100, }) - const sizes: number[] = (c as unknown as { sizes: number[] }).sizes + const sizes = expose(c).sizes checkSize(c) t.equal(c.size, 0) diff --git a/test/ttl.ts b/test/ttl.ts index 1a0d51c0..b9e9cd97 100644 --- a/test/ttl.ts +++ b/test/ttl.ts @@ -2,7 +2,7 @@ if (typeof performance === 'undefined') { global.performance = require('perf_hooks').performance } import t from 'tap' -import LRUCache from '../index.js' +import { LRUCache } from '../' import { expose } from './fixtures/expose' import Clock from 'clock-mock' @@ -35,7 +35,7 @@ const runTests = (LRU: typeof LRUCache, t: Tap.Test) => { // This is a known bug that I am ok with. clock.advance(1) const c = new LRU({ max: 5, ttl: 10, ttlResolution: 0 }) - const e = expose(c) + const e = expose(c, LRU) c.set(1, 1, { status: s() }) t.equal(c.get(1, { status: s() }), 1, '1 get not stale', { now: clock._now, @@ -62,8 +62,6 @@ const runTests = (LRU: typeof LRUCache, t: Tap.Test) => { t.equal(c.size, 1, 'still there though') t.equal(c.has(1, { status: s() }), false, '1 has stale', { now: clock._now, - ttls: e.ttls, - starts: e.starts, index: e.keyMap.get(1), stale: e.isStale(e.keyMap.get(1)), }) @@ -104,7 +102,7 @@ const runTests = (LRU: typeof LRUCache, t: Tap.Test) => { t.test('ttl tests with ttlResolution=100', t => { statuses.length = 0 const c = new LRU({ ttl: 10, ttlResolution: 100, max: 10 }) - const e = expose(c) + const e = expose(c, LRU) c.set(1, 1, { status: s() }) t.equal(c.get(1, { status: s() }), 1, '1 get not stale', { now: clock._now, @@ -225,6 +223,8 @@ const runTests = (LRU: typeof LRUCache, t: Tap.Test) => { t.equal(c.get(1), 1) clock.advance(1) t.equal(c.has(1), false) + + t.equal(c.get(1, { status: s(), noDeleteOnStaleGet: true }), 1) t.equal(c.get(1), 1) t.equal(c.get(1), undefined) t.equal(c.size, 0) @@ -374,7 +374,7 @@ const runTests = (LRU: typeof LRUCache, t: Tap.Test) => { // https://github.com/isaacs/node-lru-cache/issues/203 t.test('indexes/rindexes can walk over stale entries', t => { const c = new LRU({ max: 10, ttl: 10 }) - const e = expose(c) + const e = expose(c, LRU) for (let i = 0; i < 3; i++) { c.set(i, i) } @@ -509,7 +509,7 @@ t.test('tests with perf_hooks.performance.now()', t => { global.Date = clock.Date // @ts-ignore global.performance = clock - const LRU = t.mock('../', {}) + const { LRUCache: LRU } = t.mock('../', {}) runTests(LRU, t) }) @@ -521,6 +521,6 @@ t.test('tests using Date.now()', t => { global.Date = clock.Date // @ts-ignore global.performance = null - const LRU = t.mock('../', {}) + const { LRUCache: LRU } = t.mock('../', {}) runTests(LRU, t) }) diff --git a/test/unbounded-warning.ts b/test/unbounded-warning.ts new file mode 100644 index 00000000..457dd592 --- /dev/null +++ b/test/unbounded-warning.ts @@ -0,0 +1,68 @@ +import t from 'tap' +import { LRUCache } from '../' + +t.test('emits warning', t => { + const { emitWarning } = process + t.teardown(() => { + process.emitWarning = emitWarning + }) + const warnings: [string, string, string][] = [] + Object.defineProperty(process, 'emitWarning', { + value: (msg: string, type: string, code: string) => { + warnings.push([msg, type, code]) + }, + configurable: true, + writable: true, + }) + //@ts-expect-error + new LRUCache({ + ttl: 100, + }) + t.same(warnings, [ + [ + 'TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.', + 'UnboundedCacheWarning', + 'LRU_CACHE_UNBOUNDED', + ], + ]) + t.end() +}) + +t.test('prints to stderr if no process.emitWarning', t => { + const { LRUCache: LRU } = t.mock('../', {}) as { + LRUCache: typeof LRUCache + } + const { error } = console + const { emitWarning } = process + t.teardown(() => { + console.error = error + process.emitWarning = emitWarning + }) + const warnings: [string][] = [] + Object.defineProperty(console, 'error', { + value: (msg: string) => { + warnings.push([msg]) + }, + configurable: true, + writable: true, + }) + Object.defineProperty(process, 'emitWarning', { + value: undefined, + configurable: true, + writable: true, + }) + //@ts-expect-error + new LRU({ + ttl: 100, + }) + //@ts-expect-error + new LRU({ + ttl: 100, + }) + t.same(warnings, [ + [ + '[LRU_CACHE_UNBOUNDED] UnboundedCacheWarning: TTL caching without ttlAutopurge, max, or maxSize can result in unbounded memory consumption.', + ], + ]) + t.end() +}) diff --git a/test/warn-missing-ac.ts b/test/warn-missing-ac.ts new file mode 100644 index 00000000..c4d3e1de --- /dev/null +++ b/test/warn-missing-ac.ts @@ -0,0 +1,82 @@ +export {} +const main = async () => { + const { default: t } = await import('tap') + const { spawn } = await import('child_process') + + // need to run both tests in parallel so we don't miss the close event + t.jobs = 3 + + const tsNode = + process.platform === 'win32' ? 'ts-node.cmd' : 'ts-node' + + const warn = spawn(tsNode, [__filename, 'child']) + const warnErr: Buffer[] = [] + warn.stderr.on('data', c => warnErr.push(c)) + + const noWarn = spawn(tsNode, [__filename, 'child'], { + env: { + ...process.env, + LRU_CACHE_IGNORE_AC_WARNING: '1', + }, + }) + const noWarnErr: Buffer[] = [] + noWarn.stderr.on('data', c => noWarnErr.push(c)) + + const noFetch = spawn(tsNode, [__filename, 'child-no-fetch']) + const noFetchErr: Buffer[] = [] + noFetch.stderr.on('data', c => noFetchErr.push(c)) + + t.test('no warning', async t => { + await new Promise(r => + noWarn.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + r() + }) + ) + t.equal(Buffer.concat(noWarnErr).toString().trim(), '') + }) + + t.test('no warning (because no fetch)', async t => { + await new Promise(r => + noFetch.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + r() + }) + ) + t.equal(Buffer.concat(noFetchErr).toString().trim(), '') + }) + + t.test('warning', async t => { + await new Promise(r => + warn.on('close', (code, signal) => { + t.equal(code, 0) + t.equal(signal, null) + r() + }) + ) + t.not(Buffer.concat(warnErr).toString().trim(), '') + }) +} + +switch (process.argv[2]) { + case 'child': + //@ts-ignore + globalThis.AbortController = undefined + //@ts-ignore + globalThis.AbortSignal = undefined + import('../').then(({ LRUCache }) => { + new LRUCache({ max: 1, fetchMethod: async () => 1 }).fetch(1) + }) + break + case 'child-no-fetch': + //@ts-ignore + globalThis.AbortController = undefined + //@ts-ignore + globalThis.AbortSignal = undefined + import('../') + break + default: + main() +} diff --git a/tsconfig-base.json b/tsconfig-base.json new file mode 100644 index 00000000..4e8b9da0 --- /dev/null +++ b/tsconfig-base.json @@ -0,0 +1,17 @@ +{ + "include": ["src/**/*.ts"], + "compilerOptions": { + "allowSyntheticDefaultImports": true, + "declaration": true, + "declarationMap": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "moduleResolution": "node", + "resolveJsonModule": true, + "sourceMap": true, + "inlineSources": true, + "strict": true, + "target": "es2022" + } +} diff --git a/tsconfig-esm.json b/tsconfig-esm.json new file mode 100644 index 00000000..9a571575 --- /dev/null +++ b/tsconfig-esm.json @@ -0,0 +1,7 @@ +{ + "extends": "./tsconfig-base.json", + "compilerOptions": { + "module": "esnext", + "outDir": "dist/mjs" + } +} diff --git a/tsconfig.json b/tsconfig.json index a1fde280..13690021 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,18 +1,8 @@ { - "exclude": ["./tap-snapshots"], - "include": ["test/**/*.ts"], + "extends": "./tsconfig-base.json", "compilerOptions": { - "declaration": true, - "sourceMap": true, - "target": "ES6", - "forceConsistentCasingInFileNames": true, - "esModuleInterop": true, - "moduleResolution": "node", - "module": "CommonJS", - "resolveJsonModule": true, - "strict": true, - "skipLibCheck": true, - "noEmit": true, - "allowSyntheticDefaultImports": true + "module": "commonjs", + "outDir": "dist/cjs", + "moduleResolution": "Node" } } diff --git a/typedoc.json b/typedoc.json index 74ce9869..92e10e75 100644 --- a/typedoc.json +++ b/typedoc.json @@ -1,5 +1,7 @@ { "navigationLinks": { - "isaacs projects": "https://isaacs.github.io/" + "isaacs projects": "https://isaacs.github.io/", + "benchmark summary": "https://isaacs.github.io/node-lru-cache/benchmark/", + "benchmark details": "https://isaacs.github.io/node-lru-cache/benchmark/results/" } }