aboutsummaryrefslogtreecommitdiffstats
path: root/vanilla/node_modules/undici/lib/cache
diff options
context:
space:
mode:
authorAdam Mathes <adam@adammathes.com>2026-02-13 21:34:48 -0800
committerAdam Mathes <adam@adammathes.com>2026-02-13 21:34:48 -0800
commit76cb9c2a39d477a64824a985ade40507e3bbade1 (patch)
tree41e997aa9c6f538d3a136af61dae9424db2005a9 /vanilla/node_modules/undici/lib/cache
parent819a39a21ac992b1393244a4c283bbb125208c69 (diff)
downloadneko-76cb9c2a39d477a64824a985ade40507e3bbade1.tar.gz
neko-76cb9c2a39d477a64824a985ade40507e3bbade1.tar.bz2
neko-76cb9c2a39d477a64824a985ade40507e3bbade1.zip
feat(vanilla): add testing infrastructure and tests (NK-wjnczv)
Diffstat (limited to 'vanilla/node_modules/undici/lib/cache')
-rw-r--r--vanilla/node_modules/undici/lib/cache/memory-cache-store.js234
-rw-r--r--vanilla/node_modules/undici/lib/cache/sqlite-cache-store.js461
2 files changed, 695 insertions, 0 deletions
diff --git a/vanilla/node_modules/undici/lib/cache/memory-cache-store.js b/vanilla/node_modules/undici/lib/cache/memory-cache-store.js
new file mode 100644
index 0000000..dba29ae
--- /dev/null
+++ b/vanilla/node_modules/undici/lib/cache/memory-cache-store.js
@@ -0,0 +1,234 @@
+'use strict'
+
+const { Writable } = require('node:stream')
+const { EventEmitter } = require('node:events')
+const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
+
+/**
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheKey} CacheKey
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheValue} CacheValue
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.GetResult} GetResult
+ */
+
+/**
+ * @implements {CacheStore}
+ * @extends {EventEmitter}
+ */
+class MemoryCacheStore extends EventEmitter {
+ #maxCount = 1024
+ #maxSize = 104857600 // 100MB
+ #maxEntrySize = 5242880 // 5MB
+
+ #size = 0
+ #count = 0
+ #entries = new Map()
+ #hasEmittedMaxSizeEvent = false
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.MemoryCacheStoreOpts | undefined} [opts]
+ */
+ constructor (opts) {
+ super()
+ if (opts) {
+ if (typeof opts !== 'object') {
+ throw new TypeError('MemoryCacheStore options must be an object')
+ }
+
+ if (opts.maxCount !== undefined) {
+ if (
+ typeof opts.maxCount !== 'number' ||
+ !Number.isInteger(opts.maxCount) ||
+ opts.maxCount < 0
+ ) {
+ throw new TypeError('MemoryCacheStore options.maxCount must be a non-negative integer')
+ }
+ this.#maxCount = opts.maxCount
+ }
+
+ if (opts.maxSize !== undefined) {
+ if (
+ typeof opts.maxSize !== 'number' ||
+ !Number.isInteger(opts.maxSize) ||
+ opts.maxSize < 0
+ ) {
+ throw new TypeError('MemoryCacheStore options.maxSize must be a non-negative integer')
+ }
+ this.#maxSize = opts.maxSize
+ }
+
+ if (opts.maxEntrySize !== undefined) {
+ if (
+ typeof opts.maxEntrySize !== 'number' ||
+ !Number.isInteger(opts.maxEntrySize) ||
+ opts.maxEntrySize < 0
+ ) {
+ throw new TypeError('MemoryCacheStore options.maxEntrySize must be a non-negative integer')
+ }
+ this.#maxEntrySize = opts.maxEntrySize
+ }
+ }
+ }
+
+ /**
+ * Get the current size of the cache in bytes
+ * @returns {number} The current size of the cache in bytes
+ */
+ get size () {
+ return this.#size
+ }
+
+ /**
+ * Check if the cache is full (either max size or max count reached)
+ * @returns {boolean} True if the cache is full, false otherwise
+ */
+ isFull () {
+ return this.#size >= this.#maxSize || this.#count >= this.#maxCount
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} req
+ * @returns {import('../../types/cache-interceptor.d.ts').default.GetResult | undefined}
+ */
+ get (key) {
+ assertCacheKey(key)
+
+ const topLevelKey = `${key.origin}:${key.path}`
+
+ const now = Date.now()
+ const entries = this.#entries.get(topLevelKey)
+
+ const entry = entries ? findEntry(key, entries, now) : null
+
+ return entry == null
+ ? undefined
+ : {
+ statusMessage: entry.statusMessage,
+ statusCode: entry.statusCode,
+ headers: entry.headers,
+ body: entry.body,
+ vary: entry.vary ? entry.vary : undefined,
+ etag: entry.etag,
+ cacheControlDirectives: entry.cacheControlDirectives,
+ cachedAt: entry.cachedAt,
+ staleAt: entry.staleAt,
+ deleteAt: entry.deleteAt
+ }
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} val
+ * @returns {Writable | undefined}
+ */
+ createWriteStream (key, val) {
+ assertCacheKey(key)
+ assertCacheValue(val)
+
+ const topLevelKey = `${key.origin}:${key.path}`
+
+ const store = this
+ const entry = { ...key, ...val, body: [], size: 0 }
+
+ return new Writable({
+ write (chunk, encoding, callback) {
+ if (typeof chunk === 'string') {
+ chunk = Buffer.from(chunk, encoding)
+ }
+
+ entry.size += chunk.byteLength
+
+ if (entry.size >= store.#maxEntrySize) {
+ this.destroy()
+ } else {
+ entry.body.push(chunk)
+ }
+
+ callback(null)
+ },
+ final (callback) {
+ let entries = store.#entries.get(topLevelKey)
+ if (!entries) {
+ entries = []
+ store.#entries.set(topLevelKey, entries)
+ }
+ const previousEntry = findEntry(key, entries, Date.now())
+ if (previousEntry) {
+ const index = entries.indexOf(previousEntry)
+ entries.splice(index, 1, entry)
+ store.#size -= previousEntry.size
+ } else {
+ entries.push(entry)
+ store.#count += 1
+ }
+
+ store.#size += entry.size
+
+ // Check if cache is full and emit event if needed
+ if (store.#size > store.#maxSize || store.#count > store.#maxCount) {
+ // Emit maxSizeExceeded event if we haven't already
+ if (!store.#hasEmittedMaxSizeEvent) {
+ store.emit('maxSizeExceeded', {
+ size: store.#size,
+ maxSize: store.#maxSize,
+ count: store.#count,
+ maxCount: store.#maxCount
+ })
+ store.#hasEmittedMaxSizeEvent = true
+ }
+
+ // Perform eviction
+ for (const [key, entries] of store.#entries) {
+ for (const entry of entries.splice(0, entries.length / 2)) {
+ store.#size -= entry.size
+ store.#count -= 1
+ }
+ if (entries.length === 0) {
+ store.#entries.delete(key)
+ }
+ }
+
+ // Reset the event flag after eviction
+ if (store.#size < store.#maxSize && store.#count < store.#maxCount) {
+ store.#hasEmittedMaxSizeEvent = false
+ }
+ }
+
+ callback(null)
+ }
+ })
+ }
+
+ /**
+ * @param {CacheKey} key
+ */
+ delete (key) {
+ if (typeof key !== 'object') {
+ throw new TypeError(`expected key to be object, got ${typeof key}`)
+ }
+
+ const topLevelKey = `${key.origin}:${key.path}`
+
+ for (const entry of this.#entries.get(topLevelKey) ?? []) {
+ this.#size -= entry.size
+ this.#count -= 1
+ }
+ this.#entries.delete(topLevelKey)
+ }
+}
+
+function findEntry (key, entries, now) {
+ return entries.find((entry) => (
+ entry.deleteAt > now &&
+ entry.method === key.method &&
+ (entry.vary == null || Object.keys(entry.vary).every(headerName => {
+ if (entry.vary[headerName] === null) {
+ return key.headers[headerName] === undefined
+ }
+
+ return entry.vary[headerName] === key.headers[headerName]
+ }))
+ ))
+}
+
+module.exports = MemoryCacheStore
diff --git a/vanilla/node_modules/undici/lib/cache/sqlite-cache-store.js b/vanilla/node_modules/undici/lib/cache/sqlite-cache-store.js
new file mode 100644
index 0000000..7cb4aa7
--- /dev/null
+++ b/vanilla/node_modules/undici/lib/cache/sqlite-cache-store.js
@@ -0,0 +1,461 @@
+'use strict'
+
+const { Writable } = require('node:stream')
+const { assertCacheKey, assertCacheValue } = require('../util/cache.js')
+
+let DatabaseSync
+
+const VERSION = 3
+
+// 2gb
+const MAX_ENTRY_SIZE = 2 * 1000 * 1000 * 1000
+
+/**
+ * @typedef {import('../../types/cache-interceptor.d.ts').default.CacheStore} CacheStore
+ * @implements {CacheStore}
+ *
+ * @typedef {{
+ * id: Readonly<number>,
+ * body?: Uint8Array
+ * statusCode: number
+ * statusMessage: string
+ * headers?: string
+ * vary?: string
+ * etag?: string
+ * cacheControlDirectives?: string
+ * cachedAt: number
+ * staleAt: number
+ * deleteAt: number
+ * }} SqliteStoreValue
+ */
+module.exports = class SqliteCacheStore {
+ #maxEntrySize = MAX_ENTRY_SIZE
+ #maxCount = Infinity
+
+ /**
+ * @type {import('node:sqlite').DatabaseSync}
+ */
+ #db
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #getValuesQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #updateValueQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #insertValueQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #deleteExpiredValuesQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #deleteByUrlQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync}
+ */
+ #countEntriesQuery
+
+ /**
+ * @type {import('node:sqlite').StatementSync | null}
+ */
+ #deleteOldValuesQuery
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.SqliteCacheStoreOpts | undefined} opts
+ */
+ constructor (opts) {
+ if (opts) {
+ if (typeof opts !== 'object') {
+ throw new TypeError('SqliteCacheStore options must be an object')
+ }
+
+ if (opts.maxEntrySize !== undefined) {
+ if (
+ typeof opts.maxEntrySize !== 'number' ||
+ !Number.isInteger(opts.maxEntrySize) ||
+ opts.maxEntrySize < 0
+ ) {
+ throw new TypeError('SqliteCacheStore options.maxEntrySize must be a non-negative integer')
+ }
+
+ if (opts.maxEntrySize > MAX_ENTRY_SIZE) {
+ throw new TypeError('SqliteCacheStore options.maxEntrySize must be less than 2gb')
+ }
+
+ this.#maxEntrySize = opts.maxEntrySize
+ }
+
+ if (opts.maxCount !== undefined) {
+ if (
+ typeof opts.maxCount !== 'number' ||
+ !Number.isInteger(opts.maxCount) ||
+ opts.maxCount < 0
+ ) {
+ throw new TypeError('SqliteCacheStore options.maxCount must be a non-negative integer')
+ }
+ this.#maxCount = opts.maxCount
+ }
+ }
+
+ if (!DatabaseSync) {
+ DatabaseSync = require('node:sqlite').DatabaseSync
+ }
+ this.#db = new DatabaseSync(opts?.location ?? ':memory:')
+
+ this.#db.exec(`
+ PRAGMA journal_mode = WAL;
+ PRAGMA synchronous = NORMAL;
+ PRAGMA temp_store = memory;
+ PRAGMA optimize;
+
+ CREATE TABLE IF NOT EXISTS cacheInterceptorV${VERSION} (
+ -- Data specific to us
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
+ url TEXT NOT NULL,
+ method TEXT NOT NULL,
+
+ -- Data returned to the interceptor
+ body BUF NULL,
+ deleteAt INTEGER NOT NULL,
+ statusCode INTEGER NOT NULL,
+ statusMessage TEXT NOT NULL,
+ headers TEXT NULL,
+ cacheControlDirectives TEXT NULL,
+ etag TEXT NULL,
+ vary TEXT NULL,
+ cachedAt INTEGER NOT NULL,
+ staleAt INTEGER NOT NULL
+ );
+
+ CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_getValuesQuery ON cacheInterceptorV${VERSION}(url, method, deleteAt);
+ CREATE INDEX IF NOT EXISTS idx_cacheInterceptorV${VERSION}_deleteByUrlQuery ON cacheInterceptorV${VERSION}(deleteAt);
+ `)
+
+ this.#getValuesQuery = this.#db.prepare(`
+ SELECT
+ id,
+ body,
+ deleteAt,
+ statusCode,
+ statusMessage,
+ headers,
+ etag,
+ cacheControlDirectives,
+ vary,
+ cachedAt,
+ staleAt
+ FROM cacheInterceptorV${VERSION}
+ WHERE
+ url = ?
+ AND method = ?
+ ORDER BY
+ deleteAt ASC
+ `)
+
+ this.#updateValueQuery = this.#db.prepare(`
+ UPDATE cacheInterceptorV${VERSION} SET
+ body = ?,
+ deleteAt = ?,
+ statusCode = ?,
+ statusMessage = ?,
+ headers = ?,
+ etag = ?,
+ cacheControlDirectives = ?,
+ cachedAt = ?,
+ staleAt = ?
+ WHERE
+ id = ?
+ `)
+
+ this.#insertValueQuery = this.#db.prepare(`
+ INSERT INTO cacheInterceptorV${VERSION} (
+ url,
+ method,
+ body,
+ deleteAt,
+ statusCode,
+ statusMessage,
+ headers,
+ etag,
+ cacheControlDirectives,
+ vary,
+ cachedAt,
+ staleAt
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
+ `)
+
+ this.#deleteByUrlQuery = this.#db.prepare(
+ `DELETE FROM cacheInterceptorV${VERSION} WHERE url = ?`
+ )
+
+ this.#countEntriesQuery = this.#db.prepare(
+ `SELECT COUNT(*) AS total FROM cacheInterceptorV${VERSION}`
+ )
+
+ this.#deleteExpiredValuesQuery = this.#db.prepare(
+ `DELETE FROM cacheInterceptorV${VERSION} WHERE deleteAt <= ?`
+ )
+
+ this.#deleteOldValuesQuery = this.#maxCount === Infinity
+ ? null
+ : this.#db.prepare(`
+ DELETE FROM cacheInterceptorV${VERSION}
+ WHERE id IN (
+ SELECT
+ id
+ FROM cacheInterceptorV${VERSION}
+ ORDER BY cachedAt DESC
+ LIMIT ?
+ )
+ `)
+ }
+
+ close () {
+ this.#db.close()
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @returns {(import('../../types/cache-interceptor.d.ts').default.GetResult & { body?: Buffer }) | undefined}
+ */
+ get (key) {
+ assertCacheKey(key)
+
+ const value = this.#findValue(key)
+ return value
+ ? {
+ body: value.body ? Buffer.from(value.body.buffer, value.body.byteOffset, value.body.byteLength) : undefined,
+ statusCode: value.statusCode,
+ statusMessage: value.statusMessage,
+ headers: value.headers ? JSON.parse(value.headers) : undefined,
+ etag: value.etag ? value.etag : undefined,
+ vary: value.vary ? JSON.parse(value.vary) : undefined,
+ cacheControlDirectives: value.cacheControlDirectives
+ ? JSON.parse(value.cacheControlDirectives)
+ : undefined,
+ cachedAt: value.cachedAt,
+ staleAt: value.staleAt,
+ deleteAt: value.deleteAt
+ }
+ : undefined
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheValue & { body: null | Buffer | Array<Buffer>}} value
+ */
+ set (key, value) {
+ assertCacheKey(key)
+
+ const url = this.#makeValueUrl(key)
+ const body = Array.isArray(value.body) ? Buffer.concat(value.body) : value.body
+ const size = body?.byteLength
+
+ if (size && size > this.#maxEntrySize) {
+ return
+ }
+
+ const existingValue = this.#findValue(key, true)
+ if (existingValue) {
+ // Updating an existing response, let's overwrite it
+ this.#updateValueQuery.run(
+ body,
+ value.deleteAt,
+ value.statusCode,
+ value.statusMessage,
+ value.headers ? JSON.stringify(value.headers) : null,
+ value.etag ? value.etag : null,
+ value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
+ value.cachedAt,
+ value.staleAt,
+ existingValue.id
+ )
+ } else {
+ this.#prune()
+ // New response, let's insert it
+ this.#insertValueQuery.run(
+ url,
+ key.method,
+ body,
+ value.deleteAt,
+ value.statusCode,
+ value.statusMessage,
+ value.headers ? JSON.stringify(value.headers) : null,
+ value.etag ? value.etag : null,
+ value.cacheControlDirectives ? JSON.stringify(value.cacheControlDirectives) : null,
+ value.vary ? JSON.stringify(value.vary) : null,
+ value.cachedAt,
+ value.staleAt
+ )
+ }
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheValue} value
+ * @returns {Writable | undefined}
+ */
+ createWriteStream (key, value) {
+ assertCacheKey(key)
+ assertCacheValue(value)
+
+ let size = 0
+ /**
+ * @type {Buffer[] | null}
+ */
+ const body = []
+ const store = this
+
+ return new Writable({
+ decodeStrings: true,
+ write (chunk, encoding, callback) {
+ size += chunk.byteLength
+
+ if (size < store.#maxEntrySize) {
+ body.push(chunk)
+ } else {
+ this.destroy()
+ }
+
+ callback()
+ },
+ final (callback) {
+ store.set(key, { ...value, body })
+ callback()
+ }
+ })
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ */
+ delete (key) {
+ if (typeof key !== 'object') {
+ throw new TypeError(`expected key to be object, got ${typeof key}`)
+ }
+
+ this.#deleteByUrlQuery.run(this.#makeValueUrl(key))
+ }
+
+ #prune () {
+ if (Number.isFinite(this.#maxCount) && this.size <= this.#maxCount) {
+ return 0
+ }
+
+ {
+ const removed = this.#deleteExpiredValuesQuery.run(Date.now()).changes
+ if (removed) {
+ return removed
+ }
+ }
+
+ {
+ const removed = this.#deleteOldValuesQuery?.run(Math.max(Math.floor(this.#maxCount * 0.1), 1)).changes
+ if (removed) {
+ return removed
+ }
+ }
+
+ return 0
+ }
+
+ /**
+ * Counts the number of rows in the cache
+ * @returns {Number}
+ */
+ get size () {
+ const { total } = this.#countEntriesQuery.get()
+ return total
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @returns {string}
+ */
+ #makeValueUrl (key) {
+ return `${key.origin}/${key.path}`
+ }
+
+ /**
+ * @param {import('../../types/cache-interceptor.d.ts').default.CacheKey} key
+ * @param {boolean} [canBeExpired=false]
+ * @returns {SqliteStoreValue | undefined}
+ */
+ #findValue (key, canBeExpired = false) {
+ const url = this.#makeValueUrl(key)
+ const { headers, method } = key
+
+ /**
+ * @type {SqliteStoreValue[]}
+ */
+ const values = this.#getValuesQuery.all(url, method)
+
+ if (values.length === 0) {
+ return undefined
+ }
+
+ const now = Date.now()
+ for (const value of values) {
+ if (now >= value.deleteAt && !canBeExpired) {
+ return undefined
+ }
+
+ let matches = true
+
+ if (value.vary) {
+ const vary = JSON.parse(value.vary)
+
+ for (const header in vary) {
+ if (!headerValueEquals(headers[header], vary[header])) {
+ matches = false
+ break
+ }
+ }
+ }
+
+ if (matches) {
+ return value
+ }
+ }
+
+ return undefined
+ }
+}
+
+/**
+ * @param {string|string[]|null|undefined} lhs
+ * @param {string|string[]|null|undefined} rhs
+ * @returns {boolean}
+ */
+function headerValueEquals (lhs, rhs) {
+ if (lhs == null && rhs == null) {
+ return true
+ }
+
+ if ((lhs == null && rhs != null) ||
+ (lhs != null && rhs == null)) {
+ return false
+ }
+
+ if (Array.isArray(lhs) && Array.isArray(rhs)) {
+ if (lhs.length !== rhs.length) {
+ return false
+ }
+
+ return lhs.every((x, i) => x === rhs[i])
+ }
+
+ return lhs === rhs
+}