Auto-commit 2026-04-29 16:31

This commit is contained in:
2026-04-29 16:31:27 -04:00
parent e8687bb6b2
commit 0495ee5bd2
19691 changed files with 3272886 additions and 138 deletions

41
node_modules/lru-memoizer/.jshintrc generated vendored Normal file
View File

@@ -0,0 +1,41 @@
{
"camelcase": false,
"curly": false,
"node": true,
"esnext": true,
"bitwise": true,
"eqeqeq": true,
"immed": true,
"indent": 2,
"latedef": false,
"newcap": true,
"noarg": true,
"regexp": true,
"undef": true,
"strict": false,
"smarttabs": true,
"expr": true,
"evil": true,
"browser": true,
"regexdash": true,
"wsh": true,
"trailing": true,
"sub": true,
"unused": true,
"laxcomma": true,
"nonbsp": true,
"newcap": false,
"globals": {
"after": false,
"before": false,
"afterEach": false,
"beforeEach": false,
"describe": false,
"it": false,
"escape": false
}
}

21
node_modules/lru-memoizer/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,21 @@
The MIT License (MIT)
Copyright (c) 2016 JOSE FERNANDO ROMANIELLO <jfromaniello@gmail.com> (http://joseoncode.com)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

88
node_modules/lru-memoizer/README.md generated vendored Normal file
View File

@@ -0,0 +1,88 @@
Memoize functions results using an lru-cache.
## Installation
```
npm i lru-memoizer --save
```
## Intro
This module uses an [lru-cache](https://github.com/isaacs/node-lru-cache) internally to cache the results of an async function.
The `load` function can have N parameters and the last one must be a callback. The callback should be an errback (first parameter is `err`).
The `hash` function purpose is generate a custom hash for storing results. It has all the arguments applied to it minus the callback, and must return an string synchronous.
The `disable` function allows you to conditionally disable the use of the cache. Useful for test environments.
The `freeze` option (defaults to **false**) allows you to deep-freeze the result of the async function.
The `clone` option (defaults to **false**) allows you to deep-clone the result every time is returned from the cache.
## Usage
```javascript
var memoizer = require('lru-memoizer');
var memoizedGet = memoizer({
//defines how to load the resource when
//it is not in the cache.
load: function (options, callback) {
request.get(options, callback);
},
//defines how to create a cache key from the params.
hash: function (options) {
return options.url + qs.stringify(options.qs);
},
//don't cache in test environment
disable: isTestEnv(),
//all other params for the LRU cache.
max: 100,
maxAge: 1000 * 60
});
memoizedGet({
url: 'https://google.com',
qs: { foo: 123 }
}, function (err, result, body) {
//console.log(body);
})
```
## Sync lru-memoizer
Use `memoizer.sync` to cache things that are slow to calculate or methods returning promises.
```
var memoizer = require('lru-memoizer');
var memoizedGet = memoizer.sync({
//defines how to load the resource when
//it is not in the cache.
load: function (params) {
//return something_hard_to_compute;s
},
//defines how to create a cache key from the params.
hash: function (params) {
return params.foo;
},
//all other params for the LRU cache.
max: 100,
maxAge: 1000 * 60
});
```
## Similar modules
This module is very similar to [async-cache](https://github.com/isaacs/async-cache), the main difference is the `hash` function.
## License
MIT 2016 - José F. Romaniello

85
node_modules/lru-memoizer/lib/async.d.ts generated vendored Normal file
View File

@@ -0,0 +1,85 @@
import { syncMemoizer } from './sync';
import { INodeStyleCallBack as CB, ResultBase, IParamsBase0, IParamsBase1, IParamsBase2, IParamsBase3, IParamsBase4, IParamsBase5, IParamsBase6 } from './util';
export interface IMemoized<T1, T2, T3, T4, T5, T6, TResult> extends ResultBase {
(cb: CB<TResult>): void;
(a1: T1, cb: CB<TResult>): void;
(a1: T1, a2: T2, cb: CB<TResult>): void;
(a1: T1, a2: T2, a3: T3, cb: CB<TResult>): void;
(a1: T1, a2: T2, a3: T3, a4: T4, cb: CB<TResult>): void;
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, cb: CB<TResult>): void;
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6, cb: CB<TResult>): void;
}
interface IMemoizableFunction0<TResult> {
(cb: CB<TResult>): void;
}
interface IMemoizableFunction1<T1, TResult> {
(a1: T1, cb: CB<TResult>): void;
}
interface IMemoizableFunction2<T1, T2, TResult> {
(a1: T1, a2: T2, cb: CB<TResult>): void;
}
interface IMemoizableFunction3<T1, T2, T3, TResult> {
(a1: T1, a2: T2, a3: T3, cb: CB<TResult>): void;
}
interface IMemoizableFunction4<T1, T2, T3, T4, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, cb: CB<TResult>): void;
}
interface IMemoizableFunction5<T1, T2, T3, T4, T5, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, cb: CB<TResult>): void;
}
interface IMemoizableFunction6<T1, T2, T3, T4, T5, T6, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6, cb: CB<TResult>): void;
}
interface AsyncParams0<TResult> extends IParamsBase0<TResult> {
load: IMemoizableFunction0<TResult>;
}
interface AsyncParams1<T1, TResult> extends IParamsBase1<T1, TResult> {
load: IMemoizableFunction1<T1, TResult>;
}
interface AsyncParams2<T1, T2, TResult> extends IParamsBase2<T1, T2, TResult> {
load: IMemoizableFunction2<T1, T2, TResult>;
}
interface AsyncParams3<T1, T2, T3, TResult> extends IParamsBase3<T1, T2, T3, TResult> {
load: IMemoizableFunction3<T1, T2, T3, TResult>;
}
interface AsyncParams4<T1, T2, T3, T4, TResult> extends IParamsBase4<T1, T2, T3, T4, TResult> {
load: IMemoizableFunction4<T1, T2, T3, T4, TResult>;
}
interface AsyncParams5<T1, T2, T3, T4, T5, TResult> extends IParamsBase5<T1, T2, T3, T4, T5, TResult> {
load: IMemoizableFunction5<T1, T2, T3, T4, T5, TResult>;
}
interface AsyncParams6<T1, T2, T3, T4, T5, T6, TResult> extends IParamsBase6<T1, T2, T3, T4, T5, T6, TResult> {
/**
* The function that loads the resource when is not in the cache.
*/
load: IMemoizableFunction6<T1, T2, T3, T4, T5, T6, TResult>;
}
declare function asyncMemoizer<TResult>(options: AsyncParams0<TResult>): IMemoized<unknown, unknown, unknown, unknown, unknown, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, TResult>(options: AsyncParams1<T1, TResult>): IMemoized<T1, unknown, unknown, unknown, unknown, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, T2, TResult>(options: AsyncParams2<T1, T2, TResult>): IMemoized<T1, T2, unknown, unknown, unknown, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, T2, T3, TResult>(options: AsyncParams3<T1, T2, T3, TResult>): IMemoized<T1, T2, T3, unknown, unknown, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, T2, T3, T4, TResult>(options: AsyncParams4<T1, T2, T3, T4, TResult>): IMemoized<T1, T2, T3, T4, unknown, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, T2, T3, T4, T5, TResult>(options: AsyncParams5<T1, T2, T3, T4, T5, TResult>): IMemoized<T1, T2, T3, T4, T5, unknown, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
declare function asyncMemoizer<T1, T2, T3, T4, T5, T6, TResult>(options: AsyncParams6<T1, T2, T3, T4, T5, T6, TResult>): IMemoized<T1, T2, T3, T4, T5, T6, TResult>;
declare namespace asyncMemoizer {
var sync: typeof syncMemoizer;
}
export { asyncMemoizer };

178
node_modules/lru-memoizer/lib/async.js generated vendored Normal file

File diff suppressed because one or more lines are too long

1
node_modules/lru-memoizer/lib/freeze.d.ts generated vendored Normal file
View File

@@ -0,0 +1 @@
export declare function deepFreeze(o: any): any;

23
node_modules/lru-memoizer/lib/freeze.js generated vendored Normal file
View File

@@ -0,0 +1,23 @@
"use strict";
// From https://raw.githubusercontent.com/nikoskalogridis/deep-freeze/fb921b32064dce1645197be2bf975fe0385450b0/index.js
// which is sadly, no longer maintained
Object.defineProperty(exports, "__esModule", { value: true });
exports.deepFreeze = void 0;
function deepFreeze(o) {
if (o) {
Object.freeze(o);
Object.getOwnPropertyNames(o).forEach(function (prop) {
if (o.hasOwnProperty(prop)
&& o[prop] !== null
&& (typeof o[prop] === 'object' || typeof o[prop] === 'function')
&& (o[prop].constructor !== Buffer)
&& !Object.isFrozen(o[prop])) {
deepFreeze(o[prop]);
}
});
}
return o;
}
exports.deepFreeze = deepFreeze;
;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiZnJlZXplLmpzIiwic291cmNlUm9vdCI6IiIsInNvdXJjZXMiOlsiLi4vc3JjL2ZyZWV6ZS50cyJdLCJuYW1lcyI6W10sIm1hcHBpbmdzIjoiO0FBQUEsdUhBQXVIO0FBQ3ZILHVDQUF1Qzs7O0FBRXZDLFNBQWdCLFVBQVUsQ0FBRSxDQUFNO0lBQ2hDLElBQUksQ0FBQyxFQUFFO1FBQ0wsTUFBTSxDQUFDLE1BQU0sQ0FBQyxDQUFDLENBQUMsQ0FBQztRQUVqQixNQUFNLENBQUMsbUJBQW1CLENBQUMsQ0FBQyxDQUFDLENBQUMsT0FBTyxDQUFDLFVBQVUsSUFBSTtZQUNsRCxJQUFJLENBQUMsQ0FBQyxjQUFjLENBQUMsSUFBSSxDQUFDO21CQUNyQixDQUFDLENBQUMsSUFBSSxDQUFDLEtBQUssSUFBSTttQkFDaEIsQ0FBQyxPQUFPLENBQUMsQ0FBQyxJQUFJLENBQUMsS0FBSyxRQUFRLElBQUksT0FBTyxDQUFDLENBQUMsSUFBSSxDQUFDLEtBQUssVUFBVSxDQUFDO21CQUM5RCxDQUFDLENBQUMsQ0FBQyxJQUFJLENBQUMsQ0FBQyxXQUFXLEtBQUssTUFBTSxDQUFDO21CQUNoQyxDQUFDLE1BQU0sQ0FBQyxRQUFRLENBQUMsQ0FBQyxDQUFDLElBQUksQ0FBQyxDQUFDLEVBQUU7Z0JBQzVCLFVBQVUsQ0FBQyxDQUFDLENBQUMsSUFBSSxDQUFDLENBQUMsQ0FBQzthQUNyQjtRQUNMLENBQUMsQ0FBQyxDQUFDO0tBQ0o7SUFFRCxPQUFPLENBQUMsQ0FBQztBQUNYLENBQUM7QUFoQkQsZ0NBZ0JDO0FBQUEsQ0FBQyJ9

2
node_modules/lru-memoizer/lib/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,2 @@
import { asyncMemoizer } from './async';
export = asyncMemoizer;

4
node_modules/lru-memoizer/lib/index.js generated vendored Normal file
View File

@@ -0,0 +1,4 @@
"use strict";
var async_1 = require("./async");
module.exports = async_1.asyncMemoizer;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoiaW5kZXguanMiLCJzb3VyY2VSb290IjoiIiwic291cmNlcyI6WyIuLi9zcmMvaW5kZXgudHMiXSwibmFtZXMiOltdLCJtYXBwaW5ncyI6IjtBQUFBLGlDQUF3QztBQUV4QyxpQkFBUyxxQkFBYSxDQUFDIn0=

65
node_modules/lru-memoizer/lib/sync.d.ts generated vendored Normal file
View File

@@ -0,0 +1,65 @@
import { ResultBase, IParamsBase0, IParamsBase1, IParamsBase2, IParamsBase3, IParamsBase4, IParamsBase5, IParamsBase6, IParamsBasePlus } from './util';
interface IMemoizedSync<T1, T2, T3, T4, T5, T6, TResult> extends ResultBase {
(arg1: T1): TResult;
(arg1: T1, arg2: T2): TResult;
(arg1: T1, arg2: T2, arg3: T3): TResult;
(arg1: T1, arg2: T2, arg3: T3, arg4: T4): TResult;
(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5): TResult;
(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5, arg6: T6): TResult;
}
interface IMemoizableFunctionSync0<TResult> {
(): TResult;
}
interface IMemoizableFunctionSync1<T1, TResult> {
(arg1: T1): TResult;
}
interface IMemoizableFunctionSync2<T1, T2, TResult> {
(arg1: T1, arg2: T2): TResult;
}
interface IMemoizableFunctionSync3<T1, T2, T3, TResult> {
(arg1: T1, arg2: T2, arg3: T3): TResult;
}
interface IMemoizableFunctionSync4<T1, T2, T3, T4, TResult> {
(arg1: T1, arg2: T2, arg3: T3, arg4: T4): TResult;
}
interface IMemoizableFunctionSync5<T1, T2, T3, T4, T5, TResult> {
(arg1: T1, arg2: T2, arg3: T3, arg4: T4, arg5: T5): TResult;
}
interface IMemoizableFunctionSync6<T1, T2, T3, T4, T5, T6, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6): TResult;
}
interface IMemoizableFunctionSyncPlus<TResult> {
(...args: any[]): TResult;
}
export interface SyncParams0<TResult> extends IParamsBase0<TResult> {
load: IMemoizableFunctionSync0<TResult>;
}
export interface SyncParams1<T1, TResult> extends IParamsBase1<T1, TResult> {
load: IMemoizableFunctionSync1<T1, TResult>;
}
export interface SyncParams2<T1, T2, TResult> extends IParamsBase2<T1, T2, TResult> {
load: IMemoizableFunctionSync2<T1, T2, TResult>;
}
export interface SyncParams3<T1, T2, T3, TResult> extends IParamsBase3<T1, T2, T3, TResult> {
load: IMemoizableFunctionSync3<T1, T2, T3, TResult>;
}
export interface SyncParams4<T1, T2, T3, T4, TResult> extends IParamsBase4<T1, T2, T3, T4, TResult> {
load: IMemoizableFunctionSync4<T1, T2, T3, T4, TResult>;
}
export interface SyncParams5<T1, T2, T3, T4, T5, TResult> extends IParamsBase5<T1, T2, T3, T4, T5, TResult> {
load: IMemoizableFunctionSync5<T1, T2, T3, T4, T5, TResult>;
}
export interface SyncParams6<T1, T2, T3, T4, T5, T6, TResult> extends IParamsBase6<T1, T2, T3, T4, T5, T6, TResult> {
load: IMemoizableFunctionSync6<T1, T2, T3, T4, T5, T6, TResult>;
}
export interface SyncParamsPlus<TResult> extends IParamsBasePlus {
load: IMemoizableFunctionSyncPlus<TResult>;
}
export declare function syncMemoizer<TResult>(options: SyncParams0<TResult>): IMemoizedSync<unknown, unknown, unknown, unknown, unknown, unknown, TResult>;
export declare function syncMemoizer<T1, TResult>(options: SyncParams1<T1, TResult>): IMemoizedSync<T1, unknown, unknown, unknown, unknown, unknown, TResult>;
export declare function syncMemoizer<T1, T2, TResult>(options: SyncParams2<T1, T2, TResult>): IMemoizedSync<T1, T2, unknown, unknown, unknown, unknown, TResult>;
export declare function syncMemoizer<T1, T2, T3, TResult>(options: SyncParams3<T1, T2, T3, TResult>): IMemoizedSync<T1, T2, T3, unknown, unknown, unknown, TResult>;
export declare function syncMemoizer<T1, T2, T3, T4, TResult>(options: SyncParams4<T1, T2, T3, T4, TResult>): IMemoizedSync<T1, T2, T3, T4, unknown, unknown, TResult>;
export declare function syncMemoizer<T1, T2, T3, T4, T5, TResult>(options: SyncParams5<T1, T2, T3, T4, T5, TResult>): IMemoizedSync<T1, T2, T3, T4, T5, unknown, TResult>;
export declare function syncMemoizer<T1, T2, T3, T4, T5, T6, TResult>(options: SyncParams6<T1, T2, T3, T4, T5, T6, TResult>): IMemoizedSync<T1, T2, T3, T4, T5, T6, TResult>;
export {};

114
node_modules/lru-memoizer/lib/sync.js generated vendored Normal file
View File

@@ -0,0 +1,114 @@
"use strict";
var __read = (this && this.__read) || function (o, n) {
var m = typeof Symbol === "function" && o[Symbol.iterator];
if (!m) return o;
var i = m.call(o), r, ar = [], e;
try {
while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value);
}
catch (error) { e = { error: error }; }
finally {
try {
if (r && !r.done && (m = i["return"])) m.call(i);
}
finally { if (e) throw e.error; }
}
return ar;
};
var __spread = (this && this.__spread) || function () {
for (var ar = [], i = 0; i < arguments.length; i++) ar = ar.concat(__read(arguments[i]));
return ar;
};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.syncMemoizer = void 0;
var lru_cache_1 = __importDefault(require("lru-cache"));
var events_1 = require("events");
var lodash_clonedeep_1 = __importDefault(require("lodash.clonedeep"));
var freeze_1 = require("./freeze");
function syncMemoizer(options) {
var cache = new lru_cache_1.default(options);
var load = options.load;
var hash = options.hash;
var bypass = options.bypass;
var itemMaxAge = options.itemMaxAge;
var freeze = options.freeze;
var clone = options.clone;
var emitter = new events_1.EventEmitter();
var defaultResult = Object.assign({
del: del,
reset: function () { return cache.reset(); },
keys: cache.keys.bind(cache),
on: emitter.on.bind(emitter),
once: emitter.once.bind(emitter),
}, options);
if (options.disable) {
return Object.assign(load, defaultResult);
}
function del() {
var key = hash.apply(void 0, __spread(arguments));
cache.del(key);
}
function emit(event) {
var parameters = [];
for (var _i = 1; _i < arguments.length; _i++) {
parameters[_i - 1] = arguments[_i];
}
emitter.emit.apply(emitter, __spread([event], parameters));
}
function isPromise(result) {
// detect native, bluebird, A+ promises
return result && result.then && typeof result.then === 'function';
}
function processResult(result) {
var res = result;
if (clone) {
if (isPromise(res)) {
res = res.then(lodash_clonedeep_1.default);
}
else {
res = lodash_clonedeep_1.default(res);
}
}
if (freeze) {
if (isPromise(res)) {
res = res.then(freeze_1.deepFreeze);
}
else {
freeze_1.deepFreeze(res);
}
}
return res;
}
var result = function () {
var args = [];
for (var _i = 0; _i < arguments.length; _i++) {
args[_i] = arguments[_i];
}
if (bypass && bypass.apply(void 0, __spread(args))) {
emit.apply(void 0, __spread(['miss'], args));
return load.apply(void 0, __spread(args));
}
var key = hash.apply(void 0, __spread(args));
var fromCache = cache.get(key);
if (fromCache) {
emit.apply(void 0, __spread(['hit'], args));
return processResult(fromCache);
}
emit.apply(void 0, __spread(['miss'], args));
var result = load.apply(void 0, __spread(args));
if (itemMaxAge) {
// @ts-ignore
cache.set(key, result, itemMaxAge.apply(void 0, __spread(args.concat([result]))));
}
else {
cache.set(key, result);
}
return processResult(result);
};
return Object.assign(result, defaultResult);
}
exports.syncMemoizer = syncMemoizer;
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoic3luYy5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy9zeW5jLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7Ozs7O0FBQUEsd0RBQTRCO0FBQzVCLGlDQUFzQztBQUN0QyxzRUFBeUM7QUFDekMsbUNBQXNDO0FBaUd0QyxTQUFnQixZQUFZLENBQzFCLE9BQWdDO0lBRWhDLElBQU0sS0FBSyxHQUFRLElBQUksbUJBQUcsQ0FBQyxPQUFPLENBQUMsQ0FBQztJQUNwQyxJQUFNLElBQUksR0FBUyxPQUFPLENBQUMsSUFBSSxDQUFDO0lBQ2hDLElBQU0sSUFBSSxHQUFTLE9BQU8sQ0FBQyxJQUFJLENBQUM7SUFDaEMsSUFBTSxNQUFNLEdBQU8sT0FBTyxDQUFDLE1BQU0sQ0FBQztJQUNsQyxJQUFNLFVBQVUsR0FBRyxPQUFPLENBQUMsVUFBVSxDQUFDO0lBQ3RDLElBQU0sTUFBTSxHQUFPLE9BQU8sQ0FBQyxNQUFNLENBQUM7SUFDbEMsSUFBTSxLQUFLLEdBQVEsT0FBTyxDQUFDLEtBQUssQ0FBQztJQUNqQyxJQUFNLE9BQU8sR0FBTSxJQUFJLHFCQUFZLEVBQUUsQ0FBQztJQUV0QyxJQUFNLGFBQWEsR0FBRyxNQUFNLENBQUMsTUFBTSxDQUFDO1FBQ2xDLEdBQUcsS0FBQTtRQUNILEtBQUssRUFBRSxjQUFNLE9BQUEsS0FBSyxDQUFDLEtBQUssRUFBRSxFQUFiLENBQWE7UUFDMUIsSUFBSSxFQUFFLEtBQUssQ0FBQyxJQUFJLENBQUMsSUFBSSxDQUFDLEtBQUssQ0FBQztRQUM1QixFQUFFLEVBQUUsT0FBTyxDQUFDLEVBQUUsQ0FBQyxJQUFJLENBQUMsT0FBTyxDQUFDO1FBQzVCLElBQUksRUFBRSxPQUFPLENBQUMsSUFBSSxDQUFDLElBQUksQ0FBQyxPQUFPLENBQUM7S0FDakMsRUFBRSxPQUFPLENBQUMsQ0FBQztJQUVaLElBQUksT0FBTyxDQUFDLE9BQU8sRUFBRTtRQUNuQixPQUFPLE1BQU0sQ0FBQyxNQUFNLENBQUMsSUFBSSxFQUFFLGFBQWEsQ0FBQyxDQUFDO0tBQzNDO0lBRUQsU0FBUyxHQUFHO1FBQ1YsSUFBTSxHQUFHLEdBQUcsSUFBSSx3QkFBSSxTQUFTLEVBQUMsQ0FBQztRQUMvQixLQUFLLENBQUMsR0FBRyxDQUFDLEdBQUcsQ0FBQyxDQUFDO0lBQ2pCLENBQUM7SUFFRCxTQUFTLElBQUksQ0FBQyxLQUFhO1FBQUUsb0JBQW9CO2FBQXBCLFVBQW9CLEVBQXBCLHFCQUFvQixFQUFwQixJQUFvQjtZQUFwQixtQ0FBb0I7O1FBQy9DLE9BQU8sQ0FBQyxJQUFJLE9BQVosT0FBTyxZQUFNLEtBQUssR0FBSyxVQUFVLEdBQUU7SUFDckMsQ0FBQztJQUVELFNBQVMsU0FBUyxDQUFDLE1BQVc7UUFDNUIsdUNBQXVDO1FBQ3ZDLE9BQU8sTUFBTSxJQUFJLE1BQU0sQ0FBQyxJQUFJLElBQUksT0FBTyxNQUFNLENBQUMsSUFBSSxLQUFLLFVBQVUsQ0FBQztJQUNwRSxDQUFDO0lBRUQsU0FBUyxhQUFhLENBQUMsTUFBVztRQUNoQyxJQUFJLEdBQUcsR0FBRyxNQUFNLENBQUM7UUFFakIsSUFBSSxLQUFLLEVBQUU7WUFDVCxJQUFJLFNBQVMsQ0FBQyxHQUFHLENBQUMsRUFBRTtnQkFDbEIsR0FBRyxHQUFHLEdBQUcsQ0FBQyxJQUFJLENBQUMsMEJBQVMsQ0FBQyxDQUFDO2FBQzNCO2lCQUFNO2dCQUNMLEdBQUcsR0FBRywwQkFBUyxDQUFDLEdBQUcsQ0FBQyxDQUFDO2FBQ3RCO1NBQ0Y7UUFFRCxJQUFJLE1BQU0sRUFBRTtZQUNWLElBQUksU0FBUyxDQUFDLEdBQUcsQ0FBQyxFQUFFO2dCQUNsQixHQUFHLEdBQUcsR0FBRyxDQUFDLElBQUksQ0FBQyxtQkFBVSxDQUFDLENBQUM7YUFDNUI7aUJBQU07Z0JBQ0wsbUJBQVUsQ0FBQyxHQUFHLENBQUMsQ0FBQzthQUNqQjtTQUNGO1FBRUQsT0FBTyxHQUFHLENBQUM7SUFDYixDQUFDO0lBRUQsSUFBTSxNQUFNLEdBQThEO1FBQ3hFLGNBQWM7YUFBZCxVQUFjLEVBQWQscUJBQWMsRUFBZCxJQUFjO1lBQWQseUJBQWM7O1FBRWQsSUFBSSxNQUFNLElBQUksTUFBTSx3QkFBSSxJQUFJLEVBQUMsRUFBRTtZQUM3QixJQUFJLHlCQUFDLE1BQU0sR0FBSyxJQUFJLEdBQUU7WUFDdEIsT0FBTyxJQUFJLHdCQUFJLElBQUksR0FBRTtTQUN0QjtRQUVELElBQUksR0FBRyxHQUFHLElBQUksd0JBQUksSUFBSSxFQUFDLENBQUM7UUFFeEIsSUFBSSxTQUFTLEdBQUcsS0FBSyxDQUFDLEdBQUcsQ0FBQyxHQUFHLENBQUMsQ0FBQztRQUUvQixJQUFJLFNBQVMsRUFBRTtZQUNiLElBQUkseUJBQUMsS0FBSyxHQUFLLElBQUksR0FBRTtZQUVyQixPQUFPLGFBQWEsQ0FBQyxTQUFTLENBQUMsQ0FBQztTQUNqQztRQUVELElBQUkseUJBQUMsTUFBTSxHQUFLLElBQUksR0FBRTtRQUN0QixJQUFNLE1BQU0sR0FBRyxJQUFJLHdCQUFJLElBQUksRUFBQyxDQUFDO1FBRTdCLElBQUksVUFBVSxFQUFFO1lBQ2QsYUFBYTtZQUNiLEtBQUssQ0FBQyxHQUFHLENBQUMsR0FBRyxFQUFFLE1BQU0sRUFBRSxVQUFVLHdCQUFJLElBQUksQ0FBQyxNQUFNLENBQUMsQ0FBRSxNQUFNLENBQUUsQ0FBQyxHQUFFLENBQUM7U0FDaEU7YUFBTTtZQUNMLEtBQUssQ0FBQyxHQUFHLENBQUMsR0FBRyxFQUFFLE1BQU0sQ0FBQyxDQUFDO1NBQ3hCO1FBRUQsT0FBTyxhQUFhLENBQUMsTUFBTSxDQUFDLENBQUM7SUFDL0IsQ0FBQyxDQUFDO0lBRUYsT0FBTyxNQUFNLENBQUMsTUFBTSxDQUFDLE1BQU0sRUFBRSxhQUFhLENBQVEsQ0FBQztBQUNyRCxDQUFDO0FBNUZELG9DQTRGQyJ9

162
node_modules/lru-memoizer/lib/util.d.ts generated vendored Normal file
View File

@@ -0,0 +1,162 @@
import LRU from 'lru-cache';
export declare type Listener = (...as: any[]) => void;
export declare type INodeStyleCallBack<Success> = (err: Error | null, result?: Success) => void;
export interface ResultBase {
/**
* Returns all keys in the cache.
*/
keys: () => string[];
/**
* Clear the cache.
*/
reset: () => void;
/**
* Delete an item given the parameters.
*/
del: <T1, T2, T3, T4, T5, T6>(a1?: T1, a2?: T2, a3?: T3, a4?: T4, a5?: T5, a6?: T6) => void;
on(event: 'hit', handler: Listener): void;
on(event: 'miss', handler: Listener): void;
on(event: 'queue', handler: Listener): void;
}
export interface IHashingFunction0 {
(): string;
}
export interface IHashingFunction1<T1> {
(a1: T1): string;
}
export interface IHashingFunction2<T1, T2> {
(a1: T1, a2: T2): string;
}
export interface IHashingFunction3<T1, T2, T3> {
(a1: T1, a2: T2, a3: T3): string;
}
export interface IHashingFunction4<T1, T2, T3, T4> {
(a1: T1, a2: T2, a3: T3, a4: T4): string;
}
export interface IHashingFunction5<T1, T2, T3, T4, T5> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5): string;
}
export interface IHashingFunction6<T1, T2, T3, T4, T5, T6> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6): string;
}
export interface IHashingFunctionPlus {
(...rest: any[]): string;
}
export interface IBypassFunction0 {
(): boolean;
}
export interface IBypassFunction1<T1> {
(a1: T1): boolean;
}
export interface IBypassFunction2<T1, T2> {
(a1: T1, a2: T2): boolean;
}
export interface IBypassFunction3<T1, T2, T3> {
(a1: T1, a2: T2, a3: T3): boolean;
}
export interface IBypassFunction4<T1, T2, T3, T4> {
(a1: T1, a2: T2, a3: T3, a4: T4): boolean;
}
export interface IBypassFunction5<T1, T2, T3, T4, T5> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5): boolean;
}
export interface IBypassFunction6<T1, T2, T3, T4, T5, T6> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6): boolean;
}
export interface IBypassFunctionPlus {
(...rest: any[]): boolean;
}
export interface IMaxAgeFunction0<TResult> {
(res: TResult): number;
}
export interface IMaxAgeFunction1<T1, TResult> {
(a1: T1, res: TResult): number;
}
export interface IMaxAgeFunction2<T1, T2, TResult> {
(a1: T1, a2: T2, res: TResult): number;
}
export interface IMaxAgeFunction3<T1, T2, T3, TResult> {
(a1: T1, a2: T2, a3: T3, res: TResult): number;
}
export interface IMaxAgeFunction4<T1, T2, T3, T4, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, res: TResult): number;
}
export interface IMaxAgeFunction5<T1, T2, T3, T4, T5, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, res: TResult): number;
}
export interface IMaxAgeFunction6<T1, T2, T3, T4, T5, T6, TResult> {
(a1: T1, a2: T2, a3: T3, a4: T4, a5: T5, a6: T6, res: TResult): number;
}
export interface IMaxAgeFunctionPlus {
(...rest: any[]): number;
}
export interface IParamsBase0<TResult> extends IParamsBaseCommons {
hash: IHashingFunction0;
bypass?: IBypassFunction0;
itemMaxAge?: IMaxAgeFunction0<TResult>;
}
export interface IParamsBase1<T1, TResult> extends IParamsBaseCommons {
hash: IHashingFunction1<T1>;
bypass?: IBypassFunction1<T1>;
itemMaxAge?: IMaxAgeFunction1<T1, TResult>;
}
export interface IParamsBase2<T1, T2, TResult> extends IParamsBaseCommons {
hash: IHashingFunction2<T1, T2>;
bypass?: IBypassFunction2<T1, T2>;
itemMaxAge?: IMaxAgeFunction2<T1, T2, TResult>;
}
export interface IParamsBase3<T1, T2, T3, TResult> extends IParamsBaseCommons {
hash: IHashingFunction3<T1, T2, T3>;
bypass?: IBypassFunction3<T1, T2, T3>;
itemMaxAge?: IMaxAgeFunction3<T1, T2, T3, TResult>;
}
export interface IParamsBase4<T1, T2, T3, T4, TResult> extends IParamsBaseCommons {
hash: IHashingFunction4<T1, T2, T3, T4>;
bypass?: IBypassFunction4<T1, T2, T3, T4>;
itemMaxAge?: IMaxAgeFunction4<T1, T2, T3, T4, TResult>;
}
export interface IParamsBase5<T1, T2, T3, T4, T5, TResult> extends IParamsBaseCommons {
hash: IHashingFunction5<T1, T2, T3, T4, T5>;
bypass?: IBypassFunction5<T1, T2, T3, T4, T5>;
itemMaxAge?: IMaxAgeFunction5<T1, T2, T3, T4, T5, TResult>;
}
export interface IParamsBase6<T1, T2, T3, T4, T5, T6, TResult> extends IParamsBaseCommons {
/**
* A function to generate the key of the cache.
*/
hash: IHashingFunction6<T1, T2, T3, T4, T5, T6>;
/**
* Return true if the result should not be retrieved from the cache.
*/
bypass?: IBypassFunction6<T1, T2, T3, T4, T5, T6>;
/**
* An optional function to indicate the maxAge of an specific item.
*/
itemMaxAge?: IMaxAgeFunction6<T1, T2, T3, T4, T5, T6, TResult>;
}
export interface IParamsBasePlus extends IParamsBaseCommons {
hash: IHashingFunctionPlus;
bypass?: IBypassFunctionPlus;
itemMaxAge?: IMaxAgeFunctionPlus;
}
interface IParamsBaseCommons extends LRU.Options<string, any> {
/**
* Indicates if the resource should be freezed.
*/
freeze?: boolean;
/**
* Indicates if the resource should be cloned before is returned.
*/
clone?: boolean;
/**
* Disable the cache and executes the load logic directly.
*/
disable?: boolean;
/**
* Do not queue requests if initial call is more than `queueMaxAge` milliseconds old.
* Instead, invoke `load` again and create a new queue.
* Defaults to 1000ms.
*/
queueMaxAge?: number;
}
export {};

3
node_modules/lru-memoizer/lib/util.js generated vendored Normal file
View File

@@ -0,0 +1,3 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJmaWxlIjoidXRpbC5qcyIsInNvdXJjZVJvb3QiOiIiLCJzb3VyY2VzIjpbIi4uL3NyYy91dGlsLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiIifQ==

View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1,166 @@
# lru cache
A cache object that deletes the least-recently-used items.
[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache)
## Installation:
```javascript
npm install lru-cache --save
```
## Usage:
```javascript
var LRU = require("lru-cache")
, options = { max: 500
, length: function (n, key) { return n * 2 + key.length }
, dispose: function (key, n) { n.close() }
, maxAge: 1000 * 60 * 60 }
, cache = new LRU(options)
, otherCache = new LRU(50) // sets just the max size
cache.set("key", "value")
cache.get("key") // "value"
// non-string keys ARE fully supported
// but note that it must be THE SAME object, not
// just a JSON-equivalent object.
var someObject = { a: 1 }
cache.set(someObject, 'a value')
// Object keys are not toString()-ed
cache.set('[object Object]', 'a different value')
assert.equal(cache.get(someObject), 'a value')
// A similar object with same keys/values won't work,
// because it's a different object identity
assert.equal(cache.get({ a: 1 }), undefined)
cache.reset() // empty the cache
```
If you put more stuff in it, then items will fall out.
If you try to put an oversized thing in it, then it'll fall out right
away.
## Options
* `max` The maximum size of the cache, checked by applying the length
function to all values in the cache. Not setting this is kind of
silly, since that's the whole purpose of this lib, but it defaults
to `Infinity`. Setting it to a non-number or negative number will
throw a `TypeError`. Setting it to 0 makes it be `Infinity`.
* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
as they age, but if you try to get an item that is too old, it'll
drop it and return undefined instead of giving it to you.
Setting this to a negative value will make everything seem old!
Setting it to a non-number will throw a `TypeError`.
* `length` Function that is used to calculate the length of stored
items. If you're storing strings or buffers, then you probably want
to do something like `function(n, key){return n.length}`. The default is
`function(){return 1}`, which is fine if you want to store `max`
like-sized things. The item is passed as the first argument, and
the key is passed as the second argumnet.
* `dispose` Function that is called on items when they are dropped
from the cache. This can be handy if you want to close file
descriptors or do other cleanup tasks when items are no longer
accessible. Called with `key, value`. It's called *before*
actually removing the item from the internal cache, so if you want
to immediately put it back in, you'll have to do that in a
`nextTick` or `setTimeout` callback or it won't do anything.
* `stale` By default, if you set a `maxAge`, it'll only actually pull
stale items out of the cache when you `get(key)`. (That is, it's
not pre-emptively doing a `setTimeout` or anything.) If you set
`stale:true`, it'll return the stale value before deleting it. If
you don't set this, then it'll return `undefined` when you try to
get a stale entry, as if it had already been deleted.
* `noDisposeOnSet` By default, if you set a `dispose()` method, then
it'll be called whenever a `set()` operation overwrites an existing
key. If you set this option, `dispose()` will only be called when a
key falls out of the cache, not when it is overwritten.
* `updateAgeOnGet` When using time-expiring entries with `maxAge`,
setting this to `true` will make each item's effective time update
to the current time whenever it is retrieved from cache, causing it
to not expire. (It can still fall out of cache based on recency of
use, of course.)
## API
* `set(key, value, maxAge)`
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
They do what you think. `maxAge` is optional and overrides the
cache `maxAge` option if provided.
If the key is not found, `get()` will return `undefined`.
The key and val can be any value.
* `peek(key)`
Returns the key value (or `undefined` if not found) without
updating the "recently used"-ness of the key.
(If you find yourself using this a lot, you *might* be using the
wrong sort of data structure, but there are some use cases where
it's handy.)
* `del(key)`
Deletes a key out of the cache.
* `reset()`
Clear the cache entirely, throwing away all values.
* `has(key)`
Check if a key is in the cache, without updating the recent-ness
or deleting it for being stale.
* `forEach(function(value,key,cache), [thisp])`
Just like `Array.prototype.forEach`. Iterates over all the keys
in the cache, in order of recent-ness. (Ie, more recently used
items are iterated over first.)
* `rforEach(function(value,key,cache), [thisp])`
The same as `cache.forEach(...)` but items are iterated over in
reverse order. (ie, less recently used items are iterated over
first.)
* `keys()`
Return an array of the keys in the cache.
* `values()`
Return an array of the values in the cache.
* `length`
Return total length of objects in cache taking into account
`length` options function.
* `itemCount`
Return total quantity of objects currently in cache. Note, that
`stale` (see options) items are returned as part of this item
count.
* `dump()`
Return an array of the cache entries ready for serialization and usage
with 'destinationCache.load(arr)`.
* `load(cacheEntriesArray)`
Loads another cache entries array, obtained with `sourceCache.dump()`,
into the cache. The destination cache is reset before loading new entries
* `prune()`
Manually iterates over the entire cache proactively pruning old entries

View File

@@ -0,0 +1,334 @@
'use strict'
// A linked list to keep track of recently-used-ness
const Yallist = require('yallist')
const MAX = Symbol('max')
const LENGTH = Symbol('length')
const LENGTH_CALCULATOR = Symbol('lengthCalculator')
const ALLOW_STALE = Symbol('allowStale')
const MAX_AGE = Symbol('maxAge')
const DISPOSE = Symbol('dispose')
const NO_DISPOSE_ON_SET = Symbol('noDisposeOnSet')
const LRU_LIST = Symbol('lruList')
const CACHE = Symbol('cache')
const UPDATE_AGE_ON_GET = Symbol('updateAgeOnGet')
const naiveLength = () => 1
// lruList is a yallist where the head is the youngest
// item, and the tail is the oldest. the list contains the Hit
// objects as the entries.
// Each Hit object has a reference to its Yallist.Node. This
// never changes.
//
// cache is a Map (or PseudoMap) that matches the keys to
// the Yallist.Node object.
class LRUCache {
constructor (options) {
if (typeof options === 'number')
options = { max: options }
if (!options)
options = {}
if (options.max && (typeof options.max !== 'number' || options.max < 0))
throw new TypeError('max must be a non-negative number')
// Kind of weird to have a default max of Infinity, but oh well.
const max = this[MAX] = options.max || Infinity
const lc = options.length || naiveLength
this[LENGTH_CALCULATOR] = (typeof lc !== 'function') ? naiveLength : lc
this[ALLOW_STALE] = options.stale || false
if (options.maxAge && typeof options.maxAge !== 'number')
throw new TypeError('maxAge must be a number')
this[MAX_AGE] = options.maxAge || 0
this[DISPOSE] = options.dispose
this[NO_DISPOSE_ON_SET] = options.noDisposeOnSet || false
this[UPDATE_AGE_ON_GET] = options.updateAgeOnGet || false
this.reset()
}
// resize the cache when the max changes.
set max (mL) {
if (typeof mL !== 'number' || mL < 0)
throw new TypeError('max must be a non-negative number')
this[MAX] = mL || Infinity
trim(this)
}
get max () {
return this[MAX]
}
set allowStale (allowStale) {
this[ALLOW_STALE] = !!allowStale
}
get allowStale () {
return this[ALLOW_STALE]
}
set maxAge (mA) {
if (typeof mA !== 'number')
throw new TypeError('maxAge must be a non-negative number')
this[MAX_AGE] = mA
trim(this)
}
get maxAge () {
return this[MAX_AGE]
}
// resize the cache when the lengthCalculator changes.
set lengthCalculator (lC) {
if (typeof lC !== 'function')
lC = naiveLength
if (lC !== this[LENGTH_CALCULATOR]) {
this[LENGTH_CALCULATOR] = lC
this[LENGTH] = 0
this[LRU_LIST].forEach(hit => {
hit.length = this[LENGTH_CALCULATOR](hit.value, hit.key)
this[LENGTH] += hit.length
})
}
trim(this)
}
get lengthCalculator () { return this[LENGTH_CALCULATOR] }
get length () { return this[LENGTH] }
get itemCount () { return this[LRU_LIST].length }
rforEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].tail; walker !== null;) {
const prev = walker.prev
forEachStep(this, fn, walker, thisp)
walker = prev
}
}
forEach (fn, thisp) {
thisp = thisp || this
for (let walker = this[LRU_LIST].head; walker !== null;) {
const next = walker.next
forEachStep(this, fn, walker, thisp)
walker = next
}
}
keys () {
return this[LRU_LIST].toArray().map(k => k.key)
}
values () {
return this[LRU_LIST].toArray().map(k => k.value)
}
reset () {
if (this[DISPOSE] &&
this[LRU_LIST] &&
this[LRU_LIST].length) {
this[LRU_LIST].forEach(hit => this[DISPOSE](hit.key, hit.value))
}
this[CACHE] = new Map() // hash of items by key
this[LRU_LIST] = new Yallist() // list of items in order of use recency
this[LENGTH] = 0 // length of items in the list
}
dump () {
return this[LRU_LIST].map(hit =>
isStale(this, hit) ? false : {
k: hit.key,
v: hit.value,
e: hit.now + (hit.maxAge || 0)
}).toArray().filter(h => h)
}
dumpLru () {
return this[LRU_LIST]
}
set (key, value, maxAge) {
maxAge = maxAge || this[MAX_AGE]
if (maxAge && typeof maxAge !== 'number')
throw new TypeError('maxAge must be a number')
const now = maxAge ? Date.now() : 0
const len = this[LENGTH_CALCULATOR](value, key)
if (this[CACHE].has(key)) {
if (len > this[MAX]) {
del(this, this[CACHE].get(key))
return false
}
const node = this[CACHE].get(key)
const item = node.value
// dispose of the old one before overwriting
// split out into 2 ifs for better coverage tracking
if (this[DISPOSE]) {
if (!this[NO_DISPOSE_ON_SET])
this[DISPOSE](key, item.value)
}
item.now = now
item.maxAge = maxAge
item.value = value
this[LENGTH] += len - item.length
item.length = len
this.get(key)
trim(this)
return true
}
const hit = new Entry(key, value, len, now, maxAge)
// oversized objects fall out of cache automatically.
if (hit.length > this[MAX]) {
if (this[DISPOSE])
this[DISPOSE](key, value)
return false
}
this[LENGTH] += hit.length
this[LRU_LIST].unshift(hit)
this[CACHE].set(key, this[LRU_LIST].head)
trim(this)
return true
}
has (key) {
if (!this[CACHE].has(key)) return false
const hit = this[CACHE].get(key).value
return !isStale(this, hit)
}
get (key) {
return get(this, key, true)
}
peek (key) {
return get(this, key, false)
}
pop () {
const node = this[LRU_LIST].tail
if (!node)
return null
del(this, node)
return node.value
}
del (key) {
del(this, this[CACHE].get(key))
}
load (arr) {
// reset the cache
this.reset()
const now = Date.now()
// A previous serialized cache has the most recent items first
for (let l = arr.length - 1; l >= 0; l--) {
const hit = arr[l]
const expiresAt = hit.e || 0
if (expiresAt === 0)
// the item was created without expiration in a non aged cache
this.set(hit.k, hit.v)
else {
const maxAge = expiresAt - now
// dont add already expired items
if (maxAge > 0) {
this.set(hit.k, hit.v, maxAge)
}
}
}
}
prune () {
this[CACHE].forEach((value, key) => get(this, key, false))
}
}
const get = (self, key, doUse) => {
const node = self[CACHE].get(key)
if (node) {
const hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
return undefined
} else {
if (doUse) {
if (self[UPDATE_AGE_ON_GET])
node.value.now = Date.now()
self[LRU_LIST].unshiftNode(node)
}
}
return hit.value
}
}
const isStale = (self, hit) => {
if (!hit || (!hit.maxAge && !self[MAX_AGE]))
return false
const diff = Date.now() - hit.now
return hit.maxAge ? diff > hit.maxAge
: self[MAX_AGE] && (diff > self[MAX_AGE])
}
const trim = self => {
if (self[LENGTH] > self[MAX]) {
for (let walker = self[LRU_LIST].tail;
self[LENGTH] > self[MAX] && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
const prev = walker.prev
del(self, walker)
walker = prev
}
}
}
const del = (self, node) => {
if (node) {
const hit = node.value
if (self[DISPOSE])
self[DISPOSE](hit.key, hit.value)
self[LENGTH] -= hit.length
self[CACHE].delete(hit.key)
self[LRU_LIST].removeNode(node)
}
}
class Entry {
constructor (key, value, length, now, maxAge) {
this.key = key
this.value = value
this.length = length
this.now = now
this.maxAge = maxAge || 0
}
}
const forEachStep = (self, fn, node, thisp) => {
let hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!self[ALLOW_STALE])
hit = undefined
}
if (hit)
fn.call(thisp, hit.value, hit.key, self)
}
module.exports = LRUCache

View File

@@ -0,0 +1,34 @@
{
"name": "lru-cache",
"description": "A cache object that deletes the least-recently-used items.",
"version": "6.0.0",
"author": "Isaac Z. Schlueter <i@izs.me>",
"keywords": [
"mru",
"lru",
"cache"
],
"scripts": {
"test": "tap",
"snap": "tap",
"preversion": "npm test",
"postversion": "npm publish",
"prepublishOnly": "git push origin --follow-tags"
},
"main": "index.js",
"repository": "git://github.com/isaacs/node-lru-cache.git",
"devDependencies": {
"benchmark": "^2.1.4",
"tap": "^14.10.7"
},
"license": "ISC",
"dependencies": {
"yallist": "^4.0.0"
},
"files": [
"index.js"
],
"engines": {
"node": ">=10"
}
}

15
node_modules/lru-memoizer/node_modules/yallist/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@@ -0,0 +1,204 @@
# yallist
Yet Another Linked List
There are many doubly-linked list implementations like it, but this
one is mine.
For when an array would be too big, and a Map can't be iterated in
reverse order.
[![Build Status](https://travis-ci.org/isaacs/yallist.svg?branch=master)](https://travis-ci.org/isaacs/yallist) [![Coverage Status](https://coveralls.io/repos/isaacs/yallist/badge.svg?service=github)](https://coveralls.io/github/isaacs/yallist)
## basic usage
```javascript
var yallist = require('yallist')
var myList = yallist.create([1, 2, 3])
myList.push('foo')
myList.unshift('bar')
// of course pop() and shift() are there, too
console.log(myList.toArray()) // ['bar', 1, 2, 3, 'foo']
myList.forEach(function (k) {
// walk the list head to tail
})
myList.forEachReverse(function (k, index, list) {
// walk the list tail to head
})
var myDoubledList = myList.map(function (k) {
return k + k
})
// now myDoubledList contains ['barbar', 2, 4, 6, 'foofoo']
// mapReverse is also a thing
var myDoubledListReverse = myList.mapReverse(function (k) {
return k + k
}) // ['foofoo', 6, 4, 2, 'barbar']
var reduced = myList.reduce(function (set, entry) {
set += entry
return set
}, 'start')
console.log(reduced) // 'startfoo123bar'
```
## api
The whole API is considered "public".
Functions with the same name as an Array method work more or less the
same way.
There's reverse versions of most things because that's the point.
### Yallist
Default export, the class that holds and manages a list.
Call it with either a forEach-able (like an array) or a set of
arguments, to initialize the list.
The Array-ish methods all act like you'd expect. No magic length,
though, so if you change that it won't automatically prune or add
empty spots.
### Yallist.create(..)
Alias for Yallist function. Some people like factories.
#### yallist.head
The first node in the list
#### yallist.tail
The last node in the list
#### yallist.length
The number of nodes in the list. (Change this at your peril. It is
not magic like Array length.)
#### yallist.toArray()
Convert the list to an array.
#### yallist.forEach(fn, [thisp])
Call a function on each item in the list.
#### yallist.forEachReverse(fn, [thisp])
Call a function on each item in the list, in reverse order.
#### yallist.get(n)
Get the data at position `n` in the list. If you use this a lot,
probably better off just using an Array.
#### yallist.getReverse(n)
Get the data at position `n`, counting from the tail.
#### yallist.map(fn, thisp)
Create a new Yallist with the result of calling the function on each
item.
#### yallist.mapReverse(fn, thisp)
Same as `map`, but in reverse.
#### yallist.pop()
Get the data from the list tail, and remove the tail from the list.
#### yallist.push(item, ...)
Insert one or more items to the tail of the list.
#### yallist.reduce(fn, initialValue)
Like Array.reduce.
#### yallist.reduceReverse
Like Array.reduce, but in reverse.
#### yallist.reverse
Reverse the list in place.
#### yallist.shift()
Get the data from the list head, and remove the head from the list.
#### yallist.slice([from], [to])
Just like Array.slice, but returns a new Yallist.
#### yallist.sliceReverse([from], [to])
Just like yallist.slice, but the result is returned in reverse.
#### yallist.toArray()
Create an array representation of the list.
#### yallist.toArrayReverse()
Create a reversed array representation of the list.
#### yallist.unshift(item, ...)
Insert one or more items to the head of the list.
#### yallist.unshiftNode(node)
Move a Node object to the front of the list. (That is, pull it out of
wherever it lives, and make it the new head.)
If the node belongs to a different list, then that list will remove it
first.
#### yallist.pushNode(node)
Move a Node object to the end of the list. (That is, pull it out of
wherever it lives, and make it the new tail.)
If the node belongs to a list already, then that list will remove it
first.
#### yallist.removeNode(node)
Remove a node from the list, preserving referential integrity of head
and tail and other nodes.
Will throw an error if you try to have a list remove a node that
doesn't belong to it.
### Yallist.Node
The class that holds the data and is actually the list.
Call with `var n = new Node(value, previousNode, nextNode)`
Note that if you do direct operations on Nodes themselves, it's very
easy to get into weird states where the list is broken. Be careful :)
#### node.next
The next node in the list.
#### node.prev
The previous node in the list.
#### node.value
The data the node contains.
#### node.list
The list to which this node belongs. (Null if it does not belong to
any list.)

View File

@@ -0,0 +1,8 @@
'use strict'
module.exports = function (Yallist) {
Yallist.prototype[Symbol.iterator] = function* () {
for (let walker = this.head; walker; walker = walker.next) {
yield walker.value
}
}
}

View File

@@ -0,0 +1,29 @@
{
"name": "yallist",
"version": "4.0.0",
"description": "Yet Another Linked List",
"main": "yallist.js",
"directories": {
"test": "test"
},
"files": [
"yallist.js",
"iterator.js"
],
"dependencies": {},
"devDependencies": {
"tap": "^12.1.0"
},
"scripts": {
"test": "tap test/*.js --100",
"preversion": "npm test",
"postversion": "npm publish",
"postpublish": "git push origin --all; git push origin --tags"
},
"repository": {
"type": "git",
"url": "git+https://github.com/isaacs/yallist.git"
},
"author": "Isaac Z. Schlueter <i@izs.me> (http://blog.izs.me/)",
"license": "ISC"
}

View File

@@ -0,0 +1,426 @@
'use strict'
module.exports = Yallist
Yallist.Node = Node
Yallist.create = Yallist
function Yallist (list) {
var self = this
if (!(self instanceof Yallist)) {
self = new Yallist()
}
self.tail = null
self.head = null
self.length = 0
if (list && typeof list.forEach === 'function') {
list.forEach(function (item) {
self.push(item)
})
} else if (arguments.length > 0) {
for (var i = 0, l = arguments.length; i < l; i++) {
self.push(arguments[i])
}
}
return self
}
Yallist.prototype.removeNode = function (node) {
if (node.list !== this) {
throw new Error('removing node which does not belong to this list')
}
var next = node.next
var prev = node.prev
if (next) {
next.prev = prev
}
if (prev) {
prev.next = next
}
if (node === this.head) {
this.head = next
}
if (node === this.tail) {
this.tail = prev
}
node.list.length--
node.next = null
node.prev = null
node.list = null
return next
}
Yallist.prototype.unshiftNode = function (node) {
if (node === this.head) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var head = this.head
node.list = this
node.next = head
if (head) {
head.prev = node
}
this.head = node
if (!this.tail) {
this.tail = node
}
this.length++
}
Yallist.prototype.pushNode = function (node) {
if (node === this.tail) {
return
}
if (node.list) {
node.list.removeNode(node)
}
var tail = this.tail
node.list = this
node.prev = tail
if (tail) {
tail.next = node
}
this.tail = node
if (!this.head) {
this.head = node
}
this.length++
}
Yallist.prototype.push = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
push(this, arguments[i])
}
return this.length
}
Yallist.prototype.unshift = function () {
for (var i = 0, l = arguments.length; i < l; i++) {
unshift(this, arguments[i])
}
return this.length
}
Yallist.prototype.pop = function () {
if (!this.tail) {
return undefined
}
var res = this.tail.value
this.tail = this.tail.prev
if (this.tail) {
this.tail.next = null
} else {
this.head = null
}
this.length--
return res
}
Yallist.prototype.shift = function () {
if (!this.head) {
return undefined
}
var res = this.head.value
this.head = this.head.next
if (this.head) {
this.head.prev = null
} else {
this.tail = null
}
this.length--
return res
}
Yallist.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.head, i = 0; walker !== null; i++) {
fn.call(thisp, walker.value, i, this)
walker = walker.next
}
}
Yallist.prototype.forEachReverse = function (fn, thisp) {
thisp = thisp || this
for (var walker = this.tail, i = this.length - 1; walker !== null; i--) {
fn.call(thisp, walker.value, i, this)
walker = walker.prev
}
}
Yallist.prototype.get = function (n) {
for (var i = 0, walker = this.head; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.next
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.getReverse = function (n) {
for (var i = 0, walker = this.tail; walker !== null && i < n; i++) {
// abort out of the list early if we hit a cycle
walker = walker.prev
}
if (i === n && walker !== null) {
return walker.value
}
}
Yallist.prototype.map = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.head; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.next
}
return res
}
Yallist.prototype.mapReverse = function (fn, thisp) {
thisp = thisp || this
var res = new Yallist()
for (var walker = this.tail; walker !== null;) {
res.push(fn.call(thisp, walker.value, this))
walker = walker.prev
}
return res
}
Yallist.prototype.reduce = function (fn, initial) {
var acc
var walker = this.head
if (arguments.length > 1) {
acc = initial
} else if (this.head) {
walker = this.head.next
acc = this.head.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = 0; walker !== null; i++) {
acc = fn(acc, walker.value, i)
walker = walker.next
}
return acc
}
Yallist.prototype.reduceReverse = function (fn, initial) {
var acc
var walker = this.tail
if (arguments.length > 1) {
acc = initial
} else if (this.tail) {
walker = this.tail.prev
acc = this.tail.value
} else {
throw new TypeError('Reduce of empty list with no initial value')
}
for (var i = this.length - 1; walker !== null; i--) {
acc = fn(acc, walker.value, i)
walker = walker.prev
}
return acc
}
Yallist.prototype.toArray = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.head; walker !== null; i++) {
arr[i] = walker.value
walker = walker.next
}
return arr
}
Yallist.prototype.toArrayReverse = function () {
var arr = new Array(this.length)
for (var i = 0, walker = this.tail; walker !== null; i++) {
arr[i] = walker.value
walker = walker.prev
}
return arr
}
Yallist.prototype.slice = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = 0, walker = this.head; walker !== null && i < from; i++) {
walker = walker.next
}
for (; walker !== null && i < to; i++, walker = walker.next) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.sliceReverse = function (from, to) {
to = to || this.length
if (to < 0) {
to += this.length
}
from = from || 0
if (from < 0) {
from += this.length
}
var ret = new Yallist()
if (to < from || to < 0) {
return ret
}
if (from < 0) {
from = 0
}
if (to > this.length) {
to = this.length
}
for (var i = this.length, walker = this.tail; walker !== null && i > to; i--) {
walker = walker.prev
}
for (; walker !== null && i > from; i--, walker = walker.prev) {
ret.push(walker.value)
}
return ret
}
Yallist.prototype.splice = function (start, deleteCount, ...nodes) {
if (start > this.length) {
start = this.length - 1
}
if (start < 0) {
start = this.length + start;
}
for (var i = 0, walker = this.head; walker !== null && i < start; i++) {
walker = walker.next
}
var ret = []
for (var i = 0; walker && i < deleteCount; i++) {
ret.push(walker.value)
walker = this.removeNode(walker)
}
if (walker === null) {
walker = this.tail
}
if (walker !== this.head && walker !== this.tail) {
walker = walker.prev
}
for (var i = 0; i < nodes.length; i++) {
walker = insert(this, walker, nodes[i])
}
return ret;
}
Yallist.prototype.reverse = function () {
var head = this.head
var tail = this.tail
for (var walker = head; walker !== null; walker = walker.prev) {
var p = walker.prev
walker.prev = walker.next
walker.next = p
}
this.head = tail
this.tail = head
return this
}
function insert (self, node, value) {
var inserted = node === self.head ?
new Node(value, null, node, self) :
new Node(value, node, node.next, self)
if (inserted.next === null) {
self.tail = inserted
}
if (inserted.prev === null) {
self.head = inserted
}
self.length++
return inserted
}
function push (self, item) {
self.tail = new Node(item, self.tail, null, self)
if (!self.head) {
self.head = self.tail
}
self.length++
}
function unshift (self, item) {
self.head = new Node(item, null, self.head, self)
if (!self.tail) {
self.tail = self.head
}
self.length++
}
function Node (value, prev, next, list) {
if (!(this instanceof Node)) {
return new Node(value, prev, next, list)
}
this.list = list
this.value = value
if (prev) {
prev.next = this
this.prev = prev
} else {
this.prev = null
}
if (next) {
next.prev = this
this.next = next
} else {
this.next = null
}
}
try {
// add if support for Symbol.iterator is present
require('./iterator.js')(Yallist)
} catch (er) {}

34
node_modules/lru-memoizer/package.json generated vendored Normal file
View File

@@ -0,0 +1,34 @@
{
"name": "lru-memoizer",
"description": "Memoize functions results using an lru-cache.",
"version": "2.3.0",
"author": "José F. Romaniello <jfromaniello@gmail.com> (http://joseoncode.com)",
"repository": {
"url": "git://github.com/jfromaniello/lru-memoizer.git"
},
"keywords": [
"cache",
"memoize",
"lru"
],
"main": "./lib/index.js",
"types": "./lib/index.d.ts",
"scripts": {
"prepare": "tsc",
"test": "npm run prepare && mocha"
},
"dependencies": {
"lodash.clonedeep": "^4.5.0",
"lru-cache": "6.0.0"
},
"license": "MIT",
"devDependencies": {
"@types/lodash.clonedeep": "^4.5.9",
"@types/lru-cache": "^5.1.0",
"@types/node": "^12.0.10",
"chai": "^3.5.0",
"mocha": "^10.4.0",
"sinon": "^7.3.2",
"typescript": "^3.5.2"
}
}

View File

@@ -0,0 +1,39 @@
const memoizer = require('../lib/index.js');
const assert = require('chai').assert;
describe('lru-memoizer (bypass)', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
callback(null, a + b);
},
hash: function (a, b) {
return a + '-' + b;
},
bypass: function (a, b) {
return a < b;
},
max: 10
});
});
it('should call the load function every time', function (done) {
memoized(1, 2, function (err) {
assert.isNull(err);
assert.strictEqual(loadTimes, 1);
memoized(1, 2, function (err) {
assert.isNull(err);
assert.strictEqual(loadTimes, 2);
done();
});
});
});
});

View File

@@ -0,0 +1,44 @@
const memoizer = require('./..');
const assert = require('chai').assert;
describe('lru-memoizer (clone)', () => {
let loadTimes = 0, memoized;
beforeEach(() => {
loadTimes = 0;
memoized = memoizer({
load: (key, callback) => {
loadTimes++;
callback(null, { foo: key, buffer: Buffer.from('1234') });
},
hash: (key) => {
return key;
},
clone: true
});
});
it('should return a clone every time with the same cached structure', (done) => {
memoized('bar', (err, r1) => {
assert.isNull(err);
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, 'bar');
r1.foo = 'bax';
memoized('bar', (err, r2) => {
assert.isNull(err);
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, 'bar');
assert.notStrictEqual(r1, r2);
assert.notEqual(r1, r2);
done();
});
});
});
});

View File

@@ -0,0 +1,49 @@
const memoizer = require('./..');
const assert = require('chai').assert;
describe('lru-memoizer (disabled)', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
disable: true,
load: function (a, b, callback) {
loadTimes++;
return setTimeout(function () {
if (a === 0) {
return callback(new Error('a cant be 0'));
}
callback(null, a+b);
}, 10);
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10
});
});
it('should call the load function every time', function (done) {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 2);
done();
});
});
});
it('should expose hash function', function() {
assert.equal(memoized.hash(1, 2), '1-2');
});
});

View File

@@ -0,0 +1,132 @@
const memoizer = require('./..');
const sinon = require('sinon');
describe('lru-memoizer (events)', function () {
let memoized;
let onMiss, onHit, onQueue;
beforeEach(function () {
loadTimes = 0;
onMiss = sinon.stub();
onHit = sinon.stub();
onQueue = sinon.stub();
memoized = memoizer({
load: function (a, b, bypass, callback) {
return setTimeout(function () {
if (a === 0) {
return callback(new Error('a cant be 0'));
}
callback(null, a+b);
}, 10);
},
hash: function (a, b) {
return a + '-' + b;
},
bypass: function(a, b, bypass) {
return bypass;
},
max: 10
});
memoized.on('hit', onHit);
memoized.on('miss', onMiss);
memoized.on('queue', onQueue);
});
describe('when the result is not in the cache', () => {
beforeEach((done) => {
memoized(1, 2, false, done);
});
it('should not call onHit', () => {
sinon.assert.notCalled(onHit);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should call onMiss with the load arguments', () => {
sinon.assert.calledOnce(onMiss);
sinon.assert.calledWith(onMiss, 1, 2, false);
});
});
describe('when the result is in the cache', () => {
beforeEach((done) => {
memoized(1,2, false, () => {
onHit.reset();
onMiss.reset();
onQueue.reset();
memoized(1, 2, false, done);
});
});
it('should call onHit with the load arguments', () => {
sinon.assert.calledOnce(onHit);
sinon.assert.calledWith(onHit, 1, 2, false);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should not call onMiss', () => {
sinon.assert.notCalled(onQueue);
});
});
describe('when the cache is by passed', () => {
beforeEach((done) => {
memoized(1,2, false, () => {
onHit.reset();
onMiss.reset();
onQueue.reset();
memoized(1, 2, true, done);
});
});
it('should not call onHit', () => {
sinon.assert.notCalled(onHit);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should call onMiss with the load arguments', () => {
sinon.assert.calledOnce(onMiss);
sinon.assert.calledWith(onMiss, 1, 2, true);
});
});
describe('when the result is pending', () => {
beforeEach((done) => {
let pending = 2;
function onDone() {
pending -= 1;
if (pending === 0) {
done();
}
}
memoized(1, 2, false, onDone);
onHit.reset();
onMiss.reset();
onQueue.reset();
memoized(1, 2, false, onDone);
});
it('should not call onHit', () => {
sinon.assert.notCalled(onHit);
});
it('should call onQueue with the load arguments', () => {
sinon.assert.calledOnce(onQueue);
sinon.assert.calledWith(onQueue, 1, 2, false);
});
it('should not call onMiss', () => {
sinon.assert.notCalled(onMiss);
});
});
});

View File

@@ -0,0 +1,43 @@
const memoizer = require("./..");
const assert = require("chai").assert;
describe("lru-memoizer (freeze)", function () {
var loadTimes = 0,
memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
load: function (key, callback) {
loadTimes++;
callback(null, { foo: "bar", buffer: Buffer.from("1234") });
},
hash: function (key) {
return key;
},
freeze: true,
});
});
it("should return a freeze every time with the same cached structure", function (done) {
memoized("test", function (err, r1) {
assert.isNull(err);
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, "bar");
r1.foo = "bax";
assert.isFrozen(r1);
memoized("test", function (err, r2) {
assert.isNull(err);
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, "bar");
assert.strictEqual(r1, r2);
assert.isFrozen(r2);
done();
});
});
});
});

View File

@@ -0,0 +1,204 @@
var memoizer = require('./..');
var assert = require('chai').assert;
describe('lru-memoizer (itemMaxAge)', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
});
it('should use default behavior if not configured', function (done) {
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
setTimeout(function () {
callback(null, a + b);
}, 100);
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10,
maxAge: 500
});
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Not expired yet.
setTimeout(function() {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Expired, load times will increase.
setTimeout(function() {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 2);
done();
});
}, 200);
});
}, 400);
});
});
it('should return all args and the result in the itemMaxAge function', function (done) {
var args;
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
setTimeout(function () {
callback(null, a + b);
}, 100);
},
itemMaxAge: function (a, b, result) {
args = arguments;
return 1000;
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10,
maxAge: 600
});
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(args[0], 1);
assert.strictEqual(args[1], 2);
assert.strictEqual(args[2], 3);
done();
});
});
it('should overwrite the default behavior if configured', function (done) {
var maxAge = 0;
var lastKey = null;
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
setTimeout(function () {
callback(null, a + b);
}, 100);
},
itemMaxAge: function (a, b, result) {
lastKey = a + '-' + b;
// In this test, we set the maxAge of the current item to (result*100).
// If the result is 3, the max age of this item will be 300.
maxAge = result * 100;
return maxAge;
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10,
maxAge: 600
});
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Not expired yet after 200 ms, because the expiration is 300
setTimeout(function() {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Expired because now we are at 350 ms (even though gloabl expiration has been set to 600)
setTimeout(function() {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 2);
// Expired again, because 350ms have passed again.
setTimeout(function() {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 3);
done();
});
}, 350);
});
}, 150);
});
}, 200);
});
});
it('should overwrite the default behavior if configured (sync)', function (done) {
var maxAge = 0;
var lastKey = null;
memoized = memoizer.sync({
load: function (a, b) {
loadTimes++;
return a + b;
},
itemMaxAge: function (a, b, result) {
lastKey = a + '-' + b;
// In this test, we set the maxAge of the current item to (result*100).
// If the result is 3, the max age of this item will be 300.
maxAge = result * 100;
return maxAge;
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10,
maxAge: 600
});
var result = memoized(1, 2);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Not expired yet after 200 ms, because the expiration is 300
setTimeout(function() {
result = memoized(1, 2);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
// Expired because now we are at 350 ms (even though gloabl expiration has been set to 600)
setTimeout(function() {
result = memoized(1,2);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 2);
// Expired again, because 350ms have passed again.
setTimeout(function() {
result = memoized(1,2);
assert.strictEqual(maxAge, 300);
assert.strictEqual(lastKey, '1-2');
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 3);
done();
}, 350);
}, 150);
}, 200);
});
});

View File

@@ -0,0 +1,36 @@
const memoizer = require('./..');
const assert = require('chai').assert;
const _ = require('lodash');
describe('lru-simultaneos calls', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
setTimeout(function () {
callback(null, a + b);
}, 100);
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10
});
});
it('should call once', function (done) {
memoized(1, 2, _.noop);
memoized(1, 2, _.noop);
memoized(1, 2, function (err, result) {
if (err) { return done(err); }
assert.strictEqual(loadTimes, 1);
assert.strictEqual(result, 3);
done();
});
});
});

View File

@@ -0,0 +1,43 @@
var memoizer = require('./..');
var assert = require('chai').assert;
describe('lru-memoizer (no key)', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
load: function (callback) {
loadTimes++;
return setTimeout(function () {
callback(null, loadTimes);
}, 10);
}
});
});
it('should cache the result of an async function', function (done) {
memoized(function (err, result) {
assert.isNull(err);
assert.equal(result, 1);
assert.equal(loadTimes, 1);
memoized(function (err, result) {
assert.isNull(err);
assert.equal(result, 1);
assert.equal(loadTimes, 1);
done();
});
});
});
it('should use the hash function for keys', function (done) {
memoized(function () {
memoized(function () {
assert.includeMembers(memoized.keys(), ['_']);
done();
});
});
});
});

View File

@@ -0,0 +1,110 @@
var memoizer = require('./..');
var assert = require('chai').assert;
describe('lru-memoizer (queueMaxAge)', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
});
function observer() {
const listeners = [];
return {
listen(listener) {
listeners.push(listener);
},
trigger() {
listeners.forEach(listener => listener());
}
}
}
it('should create a new queue once expired', function (done) {
memoized = memoizer({
load: function (a, b, onResolve, callback) {
loadTimes++;
onResolve(() => callback(null, a + b));
},
queueMaxAge: 10,
hash: function (a, b) {
return a + '-' + b;
}
});
const observer1 = observer();
const observer2 = observer();
const observer3 = observer();
const resolved = [];
memoized(1, 2, observer1.listen, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('A');
});
assert.strictEqual(loadTimes, 1);
memoized(1, 2, assert.fail, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('B');
});
assert.strictEqual(loadTimes, 1);
setTimeout(() => {
// previous queue expired, this calls will be added to a new queue.
memoized(1, 2, observer2.listen, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('C');
});
memoized(1, 2, assert.fail, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('D');
});
// only one new invocation to load
assert.strictEqual(loadTimes, 2);
setTimeout(() => {
// second queue expired, this calls will be added to a third queue.
memoized(1, 2, observer3.listen, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('E');
});
memoized(1, 2, assert.fail.listen, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
resolved.push('F');
});
assert.strictEqual(loadTimes, 3);
observer1.trigger();
setImmediate(() => {
// first queue was resolved
assert.deepEqual(['A', 'B'], resolved);
observer3.trigger();
setImmediate(() => {
// third queue was resolved
assert.deepEqual(['A', 'B', 'E', 'F'], resolved);
observer2.trigger();
setImmediate(() => {
// second queue was resolved
assert.deepEqual(['A', 'B', 'E', 'F', 'C', 'D'], resolved);
done();
});
});
});
}, 100);
}, 100);
});
});

View File

@@ -0,0 +1,76 @@
const memoizer = require('./..');
const assert = require('chai').assert;
describe('lru-memoizer sync (clone)', () => {
describe('call', () => {
let loadTimes = 0, memoized;
beforeEach(() => {
loadTimes = 0;
memoized = memoizer.sync({
load: (key) => {
loadTimes++;
return { foo: key , buffer: Buffer.from('1234') };
},
hash: (key) => {
return key;
},
clone: true
});
});
it('should return a clone every time with the same cached structure', () => {
const r1 = memoized('bar');
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, 'bar');
r1.foo = 'bax';
const r2 = memoized('bar');
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, 'bar');
assert.notStrictEqual(r1, r2);
assert.notEqual(r1, r2);
});
});
describe('Promise', () => {
let loadTimes = 0, memoized;
beforeEach(() => {
loadTimes = 0;
memoized = memoizer.sync({
load: (key) => {
loadTimes++;
return Promise.resolve({ foo: key, buffer: Buffer.from('1234') });
},
hash: (key) => {
return key;
},
clone: true
});
});
it('should return a clone every time with the same cached structure', (done) => {
memoized('bar').then(r1 => {
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, 'bar');
r1.foo = 'bax';
memoized('bar').then(r2 => {
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, 'bar');
assert.notStrictEqual(r1, r2);
assert.notEqual(r1, r2);
done();
});
})
.catch(done);
});
});
});

View File

@@ -0,0 +1,95 @@
const memoizer = require('./..');
const sinon = require('sinon');
describe('lru-memoizer sync (events)', function () {
let memoized;
let onMiss, onHit, onQueue;
beforeEach(function () {
loadTimes = 0;
onMiss = sinon.stub();
onHit = sinon.stub();
onQueue = sinon.stub();
memoized = memoizer.sync({
load: function (a, b, bypass) {
return a + b;
},
hash: function (a, b, bypass) {
return a + '-' + b;
},
bypass: function(a, b, bypass) {
return bypass;
},
max: 10
});
memoized.on('hit', onHit);
memoized.on('miss', onMiss);
memoized.on('queue', onQueue);
});
describe('when the result is not in the cache', () => {
beforeEach(() => {
memoized(1, 2, false);
});
it('should not call onHit', () => {
sinon.assert.notCalled(onHit);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should call onMiss with the load arguments', () => {
sinon.assert.calledOnce(onMiss);
sinon.assert.calledWith(onMiss, 1, 2, false);
});
});
describe('when the result is in the cache', () => {
beforeEach(() => {
memoized(1,2, false);
onHit.reset();
onMiss.reset();
onQueue.reset();
memoized(1, 2, false);
});
it('should call onHit with the load arguments', () => {
sinon.assert.calledOnce(onHit);
sinon.assert.calledWith(onHit, 1, 2, false);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should not call onMiss', () => {
sinon.assert.notCalled(onQueue);
});
});
describe('when the cache is by passed', () => {
beforeEach(() => {
memoized(1,2, false);
onHit.reset();
onMiss.reset();
onQueue.reset();
memoized(1, 2, true);
});
it('should not call onHit', () => {
sinon.assert.notCalled(onHit);
});
it('should not call onQueue', () => {
sinon.assert.notCalled(onQueue);
});
it('should call onMiss with the load arguments', () => {
sinon.assert.calledOnce(onMiss);
sinon.assert.calledWith(onMiss, 1, 2, true);
});
});
});

View File

@@ -0,0 +1,74 @@
const memoizer = require('./..');
const assert = require('chai').assert;
describe('lru-memoizer sync (freeze)', () => {
describe('call', () => {
let loadTimes = 0, memoized;
beforeEach(() => {
loadTimes = 0;
memoized = memoizer.sync({
load: (key) => {
loadTimes++;
return { foo: key , buffer: Buffer.from('1234') };
},
hash: (key) => {
return key;
},
freeze: true
});
});
it('should return a freeze every time with the same cached structure', () => {
const r1 = memoized('bar');
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, 'bar');
assert.isFrozen(r1);
const r2 = memoized('bar');
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, 'bar');
assert.isFrozen(r2);
});
});
describe('Promise', () => {
let loadTimes = 0, memoized;
beforeEach(() => {
loadTimes = 0;
memoized = memoizer.sync({
load: (key) => {
loadTimes++;
return Promise.resolve({ foo: key, buffer: Buffer.from('1234') });
},
hash: (key) => {
return key;
},
freeze: true
});
});
it('should return a freeze every time with the same cached structure', (done) => {
memoized('bar').then(r1 => {
assert.strictEqual(loadTimes, 1);
assert.equal(r1.foo, 'bar');
assert.isFrozen(r1);
memoized('bar').then(r2 => {
assert.strictEqual(loadTimes, 1);
assert.equal(r2.foo, 'bar');
assert.isFrozen(r2);
done();
});
})
.catch(done);
});
});
});

View File

@@ -0,0 +1,47 @@
var memoizer = require('./..');
var assert = require('chai').assert;
describe('lru-memoizer sync', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer.sync({
load: function (a, b) {
loadTimes++;
if (a === 0) {
throw new Error('a cant be 0');
}
return a + b;
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10
});
});
it('should cache the result of an async function', function () {
var result = memoized(1, 2);
assert.equal(result, 3);
assert.equal(loadTimes, 1);
var result2 = memoized(1,2);
assert.equal(result2, 3);
assert.equal(loadTimes, 1);
});
it('shuld use the hash function for keys', function () {
memoized(1, 2);
memoized(2, 3);
assert.includeMembers(memoized.keys(), ['1-2', '2-3']);
});
it('should not cache errored funcs', function () {
try {
memoized(0, 2);
} catch(err) {}
assert.notInclude(memoized.keys(), ['0-2']);
});
});

88
node_modules/lru-memoizer/test/lru-memoizer.test.js generated vendored Normal file
View File

@@ -0,0 +1,88 @@
var memoizer = require('./..');
var assert = require('chai').assert;
describe('lru-memoizer', function () {
var loadTimes = 0, memoized;
beforeEach(function () {
loadTimes = 0;
memoized = memoizer({
load: function (a, b, callback) {
loadTimes++;
return setTimeout(function () {
if (a === 0) {
return callback(new Error('a cant be 0'));
}
callback(null, a+b);
}, 10);
},
hash: function (a, b) {
return a + '-' + b;
},
max: 10
});
});
it('should cache the result of an async function', function (done) {
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
memoized(1,2, function (err, result) {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 1);
done();
});
});
});
it('should use the hash function for keys', function (done) {
memoized(1, 2, function () {
memoized(2,3, function () {
assert.includeMembers(memoized.keys(), ['1-2', '2-3']);
done();
});
});
});
it('should not cache errored funcs', function (done) {
memoized(0, 2, function (err) {
assert.isNotNull(err);
assert.notInclude(memoized.keys(), ['0-2']);
done();
});
});
it('should expose the hash function', function() {
assert.equal(memoized.hash(0, 2), '0-2');
});
it('should expose the load function', function(done) {
memoized.load(1, 2, (err, result) => {
assert.equal(result, 3);
done();
});
});
it('should expose the max prop', function() {
assert.equal(memoized.max, 10);
});
it('should allow to del a key', function(done) {
memoized(1,2, () => {
assert.strictEqual(loadTimes, 1);
memoized.del(1,2);
memoized(1,2, (err, result) => {
assert.isNull(err);
assert.strictEqual(result, 3);
assert.strictEqual(loadTimes, 2);
done();
});
});
});
});

59
node_modules/lru-memoizer/tsconfig.json generated vendored Normal file
View File

@@ -0,0 +1,59 @@
{
"compilerOptions": {
/* Basic Options */
"target": "ES5", /* Specify ECMAScript target version: 'ES3' (default), 'ES5', 'ES2015', 'ES2016', 'ES2017','ES2018' or 'ESNEXT'. */
"module": "commonjs", /* Specify module code generation: 'none', 'commonjs', 'amd', 'system', 'umd', 'es2015', or 'ESNext'. */
"lib": ["es2015", "es2017"], /* Specify library files to be included in the compilation. */
// "allowJs": true, /* Allow javascript files to be compiled. */
// "checkJs": true, /* Report errors in .js files. */
// "jsx": "preserve", /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */
"declaration": true, /* Generates corresponding '.d.ts' file. */
// "declarationMap": true, /* Generates a sourcemap for each corresponding '.d.ts' file. */
// "sourceMap": true, /* Generates corresponding '.map' file. */
// "outFile": "./", /* Concatenate and emit output to single file. */
"outDir": "./lib", /* Redirect output structure to the directory. */
// "rootDir": "./", /* Specify the root directory of input files. Use to control the output directory structure with --outDir. */
// "composite": true, /* Enable project compilation */
// "removeComments": true, /* Do not emit comments to output. */
// "noEmit": true, /* Do not emit outputs. */
// "importHelpers": true, /* Import emit helpers from 'tslib'. */
"downlevelIteration": true, /* Provide full support for iterables in 'for-of', spread, and destructuring when targeting 'ES5' or 'ES3'. */
// "isolatedModules": true, /* Transpile each file as a separate module (similar to 'ts.transpileModule'). */
/* Strict Type-Checking Options */
"strict": true, /* Enable all strict type-checking options. */
// "noImplicitAny": true, /* Raise error on expressions and declarations with an implied 'any' type. */
"strictNullChecks": false, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictPropertyInitialization": false, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */
/* Additional Checks */
// "noUnusedLocals": true, /* Report errors on unused locals. */
// "noUnusedParameters": true, /* Report errors on unused parameters. */
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
/* Module Resolution Options */
// "moduleResolution": "node", /* Specify module resolution strategy: 'node' (Node.js) or 'classic' (TypeScript pre-1.6). */
// "baseUrl": "./", /* Base directory to resolve non-absolute module names. */
// "paths": {}, /* A series of entries which re-map imports to lookup locations relative to the 'baseUrl'. */
// "rootDirs": [], /* List of root folders whose combined content represents the structure of the project at runtime. */
// "typeRoots": [], /* List of folders to include type definitions from. */
// "types": [], /* Type declaration files to be included in compilation. */
// "allowSyntheticDefaultImports": true, /* Allow default imports from modules with no default export. This does not affect code emit, just typechecking. */
"esModuleInterop": true, /* Enables emit interoperability between CommonJS and ES Modules via creation of namespace objects for all imports. Implies 'allowSyntheticDefaultImports'. */
// "preserveSymlinks": true, /* Do not resolve the real path of symlinks. */
/* Source Map Options */
// "sourceRoot": "", /* Specify the location where debugger should locate TypeScript files instead of source locations. */
// "mapRoot": "", /* Specify the location where debugger should locate map files instead of generated locations. */
"inlineSourceMap": true, /* Emit a single file with source maps instead of having a separate file. */
// "inlineSources": true, /* Emit the source alongside the sourcemaps within a single file; requires '--inlineSourceMap' or '--sourceMap' to be set. */
/* Experimental Options */
// "experimentalDecorators": true, /* Enables experimental support for ES7 decorators. */
// "emitDecoratorMetadata": true, /* Enables experimental support for emitting type metadata for decorators. */
}
}