WIP - add extractor, generate snippet_data

This commit is contained in:
Stefan Fejes
2019-08-20 15:52:05 +02:00
parent 88084d3d30
commit cc8f1d8a7a
37396 changed files with 4588842 additions and 133 deletions

1
node_modules/cache-manager/.coveralls.yml generated vendored Normal file
View File

@ -0,0 +1 @@
service_name: travis-ci

106
node_modules/cache-manager/.jscs.json generated vendored Normal file
View File

@ -0,0 +1,106 @@
{
"requireCurlyBraces": ["if", "else", "for", "while", "do", "try", "catch"],
"requireSpaceAfterKeywords": ["if", "else", "for", "while", "do", "switch", "return", "try", "catch"],
"requireSpaceBeforeKeywords": [
"else",
"while",
"catch"
],
"requireSpaceBeforeBinaryOperators": ["?", "+", "/", "*", "=", "==", "===", "!=", "!==", ">", ">=", "<", "<="],
"requireSpaceAfterBinaryOperators": ["?", "+", "/", "*", "=", "==", "===", "!=", "!==", ">", ">=", "<", "<="],
"disallowSpaceAfterBinaryOperators": ["!"],
"disallowSpaceBeforeBinaryOperators": [","],
"requireCamelCaseOrUpperCaseIdentifiers": true,
"disallowMultipleVarDecl": true,
"disallowEmptyBlocks": true,
"disallowKeywords": ["with"],
"disallowKeywordsOnNewLine": ["else"],
"disallowSpacesInsideObjectBrackets": true,
"disallowSpacesInsideArrayBrackets": true,
"disallowSpacesInsideParentheses": true,
"disallowSpaceBeforePostfixUnaryOperators": ["++", "--"],
"disallowMultipleLineStrings": true,
"disallowTrailingWhitespace": true,
"disallowPaddingNewlinesInBlocks": true,
"requireCommaBeforeLineBreak": true,
"requireParenthesesAroundIIFE": true,
"requireSpaceBeforeBlockStatements": true,
"disallowNewlineBeforeBlockStatements": true,
"requireSpacesInConditionalExpression": true,
"requireSpacesInFunctionExpression": {
"beforeOpeningCurlyBrace": true
},
"disallowSpacesInAnonymousFunctionExpression": {
"beforeOpeningRoundBrace": true
},
"disallowSpacesInFunctionDeclaration": {
"beforeOpeningRoundBrace": true
},
"disallowSpacesInFunctionExpression": {
"beforeOpeningRoundBrace": true
},
"requireSpaceBeforeBinaryOperators": [
"+",
"-",
"/",
"*",
"=",
"==",
"===",
"!=",
"!=="
],
"requireOperatorBeforeLineBreak": [
"?",
"+",
"-",
"/",
"*",
"=",
"==",
"===",
"!=",
"!==",
">",
">=",
"<",
"<="
],
"safeContextKeyword": "self",
"maximumLineLength": {
"value": 120,
"allowUrlComments": true,
"allowRegex": true
},
"validateIndentation": 4,
"validateParameterSeparator": ", ",
"excludeFiles": [
"test/data/**",
"test/support/fakes/**"
],
"jsDoc": {
"checkParamNames": true,
"requireParamTypes": true
},
"disallowMultipleLineBreaks": true,
"validateLineBreaks": "LF",
"disallowYodaConditions": true
}

87
node_modules/cache-manager/.jshintrc generated vendored Normal file
View File

@ -0,0 +1,87 @@
{
// Settings
"passfail" : false, // Stop on first error.
"maxerr" : 500, // Maximum errors before stopping.
"multistr" : true,
// Predefined globals whom JSHint will ignore.
"browser" : true, // Standard browser globals e.g. `window`, `document`.
"node" : false,
"rhino" : false,
"couch" : false,
"wsh" : true, // Windows Scripting Host.
"jquery" : true,
"prototypejs" : false,
"mootools" : false,
"dojo" : false,
"predef" : [ // Extra globals.
"__dirname",
"Buffer",
"event",
"exports",
"global",
"module",
"process",
"require",
"daisyjs",
"after",
"afterEach",
"before",
"beforeEach",
"context",
"describe",
"it"
],
// Development.
"debug" : false, // Allow debugger statements e.g. browser breakpoints.
"devel" : true, // Allow development statements e.g. `console.log();`.
// EcmaScript 5.
"strict" : false, // Require `use strict` pragma in every file.
"globalstrict" : false, // Allow global "use strict" (also enables 'strict').
"asi" : false, // Tolerate Automatic Semicolon Insertion (no semicolons).
"laxbreak" : false, // Tolerate unsafe line breaks e.g. `return [\n] x` without semicolons.
"bitwise" : true, // Prohibit bitwise operators (&, |, ^, etc.).
"boss" : false, // Tolerate assignments inside if, for & while. Usually conditions & loops are for comparison, not assignments.
"curly" : true, // Require {} for every new block or scope.
"eqeqeq" : false, // Require triple equals i.e. `===`.
"eqnull" : false, // Tolerate use of `== null`.
"evil" : false, // Tolerate use of `eval`.
"expr" : false, // Tolerate `ExpressionStatement` as Programs.
"forin" : false, // Tolerate `for in` loops without `hasOwnProperty`.
"immed" : true, // Require immediate invocations to be wrapped in parens e.g. `( function(){}() );`
"latedef" : "nofunc", // Prohibit variable use before definition.
"loopfunc" : true, // Allow functions to be defined within loops.
"maxparams" : 4,
"maxdepth" : 5,
"maxcomplexity" : 8,
"maxstatements" : 40,
"noarg" : true, // Prohibit use of `arguments.caller` and `arguments.callee`.
"regexp" : false, // Prohibit `.` and `[^...]` in regular expressions.
"regexdash" : false, // Tolerate unescaped last dash i.e. `[-...]`.
"scripturl" : true, // Tolerate script-targeted URLs.
"shadow" : false, // Allows re-define variables later in code e.g. `var x=1; x=2;`.
"supernew" : false, // Tolerate `new function () { ... };` and `new Object;`.
"undef" : true, // Require all non-global variables be declared before they are used.
"newcap" : false, // Require capitalization of all constructor functions e.g. `new F()`.
"noempty" : true, // Prohibit use of empty blocks.
"nonew" : false, // Prohibit use of constructors for side-effects.
"nomen" : false, // Prohibit use of initial or trailing underbars in names.
"onevar" : false, // Allow only one `var` statement per function.
"plusplus" : false, // Prohibit use of `++` & `--`.
"sub" : true, // Tolerate all forms of subscript notation besides dot notation e.g. `dict['key']` instead of `dict.key`.
"trailing" : true, // Prohibit trailing whitespaces. (only works if white is 'true')
"white" : true, // Check against strict whitespace and indentation rules.
"indent" : 4,
"unused" : true
}

8
node_modules/cache-manager/.travis.yml generated vendored Normal file
View File

@ -0,0 +1,8 @@
language: node_js
node_js:
- '0.12'
- '5.5'
script: 'make test-travis'
branches:
only:
- master

192
node_modules/cache-manager/History.md generated vendored Normal file
View File

@ -0,0 +1,192 @@
- 2.10.0 2019-07-11
- Add development store "none" (#129). -@ R3VoLuT1OneR
- 2.9.1 2019-05-28
- Bind isCacheableValue to its underlying store (#126). - @arash16
- 2.9.0 2018-03-26
- Fix store's this context for isCacheableValue method (#119). - @alexandrusavin
- 2.8.0 2018-02-28
- Added link to node-cache-manager-fs-hash - @rolandstarke
- Bugfix for options object in multiCache.set (#115) - @sebelga
- Bugfix for keysToFetch in getFromHighestProrityCache (#117) - @sebelga
- 2.7.0 2018-02-13
- allow setting, getting and deleting multiple keys (#114) - @sebelga
- allow passing in a function to determine TTL based on store - @sebelga
- 2.6.0 2017-12-08
- fix multicaching when result is not cacheable (#106) - @gswalden
- 2.5.0 2017-10-09
- Add explicit return in wrapPromise (#109) - @jeff-kilbride
- 2.4.0 2017-01-17
- Added ability to use a dynamic cache ttl (#65) - @philippeauriach
- 2.3.0 2016-12-22
- Updating isCacheableValue description in README; README syntax error fix (#70, #71) - @lukechilds
- Calling back with null as first argument in memory store to keep style consistent (#72) - @defcc
- 2.2.0 2016-10-19
- Adding multi_caching.reset() (#63) - @disjunction
- 2.1.2 2016-06-08
- Checking that callback array exists before iterating over it (#57).
- 2.1.1 2016-05-24
- Fixing version number in package.json.
- 2.1.0 2016-05-24
- Allow passing in a promise dependency (#55). - @siddharthkchatterjee
- 2.0.1 2016-04-18
- Fixed triggering callback in wrap after it was queued (#48) - @theorm
- 2.0.0 2016-03-13
- Removing domain integration (#38), no longer actively supporting node 0.10.x.
- 1.5.0 2016-03-13
- npm bumps, making sure global.Promise is not defined in node 0.10 memory store test.
- 1.4.1 2016-03-13
- Fixing backward-compatibility Promise issue with node 0.10.x in memory store.
- 1.4.0 2016-02-03
- Passing ttl of 0 to lruCache, upgrading to lru-cache 4.0.0
- 1.3.0 2016-01-26
- Promise support (#39, #24) - @PuKoren
- 1.2.2 2015-10-19
- Bugfix: Fixing domain error issues when error is thrown inside 'work' function (#28).
- 1.2.1 2015-10-17
- Bugfix: multi-caching: using underlying store's isCacheableValue function when it exists (#34).
- 1.2.0 2015-10-07
- using `isCacheableValue` in `getFromHighestPriorityCache` and `getAndPassUp` (#32).
- 1.1.0 2015-07-22
- Allow stores to override isCacheableValue. - @PuKoren
- Allow overriding ttl in memory cache's set function. - @zhudan
- 1.0.0 2015-05-23
- Added JSDOC generation (`make docs`)
- (Breaking change) By default, cache falsey values like `false`, `0`, and `null`, but not `undefined` (#25).
- Allow users to pass in callback function `isCacheableValue` to specify what to cache.
- (Breaking change) Removed deprecated lower-case `multi_caching` export (use `multiCaching` instead).
- (Breaking change) Removed `multiCaching#get_and_pass_up` (use `getAndPassUp` instead).
- (Breaking change) Cache store methods must accept an `options` param (which can be ignored). Eg.,
`function set(key, val, options, cb) { }`
- (Breaking change) caching/multicaching methods no longer accept a `ttl` param. You must instead pass
in an options object which will be passed to the cache store's `set` method.
- (Breaking change) caching.js no longer accepts a path to cache store. Pass in an object or 'memory' instead.
- 0.19.0 2015-03-29
- Pass dispose, length & stale options to lru-cache (#22). - @gmaclennan
- 0.18.0 2015-02-12
- Minor changes and refactorings including:
- converting to camelcase
- hiding queues inside CallbackFiller
- general example updates
- updated redis example to use latest redis npm
- not trying to pass ttl into cache.set() in getAndPassUp() (this didn't
work anyway)
- 0.17.0 2015-02-05
- Add Additional Options Parameter (#20) - @seanzx85
- Fixing bug with nested calls to wrap() (#21)
- 0.16.0 2015-01-07
- Get and pass up feature to update higher caches. (#19) - raadad
- Minor style tweaks/jscs update.
- 0.15.0 2014-12-18
- Moved cache queue before the store get function (up to 2x performance boost). (#18) - aletorrado
- Added domain support to make sure the wrap callback function is always called - aletorrado
- 0.14.0 2014-10-15
- Set ttl in wrap #14 - nguyenchr
- Added JSCS for style checking
- 0.13.0 2014-10-14
- Applied work function locking for multi_caching (#13). -aletorrado
- 0.12.0 2014-10-09
- Checking for existence of del() method before binding to it. Fixes #11.
- 0.11.0 2014-09-18
- Prevent stalemate by executing callbacks on error. Fixes #10 - elliotttf
- 0.10.1 2014-09-10
- Fixed tag/version mismatch
- 0.10.0 2014-09-10
- Fixing Use call instead of apply for cached results, issue #9 (thanks elliotttf)
- 0.9.0 2014-08-19
- Fixing issue #8 - parallel requests to a wrapped function were calling the
function multiple times. (Thanks alex-whitney).
- 0.8.0 2014-07-07
- Adding setex() (Thanks evanlucas)
- 0.7.1 2014-06-15
- Adding link to Express.js cache-manager example app
- 0.7.0 2014-06-15
- Bumping package versions, mostly devDependencies
- 0.6.0 2014-06-15
- Adding caching.keys() function (issue #6)
- Updating examples/redis_example/example.js with cache.keys() usage
- Allow calling memory store get() without callback
- 0.5.0 2014-05-02
- Adding reset() function to caching.js. Closes #5.
- 0.4.0 2014-05-02
- New arg to ignore cache errors. if set cache errors will be ignored
and the cache_manager will go to the backing store. (Thanks londonjamo).
- 0.3.0 2013-12-08
- Bound the get, set and del functions to their original “this” context when assigning a store.
(Thanks to Boyan Rabchev)
- 0.2.0 2013-10-31
- Better examples, version bump.
- 0.1.3 2013-10-31
- Fixing unreleased connection in redis example.
- 0.1.2 2013-10-13
- Wrapping synchronous memory cache callbacks in process.nextTick() for the purists.
- 0.1.1 2013-10-13
- Travis and Coveralls integration testing.
- 0.1.0 2013-10-13
- Removing built-in Redis store to emphasize that you should plug in your own
cache store.
- 0.0.5 2013-10-13
- Removing hiredis requirement.
- 0.0.4 2013-08-01
- Better error checking in multi_cache.wrap();
- 0.0.3 2013-07-10
- Better error checking in cache.wrap();
- 0.0.2 2013-04-08
- Added ability to pass in a store module that isn't already instantiated. E.g.,
```javascript
var store = require('/path/to/my_memory_store');
cache = caching({store: store});
```
- 0.0.1 2013-04-08
- Initial release.

23
node_modules/cache-manager/LICENSE generated vendored Normal file
View File

@ -0,0 +1,23 @@
Copyrights for code authored by MOG Inc. is licensed under the following terms:
MIT License
Copyright (c) 2011 MOG Inc. All Rights Reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to
deal in the Software without restriction, including without limitation the
rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
sell copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.

35
node_modules/cache-manager/Makefile generated vendored Normal file
View File

@ -0,0 +1,35 @@
BASE = .
ISTANBUL = ./node_modules/.bin/istanbul
COVERAGE_OPTS = --lines 99 --statements 95 --branches 90 --functions 95
main: lint test docs
cover:
$(ISTANBUL) cover test/run.js
check-coverage:
$(ISTANBUL) check-coverage $(COVERAGE_OPTS)
test: cover check-coverage
test-cov: cover check-coverage
open coverage/lcov-report/index.html
test-travis: lint
./node_modules/.bin/istanbul cover test/run.js --report lcovonly \
-- -T unit,functional -R spec && cat ./coverage/lcov.info | ./node_modules/coveralls/bin/coveralls.js && rm -rf ./coverage
lint:
./node_modules/.bin/jscs ./lib && \
./node_modules/.bin/jscs ./test && \
./node_modules/.bin/jscs ./examples && \
./node_modules/.bin/jshint ./lib --config $(BASE)/.jshintrc && \
./node_modules/.bin/jshint ./test --config $(BASE)/.jshintrc
./node_modules/.bin/jshint ./examples --config $(BASE)/.jshintrc
docs:
./node_modules/.bin/jsdoc lib --recurse --readme README.md --package package.json
echo docs available in ./out/index.html
.PHONY: test

442
node_modules/cache-manager/README.md generated vendored Normal file
View File

@ -0,0 +1,442 @@
[![build status](https://secure.travis-ci.org/BryanDonovan/node-cache-manager.svg)](http://travis-ci.org/BryanDonovan/node-cache-manager)
[![Coverage Status](https://coveralls.io/repos/BryanDonovan/node-cache-manager/badge.svg?branch=master)](https://coveralls.io/r/BryanDonovan/node-cache-manager?branch=master)
node-cache-manager
======================
# Flexible NodeJS cache module
A cache module for nodejs that allows easy wrapping of functions in cache,
tiered caches, and a consistent interface.
## Features
* Easy way to wrap any function in cache.
* Tiered caches -- data gets stored in each cache and fetched from the highest
priority cache(s) first.
* Use any cache you want, as long as it has the same API.
* 100% test coverage via [mocha](https://github.com/visionmedia/mocha),
[istanbul](https://github.com/yahoo/istanbul), and [sinon](http://sinonjs.org).
## Express.js Example
See the [Express.js cache-manager example app](https://github.com/BryanDonovan/node-cache-manager-express-example) to see how to use
``node-cache-manager`` in your applications.
## Installation
npm install cache-manager
## Store Engines
* [node-cache-manager-redis](https://github.com/dial-once/node-cache-manager-redis) (uses [sol-redis-pool](https://github.com/joshuah/sol-redis-pool))
* [node-cache-manager-redis-store](https://github.com/dabroek/node-cache-manager-redis-store) (uses [node_redis](https://github.com/NodeRedis/node_redis))
* [node-cache-manager-ioredis](https://github.com/dabroek/node-cache-manager-ioredis) (uses [ioredis](https://github.com/luin/ioredis))
* [node-cache-manager-mongodb](https://github.com/v4l3r10/node-cache-manager-mongodb)
* [node-cache-manager-mongoose](https://github.com/disjunction/node-cache-manager-mongoose)
* [node-cache-manager-fs](https://github.com/hotelde/node-cache-manager-fs)
* [node-cache-manager-fs-binary](https://github.com/sheershoff/node-cache-manager-fs-binary)
* [node-cache-manager-fs-hash](https://github.com/rolandstarke/node-cache-manager-fs-hash)
* [node-cache-manager-hazelcast](https://github.com/marudor/node-cache-manager-hazelcast)
* [node-cache-manager-memcached-store](https://github.com/theogravity/node-cache-manager-memcached-store)
* [node-cache-manager-memory-store](https://github.com/theogravity/node-cache-manager-memory-store)
## Overview
**First**, it includes a `wrap` function that lets you wrap any function in cache.
(Note, this was inspired by [node-caching](https://github.com/mape/node-caching).)
This is probably the feature you're looking for. As an example, where you might have to do this:
```javascript
function getCachedUser(id, cb) {
memoryCache.get(id, function (err, result) {
if (err) { return cb(err); }
if (result) {
return cb(null, result);
}
getUser(id, function (err, result) {
if (err) { return cb(err); }
memoryCache.set(id, result);
cb(null, result);
});
});
}
```
... you can instead use the `wrap` function:
```javascript
function getCachedUser(id, cb) {
memoryCache.wrap(id, function (cacheCallback) {
getUser(id, cacheCallback);
}, {ttl: ttl}, cb);
}
```
**Second**, node-cache-manager features a built-in memory cache (using [node-lru-cache](https://github.com/isaacs/node-lru-cache)),
with the standard functions you'd expect in most caches:
set(key, val, {ttl: ttl}, cb) // * see note below
get(key, cb)
del(key, cb)
mset(key1, val1, key2, val2, {ttl: ttl}, cb) // set several keys at once
mget(key1, key2, key3, cb) // get several keys at once
// * Note that depending on the underlying store, you may be able to pass the
// ttl as the third param, like this:
set(key, val, ttl, cb)
// ... or pass no ttl at all:
set(key, val, cb)
**Third**, node-cache-manager lets you set up a tiered cache strategy. This may be of
limited use in most cases, but imagine a scenario where you expect tons of
traffic, and don't want to hit your primary cache (like Redis) for every request.
You decide to store the most commonly-requested data in an in-memory cache,
perhaps with a very short timeout and/or a small data size limit. But you
still want to store the data in Redis for backup, and for the requests that
aren't as common as the ones you want to store in memory. This is something
node-cache-manager handles easily and transparently.
**Fourth**, it allows you to get and set multiple keys at once for caching store that support it. This means that when getting muliple keys it will go through the different caches starting from the highest priority one (see multi store below) and merge the values it finds at each level.
## Usage Examples
See examples below and in the examples directory. See ``examples/redis_example`` for an example of how to implement a
Redis cache store with connection pooling.
### Single Store
```javascript
var cacheManager = require('cache-manager');
var memoryCache = cacheManager.caching({store: 'memory', max: 100, ttl: 10/*seconds*/});
var ttl = 5;
// Note: callback is optional in set() and del().
memoryCache.set('foo', 'bar', {ttl: ttl}, function(err) {
if (err) { throw err; }
memoryCache.get('foo', function(err, result) {
console.log(result);
// >> 'bar'
memoryCache.del('foo', function(err) {});
});
});
function getUser(id, cb) {
setTimeout(function () {
console.log("Returning user from slow database.");
cb(null, {id: id, name: 'Bob'});
}, 100);
}
var userId = 123;
var key = 'user_' + userId;
// Note: ttl is optional in wrap()
memoryCache.wrap(key, function (cb) {
getUser(userId, cb);
}, {ttl: ttl}, function (err, user) {
console.log(user);
// Second time fetches user from memoryCache
memoryCache.wrap(key, function (cb) {
getUser(userId, cb);
}, function (err, user) {
console.log(user);
});
});
// Outputs:
// Returning user from slow database.
// { id: 123, name: 'Bob' }
// { id: 123, name: 'Bob' }
```
The `ttl` can also be computed dynamically by passing in a function. E.g.,
```javascript
var opts = {
ttl: function(user) {
if (user.id === 1) {
return 0.1;
} else {
return 0.5;
}
}
};
memoryCache.wrap(key, function(cb) {
getUser(userId, cb);
}, opts, function(err, user) {
console.log(user);
}
```
You can get several keys at once. E.g.
```js
var key1 = 'user_1';
var key2 = 'user_1';
memoryCache.wrap(key1, key2, function (cb) {
getManyUser([key1, key2], cb);
}, function (err, users) {
console.log(users[0]);
console.log(users[1]);
});
```
#### Example setting/getting several keys with mset() and mget()
```js
memoryCache.mset('foo', 'bar', 'foo2', 'bar2' {ttl: ttl}, function(err) {
if (err) { throw err; }
memoryCache.mget('foo', 'foo2', function(err, result) {
console.log(result);
// >> ['bar', 'bar2']
// Delete keys with del() passing arguments...
memoryCache.del('foo', 'foo2', function(err) {});
// ...passing an Array of keys
memoryCache.del(['foo', 'foo2'], function(err) {});
});
});
```
#### Example Using Promises
```javascript
memoryCache.wrap(key, function() {
return getUserPromise(userId);
})
.then(function(user) {
console.log('User:', user);
});
```
If you are using a Node version that does not include native promises, you can
specify your promise dependency in the options passed to the cache module.
E.g.,
```javascript
var Promise = require('es6-promise').Promise;
cache = caching({store: store, promiseDependency: Promise});
```
#### Example Using async/await
```javascript
try {
let user = await memoryCache.wrap(key, function() {
return getUserPromise(userId);
});
} catch (err) {
// error handling
}
```
Hint: should wrap `await` call with `try` - `catch` to handle `promise` error.
#### Example Express App Usage
(Also see the [Express.js cache-manager example app](https://github.com/BryanDonovan/node-cache-manager-express-example)).
```javascript
function respond(res, err, data) {
if (err) {
res.json(500, err);
} else {
res.json(200, data);
}
}
app.get('/foo/bar', function(req, res) {
var cacheKey = 'foo-bar:' + JSON.stringify(req.query);
var ttl = 10;
memoryCache.wrap(cacheKey, function(cacheCallback) {
DB.find(req.query, cacheCallback);
}, {ttl: ttl}, function(err, result) {
respond(res, err, result);
});
});
```
#### Custom Stores
You can use your own custom store by creating one with the same API as the
built-in memory stores (such as a redis or memcached store). To use your own store just pass
in an instance of it.
E.g.,
```javascript
var myStore = require('your-homemade-store');
var cache = cacheManager.caching({store: myStore});
```
### Multi-Store
```javascript
var multiCache = cacheManager.multiCaching([memoryCache, someOtherCache]);
userId2 = 456;
key2 = 'user_' + userId;
ttl = 5;
// Sets in all caches.
// The "ttl" option can also be a function (see example below)
multiCache.set('foo2', 'bar2', {ttl: ttl}, function(err) {
if (err) { throw err; }
// Fetches from highest priority cache that has the key.
multiCache.get('foo2', function(err, result) {
console.log(result);
// >> 'bar2'
// Delete from all caches
multiCache.del('foo2');
});
});
// Set the ttl value by context depending on the store.
function getTTL(data, store) {
if (store === 'redis') {
return 6000;
}
return 3000;
}
// Sets multiple keys in all caches.
// You can pass as many key,value pair as you want
multiCache.mset('key', 'value', 'key2', 'value2', {ttl: getTTL}, function(err) {
if (err) { throw err; }
// mget() fetches from highest priority cache.
// If the first cache does not return all the keys,
// the next cache is fetched with the keys that were not found.
// This is done recursively until either:
// - all have been found
// - all caches has been fetched
multiCache.mget('key', 'key2', function(err, result) {
console.log(result[0]);
console.log(result[1]);
// >> 'bar2'
// >> 'bar3'
// Delete from all caches
multiCache.del('key', 'key2');
// ...or with an Array
multiCache.del(['key', 'key2']);
});
});
// Note: options with ttl are optional in wrap()
multiCache.wrap(key2, function (cb) {
getUser(userId2, cb);
}, {ttl: ttl}, function (err, user) {
console.log(user);
// Second time fetches user from memoryCache, since it's highest priority.
// If the data expires in the memory cache, the next fetch would pull it from
// the 'someOtherCache', and set the data in memory again.
multiCache.wrap(key2, function (cb) {
getUser(userId2, cb);
}, function (err, user) {
console.log(user);
});
});
// Multiple keys
multiCache.wrap('key1', 'key2', function (cb) {
getManyUser(['key1', 'key2'], cb);
}, {ttl: ttl}, function (err, users) {
console.log(users[0]);
console.log(users[1]);
});
```
### Specifying What to Cache in `wrap` Function
Both the `caching` and `multicaching` modules allow you to pass in a callback function named
`isCacheableValue` which is called by the `wrap` function with every value returned from cache or from the wrapped function.
This lets you specify which values should and should not be cached by `wrap`. If the function returns true, it will be
stored in cache. By default the caches cache everything except `undefined`.
NOTE: The `set` functions in `caching` and `multicaching` do *not* use `isCacheableValue`.
For example, if you don't want to cache `false` and `null`, you can pass in a function like this:
```javascript
var isCacheableValue = function(value) {
return value !== null && value !== false && value !== undefined;
};
```
Then pass it to `caching` like this:
```javascript
var memoryCache = cacheManager.caching({store: 'memory', isCacheableValue: isCacheableValue});
```
And pass it to `multicaching` like this:
```javascript
var multiCache = cacheManager.multiCaching([memoryCache, someOtherCache], {
isCacheableValue: isCacheableValue
});
```
### Development environment
You may disable real caching but still get all the callback functionality working by setting `none` store.
## Docs
To generate JSDOC 3 documentation:
make docs
## Tests
To run tests, first run:
npm install -d
Run the tests and JShint:
make
## Contribute
If you would like to contribute to the project, please fork it and send us a pull request. Please add tests
for any new features or bug fixes. Also run `make` before submitting the pull request.
## License
node-cache-manager is licensed under the MIT license.

132
node_modules/cache-manager/examples/example.js generated vendored Normal file
View File

@ -0,0 +1,132 @@
/*jshint unused:false*/
// Note: ttls are in seconds
var cacheManager = require('../');
var memoryCache = cacheManager.caching({store: 'memory', max: 100, ttl: 10});
var memoryCache2 = cacheManager.caching({store: 'memory', max: 100, ttl: 100});
var ttl; //Can't use a different ttl per set() call with memory cache
//
// Basic usage
//
memoryCache.set('foo', 'bar', function(err) {
if (err) { throw err; }
memoryCache.get('foo', function(err, result) {
console.log(result);
// >> 'bar'
memoryCache.del('foo', function(err) {
if (err) {
console.log(err);
}
});
});
});
function getUser(id, cb) {
setTimeout(function() {
console.log("Fetching user from slow database.");
cb(null, {id: id, name: 'Bob'});
}, 100);
}
var userId = 123;
var key = 'user_' + userId;
//
// wrap() example
//
// Instead of manually managing the cache like this:
function getCachedUserManually(id, cb) {
memoryCache.get(id, function(err, result) {
if (err) { return cb(err); }
if (result) {
return cb(null, result);
}
getUser(id, function(err, result) {
if (err) { return cb(err); }
memoryCache.set(id, result);
cb(null, result);
});
});
}
// ... you can instead use the `wrap` function:
function getCachedUser(id, cb) {
memoryCache.wrap(id, function(cacheCallback) {
getUser(id, cacheCallback);
}, cb);
}
getCachedUser(userId, function(err, user) {
// First time fetches the user from the (fake) database:
console.log(user);
getCachedUser(userId, function(err, user) {
// Second time fetches from cache.
console.log(user);
});
});
// Outputs:
// Returning user from slow database.
// { id: 123, name: 'Bob' }
// { id: 123, name: 'Bob' }
// Same as above, but written differently:
memoryCache.wrap(key, function(cb) {
getUser(userId, cb);
}, function(err, user) {
console.log(user);
// Second time fetches user from memoryCache
memoryCache.wrap(key, function(cb) {
getUser(userId, cb);
}, function(err, user) {
console.log(user);
});
});
//
// multi-cache example
//
var multiCache = cacheManager.multiCaching([memoryCache, memoryCache2]);
var userId2 = 456;
var key2 = 'user_' + userId;
var ttl2; //Can't use a different ttl per set() call with memory cache
multiCache.wrap(key2, function(cb) {
getUser(userId2, cb);
}, function(err, user) {
console.log(user);
// Second time fetches user from memoryCache, since it's highest priority.
// If the data expires in memoryCache, the next fetch would pull it from
// the memoryCache2, and set the data in memoryCache.
multiCache.wrap(key2, function(cb) {
getUser(userId2, cb);
}, function(err, user) {
console.log(user);
});
// Sets in all caches.
multiCache.set('foo2', 'bar2', {ttl: ttl2}, function(err) {
if (err) { throw err; }
// Fetches from highest priority cache that has the key.
multiCache.get('foo2', function(err, result) {
console.log(result);
// >> 'bar2'
// Delete from all caches
multiCache.del('foo2', function(err) {
if (err) {
console.log(err);
}
process.exit();
});
});
});
});

View File

@ -0,0 +1,147 @@
// Setup:
// npm install redis@0.12.1 sol-redis-pool@0.2.0
// node examples/redis_example/example.js
var util = require('util');
var assert = require('assert');
var cacheManager = require('../../');
var redisStore = require('./redis_store');
// Note: ttl is in seconds
var redisCache = cacheManager.caching({store: redisStore, db: 0, ttl: 100});
var ttl = 60;
console.log("set/get/del example:");
redisCache.set('foo', 'bar', {ttl: ttl}, function(err) {
if (err) { throw err; }
redisCache.get('foo', function(err, result) {
if (err) { throw err; }
console.log("result fetched from cache: " + result);
// >> 'bar'
redisCache.ttl('foo', function(err, result) {
if (err) { throw err; }
assert.ok(result > 59 && result < 61);
redisCache.del('foo', function(err) {
if (err) { throw err; }
});
});
});
});
// TTL defaults to what we passed into the caching function (100)
redisCache.set('foo-no-ttl', 'bar-no-ttl', function(err) {
if (err) { throw err; }
redisCache.get('foo-no-ttl', function(err, result) {
if (err) { throw err; }
console.log("result fetched from cache: " + result);
// >> 'bar'
redisCache.ttl('foo-no-ttl', function(err, result) {
if (err) { throw err; }
assert.ok(result > 99 && result < 101);
redisCache.del('foo-no-ttl', function(err) {
if (err) { throw err; }
});
});
});
});
// Calls Redis 'set' instead of 'setex'
redisCache.set('foo-zero-ttl', 'bar-zero-ttl', {ttl: 0}, function(err) {
if (err) { throw err; }
redisCache.get('foo-zero-ttl', function(err, result) {
if (err) { throw err; }
console.log("result fetched from cache: " + result);
// >> 'bar'
redisCache.ttl('foo-zero-ttl', function(err, result) {
if (err) { throw err; }
assert.ok(result < 0);
redisCache.del('foo-zero-ttl', function(err) {
if (err) { throw err; }
});
});
});
});
var userId = 123;
function createKey(id) {
return 'user_' + id;
}
function getUser(id, cb) {
setTimeout(function() {
console.log("\n\nReturning user from slow database.");
cb(null, {id: id, name: 'Bob'});
}, 100);
}
function getUserFromCache(id, cb) {
var key = createKey(id);
redisCache.wrap(key, function(cacheCb) {
getUser(userId, cacheCb);
}, {ttl: ttl}, cb);
}
getUserFromCache(userId, function(err, user) {
console.log(user);
// Second time fetches user from redisCache
getUserFromCache(userId, function(err, user) {
console.log("user from second cache request:");
console.log(user);
redisCache.keys(function(err, keys) {
console.log("keys: " + util.inspect(keys));
var key = createKey(userId);
redisCache.del(key, function(err) {
if (err) { throw err; }
});
});
});
});
// Outputs:
// { id: 123, name: 'Bob' }
// user from second cache request:
// { id: 123, name: 'Bob' }
// keys: [ 'user_123' ]
var redisCache2 = cacheManager.caching({store: redisStore, db: 1, ttl: 100});
var multiCache = cacheManager.multiCaching([redisCache, redisCache2]);
var userId2 = 456;
var key2 = 'user_' + userId;
var ttl2 = 50;
multiCache.wrap(key2, function(cb) {
getUser(userId2, cb);
}, {ttl: ttl2}, function(err, user) {
console.log("user: ", user);
// Second time fetches user from redisCache, since it's highest priority.
// If the data expires in the redisCache, the next fetch would pull it from
// redisCache2, and set the data in redisCache again.
multiCache.wrap(key2, function(cb) {
getUser(userId2, cb);
}, function(err, user) {
console.log("user, second fetch:", user);
});
multiCache.getAndPassUp(key2, function(err, result) {
console.log("\ngetAndPassUp result: ", result);
multiCache.del(key2, function(err) {
if (err) { throw err; }
process.exit();
});
});
});

View File

@ -0,0 +1,124 @@
/**
* This is a very basic example of how you can implement your own Redis-based
* cache store with connection pooling.
*/
var RedisPool = require('sol-redis-pool');
function redisStore(args) {
args = args || {};
var self = {};
var ttlDefault = args.ttl;
self.name = 'redis';
var redisOptions = {
host: args.host || '127.0.0.1',
port: args.port || 6379
};
var pool = new RedisPool(redisOptions, {});
function connect(cb) {
pool.acquire(function(err, conn) {
if (err) {
pool.release(conn);
return cb(err);
}
if (args.db || args.db === 0) {
conn.select(args.db);
}
cb(null, conn);
});
}
function handleResponse(conn, cb, opts) {
opts = opts || {};
return function(err, result) {
pool.release(conn);
if (err) { return cb(err); }
if (opts.parse) {
result = JSON.parse(result);
}
cb(null, result);
};
}
self.get = function(key, options, cb) {
if (typeof options === 'function') {
cb = options;
}
connect(function(err, conn) {
if (err) { return cb(err); }
conn.get(key, handleResponse(conn, cb, {parse: true}));
});
};
self.set = function(key, value, options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
options = options || {};
var ttl = (options.ttl || options.ttl === 0) ? options.ttl : ttlDefault;
connect(function(err, conn) {
if (err) { return cb(err); }
var val = JSON.stringify(value);
if (ttl) {
conn.setex(key, ttl, val, handleResponse(conn, cb));
} else {
conn.set(key, val, handleResponse(conn, cb));
}
});
};
self.del = function(key, options, cb) {
if (typeof options === 'function') {
cb = options;
}
connect(function(err, conn) {
if (err) { return cb(err); }
conn.del(key, handleResponse(conn, cb));
});
};
self.ttl = function(key, cb) {
connect(function(err, conn) {
if (err) { return cb(err); }
conn.ttl(key, handleResponse(conn, cb));
});
};
self.keys = function(pattern, cb) {
if (typeof pattern === 'function') {
cb = pattern;
pattern = '*';
}
connect(function(err, conn) {
if (err) { return cb(err); }
conn.keys(pattern, handleResponse(conn, cb));
});
};
self.isCacheableValue = function(value) {
return value !== null && value !== undefined;
};
return self;
}
module.exports = {
create: function(args) {
return redisStore(args);
}
};

1
node_modules/cache-manager/index.js generated vendored Normal file
View File

@ -0,0 +1 @@
module.exports = require('./lib');

321
node_modules/cache-manager/lib/caching.js generated vendored Normal file
View File

@ -0,0 +1,321 @@
/** @module cacheManager/caching */
/*jshint maxcomplexity:16*/
var CallbackFiller = require('./callback_filler');
var utils = require('./utils');
var parseWrapArguments = utils.parseWrapArguments;
/**
* Generic caching interface that wraps any caching library with a compatible interface.
*
* @param {object} args
* @param {object|string} args.store - The store must at least have `set` and a `get` functions.
* @param {function} [args.isCacheableValue] - A callback function which is called
* with every value returned from cache or from a wrapped function. This lets you specify
* which values should and should not be cached. If the function returns true, it will be
* stored in cache. By default it caches everything except undefined.
*/
var caching = function(args) {
args = args || {};
var self = {};
if (typeof args.store === 'object') {
if (args.store.create) {
self.store = args.store.create(args);
} else {
self.store = args.store;
}
} else {
var storeName = args.store || 'memory';
self.store = require('./stores/' + storeName).create(args);
}
// do we handle a cache error the same as a cache miss?
self.ignoreCacheErrors = args.ignoreCacheErrors || false;
var Promise = args.promiseDependency || global.Promise;
var callbackFiller = new CallbackFiller();
if (typeof args.isCacheableValue === 'function') {
self._isCacheableValue = args.isCacheableValue;
} else if (typeof self.store.isCacheableValue === 'function') {
self._isCacheableValue = self.store.isCacheableValue.bind(self.store);
} else {
self._isCacheableValue = function(value) {
return value !== undefined;
};
}
function wrapPromise(key, promise, options) {
return new Promise(function(resolve, reject) {
self.wrap(key, function(cb) {
Promise.resolve()
.then(promise)
.then(function(result) {
cb(null, result);
return null;
})
.catch(cb);
}, options, function(err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
/**
* Wraps a function in cache. I.e., the first time the function is run,
* its results are stored in cache so subsequent calls retrieve from cache
* instead of calling the function.
* You can pass any number of keys as long as the wrapped function returns
* an array with the same number of values and in the same order.
*
* @function
* @name wrap
*
* @param {string} key - The cache key to use in cache operations. Can be one or many.
* @param {function} work - The function to wrap
* @param {object} [options] - options passed to `set` function
* @param {function} cb
*
* @example
* var key = 'user_' + userId;
* cache.wrap(key, function(cb) {
* User.get(userId, cb);
* }, function(err, user) {
* console.log(user);
* });
*
* // Multiple keys
* var key = 'user_' + userId;
* var key2 = 'user_' + userId2;
* cache.wrap(key, key2, function(cb) {
* User.getMany([userId, userId2], cb);
* }, function(err, users) {
* console.log(users[0]);
* console.log(users[1]);
* });
*/
self.wrap = function() {
var parsedArgs = parseWrapArguments(Array.prototype.slice.apply(arguments));
var keys = parsedArgs.keys;
var work = parsedArgs.work;
var options = parsedArgs.options;
var cb = parsedArgs.cb;
if (!cb) {
keys.push(work);
keys.push(options);
return wrapPromise.apply(this, keys);
}
if (keys.length > 1) {
/**
* Handle more than 1 key
*/
return wrapMultiple(keys, work, options, cb);
}
var key = keys[0];
var hasKey = callbackFiller.has(key);
callbackFiller.add(key, {cb: cb});
if (hasKey) { return; }
self.store.get(key, options, function(err, result) {
if (err && (!self.ignoreCacheErrors)) {
callbackFiller.fill(key, err);
} else if (self._isCacheableValue(result)) {
callbackFiller.fill(key, null, result);
} else {
work(function(err, data) {
if (err) {
callbackFiller.fill(key, err);
return;
}
if (!self._isCacheableValue(data)) {
callbackFiller.fill(key, null, data);
return;
}
if (options && typeof options.ttl === 'function') {
options.ttl = options.ttl(data);
}
self.store.set(key, data, options, function(err) {
if (err && (!self.ignoreCacheErrors)) {
callbackFiller.fill(key, err);
} else {
callbackFiller.fill(key, null, data);
}
});
});
}
});
};
function wrapMultiple(keys, work, options, cb) {
/**
* We create a unique key for the multiple keys
* by concatenating them
*/
var combinedKey = keys.reduce(function(acc, k) {
return acc + k;
}, '');
var hasKey = callbackFiller.has(combinedKey);
callbackFiller.add(combinedKey, {cb: cb});
if (hasKey) { return; }
keys.push(options);
keys.push(onResult);
self.store.mget.apply(self.store, keys);
function onResult(err, result) {
if (err && (!self.ignoreCacheErrors)) {
return callbackFiller.fill(combinedKey, err);
}
/**
* If all the values returned are cacheable we don't need
* to call our "work" method and the values returned by the cache
* are valid. If one or more of the values is not cacheable
* the cache result is not valid.
*/
var cacheOK = Array.isArray(result) && result.filter(function(_result) {
return self._isCacheableValue(_result);
}).length === result.length;
if (cacheOK) {
return callbackFiller.fill(combinedKey, null, result);
}
return work(function(err, data) {
if (err) {
return done(err);
}
var _args = [];
data.forEach(function(value, i) {
/**
* Add the {key, value} pair to the args
* array that we will send to mset()
*/
if (self._isCacheableValue(value)) {
_args.push(keys[i]);
_args.push(value);
}
});
// If no key|value, exit
if (_args.length === 0) {
return done(null);
}
if (options && typeof options.ttl === 'function') {
options.ttl = options.ttl(data);
}
_args.push(options);
_args.push(done);
self.store.mset.apply(self.store, _args);
function done(err) {
if (err && (!self.ignoreCacheErrors)) {
callbackFiller.fill(combinedKey, err);
} else {
callbackFiller.fill(combinedKey, null, data);
}
}
});
}
}
/**
* Binds to the underlying store's `get` function.
* @function
* @name get
*/
self.get = self.store.get.bind(self.store);
/**
* Get multiple keys at once.
* Binds to the underlying store's `mget` function.
* @function
* @name mget
*/
if (typeof self.store.mget === 'function') {
self.mget = self.store.mget.bind(self.store);
}
/**
* Binds to the underlying store's `set` function.
* @function
* @name set
*/
self.set = self.store.set.bind(self.store);
/**
* Set multiple keys at once.
* It accepts any number of {key, value} pair
* Binds to the underlying store's `mset` function.
* @function
* @name mset
*/
if (typeof self.store.mset === 'function') {
self.mset = self.store.mset.bind(self.store);
}
/**
* Binds to the underlying store's `del` function if it exists.
* @function
* @name del
*/
if (typeof self.store.del === 'function') {
self.del = self.store.del.bind(self.store);
}
/**
* Binds to the underlying store's `setex` function if it exists.
* @function
* @name setex
*/
if (typeof self.store.setex === 'function') {
self.setex = self.store.setex.bind(self.store);
}
/**
* Binds to the underlying store's `reset` function if it exists.
* @function
* @name reset
*/
if (typeof self.store.reset === 'function') {
self.reset = self.store.reset.bind(self.store);
}
/**
* Binds to the underlying store's `keys` function if it exists.
* @function
* @name keys
*/
if (typeof self.store.keys === 'function') {
self.keys = self.store.keys.bind(self.store);
}
/**
* Binds to the underlying store's `ttl` function if it exists.
* @function
* @name ttl
*/
if (typeof self.store.ttl === 'function') {
self.ttl = self.store.ttl.bind(self.store);
}
return self;
};
module.exports = caching;

28
node_modules/cache-manager/lib/callback_filler.js generated vendored Normal file
View File

@ -0,0 +1,28 @@
function CallbackFiller() {
this.queues = {};
}
CallbackFiller.prototype.fill = function(key, err, data) {
var waiting = this.queues[key];
delete this.queues[key];
if (waiting && waiting.length) {
waiting.forEach(function(task) {
(task.cb)(err, data);
});
}
};
CallbackFiller.prototype.has = function(key) {
return this.queues[key];
};
CallbackFiller.prototype.add = function(key, funcObj) {
if (this.queues[key]) {
this.queues[key].push(funcObj);
} else {
this.queues[key] = [funcObj];
}
};
module.exports = CallbackFiller;

7
node_modules/cache-manager/lib/index.js generated vendored Normal file
View File

@ -0,0 +1,7 @@
/** @namespace cacheManager */
var cacheManager = {
caching: require('./caching'),
multiCaching: require('./multi_caching')
};
module.exports = cacheManager;

625
node_modules/cache-manager/lib/multi_caching.js generated vendored Normal file
View File

@ -0,0 +1,625 @@
/** @module cacheManager/multiCaching */
var async = require('async');
var CallbackFiller = require('./callback_filler');
var utils = require('./utils');
var isObject = utils.isObject;
var parseWrapArguments = utils.parseWrapArguments;
/**
* Module that lets you specify a hierarchy of caches.
*
* @param {array} caches - Array of caching objects.
* @param {object} [options]
* @param {function} [options.isCacheableValue] - A callback function which is called
* with every value returned from cache or from a wrapped function. This lets you specify
* which values should and should not be cached. If the function returns true, it will be
* stored in cache. By default it caches everything except undefined.
*
* If an underlying cache specifies its own isCacheableValue function, that function will
* be used instead of the multiCaching's _isCacheableValue function.
*/
var multiCaching = function(caches, options) {
var self = {};
options = options || {};
var Promise = options.promiseDependency || global.Promise;
if (!Array.isArray(caches)) {
throw new Error('multiCaching requires an array of caches');
}
var callbackFiller = new CallbackFiller();
if (typeof options.isCacheableValue === 'function') {
self._isCacheableValue = options.isCacheableValue;
} else {
self._isCacheableValue = function(value) {
return value !== undefined;
};
}
/**
* If the underlying cache specifies its own isCacheableValue function (such
* as how node-cache-manager-redis does), use that function, otherwise use
* self._isCacheableValue function.
*/
function getIsCacheableValueFunction(cache) {
if (cache.store && typeof cache.store.isCacheableValue === 'function') {
return cache.store.isCacheableValue.bind(cache.store);
} else {
return self._isCacheableValue;
}
}
function getFromHighestPriorityCachePromise() {
var args = Array.prototype.slice.apply(arguments).filter(function(v) {
return typeof v !== 'undefined';
});
return new Promise(function(resolve, reject) {
var cb = function(err, result) {
if (err) {
return reject(err);
}
resolve(result);
};
args.push(cb);
getFromHighestPriorityCache.apply(null, args);
});
}
function getFromHighestPriorityCache() {
var args = Array.prototype.slice.apply(arguments).filter(function(v) {
return typeof v !== 'undefined';
});
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (!cb) {
return getFromHighestPriorityCachePromise.apply(this, args);
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
/**
* Keep a copy of the keys to retrieve
*/
var keys = Array.prototype.slice.apply(args);
var multi = keys.length > 1;
/**
* Then put back the options in the args Array
*/
args.push(options);
if (multi) {
/**
* Keep track of the keys left to fetch accross the caches
*/
var keysToFetch = Array.prototype.slice.apply(keys);
/**
* Hash to save our multi keys result
*/
var mapResult = {};
}
var i = 0;
async.eachSeries(caches, function(cache, next) {
var callback = function(err, result) {
if (err) {
return next(err);
}
var _isCacheableValue = getIsCacheableValueFunction(cache);
if (multi) {
addResultToMap(result, _isCacheableValue);
if (keysToFetch.length === 0 || i === caches.length - 1) {
// Return an Array with the values merged from all the caches
return cb(null, keys.map(function(k) {
return mapResult[k] || undefined;
}), i);
}
} else if (_isCacheableValue(result)) {
// break out of async loop.
return cb(err, result, i);
}
i += 1;
next();
};
if (multi) {
if (typeof cache.store.mget !== 'function') {
/**
* Silently fail for store that don't support mget()
*/
return callback(null, []);
}
var _args = Array.prototype.slice.apply(keysToFetch);
_args.push(options);
_args.push(callback);
cache.store.mget.apply(cache.store, _args);
} else {
cache.store.get(args[0], options, callback);
}
}, function(err, result) {
return cb(err, result);
});
function addResultToMap(result, isCacheable) {
var key;
var diff = 0;
/**
* We loop through the result and if the value
* is cacheable we add it to the mapResult hash
* and remove the key to fetch from the "keysToFetch" array
*/
result.forEach(function(res, i) {
if (isCacheable(res)) {
key = keysToFetch[i - diff];
// Add the result to our map
mapResult[key] = res;
// delete key from our keysToFetch array
keysToFetch.splice(i - diff, 1);
diff += 1;
}
});
}
}
function setInMultipleCachesPromise() {
var args = Array.prototype.slice.apply(arguments);
return new Promise(function(resolve, reject) {
var cb = function(err, result) {
if (err) {
return reject(err);
}
resolve(result);
};
args.push(cb);
setInMultipleCaches.apply(null, args);
});
}
function setInMultipleCaches() {
var args = Array.prototype.slice.apply(arguments);
var _caches = Array.isArray(args[0]) ? args.shift() : caches;
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (!cb) {
return setInMultipleCachesPromise.apply(this, args);
}
if (args.length % 2 > 0 && isObject(args[args.length - 1])) {
options = args.pop();
}
var length = args.length;
var multi = length > 2;
var i;
async.each(_caches, function(cache, next) {
var _isCacheableValue = getIsCacheableValueFunction(cache);
var keysValues = Array.prototype.slice.apply(args);
/**
* We filter out the keys *not* cacheable
*/
for (i = 0; i < length; i += 2) {
if (!_isCacheableValue(keysValues[i + 1])) {
keysValues.splice(i, 2);
}
}
if (keysValues.length === 0) {
return next();
}
var cacheOptions = options;
if (typeof options.ttl === 'function') {
/**
* Dynamically set the ttl by context depending of the store
*/
cacheOptions = {};
cacheOptions.ttl = options.ttl(keysValues, cache.store.name);
}
if (multi) {
if (typeof cache.store.mset !== 'function') {
/**
* Silently fail for store that don't support mset()
*/
return next();
}
keysValues.push(cacheOptions);
keysValues.push(next);
cache.store.mset.apply(cache.store, keysValues);
} else {
cache.store.set(keysValues[0], keysValues[1], cacheOptions, next);
}
}, function(err, result) {
cb(err, result);
});
}
function getAndPassUpPromise(key) {
return new Promise(function(resolve, reject) {
self.getAndPassUp(key, function(err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
/**
* Looks for an item in cache tiers.
* When a key is found in a lower cache, all higher levels are updated.
*
* @param {string} key
* @param {function} cb
*/
self.getAndPassUp = function(key, cb) {
if (!cb) {
return getAndPassUpPromise(key);
}
getFromHighestPriorityCache(key, function(err, result, index) {
if (err) {
return cb(err);
}
if (index) {
var cachesToUpdate = caches.slice(0, index);
async.each(cachesToUpdate, function(cache, next) {
var _isCacheableValue = getIsCacheableValueFunction(cache);
if (_isCacheableValue(result)) {
// We rely on the cache module's default TTL
cache.set(key, result, next);
}
});
}
return cb(err, result);
});
};
function wrapPromise(key, promise, options) {
return new Promise(function(resolve, reject) {
self.wrap(key, function(cb) {
Promise.resolve()
.then(promise)
.then(function(result) {
cb(null, result);
})
.catch(cb);
}, options, function(err, result) {
if (err) {
return reject(err);
}
resolve(result);
});
});
}
/**
* Wraps a function in one or more caches.
* Has same API as regular caching module.
*
* If a key doesn't exist in any cache, it gets set in all caches.
* If a key exists in a high-priority (e.g., first) cache, it gets returned immediately
* without getting set in other lower-priority caches.
* If a key doesn't exist in a higher-priority cache but exists in a lower-priority
* cache, it gets set in all higher-priority caches.
* You can pass any number of keys as long as the wrapped function returns
* an array with the same number of values and in the same order.
*
* @function
* @name wrap
*
* @param {string} key - The cache key to use in cache operations. Can be one or many.
* @param {function} work - The function to wrap
* @param {object} [options] - options passed to `set` function
* @param {function} cb
*/
self.wrap = function() {
var parsedArgs = parseWrapArguments(Array.prototype.slice.apply(arguments));
var keys = parsedArgs.keys;
var work = parsedArgs.work;
var options = parsedArgs.options;
var cb = parsedArgs.cb;
if (!cb) {
keys.push(work);
keys.push(options);
return wrapPromise.apply(this, keys);
}
if (keys.length > 1) {
/**
* Handle more than 1 key
*/
return wrapMultiple(keys, work, options, cb);
}
var key = keys[0];
var hasKey = callbackFiller.has(key);
callbackFiller.add(key, {cb: cb});
if (hasKey) { return; }
getFromHighestPriorityCache(key, function(err, result, index) {
if (err) {
return callbackFiller.fill(key, err);
} else if (self._isCacheableValue(result)) {
var cachesToUpdate = caches.slice(0, index);
var args = [cachesToUpdate, key, result, options, function(err) {
callbackFiller.fill(key, err, result);
}];
setInMultipleCaches.apply(null, args);
} else {
work(function(err, data) {
if (err) {
return callbackFiller.fill(key, err);
}
if (!self._isCacheableValue(data)) {
return callbackFiller.fill(key, err, data);
}
var args = [caches, key, data, options, function(err) {
callbackFiller.fill(key, err, data);
}];
setInMultipleCaches.apply(null, args);
});
}
});
};
function wrapMultiple(keys, work, options, cb) {
/**
* We create a unique key for the multiple keys
* by concatenating them
*/
var combinedKey = keys.reduce(function(acc, k) {
return acc + k;
}, '');
var hasKey = callbackFiller.has(combinedKey);
callbackFiller.add(combinedKey, {cb: cb});
if (hasKey) { return; }
keys.push(options);
keys.push(onResult);
/**
* Get from all the caches. If multiple keys have been passed,
* we'll go through all the caches and merge the result
*/
getFromHighestPriorityCache.apply(this, keys);
function onResult(err, result, index) {
if (err) {
return done(err);
}
/**
* If all the values returned are cacheable we don't need
* to call our "work" method and the values returned by the cache
* are valid. If one or more of the values is not cacheable
* the cache result is not valid.
*/
var cacheOK = result.filter(function(_result) {
return self._isCacheableValue(_result);
}).length === result.length;
if (!cacheOK) {
/**
* We need to fetch the data first
*/
return work(workCallback);
}
var cachesToUpdate = caches.slice(0, index);
/**
* Prepare arguments to set the values in
* higher priority caches
*/
var _args = [cachesToUpdate];
/**
* Add the {key, value} pair
*/
result.forEach(function(value, i) {
_args.push(keys[i]);
_args.push(value);
});
/**
* Add options and final callback
*/
_args.push(options);
_args.push(function(err) {
done(err, result);
});
return setInMultipleCaches.apply(null, _args);
/**
* Wrapped function callback
*/
function workCallback(err, data) {
if (err) {
return done(err);
}
/**
* Prepare arguments for "setInMultipleCaches"
*/
var _args;
_args = [];
data.forEach(function(value, i) {
/**
* Add the {key, value} pair to the args
* array that we will send to mset()
*/
if (self._isCacheableValue(value)) {
_args.push(keys[i]);
_args.push(value);
}
});
// If no key,value --> exit
if (_args.length === 0) {
return done(null);
}
/**
* Add options and final callback
*/
_args.push(options);
_args.push(function(err) {
done(err, data);
});
setInMultipleCaches.apply(null, _args);
}
/**
* Final callback
*/
function done(err, data) {
callbackFiller.fill(combinedKey, err, data);
}
}
}
/**
* Set value in all caches
*
* @function
* @name set
*
* @param {string} key
* @param {*} value
* @param {object} [options] to pass to underlying set function.
* @param {function} [cb]
*/
self.set = setInMultipleCaches;
/**
* Set multiple values in all caches
* Accepts an unlimited pair of {key, value}
*
* @function
* @name mset
*
* @param {string} key
* @param {*} value
* @param {string} [key2]
* @param {*} [value2]
* @param {object} [options] to pass to underlying set function.
* @param {function} [cb]
*/
self.mset = setInMultipleCaches;
/**
* Get value from highest level cache that has stored it.
*
* @function
* @name get
*
* @param {string} key
* @param {object} [options] to pass to underlying get function.
* @param {function} cb
*/
self.get = getFromHighestPriorityCache;
/**
* Get multiple value from highest level cache that has stored it.
* If some values are not found, the next highest cache is used
* until either all keys are found or all caches have been fetched.
* Accepts an unlimited number of keys.
*
* @function
* @name mget
*
* @param {string} key key to get (any number)
* @param {object} [options] to pass to underlying get function.
* @param {function} cb optional callback
*/
self.mget = getFromHighestPriorityCache;
/**
* Delete value from all caches.
*
* @function
* @name del
*
* @param {string} key
* @param {object} [options] to pass to underlying del function.
* @param {function} cb
*/
self.del = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
args.push(options);
async.each(caches, function(cache, next) {
var _args = Array.prototype.slice.apply(args);
_args.push(next);
cache.store.del.apply(cache.store, _args);
}, cb);
};
/**
* Reset all caches.
*
* @function
* @name reset
*
* @param {function} cb
*/
self.reset = function(cb) {
async.each(caches, function(cache, next) {
cache.store.reset(next);
}, cb);
};
return self;
};
module.exports = multiCaching;

172
node_modules/cache-manager/lib/stores/memory.js generated vendored Normal file
View File

@ -0,0 +1,172 @@
var Lru = require("lru-cache");
var utils = require('../utils');
var isObject = utils.isObject;
var memoryStore = function(args) {
args = args || {};
var self = {};
self.name = 'memory';
var Promise = args.promiseDependency || global.Promise;
self.usePromises = (typeof Promise === 'undefined' || args.noPromises) ? false : true;
var ttl = args.ttl;
var lruOpts = {
max: args.max || 500,
maxAge: (ttl || ttl === 0) ? ttl * 1000 : null,
dispose: args.dispose,
length: args.length,
stale: args.stale
};
var lruCache = new Lru(lruOpts);
var setMultipleKeys = function setMultipleKeys(keysValues, maxAge) {
var length = keysValues.length;
var values = [];
for (var i = 0; i < length; i += 2) {
lruCache.set(keysValues[i], keysValues[i + 1], maxAge);
values.push(keysValues[i + 1]);
}
return values;
};
self.set = function(key, value, options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
options = options || {};
var maxAge = (options.ttl || options.ttl === 0) ? options.ttl * 1000 : lruOpts.maxAge;
lruCache.set(key, value, maxAge);
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve(value);
}
};
self.mset = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (args.length % 2 > 0 && isObject(args[args.length - 1])) {
options = args.pop();
}
var maxAge = (options.ttl || options.ttl === 0) ? options.ttl * 1000 : lruOpts.maxAge;
var values = setMultipleKeys(args, maxAge);
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve(values);
}
};
self.get = function(key, options, cb) {
if (typeof options === 'function') {
cb = options;
}
var value = lruCache.get(key);
if (cb) {
process.nextTick(cb.bind(null, null, value));
} else if (self.usePromises) {
return Promise.resolve(value);
} else {
return value;
}
};
self.mget = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
var values = args.map(function(key) {
return lruCache.get(key);
});
if (cb) {
process.nextTick(cb.bind(null, null, values));
} else if (self.usePromises) {
return Promise.resolve(values);
} else {
return values;
}
};
self.del = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
if (Array.isArray(args[0])) {
args = args[0];
}
args.forEach(function(key) {
lruCache.del(key);
});
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve();
}
};
self.reset = function(cb) {
lruCache.reset();
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve();
}
};
self.keys = function(cb) {
var keys = lruCache.keys();
if (cb) {
process.nextTick(cb.bind(null, null, keys));
} else if (self.usePromises) {
return Promise.resolve(keys);
} else {
return keys;
}
};
return self;
};
var methods = {
create: function(args) {
return memoryStore(args);
}
};
module.exports = methods;

142
node_modules/cache-manager/lib/stores/none.js generated vendored Normal file
View File

@ -0,0 +1,142 @@
var utils = require('../utils');
var isObject = utils.isObject;
/**
* Store that do nothing.
* Can be used for development environment.
*/
var noneStore = function(args) {
args = args || {};
var Promise = args.promiseDependency || global.Promise;
var self = {};
self.name = 'none';
self.usePromises = (typeof Promise === 'undefined' || args.noPromises) ? false : true;
self.set = function(key, value, options, cb) {
if (typeof options === 'function') {
cb = options;
options = {};
}
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve(value);
}
};
self.mset = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (args.length % 2 > 0 && isObject(args[args.length - 1])) {
options = args.pop();
}
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
var values = [];
for (var i = 0; i < args.length; i += 2) {
values.push(args[i + 1]);
}
return Promise.resolve(values);
}
};
self.get = function(key, options, cb) {
var value;
if (typeof options === 'function') {
cb = options;
}
if (cb) {
process.nextTick(cb.bind(null, null, value));
} else if (self.usePromises) {
return Promise.resolve(value);
} else {
return value;
}
};
self.mget = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
var values = args.map(function() {
return;
});
if (cb) {
process.nextTick(cb.bind(null, null, values));
} else if (self.usePromises) {
return Promise.resolve(values);
} else {
return values;
}
};
self.del = function() {
var args = Array.prototype.slice.apply(arguments);
var cb;
var options = {};
if (typeof args[args.length - 1] === 'function') {
cb = args.pop();
}
if (isObject(args[args.length - 1])) {
options = args.pop();
}
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve();
}
};
self.reset = function(cb) {
if (cb) {
process.nextTick(cb.bind(null, null));
} else if (self.usePromises) {
return Promise.resolve();
}
};
self.keys = function(cb) {
var keys = [];
if (cb) {
process.nextTick(cb.bind(null, null, keys));
} else if (self.usePromises) {
return Promise.resolve(keys);
} else {
return keys;
}
};
return self;
};
var methods = {
create: function(args) {
return noneStore(args);
}
};
module.exports = methods;

43
node_modules/cache-manager/lib/utils.js generated vendored Normal file
View File

@ -0,0 +1,43 @@
var isObject = function isObject(value) {
return value instanceof Object && value.constructor === Object;
};
var parseWrapArguments = function parseWrapArguments(args) {
var length = args.length;
var work;
var options = {};
var cb;
/**
* As we can receive an unlimited number of keys
* we find the index of the first function which is
* the "work" handler to fetch the keys.
*/
for (var i = 0; i < length; i += 1) {
if (typeof args[i] === 'function') {
if (typeof args[i + 2] === 'function') {
cb = args.pop();
} else if (typeof args[i + 1] === 'function') {
cb = args.pop();
}
if (isObject(args[i + 1])) {
options = args.pop();
}
work = args.pop();
break;
}
}
return {
keys: args,
work: work,
options: options,
cb: cb
};
};
module.exports = {
isObject: isObject,
parseWrapArguments: parseWrapArguments,
};

View File

@ -0,0 +1,4 @@
/node_modules
.nyc_output
nyc_output
coverage

View File

@ -0,0 +1,7 @@
sudo: false
language: node_js
node_js:
- '0.10'
- '0.12'
- '4'
- '5'

View File

@ -0,0 +1,14 @@
# Authors, sorted by whether or not they are me
Isaac Z. Schlueter <i@izs.me>
Brian Cottingham <spiffytech@gmail.com>
Carlos Brito Lage <carlos@carloslage.net>
Jesse Dailey <jesse.dailey@gmail.com>
Kevin O'Hara <kevinohara80@gmail.com>
Marco Rogers <marco.rogers@gmail.com>
Mark Cavage <mcavage@gmail.com>
Marko Mikulicic <marko.mikulicic@isti.cnr.it>
Nathan Rajlich <nathan@tootallnate.net>
Satheesh Natesan <snateshan@myspace-inc.com>
Trent Mick <trentm@gmail.com>
ashleybrener <ashley@starlogik.com>
n4kz <n4kz@n4kz.com>

View File

@ -0,0 +1,15 @@
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.

View File

@ -0,0 +1,142 @@
# lru cache
A cache object that deletes the least-recently-used items.
[![Build Status](https://travis-ci.org/isaacs/node-lru-cache.svg?branch=master)](https://travis-ci.org/isaacs/node-lru-cache) [![Coverage Status](https://coveralls.io/repos/isaacs/node-lru-cache/badge.svg?service=github)](https://coveralls.io/github/isaacs/node-lru-cache)
## Usage:
```javascript
var LRU = require("lru-cache")
, options = { max: 500
, length: function (n, key) { return n * 2 + key.length }
, dispose: function (key, n) { n.close() }
, maxAge: 1000 * 60 * 60 }
, cache = LRU(options)
, otherCache = LRU(50) // sets just the max size
cache.set("key", "value")
cache.get("key") // "value"
// non-string keys ARE fully supported
var someObject = {}
cache.set(someObject, 'a value')
cache.set('[object Object]', 'a different value')
assert.equal(cache.get(someObject), 'a value')
cache.reset() // empty the cache
```
If you put more stuff in it, then items will fall out.
If you try to put an oversized thing in it, then it'll fall out right
away.
## Options
* `max` The maximum size of the cache, checked by applying the length
function to all values in the cache. Not setting this is kind of
silly, since that's the whole purpose of this lib, but it defaults
to `Infinity`.
* `maxAge` Maximum age in ms. Items are not pro-actively pruned out
as they age, but if you try to get an item that is too old, it'll
drop it and return undefined instead of giving it to you.
* `length` Function that is used to calculate the length of stored
items. If you're storing strings or buffers, then you probably want
to do something like `function(n, key){return n.length}`. The default is
`function(){return 1}`, which is fine if you want to store `max`
like-sized things. They item is passed as the first argument, and
the key is passed as the second argumnet.
* `dispose` Function that is called on items when they are dropped
from the cache. This can be handy if you want to close file
descriptors or do other cleanup tasks when items are no longer
accessible. Called with `key, value`. It's called *before*
actually removing the item from the internal cache, so if you want
to immediately put it back in, you'll have to do that in a
`nextTick` or `setTimeout` callback or it won't do anything.
* `stale` By default, if you set a `maxAge`, it'll only actually pull
stale items out of the cache when you `get(key)`. (That is, it's
not pre-emptively doing a `setTimeout` or anything.) If you set
`stale:true`, it'll return the stale value before deleting it. If
you don't set this, then it'll return `undefined` when you try to
get a stale entry, as if it had already been deleted.
## API
* `set(key, value, maxAge)`
* `get(key) => value`
Both of these will update the "recently used"-ness of the key.
They do what you think. `maxAge` is optional and overrides the
cache `maxAge` option if provided.
If the key is not found, `get()` will return `undefined`.
The key and val can be any value.
* `peek(key)`
Returns the key value (or `undefined` if not found) without
updating the "recently used"-ness of the key.
(If you find yourself using this a lot, you *might* be using the
wrong sort of data structure, but there are some use cases where
it's handy.)
* `del(key)`
Deletes a key out of the cache.
* `reset()`
Clear the cache entirely, throwing away all values.
* `has(key)`
Check if a key is in the cache, without updating the recent-ness
or deleting it for being stale.
* `forEach(function(value,key,cache), [thisp])`
Just like `Array.prototype.forEach`. Iterates over all the keys
in the cache, in order of recent-ness. (Ie, more recently used
items are iterated over first.)
* `rforEach(function(value,key,cache), [thisp])`
The same as `cache.forEach(...)` but items are iterated over in
reverse order. (ie, less recently used items are iterated over
first.)
* `keys()`
Return an array of the keys in the cache.
* `values()`
Return an array of the values in the cache.
* `length()`
Return total length of objects in cache taking into account
`length` options function.
* `itemCount`
Return total quantity of objects currently in cache. Note, that
`stale` (see options) items are returned as part of this item
count.
* `dump()`
Return an array of the cache entries ready for serialization and usage
with 'destinationCache.load(arr)`.
* `load(cacheEntriesArray)`
Loads another cache entries array, obtained with `sourceCache.dump()`,
into the cache. The destination cache is reset before loading new entries
* `prune()`
Manually iterates over the entire cache proactively pruning old entries

View File

@ -0,0 +1,32 @@
var LRU = require('../')
var uuid = require("uuid")
var start = Date.now()
var lru = new LRU({
max: 64 * 1024 * 1024, // 64MB
length: function (v) {
return v.key.length + v.value.length
}
})
var total = 0
console.log("Iters\tAvg\tBytes")
for (x = 0; x < 5000000; ++x) {
var d = new Date()
var key = uuid.v4()
lru.set(key, {
key: key,
value: uuid.v4()
})
total += new Date() - d
var length = lru.length
if (x % 10000 === 0) {
console.log(x + "\t" + (total / 10000) + "\t" + length)
total = 0
}
}

View File

@ -0,0 +1,468 @@
module.exports = LRUCache
// This will be a proper iterable 'Map' in engines that support it,
// or a fakey-fake PseudoMap in older versions.
var Map = require('pseudomap')
var util = require('util')
// A linked list to keep track of recently-used-ness
var Yallist = require('yallist')
// use symbols if possible, otherwise just _props
var symbols = {}
var hasSymbol = typeof Symbol === 'function'
var makeSymbol
if (hasSymbol) {
makeSymbol = function (key) {
return Symbol.for(key)
}
} else {
makeSymbol = function (key) {
return '_' + key
}
}
function priv (obj, key, val) {
var sym
if (symbols[key]) {
sym = symbols[key]
} else {
sym = makeSymbol(key)
symbols[key] = sym
}
if (arguments.length === 2) {
return obj[sym]
} else {
obj[sym] = val
return val
}
}
function naiveLength () { return 1 }
// lruList is a yallist where the head is the youngest
// item, and the tail is the oldest. the list contains the Hit
// objects as the entries.
// Each Hit object has a reference to its Yallist.Node. This
// never changes.
//
// cache is a Map (or PseudoMap) that matches the keys to
// the Yallist.Node object.
function LRUCache (options) {
if (!(this instanceof LRUCache)) {
return new LRUCache(options)
}
if (typeof options === 'number') {
options = { max: options }
}
if (!options) {
options = {}
}
var max = priv(this, 'max', options.max)
// Kind of weird to have a default max of Infinity, but oh well.
if (!max ||
!(typeof max === 'number') ||
max <= 0) {
priv(this, 'max', Infinity)
}
var lc = options.length || naiveLength
if (typeof lc !== 'function') {
lc = naiveLength
}
priv(this, 'lengthCalculator', lc)
priv(this, 'allowStale', options.stale || false)
priv(this, 'maxAge', options.maxAge || 0)
priv(this, 'dispose', options.dispose)
this.reset()
}
// resize the cache when the max changes.
Object.defineProperty(LRUCache.prototype, 'max', {
set: function (mL) {
if (!mL || !(typeof mL === 'number') || mL <= 0) {
mL = Infinity
}
priv(this, 'max', mL)
trim(this)
},
get: function () {
return priv(this, 'max')
},
enumerable: true
})
Object.defineProperty(LRUCache.prototype, 'allowStale', {
set: function (allowStale) {
priv(this, 'allowStale', !!allowStale)
},
get: function () {
return priv(this, 'allowStale')
},
enumerable: true
})
Object.defineProperty(LRUCache.prototype, 'maxAge', {
set: function (mA) {
if (!mA || !(typeof mA === 'number') || mA < 0) {
mA = 0
}
priv(this, 'maxAge', mA)
trim(this)
},
get: function () {
return priv(this, 'maxAge')
},
enumerable: true
})
// resize the cache when the lengthCalculator changes.
Object.defineProperty(LRUCache.prototype, 'lengthCalculator', {
set: function (lC) {
if (typeof lC !== 'function') {
lC = naiveLength
}
if (lC !== priv(this, 'lengthCalculator')) {
priv(this, 'lengthCalculator', lC)
priv(this, 'length', 0)
priv(this, 'lruList').forEach(function (hit) {
hit.length = priv(this, 'lengthCalculator').call(this, hit.value, hit.key)
priv(this, 'length', priv(this, 'length') + hit.length)
}, this)
}
trim(this)
},
get: function () { return priv(this, 'lengthCalculator') },
enumerable: true
})
Object.defineProperty(LRUCache.prototype, 'length', {
get: function () { return priv(this, 'length') },
enumerable: true
})
Object.defineProperty(LRUCache.prototype, 'itemCount', {
get: function () { return priv(this, 'lruList').length },
enumerable: true
})
LRUCache.prototype.rforEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = priv(this, 'lruList').tail; walker !== null;) {
var prev = walker.prev
forEachStep(this, fn, walker, thisp)
walker = prev
}
}
function forEachStep (self, fn, node, thisp) {
var hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!priv(self, 'allowStale')) {
hit = undefined
}
}
if (hit) {
fn.call(thisp, hit.value, hit.key, self)
}
}
LRUCache.prototype.forEach = function (fn, thisp) {
thisp = thisp || this
for (var walker = priv(this, 'lruList').head; walker !== null;) {
var next = walker.next
forEachStep(this, fn, walker, thisp)
walker = next
}
}
LRUCache.prototype.keys = function () {
return priv(this, 'lruList').toArray().map(function (k) {
return k.key
}, this)
}
LRUCache.prototype.values = function () {
return priv(this, 'lruList').toArray().map(function (k) {
return k.value
}, this)
}
LRUCache.prototype.reset = function () {
if (priv(this, 'dispose') &&
priv(this, 'lruList') &&
priv(this, 'lruList').length) {
priv(this, 'lruList').forEach(function (hit) {
priv(this, 'dispose').call(this, hit.key, hit.value)
}, this)
}
priv(this, 'cache', new Map()) // hash of items by key
priv(this, 'lruList', new Yallist()) // list of items in order of use recency
priv(this, 'length', 0) // length of items in the list
}
LRUCache.prototype.dump = function () {
return priv(this, 'lruList').map(function (hit) {
if (!isStale(this, hit)) {
return {
k: hit.key,
v: hit.value,
e: hit.now + (hit.maxAge || 0)
}
}
}, this).toArray().filter(function (h) {
return h
})
}
LRUCache.prototype.dumpLru = function () {
return priv(this, 'lruList')
}
LRUCache.prototype.inspect = function (n, opts) {
var str = 'LRUCache {'
var extras = false
var as = priv(this, 'allowStale')
if (as) {
str += '\n allowStale: true'
extras = true
}
var max = priv(this, 'max')
if (max && max !== Infinity) {
if (extras) {
str += ','
}
str += '\n max: ' + util.inspect(max, opts)
extras = true
}
var maxAge = priv(this, 'maxAge')
if (maxAge) {
if (extras) {
str += ','
}
str += '\n maxAge: ' + util.inspect(maxAge, opts)
extras = true
}
var lc = priv(this, 'lengthCalculator')
if (lc && lc !== naiveLength) {
if (extras) {
str += ','
}
str += '\n length: ' + util.inspect(priv(this, 'length'), opts)
extras = true
}
var didFirst = false
priv(this, 'lruList').forEach(function (item) {
if (didFirst) {
str += ',\n '
} else {
if (extras) {
str += ',\n'
}
didFirst = true
str += '\n '
}
var key = util.inspect(item.key).split('\n').join('\n ')
var val = { value: item.value }
if (item.maxAge !== maxAge) {
val.maxAge = item.maxAge
}
if (lc !== naiveLength) {
val.length = item.length
}
if (isStale(this, item)) {
val.stale = true
}
val = util.inspect(val, opts).split('\n').join('\n ')
str += key + ' => ' + val
})
if (didFirst || extras) {
str += '\n'
}
str += '}'
return str
}
LRUCache.prototype.set = function (key, value, maxAge) {
maxAge = maxAge || priv(this, 'maxAge')
var now = maxAge ? Date.now() : 0
var len = priv(this, 'lengthCalculator').call(this, value, key)
if (priv(this, 'cache').has(key)) {
if (len > priv(this, 'max')) {
del(this, priv(this, 'cache').get(key))
return false
}
var node = priv(this, 'cache').get(key)
var item = node.value
// dispose of the old one before overwriting
if (priv(this, 'dispose')) {
priv(this, 'dispose').call(this, key, item.value)
}
item.now = now
item.maxAge = maxAge
item.value = value
priv(this, 'length', priv(this, 'length') + (len - item.length))
item.length = len
this.get(key)
trim(this)
return true
}
var hit = new Entry(key, value, len, now, maxAge)
// oversized objects fall out of cache automatically.
if (hit.length > priv(this, 'max')) {
if (priv(this, 'dispose')) {
priv(this, 'dispose').call(this, key, value)
}
return false
}
priv(this, 'length', priv(this, 'length') + hit.length)
priv(this, 'lruList').unshift(hit)
priv(this, 'cache').set(key, priv(this, 'lruList').head)
trim(this)
return true
}
LRUCache.prototype.has = function (key) {
if (!priv(this, 'cache').has(key)) return false
var hit = priv(this, 'cache').get(key).value
if (isStale(this, hit)) {
return false
}
return true
}
LRUCache.prototype.get = function (key) {
return get(this, key, true)
}
LRUCache.prototype.peek = function (key) {
return get(this, key, false)
}
LRUCache.prototype.pop = function () {
var node = priv(this, 'lruList').tail
if (!node) return null
del(this, node)
return node.value
}
LRUCache.prototype.del = function (key) {
del(this, priv(this, 'cache').get(key))
}
LRUCache.prototype.load = function (arr) {
// reset the cache
this.reset()
var now = Date.now()
// A previous serialized cache has the most recent items first
for (var l = arr.length - 1; l >= 0; l--) {
var hit = arr[l]
var expiresAt = hit.e || 0
if (expiresAt === 0) {
// the item was created without expiration in a non aged cache
this.set(hit.k, hit.v)
} else {
var maxAge = expiresAt - now
// dont add already expired items
if (maxAge > 0) {
this.set(hit.k, hit.v, maxAge)
}
}
}
}
LRUCache.prototype.prune = function () {
var self = this
priv(this, 'cache').forEach(function (value, key) {
get(self, key, false)
})
}
function get (self, key, doUse) {
var node = priv(self, 'cache').get(key)
if (node) {
var hit = node.value
if (isStale(self, hit)) {
del(self, node)
if (!priv(self, 'allowStale')) hit = undefined
} else {
if (doUse) {
priv(self, 'lruList').unshiftNode(node)
}
}
if (hit) hit = hit.value
}
return hit
}
function isStale (self, hit) {
if (!hit || (!hit.maxAge && !priv(self, 'maxAge'))) {
return false
}
var stale = false
var diff = Date.now() - hit.now
if (hit.maxAge) {
stale = diff > hit.maxAge
} else {
stale = priv(self, 'maxAge') && (diff > priv(self, 'maxAge'))
}
return stale
}
function trim (self) {
if (priv(self, 'length') > priv(self, 'max')) {
for (var walker = priv(self, 'lruList').tail;
priv(self, 'length') > priv(self, 'max') && walker !== null;) {
// We know that we're about to delete this one, and also
// what the next least recently used key will be, so just
// go ahead and set it now.
var prev = walker.prev
del(self, walker)
walker = prev
}
}
}
function del (self, node) {
if (node) {
var hit = node.value
if (priv(self, 'dispose')) {
priv(self, 'dispose').call(this, hit.key, hit.value)
}
priv(self, 'length', priv(self, 'length') - hit.length)
priv(self, 'cache').delete(hit.key)
priv(self, 'lruList').removeNode(node)
}
}
// classy, since V8 prefers predictable objects.
function Entry (key, value, length, now, maxAge) {
this.key = key
this.value = value
this.length = length
this.now = now
this.maxAge = maxAge || 0
}

View File

@ -0,0 +1,61 @@
{
"_from": "lru-cache@4.0.0",
"_id": "lru-cache@4.0.0",
"_inBundle": false,
"_integrity": "sha1-tcvwFVbBaWb+vlTO7A+03JDfbCg=",
"_location": "/cache-manager/lru-cache",
"_phantomChildren": {},
"_requested": {
"type": "version",
"registry": true,
"raw": "lru-cache@4.0.0",
"name": "lru-cache",
"escapedName": "lru-cache",
"rawSpec": "4.0.0",
"saveSpec": null,
"fetchSpec": "4.0.0"
},
"_requiredBy": [
"/cache-manager"
],
"_resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.0.0.tgz",
"_shasum": "b5cbf01556c16966febe54ceec0fb4dc90df6c28",
"_spec": "lru-cache@4.0.0",
"_where": "/Users/stefanfejes/Projects/30-seconds-of-python-code/node_modules/cache-manager",
"author": {
"name": "Isaac Z. Schlueter",
"email": "i@izs.me"
},
"bugs": {
"url": "https://github.com/isaacs/node-lru-cache/issues"
},
"bundleDependencies": false,
"dependencies": {
"pseudomap": "^1.0.1",
"yallist": "^2.0.0"
},
"deprecated": false,
"description": "A cache object that deletes the least-recently-used items.",
"devDependencies": {
"standard": "^5.4.1",
"tap": "^2.3.3"
},
"homepage": "https://github.com/isaacs/node-lru-cache#readme",
"keywords": [
"mru",
"lru",
"cache"
],
"license": "ISC",
"main": "lib/lru-cache.js",
"name": "lru-cache",
"repository": {
"type": "git",
"url": "git://github.com/isaacs/node-lru-cache.git"
},
"scripts": {
"posttest": "standard test/*.js lib/*.js",
"test": "tap test --cov"
},
"version": "4.0.0"
}

View File

@ -0,0 +1,520 @@
var test = require('tap').test
var LRU = require('../')
test('basic', function (t) {
var cache = new LRU({max: 10})
cache.set('key', 'value')
t.equal(cache.get('key'), 'value')
t.equal(cache.get('nada'), undefined)
t.equal(cache.length, 1)
t.equal(cache.max, 10)
t.end()
})
test('least recently set', function (t) {
var cache = new LRU(2)
cache.set('a', 'A')
cache.set('b', 'B')
cache.set('c', 'C')
t.equal(cache.get('c'), 'C')
t.equal(cache.get('b'), 'B')
t.equal(cache.get('a'), undefined)
t.end()
})
test('lru recently gotten', function (t) {
var cache = new LRU(2)
cache.set('a', 'A')
cache.set('b', 'B')
cache.get('a')
cache.set('c', 'C')
t.equal(cache.get('c'), 'C')
t.equal(cache.get('b'), undefined)
t.equal(cache.get('a'), 'A')
t.end()
})
test('del', function (t) {
var cache = new LRU(2)
cache.set('a', 'A')
cache.del('a')
t.equal(cache.get('a'), undefined)
t.end()
})
test('max', function (t) {
var cache = new LRU(3)
// test changing the max, verify that the LRU items get dropped.
cache.max = 100
var i
for (i = 0; i < 100; i++) cache.set(i, i)
t.equal(cache.length, 100)
for (i = 0; i < 100; i++) {
t.equal(cache.get(i), i)
}
cache.max = 3
t.equal(cache.length, 3)
for (i = 0; i < 97; i++) {
t.equal(cache.get(i), undefined)
}
for (i = 98; i < 100; i++) {
t.equal(cache.get(i), i)
}
// now remove the max restriction, and try again.
cache.max = 'hello'
for (i = 0; i < 100; i++) cache.set(i, i)
t.equal(cache.length, 100)
for (i = 0; i < 100; i++) {
t.equal(cache.get(i), i)
}
// should trigger an immediate resize
cache.max = 3
t.equal(cache.length, 3)
for (i = 0; i < 97; i++) {
t.equal(cache.get(i), undefined)
}
for (i = 98; i < 100; i++) {
t.equal(cache.get(i), i)
}
t.end()
})
test('reset', function (t) {
var cache = new LRU(10)
cache.set('a', 'A')
cache.set('b', 'B')
cache.reset()
t.equal(cache.length, 0)
t.equal(cache.max, 10)
t.equal(cache.get('a'), undefined)
t.equal(cache.get('b'), undefined)
t.end()
})
test('basic with weighed length', function (t) {
var cache = new LRU({
max: 100,
length: function (item, key) {
t.isa(key, 'string')
return item.size
}
})
cache.set('key', {val: 'value', size: 50})
t.equal(cache.get('key').val, 'value')
t.equal(cache.get('nada'), undefined)
t.equal(cache.lengthCalculator(cache.get('key'), 'key'), 50)
t.equal(cache.length, 50)
t.equal(cache.max, 100)
t.end()
})
test('weighed length item too large', function (t) {
var cache = new LRU({
max: 10,
length: function (item) { return item.size }
})
t.equal(cache.max, 10)
// should fall out immediately
cache.set('key', {val: 'value', size: 50})
t.equal(cache.length, 0)
t.equal(cache.get('key'), undefined)
t.end()
})
test('least recently set with weighed length', function (t) {
var cache = new LRU({
max: 8,
length: function (item) { return item.length }
})
cache.set('a', 'A')
cache.set('b', 'BB')
cache.set('c', 'CCC')
cache.set('d', 'DDDD')
t.equal(cache.get('d'), 'DDDD')
t.equal(cache.get('c'), 'CCC')
t.equal(cache.get('b'), undefined)
t.equal(cache.get('a'), undefined)
t.end()
})
test('lru recently gotten with weighed length', function (t) {
var cache = new LRU({
max: 8,
length: function (item) { return item.length }
})
cache.set('a', 'A')
cache.set('b', 'BB')
cache.set('c', 'CCC')
cache.get('a')
cache.get('b')
cache.set('d', 'DDDD')
t.equal(cache.get('c'), undefined)
t.equal(cache.get('d'), 'DDDD')
t.equal(cache.get('b'), 'BB')
t.equal(cache.get('a'), 'A')
t.end()
})
test('lru recently updated with weighed length', function (t) {
var cache = new LRU({
max: 8,
length: function (item) { return item.length }
})
cache.set('a', 'A')
cache.set('b', 'BB')
cache.set('c', 'CCC')
t.equal(cache.length, 6) // CCC BB A
cache.set('a', '+A')
t.equal(cache.length, 7) // +A CCC BB
cache.set('b', '++BB')
t.equal(cache.length, 6) // ++BB +A
t.equal(cache.get('c'), undefined)
cache.set('c', 'oversized')
t.equal(cache.length, 6) // ++BB +A
t.equal(cache.get('c'), undefined)
cache.set('a', 'oversized')
t.equal(cache.length, 4) // ++BB
t.equal(cache.get('a'), undefined)
t.equal(cache.get('b'), '++BB')
t.end()
})
test('set returns proper booleans', function (t) {
var cache = new LRU({
max: 5,
length: function (item) { return item.length }
})
t.equal(cache.set('a', 'A'), true)
// should return false for max exceeded
t.equal(cache.set('b', 'donuts'), false)
t.equal(cache.set('b', 'B'), true)
t.equal(cache.set('c', 'CCCC'), true)
t.end()
})
test('drop the old items', function (t) {
var cache = new LRU({
max: 5,
maxAge: 50
})
cache.set('a', 'A')
setTimeout(function () {
cache.set('b', 'b')
t.equal(cache.get('a'), 'A')
}, 25)
setTimeout(function () {
cache.set('c', 'C')
// timed out
t.notOk(cache.get('a'))
}, 60 + 25)
setTimeout(function () {
t.notOk(cache.get('b'))
t.equal(cache.get('c'), 'C')
}, 90)
setTimeout(function () {
t.notOk(cache.get('c'))
t.end()
}, 155)
})
test('manual pruning', function (t) {
var cache = new LRU({
max: 5,
maxAge: 50
})
cache.set('a', 'A')
cache.set('b', 'b')
cache.set('c', 'C')
setTimeout(function () {
cache.prune()
t.notOk(cache.get('a'))
t.notOk(cache.get('b'))
t.notOk(cache.get('c'))
t.end()
}, 100)
})
test('individual item can have its own maxAge', function (t) {
var cache = new LRU({
max: 5,
maxAge: 50
})
cache.set('a', 'A', 20)
setTimeout(function () {
t.notOk(cache.get('a'))
t.end()
}, 25)
})
test('individual item can have its own maxAge > cache', function (t) {
var cache = new LRU({
max: 5,
maxAge: 20
})
cache.set('a', 'A', 50)
setTimeout(function () {
t.equal(cache.get('a'), 'A')
t.end()
}, 25)
})
test('disposal function', function (t) {
var disposed = false
var cache = new LRU({
max: 1,
dispose: function (k, n) {
disposed = n
}
})
cache.set(1, 1)
cache.set(2, 2)
t.equal(disposed, 1)
cache.set(2, 10)
t.equal(disposed, 2)
cache.set(3, 3)
t.equal(disposed, 10)
cache.reset()
t.equal(disposed, 3)
t.end()
})
test('disposal function on too big of item', function (t) {
var disposed = false
var cache = new LRU({
max: 1,
length: function (k) {
return k.length
},
dispose: function (k, n) {
disposed = n
}
})
var obj = [ 1, 2 ]
t.equal(disposed, false)
cache.set('obj', obj)
t.equal(disposed, obj)
t.end()
})
test('has()', function (t) {
var cache = new LRU({
max: 1,
maxAge: 10
})
cache.set('foo', 'bar')
t.equal(cache.has('foo'), true)
cache.set('blu', 'baz')
t.equal(cache.has('foo'), false)
t.equal(cache.has('blu'), true)
setTimeout(function () {
t.equal(cache.has('blu'), false)
t.end()
}, 15)
})
test('stale', function (t) {
var cache = new LRU({
maxAge: 10,
stale: true
})
t.equal(cache.allowStale, true)
cache.set('foo', 'bar')
t.equal(cache.get('foo'), 'bar')
t.equal(cache.has('foo'), true)
setTimeout(function () {
t.equal(cache.has('foo'), false)
t.equal(cache.get('foo'), 'bar')
t.equal(cache.get('foo'), undefined)
t.end()
}, 15)
})
test('lru update via set', function (t) {
var cache = LRU({ max: 2 })
cache.set('foo', 1)
cache.set('bar', 2)
cache.del('bar')
cache.set('baz', 3)
cache.set('qux', 4)
t.equal(cache.get('foo'), undefined)
t.equal(cache.get('bar'), undefined)
t.equal(cache.get('baz'), 3)
t.equal(cache.get('qux'), 4)
t.end()
})
test('least recently set w/ peek', function (t) {
var cache = new LRU(2)
cache.set('a', 'A')
cache.set('b', 'B')
t.equal(cache.peek('a'), 'A')
cache.set('c', 'C')
t.equal(cache.get('c'), 'C')
t.equal(cache.get('b'), 'B')
t.equal(cache.get('a'), undefined)
t.end()
})
test('pop the least used item', function (t) {
var cache = new LRU(3)
var last
cache.set('a', 'A')
cache.set('b', 'B')
cache.set('c', 'C')
t.equal(cache.length, 3)
t.equal(cache.max, 3)
// Ensure we pop a, c, b
cache.get('b', 'B')
last = cache.pop()
t.equal(last.key, 'a')
t.equal(last.value, 'A')
t.equal(cache.length, 2)
t.equal(cache.max, 3)
last = cache.pop()
t.equal(last.key, 'c')
t.equal(last.value, 'C')
t.equal(cache.length, 1)
t.equal(cache.max, 3)
last = cache.pop()
t.equal(last.key, 'b')
t.equal(last.value, 'B')
t.equal(cache.length, 0)
t.equal(cache.max, 3)
last = cache.pop()
t.equal(last, null)
t.equal(cache.length, 0)
t.equal(cache.max, 3)
t.end()
})
test('get and set only accepts strings and numbers as keys', function (t) {
var cache = new LRU()
cache.set('key', 'value')
cache.set(123, 456)
t.equal(cache.get('key'), 'value')
t.equal(cache.get(123), 456)
t.end()
})
test('peek with wierd keys', function (t) {
var cache = new LRU()
cache.set('key', 'value')
cache.set(123, 456)
t.equal(cache.peek('key'), 'value')
t.equal(cache.peek(123), 456)
t.equal(cache.peek({
toString: function () { return 'key' }
}), undefined)
t.end()
})
test('invalid length calc results in basic length', function (t) {
var l = new LRU({ length: true })
t.isa(l.lengthCalculator, 'function')
l.lengthCalculator = 'not a function'
t.isa(l.lengthCalculator, 'function')
t.end()
})
test('change length calculator recalculates', function (t) {
var l = new LRU({ max: 3 })
l.set(2, 2)
l.set(1, 1)
l.lengthCalculator = function (key, val) {
return key + val
}
t.equal(l.itemCount, 1)
t.equal(l.get(2), undefined)
t.equal(l.get(1), 1)
l.set(0, 1)
t.equal(l.itemCount, 2)
l.lengthCalculator = function (key, val) {
return key
}
t.equal(l.lengthCalculator(1, 10), 1)
t.equal(l.lengthCalculator(10, 1), 10)
l.lengthCalculator = { not: 'a function' }
t.equal(l.lengthCalculator(1, 10), 1)
t.equal(l.lengthCalculator(10, 1), 1)
t.end()
})
test('delete non-existent item has no effect', function (t) {
var l = new LRU({ max: 2 })
l.set('foo', 1)
l.set('bar', 2)
l.del('baz')
t.same(l.dumpLru().toArray().map(function (hit) {
return hit.key
}), [ 'bar', 'foo' ])
t.end()
})
test('maxAge on list, cleared in forEach', function (t) {
var l = new LRU({ stale: true })
l.set('foo', 1)
// hacky. make it seem older.
l.dumpLru().head.value.now = Date.now() - 100000
// setting maxAge to invalid values does nothing.
t.equal(l.maxAge, 0)
l.maxAge = -100
t.equal(l.maxAge, 0)
l.maxAge = {}
t.equal(l.maxAge, 0)
l.maxAge = 1
var saw = false
l.forEach(function (val, key) {
saw = true
t.equal(key, 'foo')
})
t.ok(saw)
t.equal(l.length, 0)
t.end()
})

View File

@ -0,0 +1,134 @@
var test = require('tap').test
var LRU = require('../')
test('forEach', function (t) {
var l = new LRU(5)
var i
for (i = 0; i < 10; i++) {
l.set(i, i.toString(2))
}
i = 9
l.forEach(function (val, key, cache) {
t.equal(cache, l)
t.equal(key, i)
t.equal(val, i.toString(2))
i -= 1
})
// get in order of most recently used
l.get(6)
l.get(8)
var order = [ 8, 6, 9, 7, 5 ]
i = 0
l.forEach(function (val, key, cache) {
var j = order[i++]
t.equal(cache, l)
t.equal(key, j)
t.equal(val, j.toString(2))
})
t.equal(i, order.length)
i = 0
order.reverse()
l.rforEach(function (val, key, cache) {
var j = order[i++]
t.equal(cache, l)
t.equal(key, j)
t.equal(val, j.toString(2))
})
t.equal(i, order.length)
t.end()
})
test('keys() and values()', function (t) {
var l = new LRU(5)
var i
for (i = 0; i < 10; i++) {
l.set(i, i.toString(2))
}
t.similar(l.keys(), [9, 8, 7, 6, 5])
t.similar(l.values(), ['1001', '1000', '111', '110', '101'])
// get in order of most recently used
l.get(6)
l.get(8)
t.similar(l.keys(), [8, 6, 9, 7, 5])
t.similar(l.values(), ['1000', '110', '1001', '111', '101'])
t.end()
})
test('all entries are iterated over', function (t) {
var l = new LRU(5)
var i
for (i = 0; i < 10; i++) {
l.set(i.toString(), i.toString(2))
}
i = 0
l.forEach(function (val, key, cache) {
if (i > 0) {
cache.del(key)
}
i += 1
})
t.equal(i, 5)
t.equal(l.keys().length, 1)
t.end()
})
test('all stale entries are removed', function (t) {
var l = new LRU({ max: 5, maxAge: -5, stale: true })
var i
for (i = 0; i < 10; i++) {
l.set(i.toString(), i.toString(2))
}
i = 0
l.forEach(function () {
i += 1
})
t.equal(i, 5)
t.equal(l.keys().length, 0)
t.end()
})
test('expires', function (t) {
var l = new LRU({
max: 10,
maxAge: 50
})
var i
for (i = 0; i < 10; i++) {
l.set(i.toString(), i.toString(2), ((i % 2) ? 25 : undefined))
}
i = 0
var order = [ 8, 6, 4, 2, 0 ]
setTimeout(function () {
l.forEach(function (val, key, cache) {
var j = order[i++]
t.equal(cache, l)
t.equal(key, j.toString())
t.equal(val, j.toString(2))
})
t.equal(i, order.length)
setTimeout(function () {
var count = 0
l.forEach(function (val, key, cache) { count++ })
t.equal(0, count)
t.end()
}, 25)
}, 26)
})

View File

@ -0,0 +1,54 @@
// vim: set nowrap:
var util = require('util')
var t = require('tap')
var LRU = require('../')
var l = LRU()
function inspect (str) {
t.equal(util.inspect(l), str)
t.equal(l.inspect(), str)
}
inspect('LRUCache {}')
l.max = 10
inspect('LRUCache {\n max: 10\n}')
l.maxAge = 50
inspect('LRUCache {\n max: 10,\n maxAge: 50\n}')
l.set({ foo: 'bar' }, 'baz')
inspect("LRUCache {\n max: 10,\n maxAge: 50,\n\n { foo: 'bar' } => { value: 'baz' }\n}")
l.maxAge = 0
l.set(1, {a: {b: {c: {d: {e: {f: {}}}}}}})
inspect("LRUCache {\n max: 10,\n\n 1 => { value: { a: { b: [Object] } } },\n { foo: 'bar' } => { value: 'baz', maxAge: 50 }\n}")
l.allowStale = true
inspect("LRUCache {\n allowStale: true,\n max: 10,\n\n 1 => { value: { a: { b: [Object] } } },\n { foo: 'bar' } => { value: 'baz', maxAge: 50 }\n}")
setTimeout(function () {
inspect("LRUCache {\n allowStale: true,\n max: 10,\n\n 1 => { value: { a: { b: [Object] } } },\n { foo: 'bar' } => { value: 'baz', maxAge: 50, stale: true }\n}")
// prune stale items
l.forEach(function () {})
inspect('LRUCache {\n allowStale: true,\n max: 10,\n\n 1 => { value: { a: { b: [Object] } } }\n}')
l.lengthCalculator = function () { return 5 }
inspect('LRUCache {\n allowStale: true,\n max: 10,\n length: 5,\n\n 1 => { value: { a: { b: [Object] } }, length: 5 }\n}')
l.max = 0
inspect('LRUCache {\n allowStale: true,\n length: 5,\n\n 1 => { value: { a: { b: [Object] } }, length: 5 }\n}')
l.maxAge = 100
inspect('LRUCache {\n allowStale: true,\n maxAge: 100,\n length: 5,\n\n 1 => { value: { a: { b: [Object] } }, maxAge: 0, length: 5 }\n}')
l.allowStale = false
inspect('LRUCache {\n maxAge: 100,\n length: 5,\n\n 1 => { value: { a: { b: [Object] } }, maxAge: 0, length: 5 }\n}')
l.maxAge = 0
inspect('LRUCache {\n length: 5,\n\n 1 => { value: { a: { b: [Object] } }, length: 5 }\n}')
l.lengthCalculator = null
inspect('LRUCache {\n 1 => { value: { a: { b: [Object] } } }\n}')
}, 100)

View File

@ -0,0 +1,3 @@
// hacky way to test that it still works when Symbol is not there
global.Symbol = false
require('./basic.js')

View File

@ -0,0 +1,227 @@
var test = require('tap').test
var LRU = require('../')
var Yallist = require('yallist')
test('dump', function (t) {
var cache = new LRU()
t.equal(cache.dump().length, 0, 'nothing in dump for empty cache')
cache.set('a', 'A')
cache.set('b', 'B')
t.deepEqual(cache.dump(), [
{ k: 'b', v: 'B', e: 0 },
{ k: 'a', v: 'A', e: 0 }
])
cache.set(123, 456)
t.deepEqual(cache.dump(), [
{ k: 123, v: 456, e: 0 },
{ k: 'b', v: 'B', e: 0 },
{ k: 'a', v: 'A', e: 0 }
])
cache.del(123)
cache.set('a', 'A')
t.deepEqual(cache.dump(), [
{ k: 'a', v: 'A', e: 0 },
{ k: 'b', v: 'B', e: 0 }
])
cache.get('b')
t.deepEqual(cache.dump(), [
{ k: 'b', v: 'B', e: 0 },
{ k: 'a', v: 'A', e: 0 }
])
cache.del('a')
t.deepEqual(cache.dump(), [
{ k: 'b', v: 'B', e: 0 }
])
t.end()
})
test('do not dump stale items', function (t) {
var cache = new LRU({
max: 5,
maxAge: 50
})
// expires at 50
cache.set('a', 'A')
setTimeout(function () {
// expires at 75
cache.set('b', 'B')
var s = cache.dump()
t.equal(s.length, 2)
t.equal(s[0].k, 'b')
t.equal(s[1].k, 'a')
}, 25)
setTimeout(function () {
// expires at 110
cache.set('c', 'C')
var s = cache.dump()
t.equal(s.length, 2)
t.equal(s[0].k, 'c')
t.equal(s[1].k, 'b')
}, 60)
setTimeout(function () {
// expires at 130
cache.set('d', 'D', 40)
var s = cache.dump()
t.equal(s.length, 2)
t.equal(s[0].k, 'd')
t.equal(s[1].k, 'c')
}, 90)
setTimeout(function () {
var s = cache.dump()
t.equal(s.length, 1)
t.equal(s[0].k, 'd')
}, 120)
setTimeout(function () {
var s = cache.dump()
t.deepEqual(s, [])
t.end()
}, 155)
})
test('load basic cache', function (t) {
var cache = new LRU()
var copy = new LRU()
cache.set('a', 'A')
cache.set('b', 'B')
cache.set(123, 456)
copy.load(cache.dump())
t.deepEquals(cache.dump(), copy.dump())
t.end()
})
test('load staled cache', function (t) {
var cache = new LRU({maxAge: 50})
var copy = new LRU({maxAge: 50})
var arr
// expires at 50
cache.set('a', 'A')
setTimeout(function () {
// expires at 80
cache.set('b', 'B')
arr = cache.dump()
t.equal(arr.length, 2)
}, 30)
setTimeout(function () {
copy.load(arr)
t.equal(copy.get('a'), undefined)
t.equal(copy.get('b'), 'B')
}, 60)
setTimeout(function () {
t.equal(copy.get('b'), undefined)
t.end()
}, 90)
})
test('load to other size cache', function (t) {
var cache = new LRU({max: 2})
var copy = new LRU({max: 1})
cache.set('a', 'A')
cache.set('b', 'B')
copy.load(cache.dump())
t.equal(copy.get('a'), undefined)
t.equal(copy.get('b'), 'B')
// update the last read from original cache
cache.get('a')
copy.load(cache.dump())
t.equal(copy.get('a'), 'A')
t.equal(copy.get('b'), undefined)
t.end()
})
test('load to other age cache', function (t) {
var cache = new LRU({maxAge: 250})
var aged = new LRU({maxAge: 500})
var simple = new LRU()
var arr
// created at 0
// a would be valid till 0 + 250
cache.set('a', 'A')
setTimeout(function () {
// created at 20
// b would be valid till 100 + 250
cache.set('b', 'B')
// b would be valid till 100 + 350
cache.set('c', 'C', 350)
arr = cache.dump()
t.equal(arr.length, 3)
}, 100)
setTimeout(function () {
t.equal(cache.get('a'), undefined)
t.equal(cache.get('b'), 'B')
t.equal(cache.get('c'), 'C')
aged.load(arr)
t.equal(aged.get('a'), undefined)
t.equal(aged.get('b'), 'B')
t.equal(aged.get('c'), 'C')
simple.load(arr)
t.equal(simple.get('a'), undefined)
t.equal(simple.get('b'), 'B')
t.equal(simple.get('c'), 'C')
}, 300)
setTimeout(function () {
t.equal(cache.get('a'), undefined)
t.equal(cache.get('b'), undefined)
t.equal(cache.get('c'), 'C')
aged.load(arr)
t.equal(aged.get('a'), undefined)
t.equal(aged.get('b'), undefined)
t.equal(aged.get('c'), 'C')
simple.load(arr)
t.equal(simple.get('a'), undefined)
t.equal(simple.get('b'), undefined)
t.equal(simple.get('c'), 'C')
}, 400)
setTimeout(function () {
t.equal(cache.get('a'), undefined)
t.equal(cache.get('b'), undefined)
t.equal(cache.get('c'), undefined)
aged.load(arr)
t.equal(aged.get('a'), undefined)
t.equal(aged.get('b'), undefined)
t.equal(aged.get('c'), undefined)
simple.load(arr)
t.equal(simple.get('a'), undefined)
t.equal(simple.get('b'), undefined)
t.equal(simple.get('c'), undefined)
t.end()
}, 500)
})
test('dumpLru', function (t) {
var l = LRU()
t.isa(l.dumpLru(), Yallist)
t.end()
})

71
node_modules/cache-manager/package.json generated vendored Normal file
View File

@ -0,0 +1,71 @@
{
"_from": "cache-manager@^2.9.0",
"_id": "cache-manager@2.10.0",
"_inBundle": false,
"_integrity": "sha512-IuPx05r5L0uZyBDYicB2Llld1o+/1WYjoHUnrC0TNQejMAnkoYxYS9Y8Uwr+lIBytDiyu7dwwmBCup2M9KugwQ==",
"_location": "/cache-manager",
"_phantomChildren": {
"pseudomap": "1.0.2",
"yallist": "2.1.2"
},
"_requested": {
"type": "range",
"registry": true,
"raw": "cache-manager@^2.9.0",
"name": "cache-manager",
"escapedName": "cache-manager",
"rawSpec": "^2.9.0",
"saveSpec": null,
"fetchSpec": "^2.9.0"
},
"_requiredBy": [
"/gatsby"
],
"_resolved": "https://registry.npmjs.org/cache-manager/-/cache-manager-2.10.0.tgz",
"_shasum": "278e9f8784e5d7e6617bfe350358c8ccd17387bf",
"_spec": "cache-manager@^2.9.0",
"_where": "/Users/stefanfejes/Projects/30-seconds-of-python-code/node_modules/gatsby",
"author": {
"name": "Bryan Donovan"
},
"bugs": {
"url": "https://github.com/BryanDonovan/node-cache-manager/issues"
},
"bundleDependencies": false,
"dependencies": {
"async": "1.5.2",
"lru-cache": "4.0.0"
},
"deprecated": false,
"description": "Cache module for Node.js",
"devDependencies": {
"coveralls": "^2.3.0",
"es6-promise": "^3.0.2",
"istanbul": "0.4.2",
"jscs": "2.11.0",
"jsdoc": "3.5.5",
"jshint": "2.9.1",
"mocha": "2.4.5",
"optimist": "0.6.1",
"sinon": "1.17.3"
},
"homepage": "https://github.com/BryanDonovan/node-cache-manager#readme",
"keywords": [
"cache",
"redis",
"lru-cache",
"memory cache",
"multiple cache"
],
"license": "MIT",
"main": "index.js",
"name": "cache-manager",
"repository": {
"type": "git",
"url": "git+https://github.com/BryanDonovan/node-cache-manager.git"
},
"scripts": {
"test": "make"
},
"version": "2.10.0"
}

1255
node_modules/cache-manager/test/caching.unit.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

5
node_modules/cache-manager/test/mocha.opts generated vendored Normal file
View File

@ -0,0 +1,5 @@
--reporter spec
--ui bdd
--globals state,newBlocks,params,type,__coverage__
--timeout 6500
--slow 200

1735
node_modules/cache-manager/test/multi_caching.unit.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

73
node_modules/cache-manager/test/run.js generated vendored Executable file
View File

@ -0,0 +1,73 @@
#!/usr/bin/env node
process.env.NODE_ENV = 'test';
require('../index');
var Mocha = require('mocha');
var optimist = require('optimist');
var walkDir = require('./support').walkDir;
var argv = optimist
.usage("Usage: $0 -t [types] --reporter [reporter] --timeout [timeout]")['default'](
{types: 'unit,functional', reporter: 'spec', timeout: 6000})
.describe('types', 'The types of tests to run, separated by commas. E.g., unit,functional,acceptance')
.describe('reporter', 'The mocha test reporter to use.')
.describe('timeout', 'The mocha timeout to use per test (ms).')
.boolean('help')
.alias('types', 'T')
.alias('timeout', 't')
.alias('reporter', 'R')
.alias('help', 'h')
.argv;
var mocha = new Mocha({timeout: argv.timeout, reporter: argv.reporter, ui: 'bdd'});
var validTestTypes = ['unit', 'functional', 'acceptance', 'integration'];
var requestedTypes = argv.types.split(',');
var typesToUse = [];
validTestTypes.forEach(function(validTestType) {
if (requestedTypes.indexOf(validTestType) !== -1) {
typesToUse.push(validTestType);
}
});
if (argv.help || typesToUse.length === 0) {
console.log('\n' + optimist.help());
process.exit();
}
var isValidFile = function(file) {
if (file.match(/buster/)) {
return false;
}
for (var i = 0; i < typesToUse.length; i++) {
var testType = typesToUse[i];
var ext = testType + ".js";
if (file.indexOf(ext) !== -1) {
return true;
}
}
return false;
};
function run(cb) {
walkDir('test', isValidFile, function(err, files) {
if (err) { return cb(err); }
files.forEach(function(file) {
mocha.addFile(file);
});
cb();
});
}
run(function(err) {
if (err) { throw err; }
mocha.run(function(failures) {
process.exit(failures);
});
});

37
node_modules/cache-manager/test/stores/memory.unit.js generated vendored Normal file
View File

@ -0,0 +1,37 @@
var assert = require('assert');
var support = require('../support');
var memoryStore = require('../../lib/stores/memory');
describe("memory store", function() {
describe("instantiating", function() {
it("lets us pass in no args", function(done) {
var memoryCache = memoryStore.create();
support.testSetGetDel(memoryCache, done);
});
});
describe("set()", function() {
var memoryCache;
var origPromise;
beforeEach(function() {
origPromise = global.Promise;
delete global.Promise;
memoryCache = memoryStore.create({noPromises: true});
});
afterEach(function() {
global.Promise = origPromise;
});
// This test should pass in node v0.10.x:
it("does not require a callback or use of Promises", function(done) {
memoryCache.set('foo', 'bar');
setTimeout(function() {
assert.equal(memoryCache.get('foo'), 'bar');
done();
}, 10);
});
});
});

148
node_modules/cache-manager/test/stores/none.unit.js generated vendored Normal file
View File

@ -0,0 +1,148 @@
var assert = require('assert');
// var support = require('../support');
var noneStore = require('../../lib/stores/none');
describe('none store', function() {
var key = 'test-key';
var value = 'test-value';
var key2 = 'test-key-2';
var value2 = 'test-value-2';
describe('with callback', function() {
var origPromise;
var noneCache;
beforeEach(function() {
origPromise = global.Promise;
delete global.Promise;
noneCache = noneStore.create({noPromises: true});
});
afterEach(function() {
global.Promise = origPromise;
});
it('set()', function(done) {
assert.strictEqual(noneCache.set(key, value), undefined);
noneCache.set(key, value, function(err) {
assert.strictEqual(err, null);
noneCache.set(key, value, {}, done);
});
});
it('mset()', function(done) {
assert.strictEqual(noneCache.mset(key, value), undefined);
noneCache.mset(key, value, key2, value2, function(err) {
assert.strictEqual(err, null);
noneCache.mset(key, value, {}, done);
});
});
it('get()', function(done) {
assert.strictEqual(noneCache.get(key), undefined);
noneCache.get(key2, function(err, result) {
assert.strictEqual(err, null);
assert.strictEqual(result, undefined);
noneCache.get(key2, {}, done);
});
});
it('mget()', function(done) {
assert.deepEqual(noneCache.mget(key, key2), [undefined, undefined]);
noneCache.mget(key, key2, function(err, result) {
assert.strictEqual(err, null);
assert.deepEqual(result, [undefined, undefined]);
noneCache.mget(key, key2, key, {}, function(err, result) {
assert.strictEqual(err, null);
assert.deepEqual(result, [undefined, undefined, undefined]);
done();
});
});
});
it('del()', function(done) {
assert.strictEqual(noneCache.del(), undefined);
noneCache.del(function() {
noneCache.del({}, done);
});
});
it('reset()', function(done) {
assert.strictEqual(noneCache.reset(), undefined);
noneCache.reset(done);
});
it('keys()', function(done) {
assert.deepEqual(noneCache.keys(), []);
noneCache.keys(function(err, keys) {
assert.deepEqual(keys, []);
done();
});
});
});
describe('with promise', function() {
var noneCache;
beforeEach(function() {
noneCache = noneStore.create({promiseDependency: global.Promise});
});
it('get() global promise', function(done) {
noneStore.create()
.get(key)
.then(function(result) {
assert.strictEqual(result, undefined);
done();
});
});
it('set()', function(done) {
noneCache.set(key, value)
.then(function(result) {
assert.equal(result, value);
done();
});
});
it('mset()', function(done) {
noneCache.mset(key, value, key2, value2)
.then(function(result) {
assert.deepEqual(result, [value, value2]);
done();
});
});
it('get()', function(done) {
noneCache.get(key)
.then(function(result) {
assert.strictEqual(result, undefined);
done();
});
});
it('mget()', function(done) {
noneCache.mget(key, key2)
.then(function(result) {
assert.deepEqual(result, [undefined, undefined]);
done();
});
});
it('del()', function(done) {
noneCache.del(key).then(done);
});
it('reset()', function(done) {
noneCache.reset().then(done);
});
it('keys()', function(done) {
noneCache.keys()
.then(function(result) {
assert.deepEqual(result, []);
done();
});
});
});
});

115
node_modules/cache-manager/test/support.js generated vendored Normal file
View File

@ -0,0 +1,115 @@
var fs = require('fs');
var util = require('util');
var assert = require('assert');
var support = {
random: {
string: function(strLen) {
strLen = strLen || 8;
var chars = "abcdefghiklmnopqrstuvwxyz";
var randomStr = '';
for (var i = 0; i < strLen; i++) {
var rnum = Math.floor(Math.random() * chars.length);
randomStr += chars.substring(rnum, rnum + 1);
}
return randomStr;
},
number: function(max) {
max = max || 1000;
return Math.floor((Math.random() * max));
}
},
checkErr: function(err) {
if (err) {
var msg;
if (err instanceof Error) {
msg = err;
} else if (err.msg) {
msg = err.msg;
} else {
msg = util.inspect(err);
}
var error = new Error(msg);
throw error;
}
},
assertBetween: function(actual, lower, upper) {
assert.ok(actual >= lower, "Expected " + actual + " to be >= " + lower);
assert.ok(actual <= upper, "Expected " + actual + " to be <= " + upper);
},
assertWithin: function(actual, expected, delta) {
var lower = expected - delta;
var upper = expected + delta;
this.assertBetween(actual, lower, upper);
},
walkDir: function(dir, validationFunction, cb) {
if (arguments.length === 2) {
cb = validationFunction;
validationFunction = null;
}
var results = [];
fs.readdir(dir, function(err, list) {
if (err) { return cb(err); }
var pending = list.length;
if (!pending) { return cb(null, results); }
list.forEach(function(file) {
file = dir + '/' + file;
fs.stat(file, function(err, stat) {
if (stat && stat.isDirectory()) {
support.walkDir(file, validationFunction, function(err, res) {
results = results.concat(res);
if (!--pending) { cb(null, results); }
});
} else {
if (typeof validationFunction === 'function') {
if (validationFunction(file)) {
results.push(file);
}
} else {
results.push(file);
}
if (!--pending) { cb(null, results); }
}
});
});
});
},
testSetGetDel: function(cache, cb) {
var key = 'TEST' + support.random.string();
var val = support.random.string();
cache.set(key, val, function(err) {
if (err) { return cb(err); }
cache.get(key, function(err, result) {
if (err) { return cb(err); }
assert.equal(result, val);
cache.del(key, function(err) {
if (err) { return cb(err); }
cache.get(key, function(err, result) {
if (err) { return cb(err); }
assert.ok(!result);
cb();
});
});
});
});
}
};
module.exports = support;

25
node_modules/cache-manager/test/utils.unit.js generated vendored Normal file
View File

@ -0,0 +1,25 @@
var utils = require('../lib/utils');
var assert = require('assert');
var isObject = utils.isObject;
describe('utils', function() {
describe('isObject()', function() {
it('should return "true" when value passed is an object', function() {
var result1 = isObject({});
assert.ok(result1);
});
it('should return "false" when value passed is not an object', function() {
var result1 = isObject('string');
var result2 = isObject(123);
var result3 = isObject([]);
var result4 = isObject(function() {});
assert.ok(!result1);
assert.ok(!result2);
assert.ok(!result3);
assert.ok(!result4);
});
});
});