付智勇

no message

要显示太多修改。

为保证性能只显示 38 of 38+ 个文件。

... ... @@ -3,6 +3,8 @@ var saitMd5 = require('../util/saltMD5')
var status = require('../util/resTemplate')
const userService = require('../services/userService')
const uuid = require('../util/UuidUtil')
const redis = require('../util/redis')
var userController =function (){
... ... @@ -19,14 +21,30 @@ var userController =function (){
userController.prototype.addUser = async(ctx, next) =>{
try{
var params = ctx.request.body;
const pw = saitMd5.md5AddSalt(params.password)
var pw = saitMd5.md5AddSalt(params.password)
if(!params.loginName){
return status.paramError('loginName');
}else if(!params.password){
return status.paramError('password','不能为空');
}else if(params.password.length < 6){
return status.paramError('password','不得小于6位');
}else if(!params.userEmail){
return status.paramError('userEmail');
}
if(params.userRole == 2 || params.userRole == 3){
if(!params.IDcard){
return status.paramError('IDcard','不能为空');
}else if(!params.userName){
return status.paramError('userName','不能为空');
}
pw = saitMd5.md5AddSalt('123456')
params.loginName = Math.random().toString(24).substr(9);
}else if(params.userRole == 4){
if(!params.userMobile){
return status.paramError('userMobile','不能为空');
}else if(!params.password){
return status.paramError('password','不能为空');
}else if(params.password.length < 6){
return status.paramError('password','不得小于6位');
}
}
var user = {
... ... @@ -34,6 +52,7 @@ userController.prototype.addUser = async(ctx, next) =>{
password:pw.md5Pass,
companyName:params.companyName,
userName:params.userName,
IDcard:params.IDcard,
salt:pw.salt,
userType:params.userType,
userRole:params.userRole,
... ... @@ -54,16 +73,19 @@ userController.prototype.addUser = async(ctx, next) =>{
* 用户登录
*/
userController.prototype.login = async(ctx, next) =>{
try{
const body = ctx.request.body;
if(!body.name){
return status.paramError('name');
await redis.setToken('qwe123','qwe123');
let redisCode = await redis.getToken('qwe123')
console.log(redisCode)
if(redisCode != body.code ){
}else if(!body.loginName&&!body.userEmail){
return status.paramError('userEmail loginName');
}else if(!body.password){
return status.paramError('password');
}
try{
let userData = await userService.login(body.name ,body.password);
let userData = await userService.login(body.loginName ,body.password,body.userEmail);
var userBack = {
id:userData.id,
loginName:userData.loginName,
... ... @@ -97,6 +119,7 @@ userController.prototype.getStu = async(ctx, next) =>{
throw new Error(error)
}
}
userController.prototype.updateUserByUserId = async(ctx, next) =>{
const userId = ctx.params.userId;
const updateData = ctx.request.body;
... ... @@ -147,4 +170,21 @@ userController.prototype.resetPasswordByUserId = async(ctx,next) =>{
}
}
userController.prototype.updatePwByTelphone = async(ctx, next) =>{
const code = ctx.request.body.code;
const telephone = ctx.request.body.telephone;
const password = ctx.request.body.password;
const pw = saitMd5.md5AddSalt(password);
let setCode = await redis.setToken(code,code)
let redisCode = await redis.getToken(code);
if(code != redisCode){
return {code:500,msg:'验证码错误'};
}else if(!password){
return {code:501,msg:'密码不能为空'};
}
let updatePW = await userService.updatePwByTelphone(telephone,code,pw)
return {code:redisCode}
}
module.exports = new userController();
\ No newline at end of file
... ...
... ... @@ -14,8 +14,10 @@
名称|类型|描述
:--|:--|:--
|loginName |string |必须, 登录名 |
|password |string |必须,密码 |
|type | int |用户类型:0管理员 1监课 2老师 4学生 |
|password |string |必须,密码 |
|userEmail |string |必须,邮箱 |
|userMobile |string |必须,用户手机号 |
请求示例:
... ... @@ -23,10 +25,15 @@
body
{
"loginName": "admin",
"telephone": 11112112,
"loginName": "尼安德特",
"password": "123456",
"type":1
"companyName":"bat",
"userName":"尼安德特人",
"userRole":0,
"userEmail":"kengni@buyouyu.com",
"userMobile":123124,
"content":"2017-09-01",
"groupId":1
}
返回参数:
... ...
不能预览此文件类型
var sequelize = require('../config');
var Sequelize = require('sequelize');
const uuid = require('../util/UuidUtil')
var user = sequelize.define('3m_student_meeting', {
id: {
type: Sequelize.STRING(32),
defaultValue:uuid.db32(),
allowNull: false,
unique: true,
primaryKey: true,
field: "id"
},
studentId: {
allowNull: false,
type:Sequelize.STRING(32),
field: "student_id"
},
meetingId: {
allowNull: false,
type:Sequelize.STRING(32),
field: "meeting_id"
},
});
\ No newline at end of file
... ...
... ... @@ -17,6 +17,11 @@ var user = sequelize.define('3m_user', {
type:Sequelize.STRING(100),
field: "login_name"
},
IDcard: {
allowNull: false,
type:Sequelize.STRING(50),
field: "IDcard"
},
password: {
allowNull: false,
type:Sequelize.STRING(100),
... ...
node_modules/*
todo.txt
npm-debug.log
test/*
benchmark/*
browser/*
src/*
async
sync
mixed
bench.json
js/browser
js/browser/*
js/debug
js/debug/*
reader.js
read.txt
bench
.editorconfig
.jshintrc
ast_passes.js
mocharun.js
throwaway.js
throwaway.html
deque.sublime-workspace
deque.sublime-project
changelog.js
.travis.yml
sauce_connect.log
nodex64.exe
bump.js
... ...
"use strict";
Error.stackTraceLimit = 100;
var astPasses = require("./ast_passes.js");
module.exports = function( grunt ) {
var isCI = !!grunt.option("ci");
var license;
function getLicense() {
if( !license ) {
var fs = require("fs");
var text = fs.readFileSync("LICENSE", "utf8");
text = text.split("\n").map(function(line, index){
return " * " + line;
}).join("\n")
license = "/**\n" + text + "\n */\n";
}
return license
}
function writeFile( dest, content ) {
grunt.file.write( dest, content );
grunt.log.writeln('File "' + dest + '" created.');
}
var gruntConfig = {};
var getGlobals = function() {
var fs = require("fs");
var file = "./src/constants.js";
var contents = fs.readFileSync(file, "utf8");
var rconstantname = /CONSTANT\(\s*([^,]+)/g;
var m;
var globals = {
"console": false,
"require": false,
"module": false,
"define": false
};
while( ( m = rconstantname.exec( contents ) ) ) {
globals[m[1]] = false;
}
return globals;
}
gruntConfig.pkg = grunt.file.readJSON("package.json");
gruntConfig.jshint = {
all: {
options: {
globals: getGlobals(),
"bitwise": false,
"camelcase": true,
"curly": true,
"eqeqeq": true,
"es3": true,
"forin": true,
"immed": true,
"latedef": false,
"newcap": true,
"noarg": true,
"noempty": true,
"nonew": true,
"plusplus": false,
"quotmark": "double",
"undef": true,
"unused": true,
"strict": false,
"trailing": true,
"maxparams": 7,
"maxlen": 80,
"asi": false,
"boss": true,
"eqnull": true,
"evil": true,
"expr": false,
"funcscope": false,
"globalstrict": false,
"lastsemic": false,
"laxcomma": false,
"laxbreak": false,
"loopfunc": true,
"multistr": true,
"proto": false,
"scripturl": true,
"smarttabs": false,
"shadow": true,
"sub": true,
"supernew": false,
"validthis": true,
"browser": true,
"jquery": true,
"devel": true,
'-W014': true,
'-W116': true,
'-W106': true,
'-W064': true,
'-W097': true
},
files: {
src: [
"./src/deque.js"
]
}
}
};
if( !isCI ) {
gruntConfig.jshint.all.options.reporter = require("jshint-stylish");
}
gruntConfig.bump = {
options: {
files: ['package.json'],
updateConfigs: [],
commit: true,
commitMessage: 'Release v%VERSION%',
commitFiles: ['-a'],
createTag: true,
tagName: 'v%VERSION%',
tagMessage: 'Version %VERSION%',
false: true,
pushTo: 'master',
gitDescribeOptions: '--tags --always --abbrev=1 --dirty=-d' // options to use with '$ git describe'
}
};
grunt.initConfig(gruntConfig);
grunt.loadNpmTasks('grunt-contrib-jshint');
grunt.loadNpmTasks('grunt-bump');
grunt.registerTask( "build", function() {
var fs = require("fs");
var CONSTANTS_FILE = "./src/constants.js";
astPasses.readConstants(fs.readFileSync(CONSTANTS_FILE, "utf8"), CONSTANTS_FILE);
var fileNames = ["deque.js"];
fileNames.forEach(function(fileName){
var src = fs.readFileSync("./src/" + fileName, "utf8");
src = astPasses.removeComments(src, fileName);
src = astPasses.expandConstants(src, fileName);
src = getLicense() + src;
writeFile("./js/" + fileName, src);
});
});
grunt.registerTask( "testrun", function() {
var fs = require("fs");
var done = this.async();
var Mocha = require("mocha");
var mochaOpts = {
reporter: "spec",
timeout: 500,
slow: Infinity
};
var mocha = new Mocha(mochaOpts);
fs.readdirSync("./test").forEach(function(fileName) {
mocha.addFile("./test/" + fileName);
});
mocha.run(function(err){
if( err ) {
process.stderr.write(test.title + "\n" + err.stack + "\n");
done(err);
}
else {
done();
}
}).on( "fail", function( test, err ) {
process.stderr.write(test.title + "\n" + err.stack + "\n");
done(err);
});
});
grunt.registerTask( "test", ["jshint", "build", "testrun"] );
grunt.registerTask( "default", ["jshint", "build"] );
};
... ...
Copyright (c) 2013 Petka Antonov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:</p>
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
\ No newline at end of file
... ...
#Introduction
Extremely fast [double-ended queue](http://en.wikipedia.org/wiki/Double-ended_queue) implementation. Double-ended queue can also be used as a:
- [Stack](http://en.wikipedia.org/wiki/Stack_\(abstract_data_type\))
- [Queue](http://en.wikipedia.org/wiki/Queue_\(data_structure\))
The implementation is GC and CPU cache friendly [circular buffer](http://en.wikipedia.org/wiki/Circular_buffer). [It will run circles around any "linked list" implementation](#performance).
Every queue operation is done in constant `O(1)` - including random access from `.get()`.
#Topics
- [Quick start](#quick-start)
- [Why not use an Array?](#why-not-use-an-array)
- [Using double-ended queue as a normal queue](#using-double-ended-queue-as-a-normal-queue)
- [API reference and examples](#api)
- [Performance](#performance)
#Quick start
npm install double-ended-queue
```js
var Deque = require("double-ended-queue");
var deque = new Deque([1,2,3,4]);
deque.shift(); //1
deque.pop(); //4
```
#Why not use an Array?
Arrays take linear `O(N)` time to do `shift` and `unshift` operations. That means in theory that an array with 1000 items is 1000x slower to do those operations than a deque with 1000 items. 10000x slower with 10000 items and so on.
V8 implements [a trick for small arrays](https://code.google.com/p/v8/issues/detail?id=3059) where these operations are done in constant time, however even with this trick deque is still 4x faster.
But arrays use "native" methods, they must be faster!
In V8, there is almost no advantage for a method to be a built-in. In fact many times built-ins are at a severe disadvantage of having to implement far more complex semantics than is actually needed in practice. For example, sparse array handling punishes almost every built-in array method even though nobody uses sparse arrays as is evidenced by the popularity of the underscore library which doesn't handle sparse arrays in the same way across different browsers.
#Using double-ended queue as a normal queue
Queue is a more commonly needed data structure however a separate implementation does not provide any advantage in terms of performance. Aliases are provided specifically for the queue use-case. You may use `.enqueue(items...)` to enqueue item(s) and `.dequeue()` to dequeue an item.
#API
- [`new Deque()`](#new-deque---deque)
- [`new Deque(Array items)`](#new-dequearray-items---deque)
- [`new Deque(int capacity)`](#new-dequeint-capacity---deque)
- [`push(dynamic items...)`](#pushdynamic-items---int)
- [`unshift(dynamic items...)`](#unshiftdynamic-items---int)
- [`pop()`](#pop---dynamic)
- [`shift()`](#shift---dynamic)
- [`toArray()`](#toarray---array)
- [`peekBack()`](#peekback---dynamic)
- [`peekFront()`](#peekfront---dynamic)
- [`get(int index)`](#getint-index---dynamic)
- [`isEmpty()`](#isempty---boolean)
- [`clear()`](#clear---void)
#####`new Deque()` -> `Deque`
Creates an empty double-ended queue with initial capacity of 16. If you know the optimal size before-hand, use [`new Deque(int capacity)`](#new-dequeint-capacity---deque).
```js
var deque = new Deque();
deque.push(1, 2, 3);
deque.shift(); //1
deque.pop(); //3
```
<hr>
#####`new Deque(Array items)` -> `Deque`
Creates a double-ended queue from `items`.
```js
var deque = new Deque([1,2,3,4]);
deque.shift(); //1
deque.pop(); //4
```
<hr>
#####`new Deque(int capacity)` -> `Deque`
Creates an empty double-ended queue with the given `capacity`. `Capacity` should be the maximum amount of items the queue will hold at a given time.
The reason to give an initial capacity is to avoid potentially expensive resizing operations at runtime.
```js
var deque = new Deque(100);
deque.push(1, 2, 3);
deque.shift(); //1
deque.pop(); //3
```
<hr>
#####`push(dynamic items...)` -> `int`
Push items to the back of this queue. Returns the amount of items currently in the queue after the operation.
```js
var deque = new Deque();
deque.push(1);
deque.pop(); //1
deque.push(1, 2, 3);
deque.shift(); //1
deque.shift(); //2
deque.shift(); //3
```
**Aliases:** `enqueue`, `insertBack`
<hr>
#####`unshift(dynamic items...)` -> `int`
Unshift items to the front of this queue. Returns the amount of items currently in the queue after the operation.
```js
var deque = new Deque([2,3]);
deque.unshift(1);
deque.toString(); //"1,2,3"
deque.unshift(-2, -1, 0);
deque.toString(); //"-2,-1,0,1,2,3"
```
**Aliases:** `insertFront`
<hr>
#####`pop()` -> `dynamic`
Pop off the item at the back of this queue.
Note: The item will be removed from the queue. If you simply want to see what's at the back of the queue use [`peekBack()`](#peekback---dynamic) or [`.get(-1)`](#getint-index---dynamic).
If the queue is empty, `undefined` is returned. If you need to differentiate between `undefined` values in the queue and `pop()` return value -
check the queue `.length` before popping.
```js
var deque = new Deque([1,2,3]);
deque.pop(); //3
deque.pop(); //2
deque.pop(); //1
deque.pop(); //undefined
```
**Aliases:** `removeBack`
<hr>
#####`shift()` -> `dynamic`
Shifts off the item at the front of this queue.
Note: The item will be removed from the queue. If you simply want to see what's at the front of the queue use [`peekFront()`](#peekfront---dynamic) or [`.get(0)`](#getint-index---dynamic).
If the queue is empty, `undefined` is returned. If you need to differentiate between `undefined` values in the queue and `shift()` return value -
check the queue `.length` before shifting.
```js
var deque = new Deque([1,2,3]);
deque.shift(); //1
deque.shift(); //2
deque.shift(); //3
deque.shift(); //undefined
```
**Aliases:** `removeFront`, `dequeue`
<hr>
#####`toArray()` -> `Array`
Returns the items in the queue as an array. Starting from the item in the front of the queue and ending to the item at the back of the queue.
```js
var deque = new Deque([1,2,3]);
deque.push(4);
deque.unshift(0);
deque.toArray(); //[0,1,2,3,4]
```
**Aliases:** `toJSON`
<hr>
#####`peekBack()` -> `dynamic`
Returns the item that is at the back of this queue without removing it.
If the queue is empty, `undefined` is returned.
```js
var deque = new Deque([1,2,3]);
deque.push(4);
deque.peekBack(); //4
```
<hr>
#####`peekFront()` -> `dynamic`
Returns the item that is at the front of this queue without removing it.
If the queue is empty, `undefined` is returned.
```js
var deque = new Deque([1,2,3]);
deque.push(4);
deque.peekFront(); //1
```
<hr>
#####`get(int index)` -> `dynamic`
Returns the item that is at the given `index` of this queue without removing it.
The index is zero-based, so `.get(0)` will return the item that is at the front, `.get(1)` will return
the item that comes after and so on.
The index can be negative to read items at the back of the queue. `.get(-1)` returns the item that is at the back of the queue,
`.get(-2)` will return the item that comes before and so on.
Returns `undefined` if `index` is not a valid index into the queue.
```js
var deque = new Deque([1,2,3]);
deque.get(0); //1
deque.get(1); //2
deque.get(2); //3
deque.get(-1); //3
deque.get(-2); //2
deque.get(-3); //1
```
**Note**: Even though indexed accessor (e.g. `queue[0]`) could *appear* to return a correct value *sometimes*, this is completely unreliable. The numeric slots
of the deque object are internally used as an optimization and have no meaningful order or meaning to outside. Always use `.get()`.
**Note**: The implementation has O(1) random access using `.get()`.
<hr>
#####`isEmpty()` -> `boolean`
Return `true` if this queue is empty, `false` otherwise.
```js
var deque = new Deque();
deque.isEmpty(); //true
deque.push(1);
deque.isEmpty(); //false
```
<hr>
#####`clear()` -> `void`
Remove all items from this queue. Does not change the queue's capacity.
```js
var deque = new Deque([1,2,3]);
deque.toString(); //"1,2,3"
deque.clear();
deque.toString(); //""
```
<hr>
#Performance
Clone the repo and `npm install`. Then run the `bench` script.
##1000 items in the queue
double-ended-queue x 15,532,714 ops/sec ±0.19% (96 runs sampled)
built-in array x 6,501,398 ops/sec ±0.87% (95 runs sampled)
node-deque x 2,938,068 ops/sec ±3.50% (68 runs sampled)
##2 million items in the queue
double-ended-queue x 14,425,547 ops/sec ±0.17% (94 runs sampled)
node-deque x 2,815,628 ops/sec ±10.56% (76 runs sampled)
built-in array x 19.23 ops/sec ±0.35% (51 runs sampled)
Noteworthy is just how bad the degradation can be for built-in array when V8 cannot use the trick.
... ...
/**
* Copyright (c) 2013 Petka Antonov
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:</p>
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
"use strict";
function Deque(capacity) {
this._capacity = getCapacity(capacity);
this._length = 0;
this._front = 0;
if (isArray(capacity)) {
var len = capacity.length;
for (var i = 0; i < len; ++i) {
this[i] = capacity[i];
}
this._length = len;
}
}
Deque.prototype.toArray = function Deque$toArray() {
var len = this._length;
var ret = new Array(len);
var front = this._front;
var capacity = this._capacity;
for (var j = 0; j < len; ++j) {
ret[j] = this[(front + j) & (capacity - 1)];
}
return ret;
};
Deque.prototype.push = function Deque$push(item) {
var argsLength = arguments.length;
var length = this._length;
if (argsLength > 1) {
var capacity = this._capacity;
if (length + argsLength > capacity) {
for (var i = 0; i < argsLength; ++i) {
this._checkCapacity(length + 1);
var j = (this._front + length) & (this._capacity - 1);
this[j] = arguments[i];
length++;
this._length = length;
}
return length;
}
else {
var j = this._front;
for (var i = 0; i < argsLength; ++i) {
this[(j + length) & (capacity - 1)] = arguments[i];
j++;
}
this._length = length + argsLength;
return length + argsLength;
}
}
if (argsLength === 0) return length;
this._checkCapacity(length + 1);
var i = (this._front + length) & (this._capacity - 1);
this[i] = item;
this._length = length + 1;
return length + 1;
};
Deque.prototype.pop = function Deque$pop() {
var length = this._length;
if (length === 0) {
return void 0;
}
var i = (this._front + length - 1) & (this._capacity - 1);
var ret = this[i];
this[i] = void 0;
this._length = length - 1;
return ret;
};
Deque.prototype.shift = function Deque$shift() {
var length = this._length;
if (length === 0) {
return void 0;
}
var front = this._front;
var ret = this[front];
this[front] = void 0;
this._front = (front + 1) & (this._capacity - 1);
this._length = length - 1;
return ret;
};
Deque.prototype.unshift = function Deque$unshift(item) {
var length = this._length;
var argsLength = arguments.length;
if (argsLength > 1) {
var capacity = this._capacity;
if (length + argsLength > capacity) {
for (var i = argsLength - 1; i >= 0; i--) {
this._checkCapacity(length + 1);
var capacity = this._capacity;
var j = (((( this._front - 1 ) &
( capacity - 1) ) ^ capacity ) - capacity );
this[j] = arguments[i];
length++;
this._length = length;
this._front = j;
}
return length;
}
else {
var front = this._front;
for (var i = argsLength - 1; i >= 0; i--) {
var j = (((( front - 1 ) &
( capacity - 1) ) ^ capacity ) - capacity );
this[j] = arguments[i];
front = j;
}
this._front = front;
this._length = length + argsLength;
return length + argsLength;
}
}
if (argsLength === 0) return length;
this._checkCapacity(length + 1);
var capacity = this._capacity;
var i = (((( this._front - 1 ) &
( capacity - 1) ) ^ capacity ) - capacity );
this[i] = item;
this._length = length + 1;
this._front = i;
return length + 1;
};
Deque.prototype.peekBack = function Deque$peekBack() {
var length = this._length;
if (length === 0) {
return void 0;
}
var index = (this._front + length - 1) & (this._capacity - 1);
return this[index];
};
Deque.prototype.peekFront = function Deque$peekFront() {
if (this._length === 0) {
return void 0;
}
return this[this._front];
};
Deque.prototype.get = function Deque$get(index) {
var i = index;
if ((i !== (i | 0))) {
return void 0;
}
var len = this._length;
if (i < 0) {
i = i + len;
}
if (i < 0 || i >= len) {
return void 0;
}
return this[(this._front + i) & (this._capacity - 1)];
};
Deque.prototype.isEmpty = function Deque$isEmpty() {
return this._length === 0;
};
Deque.prototype.clear = function Deque$clear() {
var len = this._length;
var front = this._front;
var capacity = this._capacity;
for (var j = 0; j < len; ++j) {
this[(front + j) & (capacity - 1)] = void 0;
}
this._length = 0;
this._front = 0;
};
Deque.prototype.toString = function Deque$toString() {
return this.toArray().toString();
};
Deque.prototype.valueOf = Deque.prototype.toString;
Deque.prototype.removeFront = Deque.prototype.shift;
Deque.prototype.removeBack = Deque.prototype.pop;
Deque.prototype.insertFront = Deque.prototype.unshift;
Deque.prototype.insertBack = Deque.prototype.push;
Deque.prototype.enqueue = Deque.prototype.push;
Deque.prototype.dequeue = Deque.prototype.shift;
Deque.prototype.toJSON = Deque.prototype.toArray;
Object.defineProperty(Deque.prototype, "length", {
get: function() {
return this._length;
},
set: function() {
throw new RangeError("");
}
});
Deque.prototype._checkCapacity = function Deque$_checkCapacity(size) {
if (this._capacity < size) {
this._resizeTo(getCapacity(this._capacity * 1.5 + 16));
}
};
Deque.prototype._resizeTo = function Deque$_resizeTo(capacity) {
var oldCapacity = this._capacity;
this._capacity = capacity;
var front = this._front;
var length = this._length;
if (front + length > oldCapacity) {
var moveItemsCount = (front + length) & (oldCapacity - 1);
arrayMove(this, 0, this, oldCapacity, moveItemsCount);
}
};
var isArray = Array.isArray;
function arrayMove(src, srcIndex, dst, dstIndex, len) {
for (var j = 0; j < len; ++j) {
dst[j + dstIndex] = src[j + srcIndex];
src[j + srcIndex] = void 0;
}
}
function pow2AtLeast(n) {
n = n >>> 0;
n = n - 1;
n = n | (n >> 1);
n = n | (n >> 2);
n = n | (n >> 4);
n = n | (n >> 8);
n = n | (n >> 16);
return n + 1;
}
function getCapacity(capacity) {
if (typeof capacity !== "number") {
if (isArray(capacity)) {
capacity = capacity.length;
}
else {
return 16;
}
}
return pow2AtLeast(
Math.min(
Math.max(16, capacity), 1073741824)
);
}
module.exports = Deque;
... ...
{
"_args": [
[
{
"raw": "double-ended-queue@^2.1.0-0",
"scope": null,
"escapedName": "double-ended-queue",
"name": "double-ended-queue",
"rawSpec": "^2.1.0-0",
"spec": ">=2.1.0-0 <3.0.0",
"type": "range"
},
"/Users/fzy/project/koa2_Sequelize_project/node_modules/redis"
]
],
"_from": "double-ended-queue@>=2.1.0-0 <3.0.0",
"_id": "double-ended-queue@2.1.0-0",
"_inCache": true,
"_location": "/double-ended-queue",
"_nodeVersion": "0.10.34",
"_npmUser": {
"name": "esailija",
"email": "petka_antonov@hotmail.com"
},
"_npmVersion": "2.1.12",
"_phantomChildren": {},
"_requested": {
"raw": "double-ended-queue@^2.1.0-0",
"scope": null,
"escapedName": "double-ended-queue",
"name": "double-ended-queue",
"rawSpec": "^2.1.0-0",
"spec": ">=2.1.0-0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/redis"
],
"_resolved": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz",
"_shasum": "103d3527fd31528f40188130c841efdd78264e5c",
"_shrinkwrap": null,
"_spec": "double-ended-queue@^2.1.0-0",
"_where": "/Users/fzy/project/koa2_Sequelize_project/node_modules/redis",
"author": {
"name": "Petka Antonov",
"email": "petka_antonov@hotmail.com",
"url": "http://github.com/petkaantonov/"
},
"bugs": {
"url": "http://github.com/petkaantonov/deque/issues"
},
"dependencies": {},
"description": "Extremely fast double-ended queue implementation",
"devDependencies": {
"acorn": "~0.3.1",
"benchmark": "~1.0.0",
"bluebird": "~0.11",
"deque": "0.0.4",
"grunt": "~0.4.1",
"grunt-cli": "~0.1.9",
"grunt-contrib-jshint": "~0.6.4",
"jshint-stylish": "latest",
"mocha": "~1.12.1",
"q": "~0.9.7",
"semver-utils": "~1.1.0"
},
"directories": {},
"dist": {
"shasum": "103d3527fd31528f40188130c841efdd78264e5c",
"tarball": "https://registry.npmjs.org/double-ended-queue/-/double-ended-queue-2.1.0-0.tgz"
},
"gitHead": "51eada75cea686f1eb0c8bb5be486ac630e9b7ee",
"homepage": "https://github.com/petkaantonov/deque",
"keywords": [
"data-structure",
"data-structures",
"queue",
"deque",
"double-ended-queue"
],
"license": "MIT",
"main": "./js/deque.js",
"maintainers": [
{
"name": "esailija",
"email": "petka_antonov@hotmail.com"
}
],
"name": "double-ended-queue",
"optionalDependencies": {},
"readme": "#Introduction\n\nExtremely fast [double-ended queue](http://en.wikipedia.org/wiki/Double-ended_queue) implementation. Double-ended queue can also be used as a:\n\n- [Stack](http://en.wikipedia.org/wiki/Stack_\\(abstract_data_type\\))\n- [Queue](http://en.wikipedia.org/wiki/Queue_\\(data_structure\\))\n\nThe implementation is GC and CPU cache friendly [circular buffer](http://en.wikipedia.org/wiki/Circular_buffer). [It will run circles around any \"linked list\" implementation](#performance).\n\nEvery queue operation is done in constant `O(1)` - including random access from `.get()`.\n\n#Topics\n\n- [Quick start](#quick-start)\n- [Why not use an Array?](#why-not-use-an-array)\n- [Using double-ended queue as a normal queue](#using-double-ended-queue-as-a-normal-queue)\n- [API reference and examples](#api)\n- [Performance](#performance)\n\n#Quick start\n\n npm install double-ended-queue\n\n```js\nvar Deque = require(\"double-ended-queue\");\n\nvar deque = new Deque([1,2,3,4]);\ndeque.shift(); //1\ndeque.pop(); //4\n```\n\n#Why not use an Array?\n\nArrays take linear `O(N)` time to do `shift` and `unshift` operations. That means in theory that an array with 1000 items is 1000x slower to do those operations than a deque with 1000 items. 10000x slower with 10000 items and so on.\n\nV8 implements [a trick for small arrays](https://code.google.com/p/v8/issues/detail?id=3059) where these operations are done in constant time, however even with this trick deque is still 4x faster.\n\nBut arrays use \"native\" methods, they must be faster!\n\nIn V8, there is almost no advantage for a method to be a built-in. In fact many times built-ins are at a severe disadvantage of having to implement far more complex semantics than is actually needed in practice. For example, sparse array handling punishes almost every built-in array method even though nobody uses sparse arrays as is evidenced by the popularity of the underscore library which doesn't handle sparse arrays in the same way across different browsers.\n\n#Using double-ended queue as a normal queue\n\nQueue is a more commonly needed data structure however a separate implementation does not provide any advantage in terms of performance. Aliases are provided specifically for the queue use-case. You may use `.enqueue(items...)` to enqueue item(s) and `.dequeue()` to dequeue an item.\n\n#API\n\n- [`new Deque()`](#new-deque---deque)\n- [`new Deque(Array items)`](#new-dequearray-items---deque)\n- [`new Deque(int capacity)`](#new-dequeint-capacity---deque)\n- [`push(dynamic items...)`](#pushdynamic-items---int)\n- [`unshift(dynamic items...)`](#unshiftdynamic-items---int)\n- [`pop()`](#pop---dynamic)\n- [`shift()`](#shift---dynamic)\n- [`toArray()`](#toarray---array)\n- [`peekBack()`](#peekback---dynamic)\n- [`peekFront()`](#peekfront---dynamic)\n- [`get(int index)`](#getint-index---dynamic)\n- [`isEmpty()`](#isempty---boolean)\n- [`clear()`](#clear---void)\n\n#####`new Deque()` -> `Deque`\n\nCreates an empty double-ended queue with initial capacity of 16. If you know the optimal size before-hand, use [`new Deque(int capacity)`](#new-dequeint-capacity---deque).\n\n```js\nvar deque = new Deque();\ndeque.push(1, 2, 3);\ndeque.shift(); //1\ndeque.pop(); //3\n```\n\n<hr>\n\n#####`new Deque(Array items)` -> `Deque`\n\nCreates a double-ended queue from `items`.\n\n```js\nvar deque = new Deque([1,2,3,4]);\ndeque.shift(); //1\ndeque.pop(); //4\n```\n\n<hr>\n\n#####`new Deque(int capacity)` -> `Deque`\n\nCreates an empty double-ended queue with the given `capacity`. `Capacity` should be the maximum amount of items the queue will hold at a given time.\n\nThe reason to give an initial capacity is to avoid potentially expensive resizing operations at runtime.\n\n```js\nvar deque = new Deque(100);\ndeque.push(1, 2, 3);\ndeque.shift(); //1\ndeque.pop(); //3\n```\n\n<hr>\n\n#####`push(dynamic items...)` -> `int`\n\nPush items to the back of this queue. Returns the amount of items currently in the queue after the operation.\n\n```js\nvar deque = new Deque();\ndeque.push(1);\ndeque.pop(); //1\ndeque.push(1, 2, 3);\ndeque.shift(); //1\ndeque.shift(); //2\ndeque.shift(); //3\n```\n\n**Aliases:** `enqueue`, `insertBack`\n\n<hr>\n\n#####`unshift(dynamic items...)` -> `int`\n\nUnshift items to the front of this queue. Returns the amount of items currently in the queue after the operation.\n\n```js\nvar deque = new Deque([2,3]);\ndeque.unshift(1);\ndeque.toString(); //\"1,2,3\"\ndeque.unshift(-2, -1, 0);\ndeque.toString(); //\"-2,-1,0,1,2,3\"\n```\n\n**Aliases:** `insertFront`\n\n<hr>\n\n\n#####`pop()` -> `dynamic`\n\nPop off the item at the back of this queue.\n\nNote: The item will be removed from the queue. If you simply want to see what's at the back of the queue use [`peekBack()`](#peekback---dynamic) or [`.get(-1)`](#getint-index---dynamic).\n\nIf the queue is empty, `undefined` is returned. If you need to differentiate between `undefined` values in the queue and `pop()` return value -\ncheck the queue `.length` before popping.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.pop(); //3\ndeque.pop(); //2\ndeque.pop(); //1\ndeque.pop(); //undefined\n```\n\n**Aliases:** `removeBack`\n\n<hr>\n\n#####`shift()` -> `dynamic`\n\nShifts off the item at the front of this queue.\n\nNote: The item will be removed from the queue. If you simply want to see what's at the front of the queue use [`peekFront()`](#peekfront---dynamic) or [`.get(0)`](#getint-index---dynamic).\n\nIf the queue is empty, `undefined` is returned. If you need to differentiate between `undefined` values in the queue and `shift()` return value -\ncheck the queue `.length` before shifting.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.shift(); //1\ndeque.shift(); //2\ndeque.shift(); //3\ndeque.shift(); //undefined\n```\n\n**Aliases:** `removeFront`, `dequeue`\n\n<hr>\n\n#####`toArray()` -> `Array`\n\nReturns the items in the queue as an array. Starting from the item in the front of the queue and ending to the item at the back of the queue.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.push(4);\ndeque.unshift(0);\ndeque.toArray(); //[0,1,2,3,4]\n```\n\n**Aliases:** `toJSON`\n\n<hr>\n\n#####`peekBack()` -> `dynamic`\n\nReturns the item that is at the back of this queue without removing it.\n\nIf the queue is empty, `undefined` is returned.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.push(4);\ndeque.peekBack(); //4\n```\n\n<hr>\n\n#####`peekFront()` -> `dynamic`\n\nReturns the item that is at the front of this queue without removing it.\n\nIf the queue is empty, `undefined` is returned.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.push(4);\ndeque.peekFront(); //1\n```\n\n<hr>\n\n#####`get(int index)` -> `dynamic`\n\nReturns the item that is at the given `index` of this queue without removing it.\n\nThe index is zero-based, so `.get(0)` will return the item that is at the front, `.get(1)` will return\nthe item that comes after and so on.\n\nThe index can be negative to read items at the back of the queue. `.get(-1)` returns the item that is at the back of the queue,\n`.get(-2)` will return the item that comes before and so on.\n\nReturns `undefined` if `index` is not a valid index into the queue.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.get(0); //1\ndeque.get(1); //2\ndeque.get(2); //3\n\ndeque.get(-1); //3\ndeque.get(-2); //2\ndeque.get(-3); //1\n```\n\n**Note**: Even though indexed accessor (e.g. `queue[0]`) could *appear* to return a correct value *sometimes*, this is completely unreliable. The numeric slots\nof the deque object are internally used as an optimization and have no meaningful order or meaning to outside. Always use `.get()`.\n\n**Note**: The implementation has O(1) random access using `.get()`.\n\n<hr>\n\n#####`isEmpty()` -> `boolean`\n\nReturn `true` if this queue is empty, `false` otherwise.\n\n```js\nvar deque = new Deque();\ndeque.isEmpty(); //true\ndeque.push(1);\ndeque.isEmpty(); //false\n```\n\n<hr>\n\n#####`clear()` -> `void`\n\nRemove all items from this queue. Does not change the queue's capacity.\n\n```js\nvar deque = new Deque([1,2,3]);\ndeque.toString(); //\"1,2,3\"\ndeque.clear();\ndeque.toString(); //\"\"\n```\n<hr>\n\n#Performance\n\nClone the repo and `npm install`. Then run the `bench` script.\n\n##1000 items in the queue\n\n double-ended-queue x 15,532,714 ops/sec ±0.19% (96 runs sampled)\n built-in array x 6,501,398 ops/sec ±0.87% (95 runs sampled)\n node-deque x 2,938,068 ops/sec ±3.50% (68 runs sampled)\n\n##2 million items in the queue\n\n double-ended-queue x 14,425,547 ops/sec ±0.17% (94 runs sampled)\n node-deque x 2,815,628 ops/sec ±10.56% (76 runs sampled)\n built-in array x 19.23 ops/sec ±0.35% (51 runs sampled)\n\nNoteworthy is just how bad the degradation can be for built-in array when V8 cannot use the trick.\n",
"readmeFilename": "README.md",
"repository": {
"type": "git",
"url": "git://github.com/petkaantonov/deque.git"
},
"scripts": {
"test": "grunt test"
},
"version": "2.1.0-0"
}
... ...
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
*.rdb
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
# Commenting this out is preferred by some people, see
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git-
node_modules
# Users Environment Variables
.lock-wscript
... ...
language: node_js
sudo: false
node_js:
- "0.10"
- "0.12"
- "4"
- "5"
after_success:
- CODECLIMATE_REPO_TOKEN=b57723fafcf0516f275d6b380cd506fd082ea88d86507eb82c8abd489b9b9a09 node ./node_modules/.bin/codeclimate-test-reporter < coverage/lcov.info
... ...
The MIT License (MIT)
Copyright (c) 2015 NodeRedis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
... ...
# Redis Commands
[![Build Status](https://travis-ci.org/NodeRedis/redis-commands.png?branch=master)](https://travis-ci.org/NodeRedis/redis-commands)
[![Code Climate](https://codeclimate.com/github/NodeRedis/redis-commands/badges/gpa.svg)](https://codeclimate.com/github/NodeRedis/redis-commands)
[![Test Coverage](https://codeclimate.com/github/NodeRedis/redis-commands/badges/coverage.svg)](https://codeclimate.com/github/NodeRedis/redis-commands/coverage)
This module exports all the commands that Redis supports.
## Install
```shell
$ npm install redis-commands
```
## Usage
```javascript
var commands = require('redis-commands');
```
`.list` is an array contains all the lowercased commands:
```javascript
commands.list.forEach(function (command) {
console.log(command);
});
```
`.exists()` is used to check if the command exists:
```javascript
commands.exists('set') // true
commands.exists('other-command') // false
```
`.hasFlag()` is used to check if the command has the flag:
```javascript
commands.hasFlag('set', 'readonly') // false
```
`.getKeyIndexes()` is used to get the indexes of keys in the command arguments:
```javascript
commands.getKeyIndexes('set', ['key', 'value']) // [0]
commands.getKeyIndexes('mget', ['key1', 'key2']) // [0, 1]
```
## Acknowledgment
Thank [@Yuan Chuan](https://github.com/yuanchuan) for the package name. The original redis-commands is renamed to [@yuanchuan/redis-commands](https://www.npmjs.com/package/@yuanchuan/redis-commands).
... ...
## v.1.3.1 - 25 Jan, 2017
Bugfix
- Fix require for for webpack
## v.1.3.0 - 20 Oct, 2016
Features
- Rebuild the commands with the newest Redis unstable release
## v.1.2.0 - 21 Apr, 2016
Features
- Added support for `MIGRATE [...] KEYS key1, key2` (Redis >= v.3.0.6)
- Added build sanity check for unhandled commands with moveable keys
- Rebuild the commands with the newest unstable release
- Improved performance of .getKeyIndexes()
Bugfix
- Fixed command command returning the wrong arity due to a Redis bug
- Fixed brpop command returning the wrong keystop due to a Redis bug
## v.1.1.0 - 09 Feb, 2016
Features
- Added .exists() to check for command existence
- Improved performance of .hasFlag()
... ...
{
"append": {
"arity": 3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"asking": {
"arity": 1,
"flags": [
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"auth": {
"arity": 2,
"flags": [
"noscript",
"loading",
"stale",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"bgrewriteaof": {
"arity": 1,
"flags": [
"admin"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"bgsave": {
"arity": -1,
"flags": [
"admin"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"bitcount": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"bitfield": {
"arity": -2,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"bitop": {
"arity": -4,
"flags": [
"write",
"denyoom"
],
"keyStart": 2,
"keyStop": -1,
"step": 1
},
"bitpos": {
"arity": -3,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"blpop": {
"arity": -3,
"flags": [
"write",
"noscript"
],
"keyStart": 1,
"keyStop": -2,
"step": 1
},
"brpop": {
"arity": -3,
"flags": [
"write",
"noscript"
],
"keyStart": 1,
"keyStop": -2,
"step": 1
},
"brpoplpush": {
"arity": 4,
"flags": [
"write",
"denyoom",
"noscript"
],
"keyStart": 1,
"keyStop": 2,
"step": 1
},
"client": {
"arity": -2,
"flags": [
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"cluster": {
"arity": -2,
"flags": [
"admin"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"command": {
"arity": 1,
"flags": [
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"config": {
"arity": -2,
"flags": [
"admin",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"dbsize": {
"arity": 1,
"flags": [
"readonly",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"debug": {
"arity": -1,
"flags": [
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"decr": {
"arity": 2,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"decrby": {
"arity": 3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"del": {
"arity": -2,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"discard": {
"arity": 1,
"flags": [
"noscript",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"dump": {
"arity": 2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"echo": {
"arity": 2,
"flags": [
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"eval": {
"arity": -3,
"flags": [
"noscript",
"movablekeys"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"evalsha": {
"arity": -3,
"flags": [
"noscript",
"movablekeys"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"exec": {
"arity": 1,
"flags": [
"noscript",
"skip_monitor"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"exists": {
"arity": -2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"expire": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"expireat": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"flushall": {
"arity": -1,
"flags": [
"write"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"flushdb": {
"arity": -1,
"flags": [
"write"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"geoadd": {
"arity": -5,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"geodist": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"geohash": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"geopos": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"georadius": {
"arity": -6,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"georadiusbymember": {
"arity": -5,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"get": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"getbit": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"getrange": {
"arity": 4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"getset": {
"arity": 3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hdel": {
"arity": -3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hexists": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hget": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hgetall": {
"arity": 2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hincrby": {
"arity": 4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hincrbyfloat": {
"arity": 4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hkeys": {
"arity": 2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hlen": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hmget": {
"arity": -3,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hmset": {
"arity": -4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"host:": {
"arity": -1,
"flags": [
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"hscan": {
"arity": -3,
"flags": [
"readonly",
"random"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hset": {
"arity": 4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hsetnx": {
"arity": 4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hstrlen": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"hvals": {
"arity": 2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"incr": {
"arity": 2,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"incrby": {
"arity": 3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"incrbyfloat": {
"arity": 3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"info": {
"arity": -1,
"flags": [
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"keys": {
"arity": 2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"lastsave": {
"arity": 1,
"flags": [
"random",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"latency": {
"arity": -2,
"flags": [
"admin",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"lindex": {
"arity": 3,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"linsert": {
"arity": 5,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"llen": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lpop": {
"arity": 2,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lpush": {
"arity": -3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lpushx": {
"arity": -3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lrange": {
"arity": 4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lrem": {
"arity": 4,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"lset": {
"arity": 4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"ltrim": {
"arity": 4,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"memory": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"mget": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"migrate": {
"arity": -6,
"flags": [
"write",
"movablekeys"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"module": {
"arity": -2,
"flags": [
"admin",
"noscript"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"monitor": {
"arity": 1,
"flags": [
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"move": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"mset": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 2
},
"msetnx": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 2
},
"multi": {
"arity": 1,
"flags": [
"noscript",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"object": {
"arity": 3,
"flags": [
"readonly"
],
"keyStart": 2,
"keyStop": 2,
"step": 2
},
"persist": {
"arity": 2,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"pexpire": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"pexpireat": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"pfadd": {
"arity": -2,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"pfcount": {
"arity": -2,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"pfdebug": {
"arity": -3,
"flags": [
"write"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"pfmerge": {
"arity": -2,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"pfselftest": {
"arity": 1,
"flags": [
"admin"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"ping": {
"arity": -1,
"flags": [
"stale",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"post": {
"arity": -1,
"flags": [
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"psetex": {
"arity": 4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"psubscribe": {
"arity": -2,
"flags": [
"pubsub",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"psync": {
"arity": 3,
"flags": [
"readonly",
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"pttl": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"publish": {
"arity": 3,
"flags": [
"pubsub",
"loading",
"stale",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"pubsub": {
"arity": -2,
"flags": [
"pubsub",
"random",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"punsubscribe": {
"arity": -1,
"flags": [
"pubsub",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"quit": {
"arity": 1,
"flags": [
"loading",
"stale",
"readonly"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"randomkey": {
"arity": 1,
"flags": [
"readonly",
"random"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"readonly": {
"arity": 1,
"flags": [
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"readwrite": {
"arity": 1,
"flags": [
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"rename": {
"arity": 3,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 2,
"step": 1
},
"renamenx": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 2,
"step": 1
},
"replconf": {
"arity": -1,
"flags": [
"admin",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"restore": {
"arity": -4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"restore-asking": {
"arity": -4,
"flags": [
"write",
"denyoom",
"asking"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"role": {
"arity": 1,
"flags": [
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"rpop": {
"arity": 2,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"rpoplpush": {
"arity": 3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 2,
"step": 1
},
"rpush": {
"arity": -3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"rpushx": {
"arity": -3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"sadd": {
"arity": -3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"save": {
"arity": 1,
"flags": [
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"scan": {
"arity": -2,
"flags": [
"readonly",
"random"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"scard": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"script": {
"arity": -2,
"flags": [
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"sdiff": {
"arity": -2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"sdiffstore": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"select": {
"arity": 2,
"flags": [
"loading",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"set": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"setbit": {
"arity": 4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"setex": {
"arity": 4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"setnx": {
"arity": 3,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"setrange": {
"arity": 4,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"shutdown": {
"arity": -1,
"flags": [
"admin",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"sinter": {
"arity": -2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"sinterstore": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"sismember": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"slaveof": {
"arity": 3,
"flags": [
"admin",
"noscript",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"slowlog": {
"arity": -2,
"flags": [
"admin"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"smembers": {
"arity": 2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"smove": {
"arity": 4,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 2,
"step": 1
},
"sort": {
"arity": -2,
"flags": [
"write",
"denyoom",
"movablekeys"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"spop": {
"arity": -2,
"flags": [
"write",
"random",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"srandmember": {
"arity": -2,
"flags": [
"readonly",
"random"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"srem": {
"arity": -3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"sscan": {
"arity": -3,
"flags": [
"readonly",
"random"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"strlen": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"subscribe": {
"arity": -2,
"flags": [
"pubsub",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"substr": {
"arity": 4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"sunion": {
"arity": -2,
"flags": [
"readonly",
"sort_for_script"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"sunionstore": {
"arity": -3,
"flags": [
"write",
"denyoom"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"swapdb": {
"arity": 3,
"flags": [
"write",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"sync": {
"arity": 1,
"flags": [
"readonly",
"admin",
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"time": {
"arity": 1,
"flags": [
"random",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"touch": {
"arity": -2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"ttl": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"type": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"unlink": {
"arity": -2,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"unsubscribe": {
"arity": -1,
"flags": [
"pubsub",
"noscript",
"loading",
"stale"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"unwatch": {
"arity": 1,
"flags": [
"noscript",
"fast"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"wait": {
"arity": 3,
"flags": [
"noscript"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"watch": {
"arity": -2,
"flags": [
"noscript",
"fast"
],
"keyStart": 1,
"keyStop": -1,
"step": 1
},
"zadd": {
"arity": -4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zcard": {
"arity": 2,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zcount": {
"arity": 4,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zincrby": {
"arity": 4,
"flags": [
"write",
"denyoom",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zinterstore": {
"arity": -4,
"flags": [
"write",
"denyoom",
"movablekeys"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
},
"zlexcount": {
"arity": 4,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrange": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrangebylex": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrangebyscore": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrank": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrem": {
"arity": -3,
"flags": [
"write",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zremrangebylex": {
"arity": 4,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zremrangebyrank": {
"arity": 4,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zremrangebyscore": {
"arity": 4,
"flags": [
"write"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrevrange": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrevrangebylex": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrevrangebyscore": {
"arity": -4,
"flags": [
"readonly"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zrevrank": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zscan": {
"arity": -3,
"flags": [
"readonly",
"random"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zscore": {
"arity": 3,
"flags": [
"readonly",
"fast"
],
"keyStart": 1,
"keyStop": 1,
"step": 1
},
"zunionstore": {
"arity": -4,
"flags": [
"write",
"denyoom",
"movablekeys"
],
"keyStart": 0,
"keyStop": 0,
"step": 0
}
}
\ No newline at end of file
... ...
'use strict'
var commands = require('./commands.json')
/**
* Redis command list
*
* All commands are lowercased.
*
* @var {string[]}
* @public
*/
exports.list = Object.keys(commands)
var flags = {}
exports.list.forEach(function (commandName) {
flags[commandName] = commands[commandName].flags.reduce(function (flags, flag) {
flags[flag] = true
return flags
}, {})
})
/**
* Check if the command exists
*
* @param {string} commandName - the command name
* @return {boolean} result
* @public
*/
exports.exists = function (commandName) {
return Boolean(commands[commandName])
}
/**
* Check if the command has the flag
*
* Some of possible flags: readonly, noscript, loading
* @param {string} commandName - the command name
* @param {string} flag - the flag to check
* @return {boolean} result
* @public
*/
exports.hasFlag = function (commandName, flag) {
if (!flags[commandName]) {
throw new Error('Unknown command ' + commandName)
}
return Boolean(flags[commandName][flag])
}
/**
* Get indexes of keys in the command arguments
*
* @param {string} commandName - the command name
* @param {string[]} args - the arguments of the command
* @param {object} [options] - options
* @param {boolean} [options.parseExternalKey] - parse external keys
* @return {number[]} - the list of the index
* @public
*
* @example
* ```javascript
* getKeyIndexes('set', ['key', 'value']) // [0]
* getKeyIndexes('mget', ['key1', 'key2']) // [0, 1]
* ```
*/
exports.getKeyIndexes = function (commandName, args, options) {
var command = commands[commandName]
if (!command) {
throw new Error('Unknown command ' + commandName)
}
if (!Array.isArray(args)) {
throw new Error('Expect args to be an array')
}
var keys = []
var i, keyStart, keyStop, parseExternalKey
switch (commandName) {
case 'zunionstore':
case 'zinterstore':
keys.push(0)
// fall through
case 'eval':
case 'evalsha':
keyStop = Number(args[1]) + 2
for (i = 2; i < keyStop; i++) {
keys.push(i)
}
break
case 'sort':
parseExternalKey = options && options.parseExternalKey
keys.push(0)
for (i = 1; i < args.length - 1; i++) {
if (typeof args[i] !== 'string') {
continue
}
var directive = args[i].toUpperCase()
if (directive === 'GET') {
i += 1
if (args[i] !== '#') {
if (parseExternalKey) {
keys.push([i, getExternalKeyNameLength(args[i])])
} else {
keys.push(i)
}
}
} else if (directive === 'BY') {
i += 1
if (parseExternalKey) {
keys.push([i, getExternalKeyNameLength(args[i])])
} else {
keys.push(i)
}
} else if (directive === 'STORE') {
i += 1
keys.push(i)
}
}
break
case 'migrate':
if (args[2] === '') {
for (i = 5; i < args.length - 1; i++) {
if (args[i].toUpperCase() === 'KEYS') {
for (var j = i + 1; j < args.length; j++) {
keys.push(j)
}
break
}
}
} else {
keys.push(2)
}
break
default:
// step has to be at least one in this case, otherwise the command does not contain a key
if (command.step > 0) {
keyStart = command.keyStart - 1
keyStop = command.keyStop > 0 ? command.keyStop : args.length + command.keyStop + 1
for (i = keyStart; i < keyStop; i += command.step) {
keys.push(i)
}
}
break
}
return keys
}
function getExternalKeyNameLength (key) {
if (typeof key !== 'string') {
key = String(key)
}
var hashPos = key.indexOf('->')
return hashPos === -1 ? key.length : hashPos
}
... ...
{
"_args": [
[
{
"raw": "redis-commands@^1.2.0",
"scope": null,
"escapedName": "redis-commands",
"name": "redis-commands",
"rawSpec": "^1.2.0",
"spec": ">=1.2.0 <2.0.0",
"type": "range"
},
"/Users/fzy/project/koa2_Sequelize_project/node_modules/redis"
]
],
"_from": "redis-commands@>=1.2.0 <2.0.0",
"_id": "redis-commands@1.3.1",
"_inCache": true,
"_location": "/redis-commands",
"_nodeVersion": "7.4.0",
"_npmOperationalInternal": {
"host": "packages-12-west.internal.npmjs.com",
"tmp": "tmp/redis-commands-1.3.1.tgz_1485363355063_0.7909097610972822"
},
"_npmUser": {
"name": "bridgear",
"email": "ruben@bridgewater.de"
},
"_npmVersion": "4.0.5",
"_phantomChildren": {},
"_requested": {
"raw": "redis-commands@^1.2.0",
"scope": null,
"escapedName": "redis-commands",
"name": "redis-commands",
"rawSpec": "^1.2.0",
"spec": ">=1.2.0 <2.0.0",
"type": "range"
},
"_requiredBy": [
"/redis"
],
"_resolved": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.3.1.tgz",
"_shasum": "81d826f45fa9c8b2011f4cd7a0fe597d241d442b",
"_shrinkwrap": null,
"_spec": "redis-commands@^1.2.0",
"_where": "/Users/fzy/project/koa2_Sequelize_project/node_modules/redis",
"author": {
"name": "luin",
"email": "i@zihua.li",
"url": "http://zihua.li"
},
"bugs": {
"url": "https://github.com/NodeRedis/redis-commonds/issues"
},
"dependencies": {},
"description": "Redis commands",
"devDependencies": {
"chai": "^3.4.0",
"codeclimate-test-reporter": "^0.4.0",
"ioredis": "^2.0.0",
"istanbul": "^0.4.3",
"json-stable-stringify": "^1.0.0",
"mocha": "^3.0.0",
"snazzy": "^6.0.0",
"standard": "^8.0.0"
},
"directories": {},
"dist": {
"shasum": "81d826f45fa9c8b2011f4cd7a0fe597d241d442b",
"tarball": "https://registry.npmjs.org/redis-commands/-/redis-commands-1.3.1.tgz"
},
"gitHead": "3c87f94d94fcf66ca0c77101a6d36fad32134326",
"homepage": "https://github.com/NodeRedis/redis-commonds",
"keywords": [
"redis",
"commands",
"prefix"
],
"license": "MIT",
"main": "index.js",
"maintainers": [
{
"name": "bridgear",
"email": "ruben@bridgewater.de"
},
{
"name": "luin",
"email": "i@zihua.li"
}
],
"name": "redis-commands",
"optionalDependencies": {},
"readme": "# Redis Commands\n\n[![Build Status](https://travis-ci.org/NodeRedis/redis-commands.png?branch=master)](https://travis-ci.org/NodeRedis/redis-commands)\n[![Code Climate](https://codeclimate.com/github/NodeRedis/redis-commands/badges/gpa.svg)](https://codeclimate.com/github/NodeRedis/redis-commands)\n[![Test Coverage](https://codeclimate.com/github/NodeRedis/redis-commands/badges/coverage.svg)](https://codeclimate.com/github/NodeRedis/redis-commands/coverage)\n\nThis module exports all the commands that Redis supports.\n\n## Install\n\n```shell\n$ npm install redis-commands\n```\n\n## Usage\n\n```javascript\nvar commands = require('redis-commands');\n```\n\n`.list` is an array contains all the lowercased commands:\n\n```javascript\ncommands.list.forEach(function (command) {\n console.log(command);\n});\n```\n\n`.exists()` is used to check if the command exists:\n\n```javascript\ncommands.exists('set') // true\ncommands.exists('other-command') // false\n```\n\n`.hasFlag()` is used to check if the command has the flag:\n\n```javascript\ncommands.hasFlag('set', 'readonly') // false\n```\n\n`.getKeyIndexes()` is used to get the indexes of keys in the command arguments:\n\n```javascript\ncommands.getKeyIndexes('set', ['key', 'value']) // [0]\ncommands.getKeyIndexes('mget', ['key1', 'key2']) // [0, 1]\n```\n\n## Acknowledgment\n\nThank [@Yuan Chuan](https://github.com/yuanchuan) for the package name. The original redis-commands is renamed to [@yuanchuan/redis-commands](https://www.npmjs.com/package/@yuanchuan/redis-commands).\n",
"readmeFilename": "README.md",
"repository": {
"type": "git",
"url": "git+https://github.com/NodeRedis/redis-commands.git"
},
"scripts": {
"build": "node tools/build",
"coverage": "node ./node_modules/istanbul/lib/cli.js cover --preserve-comments ./node_modules/mocha/bin/_mocha -- -R spec",
"coverage:check": "node ./node_modules/istanbul/lib/cli.js check-coverage --branch 100 --statement 100",
"lint": "standard --fix --verbose | snazzy",
"posttest": "npm run coverage && npm run coverage:check",
"pretest": "npm run lint",
"test": "mocha"
},
"version": "1.3.1"
}
... ...
'use strict'
/* global describe, it */
var commands = require('..')
var expect = require('chai').expect
describe('redis-commands', function () {
describe('.list', function () {
it('should be an array', function () {
expect(commands.list).to.be.instanceof(Array)
})
it('should ensure every command is lowercase', function () {
commands.list.forEach(function (command) {
expect(command.toLowerCase()).to.eql(command)
})
})
it('should ensure quit command is added to the commands list', function () {
expect(commands.list.indexOf('quit')).not.to.eql(-1)
})
it('should not contain multi-word commands', function () {
commands.list.forEach(function (command) {
expect(command.indexOf(' ')).to.eql(-1)
})
})
})
describe('.exists()', function () {
it('should return true for existing commands', function () {
expect(commands.exists('set')).to.eql(true)
expect(commands.exists('get')).to.eql(true)
expect(commands.exists('cluster')).to.eql(true)
expect(commands.exists('quit')).to.eql(true)
expect(commands.exists('config')).to.eql(true)
})
it('should return false for non-existing commands', function () {
expect(commands.exists('SET')).to.eql(false)
expect(commands.exists('set get')).to.eql(false)
expect(commands.exists('other-command')).to.eql(false)
})
})
describe('.hasFlag()', function () {
it('should return true if the command has the flag', function () {
expect(commands.hasFlag('set', 'write')).to.eql(true)
expect(commands.hasFlag('set', 'denyoom')).to.eql(true)
expect(commands.hasFlag('select', 'fast')).to.eql(true)
})
it('should return false otherwise', function () {
expect(commands.hasFlag('set', 'fast')).to.eql(false)
expect(commands.hasFlag('set', 'readonly')).to.eql(false)
expect(commands.hasFlag('select', 'denyoom')).to.eql(false)
expect(commands.hasFlag('quit', 'denyoom')).to.eql(false)
})
it('should throw on unknown commands', function () {
expect(function () { commands.hasFlag('UNKNOWN') }).to.throw(Error)
})
})
describe('.getKeyIndexes()', function () {
var index = commands.getKeyIndexes
it('should throw on unknown commands', function () {
expect(function () { index('UNKNOWN') }).to.throw(Error)
})
it('should throw on faulty args', function () {
expect(function () { index('get', 'foo') }).to.throw(Error)
})
it('should return an empty array if no keys exist', function () {
expect(index('auth', [])).to.eql([])
})
it('should return key indexes', function () {
expect(index('set', ['foo', 'bar'])).to.eql([0])
expect(index('del', ['foo'])).to.eql([0])
expect(index('get', ['foo'])).to.eql([0])
expect(index('mget', ['foo', 'bar'])).to.eql([0, 1])
expect(index('mset', ['foo', 'v1', 'bar', 'v2'])).to.eql([0, 2])
expect(index('hmset', ['key', 'foo', 'v1', 'bar', 'v2'])).to.eql([0])
expect(index('blpop', ['key1', 'key2', '17'])).to.eql([0, 1])
expect(index('evalsha', ['23123', '2', 'foo', 'bar', 'zoo'])).to.eql([2, 3])
expect(index('sort', ['key'])).to.eql([0])
expect(index('zunionstore', ['out', '2', 'zset1', 'zset2', 'WEIGHTS', '2', '3'])).to.eql([0, 2, 3])
expect(index('migrate', ['127.0.0.1', 6379, 'foo', 0, 0, 'COPY'])).to.eql([2])
expect(index('migrate', ['127.0.0.1', 6379, '', 0, 0, 'REPLACE', 'KEYS', 'foo', 'bar'])).to.eql([7, 8])
expect(index('migrate', ['127.0.0.1', 6379, '', 0, 0, 'KEYS', 'foo', 'bar'])).to.eql([6, 7])
})
it('should support numeric argument', function () {
expect(index('evalsha', ['23123', 2, 'foo', 'bar', 'zoo'])).to.eql([2, 3])
expect(index('zinterstore', ['out', 2, 'zset1', 'zset2', 'WEIGHTS', 2, 3])).to.eql([0, 2, 3])
})
describe('disable parseExternalKey', function () {
it('should not parse external keys', function () {
expect(index('sort', ['key', 'BY', 'hash:*->field'])).to.eql([0, 2])
expect(index('sort', ['key', 'BY', 'hash:*->field', 'LIMIT', 2, 3, 'GET', 'gk', 'GET', '#', 'Get', 'gh->f*', 'DESC', 'ALPHA', 'STORE', 'store'])).to.eql([0, 2, 7, 11, 15])
})
})
describe('enable parseExternalKey', function () {
it('should parse external keys', function () {
expect(index('sort', ['key', 'BY', 'hash:*->field'], {
parseExternalKey: true
})).to.eql([0, [2, 6]])
expect(index('sort', ['key', 'BY', 'hash:*->field', 'LIMIT', 2, 3, 'GET', new Buffer('gk'), 'GET', '#', 'Get', 'gh->f*', 'DESC', 'ALPHA', 'STORE', 'store'], {
parseExternalKey: true
})).to.eql([0, [2, 6], [7, 2], [11, 2], 15])
})
})
})
})
... ...
var fs = require('fs')
var path = require('path')
var stringify = require('json-stable-stringify')
var commandPath = path.join(__dirname, '..', 'commands.json')
var redisCommands = require('../')
var Redis = require('ioredis')
var redis = new Redis(process.env.REDIS_URI)
redis.command().then(function (res) {
redis.disconnect()
// Find all special handled cases
var movableKeys = String(redisCommands.getKeyIndexes).match(/case '[a-z-]+':/g).map(function (entry) {
return entry.replace(/^case '|':$/g, '')
})
var commands = res.reduce(function (prev, current) {
var currentCommandPos = movableKeys.indexOf(current[0])
if (currentCommandPos !== -1 && current[2].indexOf('movablekeys') !== -1) {
movableKeys.splice(currentCommandPos, 1)
}
// https://github.com/antirez/redis/issues/2598
if (current[0] === 'brpop' && current[4] === 1) {
current[4] = -2
}
prev[current[0]] = {
arity: current[1] || 1, // https://github.com/antirez/redis/pull/2986
flags: current[2],
keyStart: current[3],
keyStop: current[4],
step: current[5]
}
return prev
}, {})
// Future proof. Redis might implement this at some point
// https://github.com/antirez/redis/pull/2982
if (!commands.quit) {
commands.quit = {
arity: 1,
flags: [
'loading',
'stale',
'readonly'
],
keyStart: 0,
keyStop: 0,
step: 0
}
}
if (movableKeys.length !== 0) {
throw new Error('Not all commands (\'' + movableKeys.join('\', \'') + '\') with the "movablekeys" flag are handled in the code')
}
// Use json-stable-stringify instead fo JSON.stringify
// for easier diffing
var content = stringify(commands, { space: ' ' })
fs.writeFile(commandPath, content)
})
... ...
# IntelliJ project files
.idea
*.iml
out
gen
# Unrelevant files and folders
benchmark
coverage
test
.travis.yml
.gitignore
*.log
.vscode
.codeclimate.yml
\ No newline at end of file
... ...
The MIT License (MIT)
Copyright (c) 2015 NodeRedis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
... ...
[![Build Status](https://travis-ci.org/NodeRedis/node-redis-parser.png?branch=master)](https://travis-ci.org/NodeRedis/node-redis-parser)
[![Test Coverage](https://codeclimate.com/github/NodeRedis/node-redis-parser/badges/coverage.svg)](https://codeclimate.com/github/NodeRedis/node-redis-parser/coverage)
[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)
# redis-parser
A high performance javascript redis parser built for [node_redis](https://github.com/NodeRedis/node_redis) and [ioredis](https://github.com/luin/ioredis). Parses all [RESP](http://redis.io/topics/protocol) data.
## Install
Install with [NPM](https://npmjs.org/):
npm install redis-parser
## Usage
```js
var Parser = require('redis-parser');
var myParser = new Parser(options);
```
### Options
* `returnReply`: *function*; mandatory
* `returnError`: *function*; mandatory
* `returnFatalError`: *function*; optional, defaults to the returnError function
* `returnBuffers`: *boolean*; optional, defaults to false
* `stringNumbers`: *boolean*; optional, defaults to false
### Functions
* `reset()`: reset the parser to it's initial state
* `setReturnBuffers(boolean)`: (JSParser only) set the returnBuffers option on/off without resetting the parser
* `setStringNumbers(boolean)`: (JSParser only) set the stringNumbers option on/off without resetting the parser
### Error classes
* `RedisError` sub class of Error
* `ReplyError` sub class of RedisError
* `ParserError` sub class of RedisError
All Redis errors will be returned as `ReplyErrors` while a parser error is returned as `ParserError`.
All error classes are exported by the parser.
### Example
```js
var Parser = require("redis-parser");
function Library () {}
Library.prototype.returnReply = function (reply) { ... }
Library.prototype.returnError = function (err) { ... }
Library.prototype.returnFatalError = function (err) { ... }
var lib = new Library();
var parser = new Parser({
returnReply: function(reply) {
lib.returnReply(reply);
},
returnError: function(err) {
lib.returnError(err);
},
returnFatalError: function (err) {
lib.returnFatalError(err);
}
});
Library.prototype.streamHandler = function () {
this.stream.on('data', function (buffer) {
// Here the data (e.g. `new Buffer('$5\r\nHello\r\n'`)) is passed to the parser and the result is passed to either function depending on the provided data.
parser.execute(buffer);
});
};
```
You do not have to use the returnFatalError function. Fatal errors will be returned in the normal error function in that case.
And if you want to return buffers instead of strings, you can do this by adding the `returnBuffers` option.
If you handle with big numbers that are to large for JS (Number.MAX_SAFE_INTEGER === 2^53 - 16) please use the `stringNumbers` option. That way all numbers are going to be returned as String and you can handle them safely.
```js
// Same functions as in the first example
var parser = new Parser({
returnReply: function(reply) {
lib.returnReply(reply);
},
returnError: function(err) {
lib.returnError(err);
},
returnBuffers: true, // All strings are returned as Buffer e.g. <Buffer 48 65 6c 6c 6f>
stringNumbers: true // All numbers are returned as String
});
// The streamHandler as above
```
## Protocol errors
To handle protocol errors (this is very unlikely to happen) gracefully you should add the returnFatalError option, reject any still running command (they might have been processed properly but the reply is just wrong), destroy the socket and reconnect. Note that while doing this no new command may be added, so all new commands have to be buffered in the meantime, otherwise a chunk might still contain partial data of a following command that was already processed properly but answered in the same chunk as the command that resulted in the protocol error.
## Contribute
The parser is highly optimized but there may still be further optimizations possible.
npm install
npm test
npm run benchmark
Currently the benchmark compares the performance against the hiredis parser:
HIREDIS: $ multiple chunks in a bulk string x 859,880 ops/sec ±1.22% (82 runs sampled)
HIREDIS BUF: $ multiple chunks in a bulk string x 608,869 ops/sec ±1.72% (85 runs sampled)
JS PARSER: $ multiple chunks in a bulk string x 910,590 ops/sec ±0.87% (89 runs sampled)
JS PARSER BUF: $ multiple chunks in a bulk string x 1,299,507 ops/sec ±2.18% (84 runs sampled)
HIREDIS: + multiple chunks in a string x 1,787,203 ops/sec ±0.58% (96 runs sampled)
HIREDIS BUF: + multiple chunks in a string x 943,584 ops/sec ±1.62% (87 runs sampled)
JS PARSER: + multiple chunks in a string x 2,008,264 ops/sec ±1.01% (91 runs sampled)
JS PARSER BUF: + multiple chunks in a string x 2,045,546 ops/sec ±0.78% (91 runs sampled)
HIREDIS: $ 4mb bulk string x 310 ops/sec ±1.58% (75 runs sampled)
HIREDIS BUF: $ 4mb bulk string x 471 ops/sec ±2.28% (78 runs sampled)
JS PARSER: $ 4mb bulk string x 747 ops/sec ±2.43% (85 runs sampled)
JS PARSER BUF: $ 4mb bulk string x 846 ops/sec ±5.52% (72 runs sampled)
HIREDIS: + simple string x 2,324,866 ops/sec ±1.61% (90 runs sampled)
HIREDIS BUF: + simple string x 1,085,823 ops/sec ±2.47% (82 runs sampled)
JS PARSER: + simple string x 4,567,358 ops/sec ±1.97% (81 runs sampled)
JS PARSER BUF: + simple string x 5,433,901 ops/sec ±0.66% (93 runs sampled)
HIREDIS: : integer x 2,332,946 ops/sec ±0.47% (93 runs sampled)
JS PARSER: : integer x 17,730,449 ops/sec ±0.73% (91 runs sampled)
JS PARSER STR: : integer x 12,942,037 ops/sec ±0.51% (92 runs sampled)
HIREDIS: : big integer x 2,012,572 ops/sec ±0.33% (93 runs sampled)
JS PARSER: : big integer x 10,210,923 ops/sec ±0.94% (94 runs sampled)
JS PARSER STR: : big integer x 4,453,320 ops/sec ±0.52% (94 runs sampled)
HIREDIS: * array x 44,479 ops/sec ±0.55% (94 runs sampled)
HIREDIS BUF: * array x 14,391 ops/sec ±1.04% (86 runs sampled)
JS PARSER: * array x 53,796 ops/sec ±2.08% (79 runs sampled)
JS PARSER BUF: * array x 72,428 ops/sec ±0.72% (93 runs sampled)
HIREDIS: * big nested array x 217 ops/sec ±0.97% (83 runs sampled)
HIREDIS BUF: * big nested array x 255 ops/sec ±2.28% (77 runs sampled)
JS PARSER: * big nested array x 242 ops/sec ±1.10% (85 runs sampled)
JS PARSER BUF: * big nested array x 375 ops/sec ±1.21% (88 runs sampled)
HIREDIS: - error x 78,821 ops/sec ±0.80% (93 runs sampled)
JS PARSER: - error x 143,382 ops/sec ±0.75% (92 runs sampled)
Platform info:
Ubuntu 16.10
Node.js 7.4.0
Intel(R) Core(TM) i7-5600U CPU
## License
[MIT](./LICENSE)
... ...
## v.2.6.0 - 03 Apr, 2017
Internals
- Use Buffer.allocUnsafe instead of new Buffer() with modern Node.js versions
## v.2.5.0 - 11 Mar, 2017
Features
- Added a `ParserError` class to differentiate them to ReplyErrors. The class is also exported
Bugfixes
- All errors now show their error message again next to the error name in the stack trace
- ParserErrors now show the offset and buffer attributes while being logged
## v.2.4.1 - 05 Feb, 2017
Bugfixes
- Fixed minimal memory consumption overhead for chunked buffers
## v.2.4.0 - 25 Jan, 2017
Features
- Added `reset` function to reset the parser to it's initial values
- Added `setReturnBuffers` function to reset the returnBuffers option (Only for the JSParser)
- Added `setStringNumbers` function to reset the stringNumbers option (Only for the JSParser)
- All Errors are now of sub classes of the new `RedisError` class. It is also exported.
- Improved bulk string chunked data handling performance
Bugfixes
- Parsing time for big nested arrays is now linear
## v.2.3.0 - 25 Nov, 2016
Features
- Parsing time for big arrays (e.g. 4mb+) is now linear and works well for arbitrary array sizes
This case is a magnitude faster than before
OLD STR: * big array x 1.09 ops/sec ±2.15% (7 runs sampled)
OLD BUF: * big array x 1.23 ops/sec ±2.67% (8 runs sampled)
NEW STR: * big array x 273 ops/sec ±2.09% (85 runs sampled)
NEW BUF: * big array x 259 ops/sec ±1.32% (85 runs sampled)
(~10mb array with 1000 entries)
## v.2.2.0 - 18 Nov, 2016
Features
- Improve `stringNumbers` parsing performance by up to 100%
Bugfixes
- Do not unref the interval anymore due to issues with NodeJS
## v.2.1.1 - 31 Oct, 2016
Bugfixes
- Remove erroneously added const to support Node.js 0.10
## v.2.1.0 - 30 Oct, 2016
Features
- Improve parser errors by adding more detailed information to them
- Accept manipulated Object.prototypes
- Unref the interval if used
## v.2.0.4 - 21 Jul, 2016
Bugfixes
- Fixed multi byte characters getting corrupted
## v.2.0.3 - 17 Jun, 2016
Bugfixes
- Fixed parser not working with huge buffers (e.g. 300 MB)
## v.2.0.2 - 08 Jun, 2016
Bugfixes
- Fixed parser with returnBuffers option returning corrupted data
## v.2.0.1 - 04 Jun, 2016
Bugfixes
- Fixed multiple parsers working concurrently resulting in faulty data in some cases
## v.2.0.0 - 29 May, 2016
The javascript parser got completely rewritten by [Michael Diarmid](https://github.com/Salakar) and [Ruben Bridgewater](https://github.com/BridgeAR) and is now a lot faster than the hiredis parser.
Therefore the hiredis parser was deprecated and should only be used for testing purposes and benchmarking comparison.
All Errors returned by the parser are from now on of class ReplyError
Features
- Improved performance by up to 15x as fast as before
- Improved options validation
- Added ReplyError Class
- Added parser benchmark
- Switched default parser from hiredis to JS, no matter if hiredis is installed or not
Removed
- Deprecated hiredis support
## v.1.3.0 - 27 Mar, 2016
Features
- Added `auto` as parser name option to check what parser is available
- Non existing requested parsers falls back into auto mode instead of always choosing the JS parser
## v.1.2.0 - 27 Mar, 2016
Features
- Added `stringNumbers` option to make sure all numbers are returned as string instead of a js number for precision
- The parser is from now on going to print warnings if a parser is explicitly requested that does not exist and gracefully chooses the JS parser
## v.1.1.0 - 26 Jan, 2016
Features
- The parser is from now on going to reset itself on protocol errors
... ...
'use strict'
module.exports = require('./lib/parser')
module.exports.ReplyError = require('./lib/replyError')
module.exports.RedisError = require('./lib/redisError')
module.exports.ParserError = require('./lib/redisError')
... ...
'use strict'
var hiredis = require('hiredis')
var ReplyError = require('../lib/replyError')
var ParserError = require('../lib/parserError')
/**
* Parse data
* @param parser
* @returns {*}
*/
function parseData (parser, data) {
try {
return parser.reader.get()
} catch (err) {
// Protocol errors land here
// Reset the parser. Otherwise new commands can't be processed properly
parser.reader = new hiredis.Reader(parser.options)
parser.returnFatalError(new ParserError(err.message, JSON.stringify(data), -1))
}
}
/**
* Hiredis Parser
* @param options
* @constructor
*/
function HiredisReplyParser (options) {
this.returnError = options.returnError
this.returnFatalError = options.returnFatalError || options.returnError
this.returnReply = options.returnReply
this.name = 'hiredis'
this.options = {
return_buffers: !!options.returnBuffers
}
this.reader = new hiredis.Reader(this.options)
}
HiredisReplyParser.prototype.execute = function (data) {
this.reader.feed(data)
var reply = parseData(this, data)
while (reply !== undefined) {
if (reply && reply.name === 'Error') {
this.returnError(new ReplyError(reply.message))
} else {
this.returnReply(reply)
}
reply = parseData(this, data)
}
}
/**
* Reset the parser values to the initial state
*
* @returns {undefined}
*/
HiredisReplyParser.prototype.reset = function () {
this.reader = new hiredis.Reader(this.options)
}
module.exports = HiredisReplyParser
... ...
'use strict'
var StringDecoder = require('string_decoder').StringDecoder
var decoder = new StringDecoder()
var ReplyError = require('./replyError')
var ParserError = require('./parserError')
var bufferPool = bufferAlloc(32 * 1024)
var bufferOffset = 0
var interval = null
var counter = 0
var notDecreased = 0
var isModern = typeof Buffer.allocUnsafe === 'function'
/**
* For backwards compatibility
* @param len
* @returns {Buffer}
*/
function bufferAlloc (len) {
return isModern ? Buffer.allocUnsafe(len) : new Buffer(len)
}
/**
* Used for lengths and numbers only, faster perf on arrays / bulks
* @param parser
* @returns {*}
*/
function parseSimpleNumbers (parser) {
var offset = parser.offset
var length = parser.buffer.length - 1
var number = 0
var sign = 1
if (parser.buffer[offset] === 45) {
sign = -1
offset++
}
while (offset < length) {
var c1 = parser.buffer[offset++]
if (c1 === 13) { // \r\n
parser.offset = offset + 1
return sign * number
}
number = (number * 10) + (c1 - 48)
}
}
/**
* Used for integer numbers in case of the returnNumbers option
*
* The maximimum possible integer to use is: Math.floor(Number.MAX_SAFE_INTEGER / 10)
* Staying in a SMI Math.floor((Math.pow(2, 32) / 10) - 1) is even more efficient though
*
* @param parser
* @returns {*}
*/
function parseStringNumbers (parser) {
var offset = parser.offset
var length = parser.buffer.length - 1
var number = 0
var res = ''
if (parser.buffer[offset] === 45) {
res += '-'
offset++
}
while (offset < length) {
var c1 = parser.buffer[offset++]
if (c1 === 13) { // \r\n
parser.offset = offset + 1
if (number !== 0) {
res += number
}
return res
} else if (number > 429496728) {
res += (number * 10) + (c1 - 48)
number = 0
} else if (c1 === 48 && number === 0) {
res += 0
} else {
number = (number * 10) + (c1 - 48)
}
}
}
/**
* Returns a string or buffer of the provided offset start and
* end ranges. Checks `optionReturnBuffers`.
*
* If returnBuffers is active, all return values are returned as buffers besides numbers and errors
*
* @param parser
* @param start
* @param end
* @returns {*}
*/
function convertBufferRange (parser, start, end) {
parser.offset = end + 2
if (parser.optionReturnBuffers === true) {
return parser.buffer.slice(start, end)
}
return parser.buffer.toString('utf-8', start, end)
}
/**
* Parse a '+' redis simple string response but forward the offsets
* onto convertBufferRange to generate a string.
* @param parser
* @returns {*}
*/
function parseSimpleString (parser) {
var start = parser.offset
var offset = start
var buffer = parser.buffer
var length = buffer.length - 1
while (offset < length) {
if (buffer[offset++] === 13) { // \r\n
return convertBufferRange(parser, start, offset - 1)
}
}
}
/**
* Returns the string length via parseSimpleNumbers
* @param parser
* @returns {*}
*/
function parseLength (parser) {
var string = parseSimpleNumbers(parser)
if (string !== undefined) {
return string
}
}
/**
* Parse a ':' redis integer response
*
* If stringNumbers is activated the parser always returns numbers as string
* This is important for big numbers (number > Math.pow(2, 53)) as js numbers
* are 64bit floating point numbers with reduced precision
*
* @param parser
* @returns {*}
*/
function parseInteger (parser) {
if (parser.optionStringNumbers) {
return parseStringNumbers(parser)
}
return parseSimpleNumbers(parser)
}
/**
* Parse a '$' redis bulk string response
* @param parser
* @returns {*}
*/
function parseBulkString (parser) {
var length = parseLength(parser)
if (length === undefined) {
return
}
if (length === -1) {
return null
}
var offsetEnd = parser.offset + length
if (offsetEnd + 2 > parser.buffer.length) {
parser.bigStrSize = offsetEnd + 2
parser.bigOffset = parser.offset
parser.totalChunkSize = parser.buffer.length
parser.bufferCache.push(parser.buffer)
return
}
return convertBufferRange(parser, parser.offset, offsetEnd)
}
/**
* Parse a '-' redis error response
* @param parser
* @returns {Error}
*/
function parseError (parser) {
var string = parseSimpleString(parser)
if (string !== undefined) {
if (parser.optionReturnBuffers === true) {
string = string.toString()
}
return new ReplyError(string)
}
}
/**
* Parsing error handler, resets parser buffer
* @param parser
* @param error
*/
function handleError (parser, error) {
parser.buffer = null
parser.returnFatalError(error)
}
/**
* Parse a '*' redis array response
* @param parser
* @returns {*}
*/
function parseArray (parser) {
var length = parseLength(parser)
if (length === undefined) {
return
}
if (length === -1) {
return null
}
var responses = new Array(length)
return parseArrayElements(parser, responses, 0)
}
/**
* Push a partly parsed array to the stack
*
* @param parser
* @param elem
* @param i
* @returns {undefined}
*/
function pushArrayCache (parser, elem, pos) {
parser.arrayCache.push(elem)
parser.arrayPos.push(pos)
}
/**
* Parse chunked redis array response
* @param parser
* @returns {*}
*/
function parseArrayChunks (parser) {
var tmp = parser.arrayCache.pop()
var pos = parser.arrayPos.pop()
if (parser.arrayCache.length) {
var res = parseArrayChunks(parser)
if (!res) {
pushArrayCache(parser, tmp, pos)
return
}
tmp[pos++] = res
}
return parseArrayElements(parser, tmp, pos)
}
/**
* Parse redis array response elements
* @param parser
* @param responses
* @param i
* @returns {*}
*/
function parseArrayElements (parser, responses, i) {
var bufferLength = parser.buffer.length
while (i < responses.length) {
var offset = parser.offset
if (parser.offset >= bufferLength) {
pushArrayCache(parser, responses, i)
return
}
var response = parseType(parser, parser.buffer[parser.offset++])
if (response === undefined) {
if (!parser.arrayCache.length) {
parser.offset = offset
}
pushArrayCache(parser, responses, i)
return
}
responses[i] = response
i++
}
return responses
}
/**
* Called the appropriate parser for the specified type.
* @param parser
* @param type
* @returns {*}
*/
function parseType (parser, type) {
switch (type) {
case 36: // $
return parseBulkString(parser)
case 58: // :
return parseInteger(parser)
case 43: // +
return parseSimpleString(parser)
case 42: // *
return parseArray(parser)
case 45: // -
return parseError(parser)
default:
return handleError(parser, new ParserError(
'Protocol error, got ' + JSON.stringify(String.fromCharCode(type)) + ' as reply type byte',
JSON.stringify(parser.buffer),
parser.offset
))
}
}
// All allowed options including their typeof value
var optionTypes = {
returnError: 'function',
returnFatalError: 'function',
returnReply: 'function',
returnBuffers: 'boolean',
stringNumbers: 'boolean',
name: 'string'
}
/**
* Javascript Redis Parser
* @param options
* @constructor
*/
function JavascriptRedisParser (options) {
if (!(this instanceof JavascriptRedisParser)) {
return new JavascriptRedisParser(options)
}
if (!options || !options.returnError || !options.returnReply) {
throw new TypeError('Please provide all return functions while initiating the parser')
}
for (var key in options) {
// eslint-disable-next-line valid-typeof
if (optionTypes.hasOwnProperty(key) && typeof options[key] !== optionTypes[key]) {
throw new TypeError('The options argument contains the property "' + key + '" that is either unknown or of a wrong type')
}
}
if (options.name === 'hiredis') {
/* istanbul ignore next: hiredis is only supported for legacy usage */
try {
var Hiredis = require('./hiredis')
console.error(new TypeError('Using hiredis is discouraged. Please use the faster JS parser by removing the name option.').stack.replace('Error', 'Warning'))
return new Hiredis(options)
} catch (e) {
console.error(new TypeError('Hiredis is not installed. Please remove the `name` option. The (faster) JS parser is used instead.').stack.replace('Error', 'Warning'))
}
}
this.optionReturnBuffers = !!options.returnBuffers
this.optionStringNumbers = !!options.stringNumbers
this.returnError = options.returnError
this.returnFatalError = options.returnFatalError || options.returnError
this.returnReply = options.returnReply
this.name = 'javascript'
this.reset()
}
/**
* Reset the parser values to the initial state
*
* @returns {undefined}
*/
JavascriptRedisParser.prototype.reset = function () {
this.offset = 0
this.buffer = null
this.bigStrSize = 0
this.bigOffset = 0
this.totalChunkSize = 0
this.bufferCache = []
this.arrayCache = []
this.arrayPos = []
}
/**
* Set the returnBuffers option
*
* @param returnBuffers
* @returns {undefined}
*/
JavascriptRedisParser.prototype.setReturnBuffers = function (returnBuffers) {
if (typeof returnBuffers !== 'boolean') {
throw new TypeError('The returnBuffers argument has to be a boolean')
}
this.optionReturnBuffers = returnBuffers
}
/**
* Set the stringNumbers option
*
* @param stringNumbers
* @returns {undefined}
*/
JavascriptRedisParser.prototype.setStringNumbers = function (stringNumbers) {
if (typeof stringNumbers !== 'boolean') {
throw new TypeError('The stringNumbers argument has to be a boolean')
}
this.optionStringNumbers = stringNumbers
}
/**
* Decrease the bufferPool size over time
* @returns {undefined}
*/
function decreaseBufferPool () {
if (bufferPool.length > 50 * 1024) {
// Balance between increasing and decreasing the bufferPool
if (counter === 1 || notDecreased > counter * 2) {
// Decrease the bufferPool by 10% by removing the first 10% of the current pool
var sliceLength = Math.floor(bufferPool.length / 10)
if (bufferOffset <= sliceLength) {
bufferOffset = 0
} else {
bufferOffset -= sliceLength
}
bufferPool = bufferPool.slice(sliceLength, bufferPool.length)
} else {
notDecreased++
counter--
}
} else {
clearInterval(interval)
counter = 0
notDecreased = 0
interval = null
}
}
/**
* Check if the requested size fits in the current bufferPool.
* If it does not, reset and increase the bufferPool accordingly.
*
* @param length
* @returns {undefined}
*/
function resizeBuffer (length) {
if (bufferPool.length < length + bufferOffset) {
var multiplier = length > 1024 * 1024 * 75 ? 2 : 3
if (bufferOffset > 1024 * 1024 * 111) {
bufferOffset = 1024 * 1024 * 50
}
bufferPool = bufferAlloc(length * multiplier + bufferOffset)
bufferOffset = 0
counter++
if (interval === null) {
interval = setInterval(decreaseBufferPool, 50)
}
}
}
/**
* Concat a bulk string containing multiple chunks
*
* Notes:
* 1) The first chunk might contain the whole bulk string including the \r
* 2) We are only safe to fully add up elements that are neither the first nor any of the last two elements
*
* @param parser
* @returns {String}
*/
function concatBulkString (parser) {
var list = parser.bufferCache
var chunks = list.length
var offset = parser.bigStrSize - parser.totalChunkSize
parser.offset = offset
if (offset <= 2) {
if (chunks === 2) {
return list[0].toString('utf8', parser.bigOffset, list[0].length + offset - 2)
}
chunks--
offset = list[list.length - 2].length + offset
}
var res = decoder.write(list[0].slice(parser.bigOffset))
for (var i = 1; i < chunks - 1; i++) {
res += decoder.write(list[i])
}
res += decoder.end(list[i].slice(0, offset - 2))
return res
}
/**
* Concat the collected chunks from parser.bufferCache.
*
* Increases the bufferPool size beforehand if necessary.
*
* @param parser
* @returns {Buffer}
*/
function concatBulkBuffer (parser) {
var list = parser.bufferCache
var chunks = list.length
var length = parser.bigStrSize - parser.bigOffset - 2
var offset = parser.bigStrSize - parser.totalChunkSize
parser.offset = offset
if (offset <= 2) {
if (chunks === 2) {
return list[0].slice(parser.bigOffset, list[0].length + offset - 2)
}
chunks--
offset = list[list.length - 2].length + offset
}
resizeBuffer(length)
var start = bufferOffset
list[0].copy(bufferPool, start, parser.bigOffset, list[0].length)
bufferOffset += list[0].length - parser.bigOffset
for (var i = 1; i < chunks - 1; i++) {
list[i].copy(bufferPool, bufferOffset)
bufferOffset += list[i].length
}
list[i].copy(bufferPool, bufferOffset, 0, offset - 2)
bufferOffset += offset - 2
return bufferPool.slice(start, bufferOffset)
}
/**
* Parse the redis buffer
* @param buffer
* @returns {undefined}
*/
JavascriptRedisParser.prototype.execute = function execute (buffer) {
if (this.buffer === null) {
this.buffer = buffer
this.offset = 0
} else if (this.bigStrSize === 0) {
var oldLength = this.buffer.length
var remainingLength = oldLength - this.offset
var newBuffer = bufferAlloc(remainingLength + buffer.length)
this.buffer.copy(newBuffer, 0, this.offset, oldLength)
buffer.copy(newBuffer, remainingLength, 0, buffer.length)
this.buffer = newBuffer
this.offset = 0
if (this.arrayCache.length) {
var arr = parseArrayChunks(this)
if (!arr) {
return
}
this.returnReply(arr)
}
} else if (this.totalChunkSize + buffer.length >= this.bigStrSize) {
this.bufferCache.push(buffer)
var tmp = this.optionReturnBuffers ? concatBulkBuffer(this) : concatBulkString(this)
this.bigStrSize = 0
this.bufferCache = []
this.buffer = buffer
if (this.arrayCache.length) {
this.arrayCache[0][this.arrayPos[0]++] = tmp
tmp = parseArrayChunks(this)
if (!tmp) {
return
}
}
this.returnReply(tmp)
} else {
this.bufferCache.push(buffer)
this.totalChunkSize += buffer.length
return
}
while (this.offset < this.buffer.length) {
var offset = this.offset
var type = this.buffer[this.offset++]
var response = parseType(this, type)
if (response === undefined) {
if (!this.arrayCache.length) {
this.offset = offset
}
return
}
if (type === 45) {
this.returnError(response)
} else {
this.returnReply(response)
}
}
this.buffer = null
}
module.exports = JavascriptRedisParser
... ...
'use strict'
var util = require('util')
var assert = require('assert')
var RedisError = require('./redisError')
var ADD_STACKTRACE = false
function ParserError (message, buffer, offset) {
assert(buffer)
assert.strictEqual(typeof offset, 'number')
RedisError.call(this, message, ADD_STACKTRACE)
this.offset = offset
this.buffer = buffer
Error.captureStackTrace(this, ParserError)
}
util.inherits(ParserError, RedisError)
Object.defineProperty(ParserError.prototype, 'name', {
value: 'ParserError',
configurable: true,
writable: true
})
module.exports = ParserError
... ...
'use strict'
var util = require('util')
function RedisError (message, stack) {
Object.defineProperty(this, 'message', {
value: message || '',
configurable: true,
writable: true
})
if (stack || stack === undefined) {
Error.captureStackTrace(this, RedisError)
}
}
util.inherits(RedisError, Error)
Object.defineProperty(RedisError.prototype, 'name', {
value: 'RedisError',
configurable: true,
writable: true
})
module.exports = RedisError
... ...
'use strict'
var util = require('util')
var RedisError = require('./redisError')
var ADD_STACKTRACE = false
function ReplyError (message) {
var tmp = Error.stackTraceLimit
Error.stackTraceLimit = 2
RedisError.call(this, message, ADD_STACKTRACE)
Error.captureStackTrace(this, ReplyError)
Error.stackTraceLimit = tmp
}
util.inherits(ReplyError, RedisError)
Object.defineProperty(ReplyError.prototype, 'name', {
value: 'ReplyError',
configurable: true,
writable: true
})
module.exports = ReplyError
... ...
{
"_args": [
[
{
"raw": "redis-parser@^2.6.0",
"scope": null,
"escapedName": "redis-parser",
"name": "redis-parser",
"rawSpec": "^2.6.0",
"spec": ">=2.6.0 <3.0.0",
"type": "range"
},
"/Users/fzy/project/koa2_Sequelize_project/node_modules/redis"
]
],
"_from": "redis-parser@>=2.6.0 <3.0.0",
"_id": "redis-parser@2.6.0",
"_inCache": true,
"_location": "/redis-parser",
"_nodeVersion": "7.7.3",
"_npmOperationalInternal": {
"host": "packages-12-west.internal.npmjs.com",
"tmp": "tmp/redis-parser-2.6.0.tgz_1491263124772_0.7531374620739371"
},
"_npmUser": {
"name": "bridgear",
"email": "ruben@bridgewater.de"
},
"_npmVersion": "4.1.2",
"_phantomChildren": {},
"_requested": {
"raw": "redis-parser@^2.6.0",
"scope": null,
"escapedName": "redis-parser",
"name": "redis-parser",
"rawSpec": "^2.6.0",
"spec": ">=2.6.0 <3.0.0",
"type": "range"
},
"_requiredBy": [
"/redis"
],
"_resolved": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz",
"_shasum": "52ed09dacac108f1a631c07e9b69941e7a19504b",
"_shrinkwrap": null,
"_spec": "redis-parser@^2.6.0",
"_where": "/Users/fzy/project/koa2_Sequelize_project/node_modules/redis",
"author": {
"name": "Ruben Bridgewater"
},
"bugs": {
"url": "https://github.com/NodeRedis/node-redis-parser/issues"
},
"dependencies": {},
"description": "Javascript Redis protocol (RESP) parser",
"devDependencies": {
"benchmark": "^2.1.0",
"codeclimate-test-reporter": "^0.4.0",
"hiredis": "^0.5.0",
"intercept-stdout": "^0.1.2",
"istanbul": "^0.4.0",
"mocha": "^3.1.2",
"standard": "^9.0.0"
},
"directories": {
"test": "test",
"lib": "lib"
},
"dist": {
"shasum": "52ed09dacac108f1a631c07e9b69941e7a19504b",
"tarball": "https://registry.npmjs.org/redis-parser/-/redis-parser-2.6.0.tgz"
},
"engines": {
"node": ">=0.10.0"
},
"gitHead": "eea04cad0c4f53fd1e9f7079b5f4ededf50f5945",
"homepage": "https://github.com/NodeRedis/node-redis-parser#readme",
"keywords": [
"redis",
"protocol",
"parser",
"database",
"javascript",
"node",
"nodejs",
"resp",
"hiredis"
],
"license": "MIT",
"main": "index.js",
"maintainers": [
{
"name": "bridgear",
"email": "ruben@bridgewater.de"
}
],
"name": "redis-parser",
"optionalDependencies": {},
"readme": "[![Build Status](https://travis-ci.org/NodeRedis/node-redis-parser.png?branch=master)](https://travis-ci.org/NodeRedis/node-redis-parser)\n[![Test Coverage](https://codeclimate.com/github/NodeRedis/node-redis-parser/badges/coverage.svg)](https://codeclimate.com/github/NodeRedis/node-redis-parser/coverage)\n[![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg)](http://standardjs.com/)\n\n# redis-parser\n\nA high performance javascript redis parser built for [node_redis](https://github.com/NodeRedis/node_redis) and [ioredis](https://github.com/luin/ioredis). Parses all [RESP](http://redis.io/topics/protocol) data.\n\n## Install\n\nInstall with [NPM](https://npmjs.org/):\n\n npm install redis-parser\n\n## Usage\n\n```js\nvar Parser = require('redis-parser');\n\nvar myParser = new Parser(options);\n```\n\n### Options\n\n* `returnReply`: *function*; mandatory\n* `returnError`: *function*; mandatory\n* `returnFatalError`: *function*; optional, defaults to the returnError function\n* `returnBuffers`: *boolean*; optional, defaults to false\n* `stringNumbers`: *boolean*; optional, defaults to false\n\n### Functions\n\n* `reset()`: reset the parser to it's initial state\n* `setReturnBuffers(boolean)`: (JSParser only) set the returnBuffers option on/off without resetting the parser\n* `setStringNumbers(boolean)`: (JSParser only) set the stringNumbers option on/off without resetting the parser\n\n### Error classes\n\n* `RedisError` sub class of Error\n* `ReplyError` sub class of RedisError\n* `ParserError` sub class of RedisError\n\nAll Redis errors will be returned as `ReplyErrors` while a parser error is returned as `ParserError`. \nAll error classes are exported by the parser.\n\n### Example\n\n```js\nvar Parser = require(\"redis-parser\");\n\nfunction Library () {}\n\nLibrary.prototype.returnReply = function (reply) { ... }\nLibrary.prototype.returnError = function (err) { ... }\nLibrary.prototype.returnFatalError = function (err) { ... }\n\nvar lib = new Library();\n\nvar parser = new Parser({\n returnReply: function(reply) {\n lib.returnReply(reply);\n },\n returnError: function(err) {\n lib.returnError(err);\n },\n returnFatalError: function (err) {\n lib.returnFatalError(err);\n }\n});\n\nLibrary.prototype.streamHandler = function () {\n this.stream.on('data', function (buffer) {\n // Here the data (e.g. `new Buffer('$5\\r\\nHello\\r\\n'`)) is passed to the parser and the result is passed to either function depending on the provided data.\n parser.execute(buffer);\n });\n};\n```\nYou do not have to use the returnFatalError function. Fatal errors will be returned in the normal error function in that case.\n\nAnd if you want to return buffers instead of strings, you can do this by adding the `returnBuffers` option.\n\nIf you handle with big numbers that are to large for JS (Number.MAX_SAFE_INTEGER === 2^53 - 16) please use the `stringNumbers` option. That way all numbers are going to be returned as String and you can handle them safely.\n\n```js\n// Same functions as in the first example\n\nvar parser = new Parser({\n returnReply: function(reply) {\n lib.returnReply(reply);\n },\n returnError: function(err) {\n lib.returnError(err);\n },\n returnBuffers: true, // All strings are returned as Buffer e.g. <Buffer 48 65 6c 6c 6f>\n stringNumbers: true // All numbers are returned as String\n});\n\n// The streamHandler as above\n```\n\n## Protocol errors\n\nTo handle protocol errors (this is very unlikely to happen) gracefully you should add the returnFatalError option, reject any still running command (they might have been processed properly but the reply is just wrong), destroy the socket and reconnect. Note that while doing this no new command may be added, so all new commands have to be buffered in the meantime, otherwise a chunk might still contain partial data of a following command that was already processed properly but answered in the same chunk as the command that resulted in the protocol error.\n\n## Contribute\n\nThe parser is highly optimized but there may still be further optimizations possible.\n\n npm install\n npm test\n npm run benchmark\n\nCurrently the benchmark compares the performance against the hiredis parser:\n\n HIREDIS: $ multiple chunks in a bulk string x 859,880 ops/sec ±1.22% (82 runs sampled)\n HIREDIS BUF: $ multiple chunks in a bulk string x 608,869 ops/sec ±1.72% (85 runs sampled)\n JS PARSER: $ multiple chunks in a bulk string x 910,590 ops/sec ±0.87% (89 runs sampled)\n JS PARSER BUF: $ multiple chunks in a bulk string x 1,299,507 ops/sec ±2.18% (84 runs sampled)\n\n HIREDIS: + multiple chunks in a string x 1,787,203 ops/sec ±0.58% (96 runs sampled)\n HIREDIS BUF: + multiple chunks in a string x 943,584 ops/sec ±1.62% (87 runs sampled)\n JS PARSER: + multiple chunks in a string x 2,008,264 ops/sec ±1.01% (91 runs sampled)\n JS PARSER BUF: + multiple chunks in a string x 2,045,546 ops/sec ±0.78% (91 runs sampled)\n\n HIREDIS: $ 4mb bulk string x 310 ops/sec ±1.58% (75 runs sampled)\n HIREDIS BUF: $ 4mb bulk string x 471 ops/sec ±2.28% (78 runs sampled)\n JS PARSER: $ 4mb bulk string x 747 ops/sec ±2.43% (85 runs sampled)\n JS PARSER BUF: $ 4mb bulk string x 846 ops/sec ±5.52% (72 runs sampled)\n\n HIREDIS: + simple string x 2,324,866 ops/sec ±1.61% (90 runs sampled)\n HIREDIS BUF: + simple string x 1,085,823 ops/sec ±2.47% (82 runs sampled)\n JS PARSER: + simple string x 4,567,358 ops/sec ±1.97% (81 runs sampled)\n JS PARSER BUF: + simple string x 5,433,901 ops/sec ±0.66% (93 runs sampled)\n\n HIREDIS: : integer x 2,332,946 ops/sec ±0.47% (93 runs sampled)\n JS PARSER: : integer x 17,730,449 ops/sec ±0.73% (91 runs sampled)\n JS PARSER STR: : integer x 12,942,037 ops/sec ±0.51% (92 runs sampled)\n\n HIREDIS: : big integer x 2,012,572 ops/sec ±0.33% (93 runs sampled)\n JS PARSER: : big integer x 10,210,923 ops/sec ±0.94% (94 runs sampled)\n JS PARSER STR: : big integer x 4,453,320 ops/sec ±0.52% (94 runs sampled)\n\n HIREDIS: * array x 44,479 ops/sec ±0.55% (94 runs sampled)\n HIREDIS BUF: * array x 14,391 ops/sec ±1.04% (86 runs sampled)\n JS PARSER: * array x 53,796 ops/sec ±2.08% (79 runs sampled)\n JS PARSER BUF: * array x 72,428 ops/sec ±0.72% (93 runs sampled)\n\n HIREDIS: * big nested array x 217 ops/sec ±0.97% (83 runs sampled)\n HIREDIS BUF: * big nested array x 255 ops/sec ±2.28% (77 runs sampled)\n JS PARSER: * big nested array x 242 ops/sec ±1.10% (85 runs sampled)\n JS PARSER BUF: * big nested array x 375 ops/sec ±1.21% (88 runs sampled)\n\n HIREDIS: - error x 78,821 ops/sec ±0.80% (93 runs sampled)\n JS PARSER: - error x 143,382 ops/sec ±0.75% (92 runs sampled)\n\n Platform info:\n Ubuntu 16.10\n Node.js 7.4.0\n Intel(R) Core(TM) i7-5600U CPU\n\n## License\n\n[MIT](./LICENSE)\n",
"readmeFilename": "README.md",
"repository": {
"type": "git",
"url": "git+https://github.com/NodeRedis/node-redis-parser.git"
},
"scripts": {
"benchmark": "node ./benchmark",
"coverage": "node ./node_modules/istanbul/lib/cli.js cover --preserve-comments ./node_modules/mocha/bin/_mocha -- -R spec",
"coverage:check": "node ./node_modules/istanbul/lib/cli.js check-coverage --branch 100 --statement 100",
"lint": "standard --fix",
"posttest": "npm run lint && npm run coverage:check",
"test": "npm run coverage"
},
"version": "2.6.0"
}
... ...
node_modules/**
coverage/**
**.md
**.log
... ...
env:
node: true
es6: false
rules:
# Possible Errors
# http://eslint.org/docs/rules/#possible-errors
comma-dangle: [2, "only-multiline"]
no-constant-condition: 2
no-control-regex: 2
no-debugger: 2
no-dupe-args: 2
no-dupe-keys: 2
no-duplicate-case: 2
no-empty: 2
no-empty-character-class: 2
no-ex-assign: 2
no-extra-boolean-cast : 2
no-extra-parens: [2, "functions"]
no-extra-semi: 2
no-func-assign: 2
no-invalid-regexp: 2
no-irregular-whitespace: 2
no-negated-in-lhs: 2
no-obj-calls: 2
no-regex-spaces: 2
no-sparse-arrays: 2
no-inner-declarations: 2
no-unexpected-multiline: 2
no-unreachable: 2
use-isnan: 2
valid-typeof: 2
# Best Practices
# http://eslint.org/docs/rules/#best-practices
array-callback-return: 2
block-scoped-var: 2
dot-notation: 2
eqeqeq: 2
no-else-return: 2
no-extend-native: 2
no-floating-decimal: 2
no-extra-bind: 2
no-fallthrough: 2
no-labels: 2
no-lone-blocks: 2
no-loop-func: 2
no-multi-spaces: 2
no-multi-str: 2
no-native-reassign: 2
no-new-wrappers: 2
no-octal: 2
no-proto: 2
no-redeclare: 2
no-return-assign: 2
no-self-assign: 2
no-self-compare: 2
no-sequences: 2
no-throw-literal: 2
no-useless-call: 2
no-useless-concat: 2
no-useless-escape: 2
no-void: 2
no-unmodified-loop-condition: 2
yoda: 2
# Strict Mode
# http://eslint.org/docs/rules/#strict-mode
strict: [2, "global"]
# Variables
# http://eslint.org/docs/rules/#variables
no-delete-var: 2
no-shadow-restricted-names: 2
no-undef: 2
no-unused-vars: [2, {"args": "none"}]
# http://eslint.org/docs/rules/#nodejs-and-commonjs
no-mixed-requires: 2
no-new-require: 2
no-path-concat: 2
# Stylistic Issues
# http://eslint.org/docs/rules/#stylistic-issues
comma-spacing: 2
eol-last: 2
indent: [2, 4, {SwitchCase: 2}]
keyword-spacing: 2
max-len: [2, 200, 2]
new-parens: 2
no-mixed-spaces-and-tabs: 2
no-multiple-empty-lines: [2, {max: 2}]
no-trailing-spaces: 2
quotes: [2, "single", "avoid-escape"]
semi: 2
space-before-blocks: [2, "always"]
space-before-function-paren: [2, "always"]
space-in-parens: [2, "never"]
space-infix-ops: 2
space-unary-ops: 2
globals:
it: true
describe: true
before: true
after: true
beforeEach: true
afterEach: true
... ...
_Thanks for wanting to report an issue you've found in node_redis. Please delete
this text and fill in the template below. Please note that the issue tracker is only
for bug reports or feature requests. If you have a question, please ask that on [gitter].
If unsure about something, just do as best as you're able._
_Note that it will be much easier to fix the issue if a test case that reproduces
the problem is provided. It is of course not always possible to reduce your code
to a small test case, but it's highly appreciated to have as much data as possible.
Thank you!_
* **Version**: What node_redis and what redis version is the issue happening on?
* **Platform**: What platform / version? (For example Node.js 0.10 or Node.js 5.7.0 on Windows 7 / Ubuntu 15.10 / Azure)
* **Description**: Description of your issue, stack traces from errors and code that reproduces the issue
[gitter]: https://gitter.im/NodeRedis/node_redis?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge
\ No newline at end of file
... ...
### Pull Request check-list
_Please make sure to review and check all of these items:_
- [ ] Does `npm test` pass with this change (including linting)?
- [ ] Is the new or changed code fully tested?
- [ ] Is a documentation update included (if this change modifies existing APIs, or introduces new ones)?
_NOTE: these things are not required to open a PR and can be done
afterwards / while the PR is open._
### Description of change
_Please provide a description of the change here._
\ No newline at end of file
... ...
examples/
benchmarks/
test/
.nyc_output/
coverage/
.tern-port
*.log
*.rdb
*.out
*.yml
... ...
LICENSE - "MIT License"
Copyright (c) 2016 by NodeRedis
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
\ No newline at end of file
... ...