From f769fe5cb41f9587490df6d97c4b5d9177646b48 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Tue, 15 May 2018 14:09:30 -0700 Subject: [PATCH 01/29] fix: correct the variable name --- src/tokens/done-in-proc/write.js | 2 +- src/tokens/done/write.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/tokens/done-in-proc/write.js b/src/tokens/done-in-proc/write.js index 6f16e38..eb47f2d 100644 --- a/src/tokens/done-in-proc/write.js +++ b/src/tokens/done-in-proc/write.js @@ -23,7 +23,7 @@ function writeDoneInProcToken(stream: Writer, token: Token) { function statusFromToken(token) { let status = 0; if (token.more) status |= 0x1; - if (token.error) status |= 0x2; + if (token.sqlError) status |= 0x2; if (token.isCountValid) status |= 0x10; if (token.serverError) status |= 0x100; return status; diff --git a/src/tokens/done/write.js b/src/tokens/done/write.js index 72a42ab..6d9a885 100644 --- a/src/tokens/done/write.js +++ b/src/tokens/done/write.js @@ -23,7 +23,7 @@ function writeDoneToken(stream: Writer, token: Token) { function statusFromToken(token) { let status = 0; if (token.more) status |= 0x1; - if (token.error) status |= 0x2; + if (token.sqlError) status |= 0x2; if (token.isCountValid) status |= 0x10; if (token.attention) status |= 0x20; if (token.serverError) status |= 0x100; From 93a1363ce760044fbb32536a174922091527f32e Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Tue, 15 May 2018 14:11:23 -0700 Subject: [PATCH 02/29] chore: update dependencies Updated dependencies to remove high level vulnerabilities listed by npm audit. Added chai-datetime plugin to test temporal types in mocha/chai --- flow-typed/chai-datetime.js | 3 +++ package.json | 11 ++++++----- 2 files changed, 9 insertions(+), 5 deletions(-) create mode 100644 flow-typed/chai-datetime.js diff --git a/flow-typed/chai-datetime.js b/flow-typed/chai-datetime.js new file mode 100644 index 0000000..2688529 --- /dev/null +++ b/flow-typed/chai-datetime.js @@ -0,0 +1,3 @@ +declare module 'chai-datetime' { + declare module.exports: any; + } \ No newline at end of file diff --git a/package.json b/package.json index 9377c71..35417a3 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,7 @@ "author": "Arthur Schreiber (schreiber.arthur@gmail.com)", "license": "ISC", "devDependencies": { - "babel-cli": "^6.24.0", + "babel-cli": "^6.26.0", "babel-eslint": "^8.2.2", "babel-plugin-transform-runtime": "^6.23.0", "babel-preset-env": "^1.2.2", @@ -21,14 +21,15 @@ "benchmark": "^2.1.3", "chai": "^4.0.2", "eslint": "^4.19.1", - "flow-bin": "^0.48.0", - "flow-copy-source": "^1.1.0", + "flow-bin": "^0.72.0", + "flow-copy-source": "^1.3.0", "mocha": "^5.0.5", "semantic-release": "^15.1.5", - "semantic-release-cli": "^3.0.3", + "semantic-release-cli": "^4.0.1", "@commitlint/cli": "^6.0.2", "@commitlint/config-conventional": "^6.0.2", - "@commitlint/travis-cli": "^6.0.2" + "@commitlint/travis-cli": "^6.0.2", + "chai-datetime": "^1.5.0" }, "dependencies": { "babel-runtime": "^6.26.0" From afdb8008c6eec97c9da48b30ed42abe3d51cb995 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Tue, 15 May 2018 14:18:37 -0700 Subject: [PATCH 03/29] feat: returnvalue token parser for fixed length types --- src/dataTypes.js | 84 +++++ src/reader.js | 23 +- src/tokens/returnvalue/index.js | 50 +++ src/tokens/returnvalue/read.js | 128 +++++++ src/value-parser.js | 169 ++++++++++ test/returnValue-test.js | 570 ++++++++++++++++++++++++++++++++ 6 files changed, 1023 insertions(+), 1 deletion(-) create mode 100644 src/dataTypes.js create mode 100644 src/tokens/returnvalue/index.js create mode 100644 src/tokens/returnvalue/read.js create mode 100644 src/value-parser.js create mode 100644 test/returnValue-test.js diff --git a/src/dataTypes.js b/src/dataTypes.js new file mode 100644 index 0000000..13ff253 --- /dev/null +++ b/src/dataTypes.js @@ -0,0 +1,84 @@ +/* @flow */ + +const type = { + // FIXEDLENTYPE + [0x1F]: { + id: 0x1F, + type: 'NULL', + name: 'Null' + }, + [0x30]: { + id: 0x30, + type: 'INT1', + name: 'TinyInt' + }, + [0x32]: { + id: 0x32, + type: 'BIT', + name: 'Bit' + }, + [0x34]: { + id: 0x34, + type: 'INT2', + name: 'SmallInt' + }, + [0x38]: { + id: 0x38, + type: 'INT4', + name: 'Int' + }, + [0x3A]: { + id: 0x3A, + type: 'DATETIM4', + name: 'SmallDateTime' + }, + [0x3B]: { + id: 0x3B, + type: 'FLT4', + name: 'Real' + }, + [0x3C]: { + id: 0x3C, + type: 'MONEY', + name: 'Money' + }, + [0x3D]: { + id: 0x3D, + type: 'DATETIME', + name: 'DateTime' + }, + [0x3E]: { + id: 0x3E, + type: 'FLT8', + name: 'Float' + }, + [0x7A]: { + id: 0x7A, + type: 'MONEY4', + name: 'SmallMoney' + }, + [0x7F]: { + id: 0x7F, + type: 'INT8', + name: 'BigInt' + }, + + //VARLENTYPE + [0x26]: { + id: 0x26, + name: 'IntN', + type: 'INTN', + LengthOfDataLength: 1 + } +}; + +const typeByName = {}; +var keys = Object.keys(type); +for (const key of keys) { + typeByName[type[key].name] = type[key]; +} + +module.exports.TYPE = type; + +// TODO: export typeByName as TYPES in index/tedious.js +// module.exports.TYPE = typeByName; diff --git a/src/reader.js b/src/reader.js index 826c8bf..a82222c 100644 --- a/src/reader.js +++ b/src/reader.js @@ -24,6 +24,7 @@ function nextToken(reader) { case 0xAD: return readLoginAckToken; case 0xA9: return readOrderToken; case 0x79: return readReturnStatus; + case 0xAC: return readReturnValueToken; default: console.log(reader.buffer.slice(reader.position - 1)); throw new Error('Unknown token type ' + type.toString(16)); @@ -37,15 +38,17 @@ const Reader = module.exports = class Reader extends Transform { position: number buffer: Buffer version: number + options: ?any // assign connection.options stash: Array - constructor(version: 0x07000000 | 0x07010000 | 0x71000001 | 0x72090002 | 0x730A0003 | 0x730B0003 | 0x74000004) { + constructor(version: 0x07000000 | 0x07010000 | 0x71000001 | 0x72090002 | 0x730A0003 | 0x730B0003 | 0x74000004, options: ?any) { super({ readableObjectMode: true }); this.buffer = Buffer.alloc(0); this.version = version; this.position = 0; + this.options = options; this.stash = []; @@ -76,6 +79,10 @@ const Reader = module.exports = class Reader extends Transform { return this.buffer.readUInt16LE(this.position + offset); } + readInt16LE(offset: number) : number { + return this.buffer.readInt16LE(this.position + offset); + } + readUInt32LE(offset: number) : number { return this.buffer.readUInt32LE(this.position + offset); } @@ -93,6 +100,19 @@ const Reader = module.exports = class Reader extends Transform { return 4294967296 * this.buffer.readUInt32LE(this.position + 4) + this.buffer.readUInt32LE(this.position); } + readInt64LE(offset: number) { + // TODO: This can overflow + return 4294967296 * this.buffer.readInt32LE(this.position + 4) + (this.buffer[this.position + 4] & (0x80 === 0x80 ? 1 : -1)) * this.buffer.readUInt32LE(this.position); + } + + readFloatLE(offset: number) { + return this.buffer.readFloatLE(this.position + offset); + } + + readDoubleLE(offset: number) { + return this.buffer.readDoubleLE(this.position + offset); + } + _transform(chunk: Buffer | string, encoding: string | null, callback: (error: ?Error) => void) { if (!(chunk instanceof Buffer)) { return callback(new Error('Expected Buffer')); @@ -133,3 +153,4 @@ const readInfoErrorToken = require('./tokens/infoerror/read'); const readLoginAckToken = require('./tokens/loginack/read'); const readOrderToken = require('./tokens/order/read'); const readReturnStatus = require('./tokens/returnStatus/read'); +const readReturnValueToken = require('./tokens/returnvalue/read'); diff --git a/src/tokens/returnvalue/index.js b/src/tokens/returnvalue/index.js new file mode 100644 index 0000000..4d49092 --- /dev/null +++ b/src/tokens/returnvalue/index.js @@ -0,0 +1,50 @@ +/* @flow */ + +const Token = require('../../token'); +import type { TypeInfo } from '../../types'; + +class ReturnValueToken extends Token { + paramOrdinal: ?number + paramName: ?string + status: ?number + userType: ?number + // TODO: parser flag + flags: { + nullable: ?boolean, + caseSensitive: ?boolean, + updateable: ?boolean, + identity: ?boolean, + computed: ?boolean, + reservedODBC: ?boolean, + fixedLenCLRType: ?boolean, + encrypted: ?boolean + } + typeInfo: ?TypeInfo + valueLength: ?number + value: ?any + + constructor() { + super(0xAC); + + this.paramOrdinal = undefined; + this.paramName = undefined; + this.status = undefined; + this.userType = undefined; + this.flags = { + nullable: undefined, + caseSensitive: undefined, + updateable: undefined, + identity: undefined, + computed: undefined, + reservedODBC: undefined, + fixedLenCLRType: undefined, + encrypted: undefined + }; + this.typeInfo = undefined; + this.valueLength = undefined; + this.value = undefined; + } +} +module.exports = ReturnValueToken; + +ReturnValueToken.read = require('./read'); diff --git a/src/tokens/returnvalue/read.js b/src/tokens/returnvalue/read.js new file mode 100644 index 0000000..d876d0c --- /dev/null +++ b/src/tokens/returnvalue/read.js @@ -0,0 +1,128 @@ +/* @flow */ + +import type Reader from '../../reader'; +import type { TypeInfo } from '../../types'; + +function readReturnValueToken(reader: Reader) { + const token = new ReturnValueToken(); + + let offset = 0; + token.paramOrdinal = reader.readUInt16LE(offset); + offset += 2; + const paramLength = reader.readUInt8(offset) * 2; + offset += 1; + token.paramName = reader.readString('ucs2', offset, offset + paramLength); + offset += paramLength; + + token.status = reader.readUInt8(offset); + offset += 1; + reader.consumeBytes(offset); + + reader.stash.push(token); + return parseUserType; +} + +function parseUserType(reader: Reader) { + if (reader.version < 0x72090002) { + return parseUserType_7_0; + } else { + return parseUserType_7_2; + } +} + +function parseUserType_7_0(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + + const userType = reader.readUInt16LE(0); + reader.consumeBytes(2); + + const token: ReturnValueToken = reader.stash[reader.stash.length - 1]; + token.userType = userType; + + return parseFlags; +} + +function parseUserType_7_2(reader: Reader) { + if (!reader.bytesAvailable(4)) { + return; + } + + const userType = reader.readUInt32LE(0); + reader.consumeBytes(4); + + const token: ReturnValueToken = reader.stash[reader.stash.length - 1]; + token.userType = userType; + + return parseFlags; +} + +function parseFlags(reader: Reader) { + if (reader.version < 0x72090002) { + return parseFlags_7_0; + } else if (reader.version < 0x74000004) { + return parseFlags_7_2; + } else { + return parseFlags_7_4; + } +} + + +function parseFlags_7_0(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + + // TODO: Implement flag parsing + const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars + reader.consumeBytes(2); + + return parseTypeInfo; +} + +function parseFlags_7_2(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + + // TODO: Implement flag parsing + const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars + reader.consumeBytes(2); + + return parseTypeInfo; +} + +function parseFlags_7_4(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + + // TODO: Implement flag parsing + const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars + reader.consumeBytes(2); + + return parseTypeInfo; +} + +function parseTypeInfo(reader: Reader) { + return readTypeInfo(parseValue, reader); +} + +function parseValue(reader: Reader) { + const typeInfo: TypeInfo = reader.stash.pop(); + const token: ReturnValueToken = reader.stash[reader.stash.length - 1]; + token.typeInfo = typeInfo; + return valueParse(afterReadingValue, reader); +} + +function afterReadingValue(reader: Reader) { + const token: ReturnValueToken = reader.stash.pop(); + reader.push(token); + return reader.nextToken; +} + +module.exports = readReturnValueToken; +const ReturnValueToken = require('.'); +const { readTypeInfo } = require('../../types'); +const { valueParse } = require('../../value-parser'); diff --git a/src/value-parser.js b/src/value-parser.js new file mode 100644 index 0000000..ccabf1e --- /dev/null +++ b/src/value-parser.js @@ -0,0 +1,169 @@ +/* @flow */ + +type readStep = (reader: Reader) =>?readStep; +const Reader = require('./reader'); +const TYPE = require('./dataTypes').TYPE; +const MAX = (1 << 16) - 1; +const THREE_AND_A_THIRD = 3 + (1 / 3); +const MONEY_DIVISOR = 10000; + +function valueParse(next: readStep, reader: Reader) { + reader.stash.push(next); + //TODO : add readTextPointerNull + return readDataLength; +} + +function readDataLength(reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + // s2.2.4.2.1 + switch (token.typeInfo.id & 0x30) { + case 0x10: // xx01xxxx - s2.2.4.2.1.1 + // token.value = 0; + // reader.stash.push(0); + reader.stash.push(0); + //TODO: test this + return readValue; + + case 0x20: // xx10xxxx - s2.2.4.2.1.3 + // Variable length + if (token.typeInfo.dataLength !== MAX) { + switch (TYPE[token.typeInfo.id].LengthOfDataLength) { + case 1: + reader.stash.push(reader.readUInt8(0)); + reader.consumeBytes(1); + return readValue; + default: + console.log('Datalength parser not-implemented for ', TYPE[token.typeInfo.id].name); + } + } + else { + //TODO: add test? + return reader.stash.pop(); + } + + case 0x30: // xx11xxxx - s2.2.4.2.1.2 + // Fixed length + const len = 1 << ((token.typeInfo.id & 0x0C) >> 2); + reader.stash.push(len); + return readValue; + } +} + +function readValue(reader: Reader) { + const dataLength = reader.stash.pop(); + const token = reader.stash[reader.stash.length - 2]; + + switch (TYPE[token.typeInfo.id].name) { + + // Fixed-Length Data Types + case 'Null': + token.value = null; + return reader.stash.pop(); + case 'TinyInt': + token.value = reader.readUInt8(0); + reader.consumeBytes(1); + return reader.stash.pop(); + case 'Bit': + token.value = !!reader.readUInt8(0); + reader.consumeBytes(1); + return reader.stash.pop(); + case 'SmallInt': + token.value = reader.readInt16LE(0); + reader.consumeBytes(2); + return reader.stash.pop(); + case 'Int': + token.value = reader.readInt32LE(0); + reader.consumeBytes(4); + return reader.stash.pop(); + case 'BigInt': + //TODO: replace with better alternative to avoid overflow and to read -ve value + token.value = reader.readUInt64LE(0); + reader.consumeBytes(8); + return reader.stash.pop(); + case 'SmallDateTime': + return readSmallDateTime; + case 'Real': + token.value = reader.readFloatLE(0); + reader.consumeBytes(4); + return reader.stash.pop(); + case 'Money': + return readMoney; + case 'DateTime': + return readDateTime; + case 'Float': + token.value = reader.readDoubleLE(0); + reader.consumeBytes(8); + return reader.stash.pop(); + case 'SmallMoney': + token.value = reader.readInt32LE(0) / MONEY_DIVISOR; + reader.consumeBytes(4); + return reader.stash.pop(); + + // Variable-Length Data Types + case 'IntN': + switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); + case 1: // TinyInt + token.value = reader.readUInt8(0); + reader.consumeBytes(1); + return reader.stash.pop(); + case 2: // SmallInt + token.value = reader.readInt16LE(0); + reader.consumeBytes(2); + return reader.stash.pop(); + case 4: // Int + token.value = reader.readInt32LE(0); + reader.consumeBytes(4); + return reader.stash.pop(); + case 8: // BigInt + // TODO: replace with better alternative to avoid overflow and to read -ve value + token.value = reader.readUInt64LE(0); + reader.consumeBytes(8); + return reader.stash.pop(); + default: + console.log('Unknown length'); + } + default: + console.log('readValue not implemented'); + } +} + +function readSmallDateTime(reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + const days = reader.readUInt16LE(0); + const minutes = reader.readUInt16LE(2); + if (reader.options.useUTC) { + token.value = new Date(Date.UTC(1900, 0, 1 + days, 0, minutes)); + } else { + token.value = new Date(1900, 0, 1 + days, 0, minutes); + } + reader.consumeBytes(4); + return reader.stash.pop(); +} + +function readDateTime(reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + const days = reader.readUInt32LE(0); + const threeHundredthsOfSecond = reader.readUInt32LE(4); + const milliseconds = Math.round(threeHundredthsOfSecond * THREE_AND_A_THIRD); + if (reader.options.useUTC) { + token.value = new Date(Date.UTC(1900, 0, 1 + days, 0, 0, 0, milliseconds)); + } else { + token.value = new Date(1900, 0, 1 + days, 0, 0, 0, milliseconds); + } + reader.consumeBytes(8); + return reader.stash.pop(); +} + +function readMoney(reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + const high = reader.readUInt32LE(0); + const low = reader.readUInt32LE(4); + token.value = (low + (0x100000000 * high)) / MONEY_DIVISOR; + reader.consumeBytes(8); + return reader.stash.pop(); +} + +module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js new file mode 100644 index 0000000..bb7c606 --- /dev/null +++ b/test/returnValue-test.js @@ -0,0 +1,570 @@ +/* @flow */ + +const chai = require('chai'); +const assert = chai.assert; +const chai_datetime = require('chai-datetime'); +chai.use(chai_datetime); + +const Reader = require('../src').Reader; +const ReturnValueToken = require('../src/tokens/returnvalue'); + +describe('Parsing a RETURNVALUE token', function() { + + const SHIFT_LEFT_32 = (1 << 16) * (1 << 16); + const SHIFT_RIGHT_32 = 1 / SHIFT_LEFT_32; + + describe('in TDS 7.0 mode', function() { + + let reader, data, paramOrdinal, paramName, status, userType, typeid, dataLength, value, offset, tempBuff; + + before(function() { + paramOrdinal = 1; + paramName = '@count'; + status = 1; + userType = 0; + typeid = 0x26; + value = 4; + offset = 0; + tempBuff = Buffer.alloc(21); + buildDataBuffer(); + }); + + beforeEach(function() { + reader = new Reader(0x07000000); + }); + + function addListners(done, token) { + reader.on('data', function(retValToken) { + assert.instanceOf(retValToken, ReturnValueToken); + token = retValToken; + }); + + reader.on('end', function() { + assert.strictEqual(token.paramOrdinal, paramOrdinal); + assert.strictEqual(token.paramName, paramName); + assert.strictEqual(token.status, status); + assert.strictEqual(token.userType, userType); + assert.strictEqual(token.typeInfo.id, typeid); + assert.strictEqual(token.value, value); + done(); + }); + } + + function buildDataBuffer() { + tempBuff.writeUInt8(0xAC, offset++); + tempBuff.writeUInt16LE(paramOrdinal, offset); + offset += 2; + tempBuff.writeUInt8(paramName.length, offset++); + tempBuff.write(paramName, offset, paramName.length * 2, 'ucs2'); + offset += paramName.length * 2; + tempBuff.writeUInt8(status, offset++); + tempBuff.writeUInt16LE(userType, offset); + offset += 2; + // Flag + tempBuff.writeUInt16LE(0, offset); + offset += 2; + } + + it('should parse the INTNTYPE(Int) token correctly', function(done) { + dataLength = 4; + + data = Buffer.alloc(28); + tempBuff.copy(data, 0, 0); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt32LE(value, offset); + const token = {}; + + addListners(done, token); + reader.end(data); + }); + }); + + describe('in TDS 7.2 mode', function() { + + describe('test INTNTYPE', function() { + + let reader, data, paramOrdinal, paramName, status, userType, typeid, dataLength, value, offset, tempBuff, tempOffset; + + before(function() { + paramOrdinal = 1; + paramName = '@count'; + status = 1; + userType = 0; + typeid = 0x26; + value = 4; + tempOffset = 0; + tempBuff = Buffer.alloc(23); + buildDataBuffer(); + }); + + beforeEach(function() { + reader = new Reader(0x72090002); + }); + + function addListners(done, token) { + reader.on('data', function(retValToken) { + assert.instanceOf(retValToken, ReturnValueToken); + token = retValToken; + }); + + reader.on('end', function() { + assert.strictEqual(token.paramOrdinal, paramOrdinal); + assert.strictEqual(token.paramName, paramName); + assert.strictEqual(token.status, status); + assert.strictEqual(token.userType, userType); + assert.strictEqual(token.typeInfo.id, typeid); + assert.strictEqual(token.value, value); + done(); + }); + } + + function buildDataBuffer() { + tempBuff.writeUInt8(0xAC, tempOffset++); + tempBuff.writeUInt16LE(paramOrdinal, tempOffset); + tempOffset += 2; + tempBuff.writeUInt8(paramName.length, tempOffset++); + tempBuff.write(paramName, tempOffset, paramName.length * 2, 'ucs2'); + tempOffset += paramName.length * 2; + tempBuff.writeUInt8(status, tempOffset++); + tempBuff.writeUInt32LE(userType, tempOffset); + tempOffset += 4; + // Flag + tempBuff.writeUInt16LE(0, tempOffset); + tempOffset += 2; + } + + it('should parse the INTNTYPE(Tinyint) token correctly', function(done) { + dataLength = 1; + offset = tempOffset; + + data = Buffer.alloc(27); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt8(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INTNTYPE(smallint) token correctly', function(done) { + dataLength = 2; + offset = tempOffset; + + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt16LE(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INTNTYPE(Int) token correctly', function(done) { + dataLength = 4; + offset = tempOffset; + + data = Buffer.alloc(30); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt32LE(value, offset); + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the INTNTYPE(Bigint) token correctly', function(done) { + dataLength = 8; + offset = tempOffset; + + data = Buffer.alloc(34); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + // writing data as 2 separate 32bits + data.writeUInt32LE(value, offset); + data.writeUInt32LE(0, offset + 4); + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the INTNTYPE(null) token correctly', function(done) { + dataLength = 8; + value = null; + offset = tempOffset; + + data = Buffer.alloc(26); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE : zero value length for null type + data.writeUInt8(0, offset++); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + }); + + describe('test FIXEDLENTYPE', function() { + let reader, data, paramOrdinal, paramName, status, userType, typeid, value, offset, tempBuff, tempOffset; + + before(function() { + paramOrdinal = 1; + paramName = '@count'; + status = 1; + userType = 0; + tempOffset = 0; + tempBuff = Buffer.alloc(23); + buildDataBuffer(); + }); + + beforeEach(function() { + reader = new Reader(0x72090002); + + }); + + + function addListners(done, token) { + reader.on('data', function(retValToken) { + assert.instanceOf(retValToken, ReturnValueToken); + token = retValToken; + }); + + reader.on('end', function() { + assert.strictEqual(token.paramOrdinal, paramOrdinal); + assert.strictEqual(token.paramName, paramName); + assert.strictEqual(token.status, status); + assert.strictEqual(token.userType, userType); + assert.strictEqual(token.typeInfo.id, typeid); + + if (typeid == 0x3A || typeid == 0x3D) { + // use chai-datetime package for temporal types + assert.equalDate(token.value, value); + assert.equalTime(token.value, value); + } + else { + assert.strictEqual(token.value, value); + } + + done(); + }); + } + + function buildDataBuffer() { + tempBuff.writeUInt8(0xAC, tempOffset++); + tempBuff.writeUInt16LE(paramOrdinal, tempOffset); + tempOffset += 2; + tempBuff.writeUInt8(paramName.length, tempOffset++); + tempBuff.write(paramName, tempOffset, paramName.length * 2, 'ucs2'); + tempOffset += paramName.length * 2; + tempBuff.writeUInt8(status, tempOffset++); + tempBuff.writeUInt32LE(userType, tempOffset); + tempOffset += 4; + // Flag + tempBuff.writeUInt16LE(0, tempOffset); + tempOffset += 2; + } + + it('should parse the NULLTYPE token correctly', function(done) { + typeid = 0x1F; + value = null; + offset = tempOffset; + + data = Buffer.alloc(24); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INT1TYPE/TintInt token correctly', function(done) { + typeid = 0x30; + value = 255; + offset = tempOffset; + + data = Buffer.alloc(25); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt8(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the BITTYPE token correctly', function(done) { + typeid = 0x32; + value = false; + offset = tempOffset; + + data = Buffer.alloc(25); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt8(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INT2TYPE/SmallInt token correctly', function(done) { + typeid = 0x34; + value = 32767; + offset = tempOffset; + + data = Buffer.alloc(26); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt16LE(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INT4TYPE/Int token correctly', function(done) { + typeid = 0x38; + value = -2147483648; + offset = tempOffset; + + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeInt32LE(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the INT8TYPE/BigInt token correctly', function(done) { + typeid = 0x7F; + // value = -2147483648; + value = 147483648; + offset = tempOffset; + + data = Buffer.alloc(32); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + //TODO: better alternative to test bigInt value? + data.writeInt32LE(value, offset); + data.writeInt32LE(0, offset + 4); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the DATETIM4TYPE/SmallDateTime token correctly : UTC', function(done) { + reader.options = {}; + reader.options.useUTC = true; + typeid = 0x3A; + const days = 43225; // days since 1900-01-01 + const minutes = 763; + value = new Date('2018-05-07T12:43:00.000Z'); + offset = tempOffset; + + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt16LE(days, offset); + data.writeUInt16LE(minutes, offset + 2); + + const token = {}; + addListners(done, token); + + reader.end(data); + + }); + + it('should parse the DATETIM4TYPE/SmallDateTime token correctly : local time', function(done) { + reader.options = {}; + reader.options.useUTC = false; + typeid = 0x3A; + const days = 43225; + const minutes = 763; + value = new Date('2018-05-07T12:43:00.000'); + offset = tempOffset; + + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt16LE(days, offset); + data.writeUInt16LE(minutes, offset + 2); + + const token = {}; + addListners(done, token); + + reader.end(data); + + }); + + it('should parse the FLT4TYPE/Real token correctly', function(done) { + typeid = 0x3B; + value = 9654.2529296875; + offset = tempOffset; + + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeFloatLE(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the FLT8TYPE/Float token correctly', function(done) { + typeid = 0x3E; + value = 9654.2546456567565767644; + offset = tempOffset; + + data = Buffer.alloc(32); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeDoubleLE(value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the MONEYTYPE/Money token correctly', function(done) { + typeid = 0x3C; + value = 922337203.5807; + offset = tempOffset; + + const TDS_value = value * 10000; + data = Buffer.alloc(32); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeInt32LE(Math.floor(TDS_value * SHIFT_RIGHT_32), offset); + data.writeInt32LE(TDS_value & -1, offset + 4); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the MONEY4TYPE/SmallMoney token correctly', function(done) { + typeid = 0x7A; + value = -214748.3647; + offset = tempOffset; + + const TDS_value = value * 10000; + data = Buffer.alloc(28); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeInt32LE(TDS_value, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the DATETIMETYPE/DateTime token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + offset = tempOffset; + + typeid = 0x3D; + value = new Date('2004-05-23T14:25:10.487Z'); + + const datetime = Buffer.alloc(8, 'F09400009AA0ED00', 'hex'); //'2004-05-23T14:25:10.487Z' + data = Buffer.alloc(32); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + datetime.copy(data, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + }); + }); + +}); From cdaa1a7bffc61944521e8ec4195f1175a0841234 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Mon, 28 May 2018 14:06:02 -0700 Subject: [PATCH 04/29] feat: add flag parsing and tests for returnvalue token --- src/tokens/returnvalue/index.js | 21 --------- src/tokens/returnvalue/read.js | 38 ++-------------- test/returnValue-test.js | 79 ++++++++++++++++++--------------- 3 files changed, 46 insertions(+), 92 deletions(-) diff --git a/src/tokens/returnvalue/index.js b/src/tokens/returnvalue/index.js index 4d49092..68f473c 100644 --- a/src/tokens/returnvalue/index.js +++ b/src/tokens/returnvalue/index.js @@ -8,17 +8,6 @@ class ReturnValueToken extends Token { paramName: ?string status: ?number userType: ?number - // TODO: parser flag - flags: { - nullable: ?boolean, - caseSensitive: ?boolean, - updateable: ?boolean, - identity: ?boolean, - computed: ?boolean, - reservedODBC: ?boolean, - fixedLenCLRType: ?boolean, - encrypted: ?boolean - } typeInfo: ?TypeInfo valueLength: ?number value: ?any @@ -30,16 +19,6 @@ class ReturnValueToken extends Token { this.paramName = undefined; this.status = undefined; this.userType = undefined; - this.flags = { - nullable: undefined, - caseSensitive: undefined, - updateable: undefined, - identity: undefined, - computed: undefined, - reservedODBC: undefined, - fixedLenCLRType: undefined, - encrypted: undefined - }; this.typeInfo = undefined; this.valueLength = undefined; this.value = undefined; diff --git a/src/tokens/returnvalue/read.js b/src/tokens/returnvalue/read.js index d876d0c..0e91349 100644 --- a/src/tokens/returnvalue/read.js +++ b/src/tokens/returnvalue/read.js @@ -59,48 +59,16 @@ function parseUserType_7_2(reader: Reader) { } function parseFlags(reader: Reader) { - if (reader.version < 0x72090002) { - return parseFlags_7_0; - } else if (reader.version < 0x74000004) { - return parseFlags_7_2; - } else { - return parseFlags_7_4; - } -} - - -function parseFlags_7_0(reader: Reader) { + // for RETURNVALUE_TOKEN all the flags should be zero (TDS 2.2.7.18) if (!reader.bytesAvailable(2)) { return; } - // TODO: Implement flag parsing const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars reader.consumeBytes(2); - return parseTypeInfo; -} - -function parseFlags_7_2(reader: Reader) { - if (!reader.bytesAvailable(2)) { - return; - } - - // TODO: Implement flag parsing - const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars - reader.consumeBytes(2); - - return parseTypeInfo; -} - -function parseFlags_7_4(reader: Reader) { - if (!reader.bytesAvailable(2)) { - return; - } - - // TODO: Implement flag parsing - const flags = reader.readUInt16LE(0); // eslint-disable-line no-unused-vars - reader.consumeBytes(2); + if (0 != flags) + throw new Error('Unknown flags in RETURNVALUE_TOKEN '); return parseTypeInfo; } diff --git a/test/returnValue-test.js b/test/returnValue-test.js index bb7c606..0551d77 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -15,22 +15,24 @@ describe('Parsing a RETURNVALUE token', function() { describe('in TDS 7.0 mode', function() { - let reader, data, paramOrdinal, paramName, status, userType, typeid, dataLength, value, offset, tempBuff; + let reader, data, paramOrdinal, paramName, status, userType, flag, typeid, dataLength, value, offset, tempOffset, tempBuff; before(function() { paramOrdinal = 1; paramName = '@count'; status = 1; userType = 0; + flag = 0; typeid = 0x26; value = 4; - offset = 0; + tempOffset = 0; tempBuff = Buffer.alloc(21); buildDataBuffer(); }); beforeEach(function() { reader = new Reader(0x07000000); + offset = tempOffset; }); function addListners(done, token) { @@ -51,18 +53,17 @@ describe('Parsing a RETURNVALUE token', function() { } function buildDataBuffer() { - tempBuff.writeUInt8(0xAC, offset++); - tempBuff.writeUInt16LE(paramOrdinal, offset); - offset += 2; - tempBuff.writeUInt8(paramName.length, offset++); - tempBuff.write(paramName, offset, paramName.length * 2, 'ucs2'); - offset += paramName.length * 2; - tempBuff.writeUInt8(status, offset++); - tempBuff.writeUInt16LE(userType, offset); - offset += 2; - // Flag - tempBuff.writeUInt16LE(0, offset); - offset += 2; + tempBuff.writeUInt8(0xAC, tempOffset++); + tempBuff.writeUInt16LE(paramOrdinal, tempOffset); + tempOffset += 2; + tempBuff.writeUInt8(paramName.length, tempOffset++); + tempBuff.write(paramName, tempOffset, paramName.length * 2, 'ucs2'); + tempOffset += paramName.length * 2; + tempBuff.writeUInt8(status, tempOffset++); + tempBuff.writeUInt16LE(userType, tempOffset); + tempOffset += 2; + tempBuff.writeUInt16LE(flag, tempOffset); + tempOffset += 2; } it('should parse the INTNTYPE(Int) token correctly', function(done) { @@ -82,19 +83,43 @@ describe('Parsing a RETURNVALUE token', function() { addListners(done, token); reader.end(data); }); + + it('should throw exception on receiving non-zero flag', function(done) { + dataLength = 4; + + data = Buffer.alloc(28); + tempBuff.copy(data, 0, 0, offset - 2); + + // write non-zero flag + data.writeUInt16LE(56, offset - 2); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt32LE(value, offset); + const token = {}; + + addListners(done, token); + assert.throw(() => reader.end(data), Error, 'Unknown flags in RETURNVALUE_TOKEN'); + done(); + }); }); describe('in TDS 7.2 mode', function() { describe('test INTNTYPE', function() { - let reader, data, paramOrdinal, paramName, status, userType, typeid, dataLength, value, offset, tempBuff, tempOffset; + let reader, data, paramOrdinal, paramName, status, userType, flag, typeid, dataLength, value, offset, tempBuff, tempOffset; before(function() { paramOrdinal = 1; paramName = '@count'; status = 1; userType = 0; + flag = 0; typeid = 0x26; value = 4; tempOffset = 0; @@ -104,6 +129,7 @@ describe('Parsing a RETURNVALUE token', function() { beforeEach(function() { reader = new Reader(0x72090002); + offset = tempOffset; }); function addListners(done, token) { @@ -133,14 +159,12 @@ describe('Parsing a RETURNVALUE token', function() { tempBuff.writeUInt8(status, tempOffset++); tempBuff.writeUInt32LE(userType, tempOffset); tempOffset += 4; - // Flag - tempBuff.writeUInt16LE(0, tempOffset); + tempBuff.writeUInt16LE(flag, tempOffset); tempOffset += 2; } it('should parse the INTNTYPE(Tinyint) token correctly', function(done) { dataLength = 1; - offset = tempOffset; data = Buffer.alloc(27); tempBuff.copy(data); @@ -159,7 +183,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(smallint) token correctly', function(done) { dataLength = 2; - offset = tempOffset; data = Buffer.alloc(28); tempBuff.copy(data); @@ -179,7 +202,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(Int) token correctly', function(done) { dataLength = 4; - offset = tempOffset; data = Buffer.alloc(30); tempBuff.copy(data); @@ -198,7 +220,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(Bigint) token correctly', function(done) { dataLength = 8; - offset = tempOffset; data = Buffer.alloc(34); tempBuff.copy(data); @@ -220,7 +241,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(null) token correctly', function(done) { dataLength = 8; value = null; - offset = tempOffset; data = Buffer.alloc(26); tempBuff.copy(data); @@ -253,7 +273,7 @@ describe('Parsing a RETURNVALUE token', function() { beforeEach(function() { reader = new Reader(0x72090002); - + offset = tempOffset; }); @@ -301,7 +321,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the NULLTYPE token correctly', function(done) { typeid = 0x1F; value = null; - offset = tempOffset; data = Buffer.alloc(24); tempBuff.copy(data); @@ -318,7 +337,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INT1TYPE/TintInt token correctly', function(done) { typeid = 0x30; value = 255; - offset = tempOffset; data = Buffer.alloc(25); tempBuff.copy(data); @@ -337,7 +355,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the BITTYPE token correctly', function(done) { typeid = 0x32; value = false; - offset = tempOffset; data = Buffer.alloc(25); tempBuff.copy(data); @@ -356,7 +373,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INT2TYPE/SmallInt token correctly', function(done) { typeid = 0x34; value = 32767; - offset = tempOffset; data = Buffer.alloc(26); tempBuff.copy(data); @@ -375,7 +391,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INT4TYPE/Int token correctly', function(done) { typeid = 0x38; value = -2147483648; - offset = tempOffset; data = Buffer.alloc(28); tempBuff.copy(data); @@ -395,7 +410,6 @@ describe('Parsing a RETURNVALUE token', function() { typeid = 0x7F; // value = -2147483648; value = 147483648; - offset = tempOffset; data = Buffer.alloc(32); tempBuff.copy(data); @@ -420,7 +434,6 @@ describe('Parsing a RETURNVALUE token', function() { const days = 43225; // days since 1900-01-01 const minutes = 763; value = new Date('2018-05-07T12:43:00.000Z'); - offset = tempOffset; data = Buffer.alloc(28); tempBuff.copy(data); @@ -445,7 +458,6 @@ describe('Parsing a RETURNVALUE token', function() { const days = 43225; const minutes = 763; value = new Date('2018-05-07T12:43:00.000'); - offset = tempOffset; data = Buffer.alloc(28); tempBuff.copy(data); @@ -466,7 +478,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the FLT4TYPE/Real token correctly', function(done) { typeid = 0x3B; value = 9654.2529296875; - offset = tempOffset; data = Buffer.alloc(28); tempBuff.copy(data); @@ -485,7 +496,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the FLT8TYPE/Float token correctly', function(done) { typeid = 0x3E; value = 9654.2546456567565767644; - offset = tempOffset; data = Buffer.alloc(32); tempBuff.copy(data); @@ -504,7 +514,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the MONEYTYPE/Money token correctly', function(done) { typeid = 0x3C; value = 922337203.5807; - offset = tempOffset; const TDS_value = value * 10000; data = Buffer.alloc(32); @@ -525,7 +534,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the MONEY4TYPE/SmallMoney token correctly', function(done) { typeid = 0x7A; value = -214748.3647; - offset = tempOffset; const TDS_value = value * 10000; data = Buffer.alloc(28); @@ -545,7 +553,6 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the DATETIMETYPE/DateTime token correctly', function(done) { reader.options = {}; reader.options.useUTC = true; - offset = tempOffset; typeid = 0x3D; value = new Date('2004-05-23T14:25:10.487Z'); From 8a4c134c8e1575aa82d156a33f1304561646d1bf Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Tue, 5 Jun 2018 15:56:25 -0700 Subject: [PATCH 05/29] feat: support uuid datatype in returnvalue --- src/dataTypes.js | 10 ++++- src/guid-parser.js | 85 ++++++++++++++++++++++++++++++++++++++++ src/value-parser.js | 23 ++++++++++- test/returnValue-test.js | 61 ++++++++++++++++++++++++++-- 4 files changed, 173 insertions(+), 6 deletions(-) create mode 100644 src/guid-parser.js diff --git a/src/dataTypes.js b/src/dataTypes.js index 13ff253..1b2c117 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -64,12 +64,20 @@ const type = { }, //VARLENTYPE + [0x24]: { + id: 0x24, + type: 'GUIDN', + name: 'UniqueIdentifier', + LengthOfDataLength: 1 + }, + [0x26]: { id: 0x26, - name: 'IntN', type: 'INTN', + name: 'IntN', LengthOfDataLength: 1 } + }; const typeByName = {}; diff --git a/src/guid-parser.js b/src/guid-parser.js new file mode 100644 index 0000000..3ebedb6 --- /dev/null +++ b/src/guid-parser.js @@ -0,0 +1,85 @@ +/* @flow */ + +const MAP = [ + '00', '01', '02', '03', '04', '05', '06', '07', '08', '09', '0A', '0B', '0C', '0D', '0E', '0F', + '10', '11', '12', '13', '14', '15', '16', '17', '18', '19', '1A', '1B', '1C', '1D', '1E', '1F', + '20', '21', '22', '23', '24', '25', '26', '27', '28', '29', '2A', '2B', '2C', '2D', '2E', '2F', + '30', '31', '32', '33', '34', '35', '36', '37', '38', '39', '3A', '3B', '3C', '3D', '3E', '3F', + '40', '41', '42', '43', '44', '45', '46', '47', '48', '49', '4A', '4B', '4C', '4D', '4E', '4F', + '50', '51', '52', '53', '54', '55', '56', '57', '58', '59', '5A', '5B', '5C', '5D', '5E', '5F', + '60', '61', '62', '63', '64', '65', '66', '67', '68', '69', '6A', '6B', '6C', '6D', '6E', '6F', + '70', '71', '72', '73', '74', '75', '76', '77', '78', '79', '7A', '7B', '7C', '7D', '7E', '7F', + '80', '81', '82', '83', '84', '85', '86', '87', '88', '89', '8A', '8B', '8C', '8D', '8E', '8F', + '90', '91', '92', '93', '94', '95', '96', '97', '98', '99', '9A', '9B', '9C', '9D', '9E', '9F', + 'A0', 'A1', 'A2', 'A3', 'A4', 'A5', 'A6', 'A7', 'A8', 'A9', 'AA', 'AB', 'AC', 'AD', 'AE', 'AF', + 'B0', 'B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B9', 'BA', 'BB', 'BC', 'BD', 'BE', 'BF', + 'C0', 'C1', 'C2', 'C3', 'C4', 'C5', 'C6', 'C7', 'C8', 'C9', 'CA', 'CB', 'CC', 'CD', 'CE', 'CF', + 'D0', 'D1', 'D2', 'D3', 'D4', 'D5', 'D6', 'D7', 'D8', 'D9', 'DA', 'DB', 'DC', 'DD', 'DE', 'DF', + 'E0', 'E1', 'E2', 'E3', 'E4', 'E5', 'E6', 'E7', 'E8', 'E9', 'EA', 'EB', 'EC', 'ED', 'EE', 'EF', + 'F0', 'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9', 'FA', 'FB', 'FC', 'FD', 'FE', 'FF' +]; + +module.exports.arrayToGuid = arrayToGuid; +function arrayToGuid(array: Buffer) { + return ( + MAP[array[3]] + + MAP[array[2]] + + MAP[array[1]] + + MAP[array[0]] + + '-' + + MAP[array[5]] + + MAP[array[4]] + + '-' + + MAP[array[7]] + + MAP[array[6]] + + '-' + + MAP[array[8]] + + MAP[array[9]] + + '-' + + MAP[array[10]] + + MAP[array[11]] + + MAP[array[12]] + + MAP[array[13]] + + MAP[array[14]] + + MAP[array[15]] + ); +} + +const CHARCODEMAP = {}; + +const hexDigits = [ + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', + 'a', 'b', 'c', 'd', 'e', 'f', + 'A', 'B', 'C', 'D', 'E', 'F' +].map((d) => d.charCodeAt(0)); + +for (let i = 0; i < hexDigits.length; i++) { + const map = CHARCODEMAP[hexDigits[i]] = {}; + for (let j = 0; j < hexDigits.length; j++) { + const hex = String.fromCharCode(hexDigits[i], hexDigits[j]); + const value = parseInt(hex, 16); + map[hexDigits[j]] = value; + } +} + +module.exports.guidToArray = guidToArray; +function guidToArray(guid: string) { + return [ + CHARCODEMAP[guid.charCodeAt(6)][guid.charCodeAt(7)], + CHARCODEMAP[guid.charCodeAt(4)][guid.charCodeAt(5)], + CHARCODEMAP[guid.charCodeAt(2)][guid.charCodeAt(3)], + CHARCODEMAP[guid.charCodeAt(0)][guid.charCodeAt(1)], + CHARCODEMAP[guid.charCodeAt(11)][guid.charCodeAt(12)], + CHARCODEMAP[guid.charCodeAt(9)][guid.charCodeAt(10)], + CHARCODEMAP[guid.charCodeAt(16)][guid.charCodeAt(17)], + CHARCODEMAP[guid.charCodeAt(14)][guid.charCodeAt(15)], + CHARCODEMAP[guid.charCodeAt(19)][guid.charCodeAt(20)], + CHARCODEMAP[guid.charCodeAt(21)][guid.charCodeAt(22)], + CHARCODEMAP[guid.charCodeAt(24)][guid.charCodeAt(25)], + CHARCODEMAP[guid.charCodeAt(26)][guid.charCodeAt(27)], + CHARCODEMAP[guid.charCodeAt(28)][guid.charCodeAt(29)], + CHARCODEMAP[guid.charCodeAt(30)][guid.charCodeAt(31)], + CHARCODEMAP[guid.charCodeAt(32)][guid.charCodeAt(33)], + CHARCODEMAP[guid.charCodeAt(34)][guid.charCodeAt(35)] + ]; +} diff --git a/src/value-parser.js b/src/value-parser.js index ccabf1e..0e1ea14 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -3,6 +3,7 @@ type readStep = (reader: Reader) =>?readStep; const Reader = require('./reader'); const TYPE = require('./dataTypes').TYPE; +const guidParser = require('./guid-parser'); const MAX = (1 << 16) - 1; const THREE_AND_A_THIRD = 3 + (1 / 3); const MONEY_DIVISOR = 10000; @@ -28,10 +29,18 @@ function readDataLength(reader: Reader) { // Variable length if (token.typeInfo.dataLength !== MAX) { switch (TYPE[token.typeInfo.id].LengthOfDataLength) { - case 1: + case 1: // BYTELEN reader.stash.push(reader.readUInt8(0)); reader.consumeBytes(1); return readValue; + case 2: // USHORTCHARBINLEN + reader.stash.push(reader.readUInt16LE(0)); + reader.consumeBytes(2); + return readValue; + case 4: // LONGLEN + reader.stash.push(reader.readUInt32LE(0)); + reader.consumeBytes(4); + return readValue; default: console.log('Datalength parser not-implemented for ', TYPE[token.typeInfo.id].name); } @@ -100,6 +109,18 @@ function readValue(reader: Reader) { return reader.stash.pop(); // Variable-Length Data Types + case 'UniqueIdentifier': + switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); + case 0x10: + token.value = guidParser.arrayToGuid(reader.readBuffer(0, dataLength)); + reader.consumeBytes(dataLength); + return reader.stash.pop(); + default: + console.log('Unknown UniqueIdentifier length'); + } case 'IntN': switch (dataLength) { case 0: diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 0551d77..571ea6e 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -103,7 +103,7 @@ describe('Parsing a RETURNVALUE token', function() { const token = {}; addListners(done, token); - assert.throw(() => reader.end(data), Error, 'Unknown flags in RETURNVALUE_TOKEN'); + assert.throws(() => reader.end(data), Error, 'Unknown flags in RETURNVALUE_TOKEN'); done(); }); }); @@ -120,8 +120,6 @@ describe('Parsing a RETURNVALUE token', function() { status = 1; userType = 0; flag = 0; - typeid = 0x26; - value = 4; tempOffset = 0; tempBuff = Buffer.alloc(23); buildDataBuffer(); @@ -165,6 +163,8 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(Tinyint) token correctly', function(done) { dataLength = 1; + typeid = 0x26; + value = 4; data = Buffer.alloc(27); tempBuff.copy(data); @@ -183,6 +183,8 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(smallint) token correctly', function(done) { dataLength = 2; + typeid = 0x26; + value = 4; data = Buffer.alloc(28); tempBuff.copy(data); @@ -202,6 +204,8 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(Int) token correctly', function(done) { dataLength = 4; + typeid = 0x26; + value = 4; data = Buffer.alloc(30); tempBuff.copy(data); @@ -220,6 +224,8 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(Bigint) token correctly', function(done) { dataLength = 8; + typeid = 0x26; + value = 4; data = Buffer.alloc(34); tempBuff.copy(data); @@ -240,6 +246,7 @@ describe('Parsing a RETURNVALUE token', function() { it('should parse the INTNTYPE(null) token correctly', function(done) { dataLength = 8; + typeid = 0x26; value = null; data = Buffer.alloc(26); @@ -256,6 +263,52 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + + it('should parse the GUIDTYPE() token correctly', function(done) { + data = Buffer.alloc(42); + typeid = 0x24; + dataLength = 16; + + value = '6DF72E68-AB06-4D75-AC95-16899948B81C'; + const valueAsBuffer = Buffer.from([0x68, 0x2E, 0xF7, 0x6D, 0x06, 0xAB, 0x75, 0x4D, 0xAC, 0x95, 0x16, 0x89, 0x99, 0x48, 0xB8, 0x1C]); + + tempBuff.copy(data); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + + it('should parse the GUIDTYPE()-null token correctly', function(done) { + data = Buffer.alloc(26); + typeid = 0x24; + dataLength = 16; + + value = null; + + tempBuff.copy(data); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(0, offset++); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); }); describe('test FIXEDLENTYPE', function() { @@ -362,7 +415,7 @@ describe('Parsing a RETURNVALUE token', function() { data.writeUInt8(typeid, offset++); // TYPE_VARBYTE - data.writeUInt8(value, offset); + data.writeUInt8(value ? 1 : 0, offset); const token = {}; addListners(done, token); From 227279dac7a46e7a8dd20f4eceae6633c7570687 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 20 Jun 2018 09:21:44 -0700 Subject: [PATCH 06/29] feat: parser vaiable length numeric and bit type --- src/dataTypes.js | 17 +++++- src/reader.js | 21 ++++++++ src/types.js | 28 +++++++--- src/value-parser.js | 45 +++++++++++++++- test/returnValue-test.js | 112 ++++++++++++++++++++++++++++++++++++++- 5 files changed, 211 insertions(+), 12 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 1b2c117..d3e1c18 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -76,8 +76,23 @@ const type = { type: 'INTN', name: 'IntN', LengthOfDataLength: 1 - } + }, + + [0x68]: { + id: 0x68, + type: 'BITN', + name: 'BitN', + LengthOfDataLength: 1 + }, + [0x6C]: { + id: 0x6C, + type: 'NUMERICN', + name: 'NumericN', + LengthOfDataLength: 1, + hasPrecision: true, + hasScale: true + } }; const typeByName = {}; diff --git a/src/reader.js b/src/reader.js index a82222c..03b2769 100644 --- a/src/reader.js +++ b/src/reader.js @@ -113,6 +113,27 @@ const Reader = module.exports = class Reader extends Transform { return this.buffer.readDoubleLE(this.position + offset); } + readUNumeric64LE(offset: number) { + const low = this.buffer.readUInt32LE(this.position + offset); + const high = this.buffer.readUInt32LE(this.position + offset + 4); + return (0x100000000 * high) + low; + } + + readUNumeric96LE(offset: number) { + const dword1 = this.buffer.readUInt32LE(this.position + offset); + const dword2 = this.buffer.readUInt32LE(this.position + offset + 4); + const dword3 = this.buffer.readUInt32LE(this.position + offset + 8); + return (dword1 + (0x100000000 * dword2) + (0x100000000 * 0x100000000 * dword3)); + } + + readUNumeric128LE(offset: number) { + const dword1 = this.buffer.readUInt32LE(this.position + offset); + const dword2 = this.buffer.readUInt32LE(this.position + offset + 4); + const dword3 = this.buffer.readUInt32LE(this.position + offset + 8); + const dword4 = this.buffer.readUInt32LE(this.position + offset + 12); + return (dword1 + (0x100000000 * dword2) + (0x100000000 * 0x100000000 * dword3) + (0x100000000 * 0x100000000 * 0x100000000 * dword4)); + } + _transform(chunk: Buffer | string, encoding: string | null, callback: (error: ?Error) => void) { if (!(chunk instanceof Buffer)) { return callback(new Error('Expected Buffer')); diff --git a/src/types.js b/src/types.js index 135daa3..98fe23e 100644 --- a/src/types.js +++ b/src/types.js @@ -7,8 +7,9 @@ const Reader = require('./reader'); type typeInfoId = // FIXEDLENTYPE 0x1F | 0x30 | 0x32 | 0x34 | 0x38 | 0x3A | 0x3B | 0x3C | 0x3D | 0x3E | 0x7A | 0x7F | + // BYTELEN_TYPE - 0x24 | 0x26 | 0x37 | + 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | // USHORTLEN_TYPE 0xE7 | @@ -148,12 +149,14 @@ function readTypeId(reader: Reader) { return readIntNType; case 0x37: // DECIMALTYPE - return readDecimalType; - case 0x3F: // NUMERICTYPE - case 0x68: // BITNTYPE case 0x6A: // DECIMALNTYPE case 0x6C: // NUMERICNTYPE + return readDecimalNumericType(id, reader); + + case 0x68: // BITNTYPE + return readBitNType; + case 0x6D: // FLTNTYPE case 0x6E: // MONEYNTYPE case 0x6F: // DATETIMNTYPE @@ -212,6 +215,18 @@ function readGuidType(reader: Reader) { return next; } +function readBitNType(reader: Reader) { + if (!reader.bytesAvailable(1)) { + return; + } + const dataLength = reader.readUInt8(0); + reader.consumeBytes(1); + + const next = reader.stash.pop(); + reader.stash.push(new TypeInfo(0x68, dataLength)); + return next; +} + function readIntNType(reader: Reader) { if (!reader.bytesAvailable(1)) { return; @@ -229,7 +244,7 @@ function readIntNType(reader: Reader) { return next; } -function readDecimalType(reader: Reader) { +function readDecimalNumericType(id, reader: Reader) { if (!reader.bytesAvailable(3)) { return; } @@ -237,11 +252,10 @@ function readDecimalType(reader: Reader) { const dataLength = reader.readUInt8(0); const precision = reader.readUInt8(1); const scale = reader.readUInt8(2); - reader.consumeBytes(3); const next = reader.stash.pop(); - reader.stash.push(new TypeInfo(0x37, dataLength, precision, scale)); + reader.stash.push(new TypeInfo(id, dataLength, precision, scale)); return next; } diff --git a/src/value-parser.js b/src/value-parser.js index 0e1ea14..4ceeebb 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -119,7 +119,7 @@ function readValue(reader: Reader) { reader.consumeBytes(dataLength); return reader.stash.pop(); default: - console.log('Unknown UniqueIdentifier length'); + throw new Error('Unknown UniqueIdentifier length'); } case 'IntN': switch (dataLength) { @@ -144,8 +144,49 @@ function readValue(reader: Reader) { reader.consumeBytes(8); return reader.stash.pop(); default: - console.log('Unknown length'); + throw new Error('Unknown length for integer datatype'); } + case 'BitN': + switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); + case 1: + token.value = !!reader.readUInt8(0); + reader.consumeBytes(1); + return reader.stash.pop(); + } + case 'NumericN': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } + let sign = reader.readUInt8(0); + reader.consumeBytes(1); + sign = sign === 1 ? 1 : -1; + let value; + switch (dataLength - 1) { + case 4: + value = reader.readUInt32LE(0); + reader.consumeBytes(4); + break; + case 8: + value = reader.readUNumeric64LE(0); + reader.consumeBytes(8); + break; + case 12: + value = reader.readUNumeric96LE(0); + reader.consumeBytes(12); + break; + case 16: + value = reader.readUNumeric128LE(0); + reader.consumeBytes(16); + break; + default: + throw new Error(`Unsupported numeric size ${dataLength - 1}`); + } + token.value = (value * sign) / Math.pow(10, token.typeInfo.scale); + return reader.stash.pop(); default: console.log('readValue not implemented'); } diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 571ea6e..0c398ee 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -110,7 +110,7 @@ describe('Parsing a RETURNVALUE token', function() { describe('in TDS 7.2 mode', function() { - describe('test INTNTYPE', function() { + describe('test VARLENTYPE-BYTELEN', function() { let reader, data, paramOrdinal, paramName, status, userType, flag, typeid, dataLength, value, offset, tempBuff, tempOffset; @@ -309,6 +309,115 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + + it('should parse the NUMERIC token correctly : 1 <= precision <= 9', function(done) { + data = Buffer.alloc(33); + tempBuff.copy(data); + + typeid = 0x6C; + const lengthInMeta = 0x11; + const precision = 5; + const scale = 3; + dataLength = 5; + const valueAsBuffer = Buffer.from([0x00, 0xC5, 0xDB, 0x00, 0x00]); + value = -56.261; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(lengthInMeta, offset++); + data.writeUInt8(precision, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the NUMERIC token correctly : 10 <= precision <= 19', function(done) { + data = Buffer.alloc(37); + tempBuff.copy(data); + + typeid = 0x6C; + const lengthInMeta = 0x11; + const precision = 15; + const scale = 3; + dataLength = 9; + const valueAsBuffer = Buffer.from([0x01, 0xAD, 0x2F, 0x1C, 0xBD, 0x11, 0x05, 0x02, 0x00]); + value = 568523698745.261; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(lengthInMeta, offset++); + data.writeUInt8(precision, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the NUMERIC token correctly : 29 <= precision <= 38', function(done) { + data = Buffer.alloc(45); + tempBuff.copy(data); + // 1.235236987000989e+26 + typeid = 0x6C; + const lengthInMeta = 0x11; + const precision = 30; + const scale = 3; + dataLength = 17; + const valueAsBuffer = Buffer.from([0x01, 0x2D, 0x77, 0xCE, 0xC2, 0x9B, 0x0E, 0x61, 0x34, 0xA4, 0x68, 0x20, 0x8F, 0x01, 0x00, 0x00, 0x00]); + value = 1.235236987000989e+26; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(lengthInMeta, offset++); + data.writeUInt8(precision, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the BITNTYPE token correctly', function(done) { + dataLength = 1; + typeid = 0x68; + const value_sent = 0; + value = false; + + data = Buffer.alloc(27); + tempBuff.copy(data); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + data.writeUInt8(value_sent, offset); + + const token = {}; + addListners(done, token); + + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { @@ -329,7 +438,6 @@ describe('Parsing a RETURNVALUE token', function() { offset = tempOffset; }); - function addListners(done, token) { reader.on('data', function(retValToken) { assert.instanceOf(retValToken, ReturnValueToken); From 625d667d394a0525d6e91e24a15d207e615d8baf Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 20 Jun 2018 09:23:15 -0700 Subject: [PATCH 07/29] chore: update chai and mocha flow-typed library --- flow-typed/npm/chai_v3.5.x.js | 198 ------------ flow-typed/npm/chai_v4.x.x.js | 297 ++++++++++++++++++ .../npm/{mocha_v3.1.x.js => mocha_v5.x.x.js} | 4 +- 3 files changed, 299 insertions(+), 200 deletions(-) delete mode 100644 flow-typed/npm/chai_v3.5.x.js create mode 100644 flow-typed/npm/chai_v4.x.x.js rename flow-typed/npm/{mocha_v3.1.x.js => mocha_v5.x.x.js} (98%) diff --git a/flow-typed/npm/chai_v3.5.x.js b/flow-typed/npm/chai_v3.5.x.js deleted file mode 100644 index 21a67df..0000000 --- a/flow-typed/npm/chai_v3.5.x.js +++ /dev/null @@ -1,198 +0,0 @@ -// flow-typed signature: 0bb5177083bf69296bdda98e783b4462 -// flow-typed version: 3a256f81e9/chai_v3.5.x/flow_>=v0.24.0 - -declare module "chai" { - - declare type ExpectChain = { - and: ExpectChain, - at: ExpectChain, - be: ExpectChain, - been: ExpectChain, - have: ExpectChain, - has: ExpectChain, - is: ExpectChain, - of: ExpectChain, - same: ExpectChain, - that: ExpectChain, - to: ExpectChain, - which: ExpectChain, - with: ExpectChain, - - not: ExpectChain, - deep: ExpectChain, - any: ExpectChain, - all: ExpectChain, - - a: ExpectChain & (type: string) => ExpectChain, - an: ExpectChain & (type: string) => ExpectChain, - - include: ExpectChain & (value: mixed) => ExpectChain, - includes: ExpectChain & (value: mixed) => ExpectChain, - contain: ExpectChain & (value: mixed) => ExpectChain, - contains: ExpectChain & (value: mixed) => ExpectChain, - - eql: (value: T) => ExpectChain, - equal: (value: T) => ExpectChain, - equals: (value: T) => ExpectChain, - - above: (value: T & number) => ExpectChain, - least: (value: T & number) => ExpectChain, - below: (value: T & number) => ExpectChain, - most: (value: T & number) => ExpectChain, - within: (start: T & number, finish: T & number) => ExpectChain, - - instanceof: (constructor: mixed) => ExpectChain, - property: ( -

(name: string, value?: P) => ExpectChain

- & (name: string) => ExpectChain - ), - - length: (value: number) => ExpectChain | ExpectChain, - lengthOf: (value: number) => ExpectChain, - - match: (regex: RegExp) => ExpectChain, - string: (string: string) => ExpectChain, - - key: (key: string) => ExpectChain, - keys: (key: string | Array, ...keys: Array) => ExpectChain, - - throw: (err: Class | Error | RegExp | string, msg?: RegExp | string) => ExpectChain, - - respondTo: (method: string) => ExpectChain, - itself: ExpectChain, - - satisfy: (method: (value: T) => bool) => ExpectChain, - - closeTo: (expected: T & number, delta: number) => ExpectChain, - - members: (set: mixed) => ExpectChain, - oneOf: (list: Array) => ExpectChain, - - change: (obj: mixed, key: string) => ExpectChain, - increase: (obj: mixed, key: string) => ExpectChain, - decrease: (obj: mixed, key: string) => ExpectChain, - - // dirty-chai - ok: () => ExpectChain, - true: () => ExpectChain, - false: () => ExpectChain, - null: () => ExpectChain, - undefined: () => ExpectChain, - exist: () => ExpectChain, - empty: () => ExpectChain, - - // chai-immutable - size: (n: number) => ExpectChain, - - // sinon-chai - called: () => ExpectChain, - callCount: (n: number) => ExpectChain, - calledOnce: () => ExpectChain, - calledBefore: (spy: mixed) => ExpectChain, - calledAfter: (spy: mixed) => ExpectChain, - calledWith: (...args: Array) => ExpectChain, - calledWithMatch: (...args: Array) => ExpectChain, - calledWithExactly: (...args: Array) => ExpectChain, - - // chai-as-promised - eventually: ExpectChain, - resolvedWith: (value: mixed) => Promise & ExpectChain, - resolved: () => Promise & ExpectChain, - rejectedWith: (value: mixed) => Promise & ExpectChain, - rejected: () => Promise & ExpectChain, - notify: (callback: () => mixed) => ExpectChain, - }; - - declare function expect(actual: T): ExpectChain; - - declare function use(plugin: (chai: Object, utils: Object) => void): void; - - declare class assert { - static(expression: mixed, message?: string): void; - static fail(actual: mixed, expected: mixed, message?: string, operator?: string): void; - - static isOk(object: mixed, message?: string): void; - static isNotOk(object: mixed, message?: string): void; - - static equal(actual: mixed, expected: mixed, message?: string): void; - static notEqual(actual: mixed, expected: mixed, message?: string): void; - - static strictEqual(act: mixed, exp: mixed, msg?: string): void; - static notStrictEqual(act: mixed, exp: mixed, msg?: string): void; - - static deepEqual(act: mixed, exp: mixed, msg?: string): void; - static notDeepEqual(act: mixed, exp: mixed, msg?: string): void; - - static isTrue(val: mixed, msg?: string): void; - static isNotTrue(val: mixed, msg?: string): void; - static isFalse(val: mixed, msg?: string): void; - static isNotFalse(val: mixed, msg?: string): void; - - static isNull(val: mixed, msg?: string): void; - static isNotNull(val: mixed, msg?: string): void; - - static isUndefined(val: mixed, msg?: string): void; - static isDefined(val: mixed, msg?: string): void; - - static isNaN(val: mixed, msg?: string): void; - static isNotNaN(val: mixed, msg?: string): void; - - static isAbove(val: number, abv: number, msg?: string): void; - static isBelow(val: number, blw: number, msg?: string): void; - - static isAtMost(val: number, atmst: number, msg?: string): void; - static isAtLeast(val: number, atlst: number, msg?: string): void; - - static isFunction(val: mixed, msg?: string): void; - static isNotFunction(val: mixed, msg?: string): void; - - static isObject(val: mixed, msg?: string): void; - static isNotObject(val: mixed, msg?: string): void; - - static isArray(val: mixed, msg?: string): void; - static isNotArray(val: mixed, msg?: string): void; - - static isString(val: mixed, msg?: string): void; - static isNotString(val: mixed, msg?: string): void; - - static isNumber(val: mixed, msg?: string): void; - static isNotNumber(val: mixed, msg?: string): void; - - static isBoolean(val: mixed, msg?: string): void; - static isNotBoolean(val: mixed, msg?: string): void; - - static typeOf(val: mixed, type: string, msg?: string): void; - static notTypeOf(val: mixed, type: string, msg?: string): void; - - static instanceOf(val: mixed, constructor: Function, msg?: string): void; - static notInstanceOf(val: mixed, constructor: Function, msg?: string): void; - - static include(exp: string, inc: mixed, msg?: string): void; - static include(exp: Array, inc: mixed, msg?: string): void; - - static notInclude(exp: string, inc: mixed, msg?: string): void; - static notInclude(exp: Array, inc: mixed, msg?: string): void; - - static match(exp: mixed, re: RegExp, msg?: string): void; - static notMatch(exp: mixed, re: RegExp, msg?: string): void; - - static property(obj: Object, prop: string, msg?: string): void; - static notProperty(obj: Object, prop: string, msg?: string): void; - static deepProperty(obj: Object, prop: string, msg?: string): void; - static notDeepProperty(obj: Object, prop: string, msg?: string): void; - - static propertyVal(obj: Object, prop: string, val: mixed, msg?: string): void; - static propertyNotVal(obj: Object, prop: string, val: mixed, msg?: string): void; - - static deepPropertyVal(obj: Object, prop: string, val: mixed, msg?: string): void; - static deepPropertyNotVal(obj: Object, prop: string, val: mixed, msg?: string): void; - - static lengthOf(exp: mixed, len: number, msg?: string): void; - } - - declare var config: { - includeStack: boolean, - showDiff: boolean, - truncateThreshold: number - }; -} diff --git a/flow-typed/npm/chai_v4.x.x.js b/flow-typed/npm/chai_v4.x.x.js new file mode 100644 index 0000000..23d7bd9 --- /dev/null +++ b/flow-typed/npm/chai_v4.x.x.js @@ -0,0 +1,297 @@ +// flow-typed signature: f506e02b4091df7bc8f1798f5857a644 +// flow-typed version: 120d43bb08/chai_v4.x.x/flow_>=v0.25.0 + +declare module "chai" { + declare type ExpectChain = { + and: ExpectChain, + at: ExpectChain, + be: ExpectChain, + been: ExpectChain, + have: ExpectChain, + has: ExpectChain, + is: ExpectChain, + of: ExpectChain, + same: ExpectChain, + that: ExpectChain, + to: ExpectChain, + which: ExpectChain, + with: ExpectChain, + + not: ExpectChain, + deep: ExpectChain, + any: ExpectChain, + all: ExpectChain, + + a: ExpectChain & ((type: string, message?: string) => ExpectChain), + an: ExpectChain & ((type: string, message?: string) => ExpectChain), + + include: ExpectChain & ((value: mixed, message?: string) => ExpectChain), + includes: ExpectChain & ((value: mixed, message?: string) => ExpectChain), + contain: ExpectChain & ((value: mixed, message?: string) => ExpectChain), + contains: ExpectChain & ((value: mixed, message?: string) => ExpectChain), + + eq: (value: T, message?: string) => ExpectChain, + eql: (value: T, message?: string) => ExpectChain, + equal: (value: T, message?: string) => ExpectChain, + equals: (value: T, message?: string) => ExpectChain, + + above: (value: T & number, message?: string) => ExpectChain, + gt: (value: T & number, message?: string) => ExpectChain, + greaterThan: (value: T & number, message?: string) => ExpectChain, + least: (value: T & number, message?: string) => ExpectChain, + below: (value: T & number, message?: string) => ExpectChain, + lessThan: (value: T & number, message?: string) => ExpectChain, + lt: (value: T & number, message?: string) => ExpectChain, + most: (value: T & number, message?: string) => ExpectChain, + within: (start: T & number, finish: T & number, message?: string) => ExpectChain, + + instanceof: (constructor: mixed, message?: string) => ExpectChain, + nested: ExpectChain, + property:

( + name: string, + value?: P, + message?: string + ) => ExpectChain

& ((name: string) => ExpectChain), + + length: (value: number, message?: string) => ExpectChain | ExpectChain, + lengthOf: (value: number, message?: string) => ExpectChain, + + match: (regex: RegExp, message?: string) => ExpectChain, + string: (string: string, message?: string) => ExpectChain, + + key: (key: string) => ExpectChain, + keys: ( + key: string | Array, + ...keys: Array + ) => ExpectChain, + + throw: ( + err?: Class | Error | RegExp | string, + errMsgMatcher?: RegExp | string, + msg?: string + ) => ExpectChain, + + respondTo: (method: string, message?: string) => ExpectChain, + itself: ExpectChain, + + satisfy: (method: (value: T) => boolean, message?: string) => ExpectChain, + + closeTo: (expected: T & number, delta: number, message?: string) => ExpectChain, + + members: (set: mixed, message?: string) => ExpectChain, + oneOf: (list: Array, message?: string) => ExpectChain, + + change: (obj: mixed, key: string, message?: string) => ExpectChain, + increase: (obj: mixed, key: string, message?: string) => ExpectChain, + decrease: (obj: mixed, key: string, message?: string) => ExpectChain, + + by: (delta: number, message?: string) => ExpectChain, + + // dirty-chai + ok: () => ExpectChain, + true: () => ExpectChain, + false: () => ExpectChain, + null: () => ExpectChain, + undefined: () => ExpectChain, + exist: () => ExpectChain, + empty: () => ExpectChain, + + extensible: () => ExpectChain, + sealed: () => ExpectChain, + frozen: () => ExpectChain, + NaN: () => ExpectChain, + + // chai-immutable + size: (n: number) => ExpectChain, + + // sinon-chai + called: () => ExpectChain, + callCount: (n: number) => ExpectChain, + calledOnce: () => ExpectChain, + calledTwice: () => ExpectChain, + calledThrice: () => ExpectChain, + calledBefore: (spy: mixed) => ExpectChain, + calledAfter: (spy: mixed) => ExpectChain, + calledWith: (...args: Array) => ExpectChain, + calledWithMatch: (...args: Array) => ExpectChain, + calledWithExactly: (...args: Array) => ExpectChain, + + // chai-as-promised + eventually: ExpectChain, + resolvedWith: (value: mixed) => Promise & ExpectChain, + resolved: () => Promise & ExpectChain, + rejectedWith: (value: mixed) => Promise & ExpectChain, + rejected: () => Promise & ExpectChain, + notify: (callback: () => mixed) => ExpectChain, + fulfilled: () => Promise & ExpectChain, + + // chai-subset + containSubset: (obj: Object | Object[]) => ExpectChain, + + // chai-redux-mock-store + dispatchedActions: ( + actions: Array any)> + ) => ExpectChain, + dispatchedTypes: (actions: Array) => ExpectChain, + + // chai-enzyme + attr: (key: string, val?: any) => ExpectChain, + data: (key: string, val?: any) => ExpectChain, + prop: (key: string, val?: any) => ExpectChain, + state: (key: string, val?: any) => ExpectChain, + value: (val: string) => ExpectChain, + className: (val: string) => ExpectChain, + text: (val: string) => ExpectChain, + + // chai-karma-snapshot + matchSnapshot: (lang?: any, update?: boolean, msg?: any) => ExpectChain + }; + + declare function expect(actual: T, message?: string): ExpectChain; + + declare function use(plugin: (chai: Object, utils: Object) => void): void; + + declare class assert { + static (expression: mixed, message?: string): void; + static fail( + actual: mixed, + expected: mixed, + message?: string, + operator?: string + ): void; + + static isOk(object: mixed, message?: string): void; + static isNotOk(object: mixed, message?: string): void; + + static equal(actual: mixed, expected: mixed, message?: string): void; + static notEqual(actual: mixed, expected: mixed, message?: string): void; + + static strictEqual(act: mixed, exp: mixed, msg?: string): void; + static notStrictEqual(act: mixed, exp: mixed, msg?: string): void; + + static deepEqual(act: mixed, exp: mixed, msg?: string): void; + static notDeepEqual(act: mixed, exp: mixed, msg?: string): void; + + static ok(val: mixed, msg?: string): void; + static isTrue(val: mixed, msg?: string): void; + static isNotTrue(val: mixed, msg?: string): void; + static isFalse(val: mixed, msg?: string): void; + static isNotFalse(val: mixed, msg?: string): void; + + static isNull(val: mixed, msg?: string): void; + static isNotNull(val: mixed, msg?: string): void; + + static isUndefined(val: mixed, msg?: string): void; + static isDefined(val: mixed, msg?: string): void; + + static isNaN(val: mixed, msg?: string): void; + static isNotNaN(val: mixed, msg?: string): void; + + static isAbove(val: number, abv: number, msg?: string): void; + static isBelow(val: number, blw: number, msg?: string): void; + + static isAtMost(val: number, atmst: number, msg?: string): void; + static isAtLeast(val: number, atlst: number, msg?: string): void; + + static isFunction(val: mixed, msg?: string): void; + static isNotFunction(val: mixed, msg?: string): void; + + static isObject(val: mixed, msg?: string): void; + static isNotObject(val: mixed, msg?: string): void; + + static isArray(val: mixed, msg?: string): void; + static isNotArray(val: mixed, msg?: string): void; + + static isString(val: mixed, msg?: string): void; + static isNotString(val: mixed, msg?: string): void; + + static isNumber(val: mixed, msg?: string): void; + static isNotNumber(val: mixed, msg?: string): void; + + static isBoolean(val: mixed, msg?: string): void; + static isNotBoolean(val: mixed, msg?: string): void; + + static typeOf(val: mixed, type: string, msg?: string): void; + static notTypeOf(val: mixed, type: string, msg?: string): void; + + static instanceOf(val: mixed, constructor: Function, msg?: string): void; + static notInstanceOf(val: mixed, constructor: Function, msg?: string): void; + + static include(exp: string, inc: mixed, msg?: string): void; + static include(exp: Array, inc: T, msg?: string): void; + + static notInclude(exp: string, inc: mixed, msg?: string): void; + static notInclude(exp: Array, inc: T, msg?: string): void; + + static match(exp: mixed, re: RegExp, msg?: string): void; + static notMatch(exp: mixed, re: RegExp, msg?: string): void; + + static property(obj: Object, prop: string, msg?: string): void; + static notProperty(obj: Object, prop: string, msg?: string): void; + static deepProperty(obj: Object, prop: string, msg?: string): void; + static notDeepProperty(obj: Object, prop: string, msg?: string): void; + + static propertyVal( + obj: Object, + prop: string, + val: mixed, + msg?: string + ): void; + static propertyNotVal( + obj: Object, + prop: string, + val: mixed, + msg?: string + ): void; + + static deepPropertyVal( + obj: Object, + prop: string, + val: mixed, + msg?: string + ): void; + static deepPropertyNotVal( + obj: Object, + prop: string, + val: mixed, + msg?: string + ): void; + + static lengthOf(exp: mixed, len: number, msg?: string): void; + + static throws( + func: () => any, + err?: Class | Error | RegExp | string, + errorMsgMatcher?: string | RegExp, + msg?: string + ): void; + static doesNotThrow( + func: () => any, + err?: Class | Error | RegExp | string, + errorMsgMatcher?: string | RegExp, + msg?: string + ): void; + + static closeTo( + actual: number, + expected: number, + delta: number, + msg?: string + ): void; + static approximately( + actual: number, + expected: number, + delta: number, + msg?: string + ): void; + + // chai-immutable + static sizeOf(val: mixed, length: number): void; + } + + declare var config: { + includeStack: boolean, + showDiff: boolean, + truncateThreshold: number + }; +} diff --git a/flow-typed/npm/mocha_v3.1.x.js b/flow-typed/npm/mocha_v5.x.x.js similarity index 98% rename from flow-typed/npm/mocha_v3.1.x.js rename to flow-typed/npm/mocha_v5.x.x.js index 1a35f6e..aec59b5 100644 --- a/flow-typed/npm/mocha_v3.1.x.js +++ b/flow-typed/npm/mocha_v5.x.x.js @@ -1,5 +1,5 @@ -// flow-typed signature: 58fb316c623a4f7918b0e2529256be8c -// flow-typed version: 0ef6a9a08b/mocha_v3.1.x/flow_>=v0.28.x +// flow-typed signature: 7599ac3bddd92797de6f6c8d58914f6c +// flow-typed version: 03669c2773/mocha_v5.x.x/flow_>=v0.28.x declare interface $npm$mocha$SetupOptions { slow?: number; From 8a203303343e1d899221a5cef2934791ba24cb4a Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Tue, 26 Jun 2018 12:51:08 -0700 Subject: [PATCH 08/29] feat: add varaible length type float to parser --- src/dataTypes.js | 7 ++++++ src/types.js | 18 ++++++++++++++- src/value-parser.js | 20 ++++++++++++++++ test/returnValue-test.js | 50 ++++++++++++++++++++++++++++++++++++++++ 4 files changed, 94 insertions(+), 1 deletion(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index d3e1c18..fead580 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -92,6 +92,13 @@ const type = { LengthOfDataLength: 1, hasPrecision: true, hasScale: true + }, + + [0x6D]: { + id: 0x6D, + type: 'FLTN', + name: 'FloatN', + LengthOfDataLength: 1 } }; diff --git a/src/types.js b/src/types.js index 98fe23e..227c7b6 100644 --- a/src/types.js +++ b/src/types.js @@ -9,7 +9,7 @@ type typeInfoId = 0x1F | 0x30 | 0x32 | 0x34 | 0x38 | 0x3A | 0x3B | 0x3C | 0x3D | 0x3E | 0x7A | 0x7F | // BYTELEN_TYPE - 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | + 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | // USHORTLEN_TYPE 0xE7 | @@ -158,6 +158,8 @@ function readTypeId(reader: Reader) { return readBitNType; case 0x6D: // FLTNTYPE + return readFloatNType; + case 0x6E: // MONEYNTYPE case 0x6F: // DATETIMNTYPE case 0x28: // DATENTYPE @@ -259,6 +261,20 @@ function readDecimalNumericType(id, reader: Reader) { return next; } +function readFloatNType(reader: Reader) { + if (!reader.bytesAvailable(1)) { + return; + } + const dataLength = reader.readUInt8(0); + reader.consumeBytes(1); + if (dataLength !== 4 && dataLength !== 8) { + throw new Error('Invalid data length for FLTNTYPE'); + } + const next = reader.stash.pop(); + reader.stash.push(new TypeInfo(0x6D, dataLength)); + return next; +} + function readNVarCharType(reader: Reader) { if (!reader.bytesAvailable(7)) { return; diff --git a/src/value-parser.js b/src/value-parser.js index 4ceeebb..9bed868 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -187,6 +187,26 @@ function readValue(reader: Reader) { } token.value = (value * sign) / Math.pow(10, token.typeInfo.scale); return reader.stash.pop(); + + case 'FloatN': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } + switch (dataLength) { + case 4: + token.value = reader.readFloatLE(0); + reader.consumeBytes(4); + break; + case 8: + token.value = reader.readDoubleLE(0); + reader.consumeBytes(8); + break; + default: + throw new Error('Unsupported dataLength ' + dataLength + ' for FloatN'); + } + return reader.stash.pop(); + default: console.log('readValue not implemented'); } diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 0c398ee..9ac1934 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -418,6 +418,56 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the FLTN(7) token correctly', function(done) { + data = Buffer.alloc(30); + tempBuff.copy(data); + + typeid = 0x6D; + dataLength = 4; + + const valueAsBuffer = Buffer.from([0x40, 0x88, 0x59, 0xC7]); + value = -55688.25; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + + addListners(done, token); + reader.end(data); + }); + + it('should parse the FLTN(15) token correctly', function(done) { + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0x6D; + dataLength = 8; + + const valueAsBuffer = Buffer.from([0x00, 0x00, 0x00, 0x20, 0x08, 0x31, 0xEB, 0x40]); + value = 55688.25390625; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + + addListners(done, token); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From 3acbf3785dd470139082cae8f7da5fa01d938744 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Fri, 6 Jul 2018 17:05:45 -0700 Subject: [PATCH 09/29] feat: add varaible length type money to returnvalue parser --- src/dataTypes.js | 7 +++++ src/types.js | 68 +++++++++++++++++----------------------- src/value-parser.js | 18 ++++++++++- test/returnValue-test.js | 48 ++++++++++++++++++++++++++++ 4 files changed, 100 insertions(+), 41 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index fead580..24eaecf 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -99,6 +99,13 @@ const type = { type: 'FLTN', name: 'FloatN', LengthOfDataLength: 1 + }, + + [0x6E]: { + id: 0x6E, + type: 'MONEYN', + name: 'MoneyN', + LengthOfDataLength: 1 } }; diff --git a/src/types.js b/src/types.js index 227c7b6..e09b390 100644 --- a/src/types.js +++ b/src/types.js @@ -9,7 +9,7 @@ type typeInfoId = 0x1F | 0x30 | 0x32 | 0x34 | 0x38 | 0x3A | 0x3B | 0x3C | 0x3D | 0x3E | 0x7A | 0x7F | // BYTELEN_TYPE - 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | + 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | 0x6E | // USHORTLEN_TYPE 0xE7 | @@ -145,8 +145,6 @@ function readTypeId(reader: Reader) { case 0x24: // GUIDTYPE return readGuidType; - case 0x26: // INTNTYPE - return readIntNType; case 0x37: // DECIMALTYPE case 0x3F: // NUMERICTYPE @@ -154,13 +152,12 @@ function readTypeId(reader: Reader) { case 0x6C: // NUMERICNTYPE return readDecimalNumericType(id, reader); + case 0x26: // INTNTYPE case 0x68: // BITNTYPE - return readBitNType; - case 0x6D: // FLTNTYPE - return readFloatNType; - case 0x6E: // MONEYNTYPE + return readByteLenType(id, reader); + case 0x6F: // DATETIMNTYPE case 0x28: // DATENTYPE case 0x29: // TIMENTYPE @@ -217,35 +214,6 @@ function readGuidType(reader: Reader) { return next; } -function readBitNType(reader: Reader) { - if (!reader.bytesAvailable(1)) { - return; - } - const dataLength = reader.readUInt8(0); - reader.consumeBytes(1); - - const next = reader.stash.pop(); - reader.stash.push(new TypeInfo(0x68, dataLength)); - return next; -} - -function readIntNType(reader: Reader) { - if (!reader.bytesAvailable(1)) { - return; - } - - const dataLength = reader.readUInt8(0); - reader.consumeBytes(1); - - if (dataLength != 0x01 && dataLength != 0x02 && dataLength != 0x04 && dataLength != 0x08) { - throw new Error('Invalid data length for INTNTYPE'); - } - - const next = reader.stash.pop(); - reader.stash.push(new TypeInfo(0x26, dataLength)); - return next; -} - function readDecimalNumericType(id, reader: Reader) { if (!reader.bytesAvailable(3)) { return; @@ -261,17 +229,37 @@ function readDecimalNumericType(id, reader: Reader) { return next; } -function readFloatNType(reader: Reader) { +function readByteLenType(id, reader: Reader) { if (!reader.bytesAvailable(1)) { return; } const dataLength = reader.readUInt8(0); reader.consumeBytes(1); - if (dataLength !== 4 && dataLength !== 8) { - throw new Error('Invalid data length for FLTNTYPE'); + + switch (id) { + case 0x26: + if (dataLength != 0x01 && dataLength != 0x02 && dataLength != 0x04 && dataLength != 0x08) { + throw new Error('Invalid data length for INTNTYPE'); + } + break; + case 0x68: + if (dataLength != 0x00 && dataLength != 0x01) { + throw new Error('Invalid data length for BITNTYPE'); + } + break; + case 0x6D: + if (dataLength !== 4 && dataLength !== 8) { + throw new Error('Invalid data length for FLTNTYPE'); + } + break; + case 0x6E: + if (dataLength !== 4 && dataLength !== 8) { + throw new Error('Invalid data length for MONEYNTYPE'); + } + break; } const next = reader.stash.pop(); - reader.stash.push(new TypeInfo(0x6D, dataLength)); + reader.stash.push(new TypeInfo(id, dataLength)); return next; } diff --git a/src/value-parser.js b/src/value-parser.js index 9bed868..615f8d1 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -207,6 +207,22 @@ function readValue(reader: Reader) { } return reader.stash.pop(); + case 'MoneyN': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } + switch (dataLength) { + case 4: + token.value = reader.readInt32LE(0) / MONEY_DIVISOR; + reader.consumeBytes(4); + return reader.stash.pop(); + case 8: + return readMoney; + default: + throw new Error('Unsupported dataLength ' + dataLength + ' for MoneyN'); + } + default: console.log('readValue not implemented'); } @@ -241,7 +257,7 @@ function readDateTime(reader: Reader) { function readMoney(reader: Reader) { const token = reader.stash[reader.stash.length - 2]; - const high = reader.readUInt32LE(0); + const high = reader.readInt32LE(0); const low = reader.readUInt32LE(4); token.value = (low + (0x100000000 * high)) / MONEY_DIVISOR; reader.consumeBytes(8); diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 9ac1934..42e413f 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -468,6 +468,54 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the MONEYNTYPE(smallmoney) token correctly', function(done) { + data = Buffer.alloc(30); + tempBuff.copy(data); + + typeid = 0x6E; + dataLength = 4; + + const valueAsBuffer = Buffer.from([0x00, 0x00, 0x00, 0x80]); + value = -214748.3648; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the MONEYNTYPE(money) token correctly', function(done) { + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0x6E; + dataLength = 8; + + const valueAsBuffer = Buffer.from([0x26, 0x05, 0xF4, 0xFF, 0x01, 0x00, 0x1A, 0x7D]); + value = -337203685477.5807; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From ee0848987b15cefb358f3856911bb38d2aae6127 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 11 Jul 2018 10:31:25 -0700 Subject: [PATCH 10/29] feat: date type support for returnValue parser --- src/dataTypes.js | 7 +++++ src/reader.js | 6 +++++ src/types.js | 7 +++-- src/value-parser.js | 38 +++++++++++++++++++++------ test/returnValue-test.js | 56 +++++++++++++++++++++++++++++++++++++++- 5 files changed, 103 insertions(+), 11 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 24eaecf..703673e 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -106,6 +106,13 @@ const type = { type: 'MONEYN', name: 'MoneyN', LengthOfDataLength: 1 + }, + + [0x28]: { + id: 0x28, + type: 'DATEN', + name: 'Date', + LengthOfDataLength: 1 } }; diff --git a/src/reader.js b/src/reader.js index 03b2769..65f02c8 100644 --- a/src/reader.js +++ b/src/reader.js @@ -83,6 +83,12 @@ const Reader = module.exports = class Reader extends Transform { return this.buffer.readInt16LE(this.position + offset); } + readUInt24LE(offset: number) : number { + const low = this.buffer.readUInt16LE(this.position + offset); + const high = this.buffer.readUInt8(this.position + offset + 2); + return low | (high << 16); + } + readUInt32LE(offset: number) : number { return this.buffer.readUInt32LE(this.position + offset); } diff --git a/src/types.js b/src/types.js index e09b390..69182f6 100644 --- a/src/types.js +++ b/src/types.js @@ -9,7 +9,7 @@ type typeInfoId = 0x1F | 0x30 | 0x32 | 0x34 | 0x38 | 0x3A | 0x3B | 0x3C | 0x3D | 0x3E | 0x7A | 0x7F | // BYTELEN_TYPE - 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | 0x6E | + 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | 0x6E | 0x28 | // USHORTLEN_TYPE 0xE7 | @@ -158,8 +158,11 @@ function readTypeId(reader: Reader) { case 0x6E: // MONEYNTYPE return readByteLenType(id, reader); - case 0x6F: // DATETIMNTYPE case 0x28: // DATENTYPE + // TODO: change the function name to something generic? + return readFixedLengthType(id, 0, reader); + + case 0x6F: // DATETIMNTYPE case 0x29: // TIMENTYPE case 0x2A: // DATETIME2NTYPE case 0x2B: // DATETIMEOFFSETNTYPE diff --git a/src/value-parser.js b/src/value-parser.js index 615f8d1..556c418 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -189,11 +189,10 @@ function readValue(reader: Reader) { return reader.stash.pop(); case 'FloatN': - if (dataLength === 0) { - token.value = null; - return reader.stash.pop(); - } switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); case 4: token.value = reader.readFloatLE(0); reader.consumeBytes(4); @@ -208,11 +207,10 @@ function readValue(reader: Reader) { return reader.stash.pop(); case 'MoneyN': - if (dataLength === 0) { - token.value = null; - return reader.stash.pop(); - } switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); case 4: token.value = reader.readInt32LE(0) / MONEY_DIVISOR; reader.consumeBytes(4); @@ -223,6 +221,17 @@ function readValue(reader: Reader) { throw new Error('Unsupported dataLength ' + dataLength + ' for MoneyN'); } + case 'Date': + switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); + case 3: + return readDate; + default: + throw new Error('Unsupported dataLength ' + dataLength + ' for Date'); + } + default: console.log('readValue not implemented'); } @@ -255,6 +264,19 @@ function readDateTime(reader: Reader) { return reader.stash.pop(); } + +function readDate(reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + const days = reader.readUInt24LE(0); + if (reader.options.useUTC) { + token.value = new Date(Date.UTC(2000, 0, days - 730118)); + } else { + token.value = new Date(2000, 0, days - 730118); + } + reader.consumeBytes(3); + return reader.stash.pop(); +} + function readMoney(reader: Reader) { const token = reader.stash[reader.stash.length - 2]; const high = reader.readInt32LE(0); diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 42e413f..7e4b43e 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -142,7 +142,14 @@ describe('Parsing a RETURNVALUE token', function() { assert.strictEqual(token.status, status); assert.strictEqual(token.userType, userType); assert.strictEqual(token.typeInfo.id, typeid); - assert.strictEqual(token.value, value); + + if ((value !== null) && typeid == 0x28) { + assert.equalDate(token.value, value); + } + else { + assert.strictEqual(token.value, value); + } + done(); }); } @@ -516,6 +523,53 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the DATENTYPE token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(28); + tempBuff.copy(data); + + typeid = 0x28; + dataLength = 3; + + const valueAsBuffer = Buffer.from([0x0A, 0x49, 0x0B]); + value = new Date('12-10-25Z'); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the DATENTYPE(null) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(25); + tempBuff.copy(data); + + typeid = 0x28; + dataLength = 0; + value = null; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + + const token = {}; + addListners(done, token); + reader.end(data); + }); }); describe('test FIXEDLENTYPE', function() { From a188c44b6f68ad024f3773f5e63fe61f41958a1b Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Fri, 13 Jul 2018 16:09:45 -0700 Subject: [PATCH 11/29] refactor: modularize parsing datalength, precision, scale Modularizing the parser code. Removed the type typeInfoId, due to flow error when passing integer. --- src/types.js | 160 ++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 134 insertions(+), 26 deletions(-) diff --git a/src/types.js b/src/types.js index 69182f6..3fcb6de 100644 --- a/src/types.js +++ b/src/types.js @@ -3,23 +3,7 @@ type readStep = (reader: Reader) => ?readStep; const Reader = require('./reader'); - -type typeInfoId = - // FIXEDLENTYPE - 0x1F | 0x30 | 0x32 | 0x34 | 0x38 | 0x3A | 0x3B | 0x3C | 0x3D | 0x3E | 0x7A | 0x7F | - - // BYTELEN_TYPE - 0x24 | 0x26 | 0x37 | 0x3F | 0x68 | 0X6A | 0X6C | 0X6D | 0x6E | 0x28 | - - // USHORTLEN_TYPE - 0xE7 | - - // LONGLEN_TYPE - 0xF1 | // XMLTYPE - 0x22 | // IMAGETYPE - 0x23 | // TEXTTYPE - 0x62 | // SSVARIANTTYPE - 0x63; // NTEXTTYPE +const TYPE = require('./dataTypes').TYPE; class Collation { localeId: number @@ -78,7 +62,7 @@ class Collation { } class TypeInfo { - id: typeInfoId + id: number dataLength: ?number precision: ?number scale: ?number @@ -89,11 +73,11 @@ class TypeInfo { xmlOwningSchema: ?string xmlSchemaCollection: ?string - constructor(id: typeInfoId, dataLength: ?number, precision: ?number, scale: ?number) { + constructor(id: number) { this.id = id; - this.dataLength = dataLength; - this.precision = precision; - this.scale = scale; + this.dataLength = undefined; + this.precision = undefined; + this.scale = undefined; this.collation = undefined; @@ -117,6 +101,13 @@ function readTypeId(reader: Reader) { const id = reader.readUInt8(0); reader.consumeBytes(1); + if (!TYPE[id]) { + throw new Error('Unknown Type! - 0x' + id.toString(16)); + } + + reader.stash.push(new TypeInfo(id)); + return readDataLength; + /* switch (id) { // FIXEDLENTYPE case 0x1F: // NULLTYPE @@ -162,8 +153,10 @@ function readTypeId(reader: Reader) { // TODO: change the function name to something generic? return readFixedLengthType(id, 0, reader); - case 0x6F: // DATETIMNTYPE case 0x29: // TIMENTYPE + // read scale?? + + case 0x6F: // DATETIMNTYPE case 0x2A: // DATETIME2NTYPE case 0x2B: // DATETIMEOFFSETNTYPE case 0x2F: // CHARTYPE @@ -190,13 +183,128 @@ function readTypeId(reader: Reader) { default: throw new Error('Unknown Type! - 0x' + id.toString(16)); } + */ +} + +function readDataLength(reader: Reader) { + const token: TypeInfo = reader.stash.pop(); + const type = TYPE[token.id]; + if ((token.id & 0x30) === 0x20) { // VARLEN_TYPE + if (type.dataLengthFromScale) { + const next = reader.stash.pop(); + reader.stash.push(token); + return next; + } else if (type.fixedDataLength) { + const next = reader.stash.pop(); + reader.stash.push(token); + return next; + } + else { + switch (type.LengthOfDataLength) { + case 0: + token.dataLength = undefined; + break; + case 1: + token.dataLength = reader.readUInt8(0); + reader.consumeBytes(1); + switch (token.dataLength) { + case 0x24: // GUIDTYPE + if (token.dataLength != 0x00 && token.dataLength != 0x10) { + throw new Error('Invalid data length for GUIDTYPE'); + } + break; + case 0x26: // INTNTYPE + if (token.dataLength != 0x01 && token.dataLength != 0x02 && + token.dataLength != 0x04 && token.dataLength != 0x08) { + throw new Error('Invalid data length for INTNTYPE'); + } + break; + case 0x68: // BITNTYPE + if (token.dataLength != 0x00 && token.dataLength != 0x01) { + throw new Error('Invalid data length for BITNTYPE'); + } + break; + case 0x6D: // FLTNTYPE + if (token.dataLength !== 4 && token.dataLength !== 8) { + throw new Error('Invalid data length for FLTNTYPE'); + } + break; + case 0x6E: // MONEYNTYPE + if (token.dataLength !== 4 && token.dataLength !== 8) { + throw new Error('Invalid data length for MONEYNTYPE'); + } + break; + } + break; + case 2: + token.dataLength = reader.readUInt16LE(0); + reader.consumeBytes(2); + break; + case 4: + token.dataLength = reader.readUInt32LE(0); + reader.consumeBytes(4); + break; + default: + throw new Error('Unsupported dataLengthLength ' + type.LengthOfDataLength + ' for data type ' + type.name); + } + reader.stash.push(token); + return readPrecision; + } + } else { + // token.dataLength is not needed for FIXEDLENTYPE type + const next = reader.stash.pop(); + reader.stash.push(token); + return next; + } } -function readFixedLengthType(id, dataLength: number, reader: Reader) { - const next = reader.stash.pop(); - reader.stash.push(new TypeInfo(id, dataLength)); +function readPrecision(reader: Reader) { + const token: TypeInfo = reader.stash[reader.stash.length - 1]; + const type = TYPE[token.id]; + if (type.hasPrecision) { + token.precision = reader.readUInt8(0); + reader.consumeBytes(1); + } + return readScale; +} + +function readScale(reader: Reader) { + const token: TypeInfo = reader.stash[reader.stash.length - 1]; + const type = TYPE[token.id]; + if (type.hasScale) { + token.scale = reader.readUInt8(0); + reader.consumeBytes(1); + } + return readCollation; +} + +function readCollation(reader: Reader) { + const token: TypeInfo = reader.stash[reader.stash.length - 1]; + const type = TYPE[token.id]; + if (type.hasCollation) { + console.log('readCollation not implemented'); + } + return readSchema; +} +function readSchema(reader: Reader) { + const token: TypeInfo = reader.stash[reader.stash.length - 1]; + const type = TYPE[token.id]; + if (type.hasSchema) { + console.log('readSchema not implemented'); + } + return readUDTInfo; +} + +function readUDTInfo(reader: Reader) { + const token: TypeInfo = reader.stash.pop(); + const type = TYPE[token.id]; + if (type.hasUDTInfo) { + console.log('readUDTInfo not implemented'); + } + const next = reader.stash.pop(); + reader.stash.push(token); return next; } From 0fae4a412a6b7463f7fb17664d57080c262f81e6 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 19 Jul 2018 14:35:49 -0700 Subject: [PATCH 12/29] feat: support for time datatype --- src/dataTypes.js | 12 +++- src/reader.js | 6 ++ src/types.js | 3 +- src/value-parser.js | 48 ++++++++++++++++ test/returnValue-test.js | 118 ++++++++++++++++++++++++++++++--------- 5 files changed, 158 insertions(+), 29 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 703673e..616e371 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -112,7 +112,17 @@ const type = { id: 0x28, type: 'DATEN', name: 'Date', - LengthOfDataLength: 1 + LengthOfDataLength: 1, + fixedDataLength: 3 + }, + + [0x29]: { + id: 0x29, + type: 'TIMEN', + name: 'Time', + hasScale: true, + LengthOfDataLength: 1, + dataLengthFromScale: true } }; diff --git a/src/reader.js b/src/reader.js index 65f02c8..09226af 100644 --- a/src/reader.js +++ b/src/reader.js @@ -89,6 +89,12 @@ const Reader = module.exports = class Reader extends Transform { return low | (high << 16); } + readUInt40LE(offset: number) : number { + const low = this.buffer.readUInt32LE(this.position + offset); + const high = this.buffer.readUInt8(this.position + offset + 4); + return (0x100000000 * high) + low; + } + readUInt32LE(offset: number) : number { return this.buffer.readUInt32LE(this.position + offset); } diff --git a/src/types.js b/src/types.js index 3fcb6de..a053531 100644 --- a/src/types.js +++ b/src/types.js @@ -191,9 +191,8 @@ function readDataLength(reader: Reader) { const type = TYPE[token.id]; if ((token.id & 0x30) === 0x20) { // VARLEN_TYPE if (type.dataLengthFromScale) { - const next = reader.stash.pop(); reader.stash.push(token); - return next; + return readScale; } else if (type.fixedDataLength) { const next = reader.stash.pop(); reader.stash.push(token); diff --git a/src/value-parser.js b/src/value-parser.js index 556c418..49e4b61 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -232,6 +232,13 @@ function readValue(reader: Reader) { throw new Error('Unsupported dataLength ' + dataLength + ' for Date'); } + case 'Time': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } else { + return readTime(dataLength, reader); + } default: console.log('readValue not implemented'); } @@ -264,6 +271,47 @@ function readDateTime(reader: Reader) { return reader.stash.pop(); } +function readTime(dataLength: number, reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + let value; + switch (dataLength) { + case 3: + value = reader.readUInt24LE(0); + reader.consumeBytes(3); + break; + case 4: + value = reader.readUInt32LE(0); + reader.consumeBytes(4); + break; + case 5: + value = reader.readUInt40LE(0); + reader.consumeBytes(5); + break; + default: + throw new Error('Unknown length for temporal datatype'); + } + const scale = token.typeInfo.scale; + + if (scale < 7) { + for (let i = scale; i < 7; i++) { + value *= 10; + } + } + let date; + if (reader.options.useUTC) { + date = new Date(Date.UTC(1970, 0, 1, 0, 0, 0, value / 10000)); + } else { + date = new Date(1970, 0, 1, 0, 0, 0, value / 10000); + } + + // $FlowFixMe: suppressing this error until https://github.com/facebook/flow/issues/396 is fixed + Object.defineProperty(date, 'nanosecondsDelta', { + enumerable: false, + value: (value % 10000) / Math.pow(10, 7) + }); + token.value = date; + return reader.stash.pop(); +} function readDate(reader: Reader) { const token = reader.stash[reader.stash.length - 2]; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 7e4b43e..fe22241 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1,9 +1,9 @@ /* @flow */ const chai = require('chai'); -const assert = chai.assert; const chai_datetime = require('chai-datetime'); chai.use(chai_datetime); +const assert = chai.assert; const Reader = require('../src').Reader; const ReturnValueToken = require('../src/tokens/returnvalue'); @@ -130,7 +130,7 @@ describe('Parsing a RETURNVALUE token', function() { offset = tempOffset; }); - function addListners(done, token) { + function addListners(done, token, nanoSec) { reader.on('data', function(retValToken) { assert.instanceOf(retValToken, ReturnValueToken); token = retValToken; @@ -145,6 +145,10 @@ describe('Parsing a RETURNVALUE token', function() { if ((value !== null) && typeid == 0x28) { assert.equalDate(token.value, value); + if (nanoSec) + {assert.strictEqual(token.value.nanosecondsDelta, nanoSec);} + } else if ((value !== null) && typeid == 0x29) { + assert.equalTime(token.value, value); } else { assert.strictEqual(token.value, value); @@ -570,6 +574,92 @@ describe('Parsing a RETURNVALUE token', function() { addListners(done, token); reader.end(data); }); + + it('should parse the TIMETYPE(2) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(29); + tempBuff.copy(data); + + typeid = 0x29; + dataLength = 3; + const scale = 2; + + const valueAsBuffer = Buffer.from([0x04, 0x1D, 0x45]); + value = new Date(Date.UTC(1970, 0, 1, 12, 34, 54, 120)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the TIMETYPE(3) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(30); + tempBuff.copy(data); + + typeid = 0x29; + dataLength = 4; + const scale = 3; + + const valueAsBuffer = Buffer.from([0x2F, 0x22, 0xB3, 0x02]); + value = new Date(Date.UTC(1970, 0, 1, 12, 34, 54, 127)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the TIMETYPE(7) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(31); + tempBuff.copy(data); + + typeid = 0x29; + dataLength = 5; + const scale = 7; + + // declare @tm time(7); set @tm = '12:34:54.1275523Z' + const valueAsBuffer = Buffer.from([0x83, 0x61, 0x67, 0x75, 0x69]); + + value = new Date(Date.UTC(1970, 0, 1, 12, 34, 54, 127)); + const nanoSec = 0.0005523; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { @@ -764,30 +854,6 @@ describe('Parsing a RETURNVALUE token', function() { }); - it('should parse the DATETIM4TYPE/SmallDateTime token correctly : local time', function(done) { - reader.options = {}; - reader.options.useUTC = false; - typeid = 0x3A; - const days = 43225; - const minutes = 763; - value = new Date('2018-05-07T12:43:00.000'); - - data = Buffer.alloc(28); - tempBuff.copy(data); - // TYPE_INFO - data.writeUInt8(typeid, offset++); - - // TYPE_VARBYTE - data.writeUInt16LE(days, offset); - data.writeUInt16LE(minutes, offset + 2); - - const token = {}; - addListners(done, token); - - reader.end(data); - - }); - it('should parse the FLT4TYPE/Real token correctly', function(done) { typeid = 0x3B; value = 9654.2529296875; From fd1e608ed4bfaf3f7f0bc42814efba1ff2f54f30 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 19 Jul 2018 14:37:56 -0700 Subject: [PATCH 13/29] test: update flow type for test framework and dependencies --- flow-typed/chai-datetime.js | 3 -- flow-typed/npm/chai-datetime_vx.x.x.js | 39 ++++++++++++++++++++++++++ flow-typed/npm/chai_v4.x.x.js | 9 ++++-- src/tokens/returnvalue/index.js | 2 +- 4 files changed, 47 insertions(+), 6 deletions(-) delete mode 100644 flow-typed/chai-datetime.js create mode 100644 flow-typed/npm/chai-datetime_vx.x.x.js diff --git a/flow-typed/chai-datetime.js b/flow-typed/chai-datetime.js deleted file mode 100644 index 2688529..0000000 --- a/flow-typed/chai-datetime.js +++ /dev/null @@ -1,3 +0,0 @@ -declare module 'chai-datetime' { - declare module.exports: any; - } \ No newline at end of file diff --git a/flow-typed/npm/chai-datetime_vx.x.x.js b/flow-typed/npm/chai-datetime_vx.x.x.js new file mode 100644 index 0000000..c3350fe --- /dev/null +++ b/flow-typed/npm/chai-datetime_vx.x.x.js @@ -0,0 +1,39 @@ +// flow-typed signature: f041f510a7750be7b298b3ce9e122452 +// flow-typed version: <>/chai-datetime_v^1.5.0/flow_v0.72.0 + +/** + * This is an autogenerated libdef stub for: + * + * 'chai-datetime' + * + * Fill this stub out by replacing all the `any` types. + * + * Once filled out, we encourage you to share your work with the + * community by sending a pull request to: + * https://github.com/flowtype/flow-typed + */ + +declare module 'chai-datetime' { + declare module.exports: any; +} + +/** + * We include stubs for each file inside this npm package in case you need to + * require those files directly. Feel free to delete any files that aren't + * needed. + */ +declare module 'chai-datetime/chai-datetime' { + declare module.exports: any; +} + +declare module 'chai-datetime/test/test' { + declare module.exports: any; +} + +// Filename aliases +declare module 'chai-datetime/chai-datetime.js' { + declare module.exports: $Exports<'chai-datetime/chai-datetime'>; +} +declare module 'chai-datetime/test/test.js' { + declare module.exports: $Exports<'chai-datetime/test/test'>; +} diff --git a/flow-typed/npm/chai_v4.x.x.js b/flow-typed/npm/chai_v4.x.x.js index 23d7bd9..b98a9c1 100644 --- a/flow-typed/npm/chai_v4.x.x.js +++ b/flow-typed/npm/chai_v4.x.x.js @@ -1,5 +1,5 @@ -// flow-typed signature: f506e02b4091df7bc8f1798f5857a644 -// flow-typed version: 120d43bb08/chai_v4.x.x/flow_>=v0.25.0 +// flow-typed signature: 945a83fac32f55c964e2a5d3bcc55c94 +// flow-typed version: ae29f8b328/chai_v4.x.x/flow_>=v0.25.0 declare module "chai" { declare type ExpectChain = { @@ -163,6 +163,11 @@ declare module "chai" { static isOk(object: mixed, message?: string): void; static isNotOk(object: mixed, message?: string): void; + static empty(object: mixed, message?: string): void; + static isEmpty(object: mixed, message?: string): void; + static notEmpty(object: mixed, message?: string): void; + static isNotEmpty(object: mixed, message?: string): void; + static equal(actual: mixed, expected: mixed, message?: string): void; static notEqual(actual: mixed, expected: mixed, message?: string): void; diff --git a/src/tokens/returnvalue/index.js b/src/tokens/returnvalue/index.js index 68f473c..691bb12 100644 --- a/src/tokens/returnvalue/index.js +++ b/src/tokens/returnvalue/index.js @@ -9,7 +9,7 @@ class ReturnValueToken extends Token { status: ?number userType: ?number typeInfo: ?TypeInfo - valueLength: ?number + valueLength: ?number // TODO: is it used? value: ?any constructor() { From c15ce80a05e8e0ff513f262a98752f99fa489d41 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 19 Jul 2018 15:34:06 -0700 Subject: [PATCH 14/29] fix: apis of chai-datetime plugin are not recognized in flow Ignoring test folder from flow type check (temporarily?) --- .flowconfig | 1 + 1 file changed, 1 insertion(+) diff --git a/.flowconfig b/.flowconfig index 5ae3218..87a46e8 100644 --- a/.flowconfig +++ b/.flowconfig @@ -1,6 +1,7 @@ [ignore] /lib/.* /node_modules/.* +/test/.* [libs] flow-typed/ From 266d0367ae181db9bf9b3dfaac04940c24df64cd Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Mon, 30 Jul 2018 15:05:56 -0700 Subject: [PATCH 15/29] feat: add temporal types to returnvalue token parser --- src/dataTypes.js | 29 +++-- src/value-parser.js | 86 +++++++++++++-- test/returnValue-test.js | 233 ++++++++++++++++++++++++++++++++++++++- 3 files changed, 327 insertions(+), 21 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 616e371..0caadf6 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -70,21 +70,18 @@ const type = { name: 'UniqueIdentifier', LengthOfDataLength: 1 }, - [0x26]: { id: 0x26, type: 'INTN', name: 'IntN', LengthOfDataLength: 1 }, - [0x68]: { id: 0x68, type: 'BITN', name: 'BitN', LengthOfDataLength: 1 }, - [0x6C]: { id: 0x6C, type: 'NUMERICN', @@ -93,21 +90,18 @@ const type = { hasPrecision: true, hasScale: true }, - [0x6D]: { id: 0x6D, type: 'FLTN', name: 'FloatN', LengthOfDataLength: 1 }, - [0x6E]: { id: 0x6E, type: 'MONEYN', name: 'MoneyN', LengthOfDataLength: 1 }, - [0x28]: { id: 0x28, type: 'DATEN', @@ -115,7 +109,6 @@ const type = { LengthOfDataLength: 1, fixedDataLength: 3 }, - [0x29]: { id: 0x29, type: 'TIMEN', @@ -123,6 +116,28 @@ const type = { hasScale: true, LengthOfDataLength: 1, dataLengthFromScale: true + }, + [0x6F]: { + id: 0x6F, + type: 'DATETIMN', + name: 'DateTimeN', + LengthOfDataLength: 1 + }, + [0x2A]: { + id: 0x2A, + type: 'DATETIME2N', + name: 'DateTime2', + hasScale: true, + LengthOfDataLength: 1, + dataLengthFromScale: true + }, + [0x2B]: { + id: 0x2B, + type: 'DATETIMEOFFSETN', + name: 'DateTimeOffset', + hasScale: true, + LengthOfDataLength: 1, + dataLengthFromScale: true } }; diff --git a/src/value-parser.js b/src/value-parser.js index 49e4b61..0785841 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -227,7 +227,7 @@ function readValue(reader: Reader) { token.value = null; return reader.stash.pop(); case 3: - return readDate; + return readDateN; default: throw new Error('Unsupported dataLength ' + dataLength + ' for Date'); } @@ -237,8 +237,36 @@ function readValue(reader: Reader) { token.value = null; return reader.stash.pop(); } else { - return readTime(dataLength, reader); + return readTimeN(dataLength, reader); } + + case 'DateTimeN': + switch (dataLength) { + case 0: + token.value = null; + return reader.stash.pop(); + case 4: + return readSmallDateTime; + case 8: + return readDateTime; + } + + case 'DateTime2': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } else { + return readDateTime2N(dataLength, reader); + } + + case 'DateTimeOffset': + if (dataLength === 0) { + token.value = null; + return reader.stash.pop(); + } else { + return readDateTimeOffset(dataLength, reader); + } + default: console.log('readValue not implemented'); } @@ -271,8 +299,13 @@ function readDateTime(reader: Reader) { return reader.stash.pop(); } -function readTime(dataLength: number, reader: Reader) { +function readTimeN(dataLength: number, reader: Reader) { const token = reader.stash[reader.stash.length - 2]; + token.value = readTime(dataLength, token.typeInfo.scale, reader); + return reader.stash.pop(); +} + +function readTime(dataLength: number, scale: number, reader: Reader) { let value; switch (dataLength) { case 3: @@ -288,9 +321,8 @@ function readTime(dataLength: number, reader: Reader) { reader.consumeBytes(5); break; default: - throw new Error('Unknown length for temporal datatype'); + throw new Error(`Unknown length ${dataLength} for temporal datatype`); } - const scale = token.typeInfo.scale; if (scale < 7) { for (let i = scale; i < 7; i++) { @@ -309,22 +341,54 @@ function readTime(dataLength: number, reader: Reader) { enumerable: false, value: (value % 10000) / Math.pow(10, 7) }); - token.value = date; - return reader.stash.pop(); + return date; } -function readDate(reader: Reader) { +function readDateN(reader: Reader) { const token = reader.stash[reader.stash.length - 2]; + token.value = readDate(undefined, reader.options.useUTC, reader); + return reader.stash.pop(); +} + +function readDate(time: ?Date, useUTC: boolean, reader: Reader) { + let value; const days = reader.readUInt24LE(0); - if (reader.options.useUTC) { - token.value = new Date(Date.UTC(2000, 0, days - 730118)); + if (useUTC) { + value = new Date(Date.UTC(2000, 0, days - 730118, 0, 0, 0, time ? +time : 0)); } else { - token.value = new Date(2000, 0, days - 730118); + value = new Date(2000, 0, days - 730118, time ? (time.getHours(), time.getMinutes(), time.getSeconds(), time.getMilliseconds()) : 0); } reader.consumeBytes(3); + return value; +} + +function readDateTime2N(dataLength: number, reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + token.value = readDateTime2(dataLength - 3, token.typeInfo.scale, reader.options.useUTC, reader); + return reader.stash.pop(); +} + +function readDateTimeOffset(dataLength: number, reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + const date = readDateTime2(dataLength - 5, token.typeInfo.scale, true, reader); + reader.readInt16LE(0); // TODO: add implementation to handle offset + reader.consumeBytes(2); + token.value = date; return reader.stash.pop(); } +function readDateTime2(dataLength: number, scale: number, useUTC: boolean, reader: Reader) { + const time = readTime(dataLength, scale, reader); + const date = readDate(time, useUTC, reader); + // $FlowFixMe + Object.defineProperty(date, 'nanosecondsDelta', { + enumerable: false, + // $FlowFixMe + value: time.nanosecondsDelta + }); + return date; +} + function readMoney(reader: Reader) { const token = reader.stash[reader.stash.length - 2]; const high = reader.readInt32LE(0); diff --git a/test/returnValue-test.js b/test/returnValue-test.js index fe22241..86d35ae 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -143,12 +143,15 @@ describe('Parsing a RETURNVALUE token', function() { assert.strictEqual(token.userType, userType); assert.strictEqual(token.typeInfo.id, typeid); - if ((value !== null) && typeid == 0x28) { + if ((value !== null) && (typeid == 0x28 || typeid == 0x6F)) { assert.equalDate(token.value, value); - if (nanoSec) - {assert.strictEqual(token.value.nanosecondsDelta, nanoSec);} + if (nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, nanoSec); } } else if ((value !== null) && typeid == 0x29) { assert.equalTime(token.value, value); + } else if ((value !== null) && (typeid == 0x2A || typeid == 0x2B)) { + assert.equalDate(token.value, value); + assert.equalTime(token.value, value); + if (nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, nanoSec); } } else { assert.strictEqual(token.value, value); @@ -660,6 +663,230 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the DATETIMETYPE token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0x6F; + dataLength = 8; + + // declare @tm datetime; set @tm = '2007-05-08T12:35:29.123Z' + const valueAsBuffer = Buffer.from([0x28, 0x99, 0x00, 0x00, 0x11, 0x80, 0xCF, 0x00]); + + value = new Date(Date.UTC(2007, 4, 8, 12, 35, 29, 123)); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the DATETIMETYPE(smalldatetime) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(30); + tempBuff.copy(data); + + typeid = 0x6F; + dataLength = 4; + + // declare @tm smalldatetime; set @tm = '2007-05-08T12:35:29.123Z' + const valueAsBuffer = Buffer.from([0x28, 0x99, 0xF3, 0x02]); + + value = new Date(Date.UTC(2007, 4, 8, 12, 35)); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(dataLength, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the DATETIME2NTYPE(7) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0x2A; + dataLength = 8; + const scale = 7; + const nanoSec = 0.0004567; + + const valueAsBuffer = Buffer.from([0x07, 0x55, 0x43, 0x8A, 0x69, 0x83, 0x2E, 0x0B]); + value = new Date(Date.UTC(2007, 4, 8, 12, 35, 29, 123)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + + it('should parse the DATETIME2NTYPE(4) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(33); + tempBuff.copy(data); + + typeid = 0x2A; + dataLength = 7; + const scale = 4; + const nanoSec = 0.0006; + + const valueAsBuffer = Buffer.from([0xE4, 0xAC, 0x04, 0x1B, 0x83, 0x2E, 0x0B]); + value = new Date(Date.UTC(2007, 4, 8, 12, 35, 29, 123)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + + it('should parse the DATETIME2NTYPE token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(32); + tempBuff.copy(data); + + typeid = 0x2A; + dataLength = 6; + const scale = 0; + const nanoSec = 0; + + const valueAsBuffer = Buffer.from([0x11, 0xB1, 0x00, 0x83, 0x2E, 0x0B]); + value = new Date(Date.UTC(2007, 4, 8, 12, 35, 29)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + + it('should parse the DATETIMEOFFSETNTYPE token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0x2B; + dataLength = 8; + const scale = 0; + const nanoSec = 0; + + const valueAsBuffer = Buffer.from([0x3A, 0xA2, 0x00, 0x0A, 0x49, 0x0B, 0x3C, 0x00]); + // select @count = '12-10-25 12:32:10.000 +01:00' + value = new Date(Date.UTC(2025, 11, 10, 11, 32, 10)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + + it('should parse the DATETIMEOFFSETNTYPE(7) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(36); + tempBuff.copy(data); + + typeid = 0x2B; + dataLength = 10; + const scale = 7; + const nanoSec = 0.0008741; + + const valueAsBuffer = Buffer.from([0x35, 0x36, 0x00, 0xB2, 0x60, 0x0A, 0x49, 0x0B, 0x3C, 0x00]); + // select @count = '12-10-25 12:32:10.3218741 +01:00' + value = new Date(Date.UTC(2025, 11, 10, 11, 32, 10, 321)); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, nanoSec); + reader.end(data); + }); + + it('should parse the DATETIMEOFFSETNTYPE(null) token correctly', function(done) { + reader.options = {}; + reader.options.useUTC = true; + + data = Buffer.alloc(26); + tempBuff.copy(data); + + typeid = 0x2B; + dataLength = 0; + const scale = 0; + + value = null; + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt8(scale, offset++); + + // TYPE_VARBYTE + data.writeUInt8(dataLength, offset++); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); }); describe('test FIXEDLENTYPE', function() { From 2def84848caa8d1698d257ca84c770d42add3656 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Mon, 13 Aug 2018 14:39:49 -0700 Subject: [PATCH 16/29] feat: add collation and bigchartype support --- flow-typed/npm/iconv-lite_vx.x.x.js | 116 +++++++++++++++++++ package.json | 11 +- src/collation.js | 171 ++++++++++++++++++++++++++++ src/dataTypes.js | 10 ++ src/types.js | 36 +++--- src/value-parser.js | 29 +++++ test/returnValue-test.js | 99 ++++++++++++++-- 7 files changed, 443 insertions(+), 29 deletions(-) create mode 100644 flow-typed/npm/iconv-lite_vx.x.x.js create mode 100644 src/collation.js diff --git a/flow-typed/npm/iconv-lite_vx.x.x.js b/flow-typed/npm/iconv-lite_vx.x.x.js new file mode 100644 index 0000000..c198906 --- /dev/null +++ b/flow-typed/npm/iconv-lite_vx.x.x.js @@ -0,0 +1,116 @@ +// flow-typed signature: 65e09a462d23dee69755e10dc072529c +// flow-typed version: <>/iconv-lite_v^0.4.23/flow_v0.72.0 + +/** + * This is an autogenerated libdef stub for: + * + * 'iconv-lite' + * + * Fill this stub out by replacing all the `any` types. + * + * Once filled out, we encourage you to share your work with the + * community by sending a pull request to: + * https://github.com/flowtype/flow-typed + */ + +declare module 'iconv-lite' { + declare module.exports: any; +} + +/** + * We include stubs for each file inside this npm package in case you need to + * require those files directly. Feel free to delete any files that aren't + * needed. + */ +declare module 'iconv-lite/encodings/dbcs-codec' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/dbcs-data' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/index' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/internal' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/sbcs-codec' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/sbcs-data-generated' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/sbcs-data' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/utf16' { + declare module.exports: any; +} + +declare module 'iconv-lite/encodings/utf7' { + declare module.exports: any; +} + +declare module 'iconv-lite/lib/bom-handling' { + declare module.exports: any; +} + +declare module 'iconv-lite/lib/extend-node' { + declare module.exports: any; +} + +declare module 'iconv-lite/lib/index' { + declare module.exports: any; +} + +declare module 'iconv-lite/lib/streams' { + declare module.exports: any; +} + +// Filename aliases +declare module 'iconv-lite/encodings/dbcs-codec.js' { + declare module.exports: $Exports<'iconv-lite/encodings/dbcs-codec'>; +} +declare module 'iconv-lite/encodings/dbcs-data.js' { + declare module.exports: $Exports<'iconv-lite/encodings/dbcs-data'>; +} +declare module 'iconv-lite/encodings/index.js' { + declare module.exports: $Exports<'iconv-lite/encodings/index'>; +} +declare module 'iconv-lite/encodings/internal.js' { + declare module.exports: $Exports<'iconv-lite/encodings/internal'>; +} +declare module 'iconv-lite/encodings/sbcs-codec.js' { + declare module.exports: $Exports<'iconv-lite/encodings/sbcs-codec'>; +} +declare module 'iconv-lite/encodings/sbcs-data-generated.js' { + declare module.exports: $Exports<'iconv-lite/encodings/sbcs-data-generated'>; +} +declare module 'iconv-lite/encodings/sbcs-data.js' { + declare module.exports: $Exports<'iconv-lite/encodings/sbcs-data'>; +} +declare module 'iconv-lite/encodings/utf16.js' { + declare module.exports: $Exports<'iconv-lite/encodings/utf16'>; +} +declare module 'iconv-lite/encodings/utf7.js' { + declare module.exports: $Exports<'iconv-lite/encodings/utf7'>; +} +declare module 'iconv-lite/lib/bom-handling.js' { + declare module.exports: $Exports<'iconv-lite/lib/bom-handling'>; +} +declare module 'iconv-lite/lib/extend-node.js' { + declare module.exports: $Exports<'iconv-lite/lib/extend-node'>; +} +declare module 'iconv-lite/lib/index.js' { + declare module.exports: $Exports<'iconv-lite/lib/index'>; +} +declare module 'iconv-lite/lib/streams.js' { + declare module.exports: $Exports<'iconv-lite/lib/streams'>; +} diff --git a/package.json b/package.json index 35417a3..6f1a756 100644 --- a/package.json +++ b/package.json @@ -12,6 +12,9 @@ "author": "Arthur Schreiber (schreiber.arthur@gmail.com)", "license": "ISC", "devDependencies": { + "@commitlint/cli": "^6.0.2", + "@commitlint/config-conventional": "^6.0.2", + "@commitlint/travis-cli": "^6.0.2", "babel-cli": "^6.26.0", "babel-eslint": "^8.2.2", "babel-plugin-transform-runtime": "^6.23.0", @@ -20,16 +23,14 @@ "babel-register": "^6.24.0", "benchmark": "^2.1.3", "chai": "^4.0.2", + "chai-datetime": "^1.5.0", "eslint": "^4.19.1", "flow-bin": "^0.72.0", "flow-copy-source": "^1.3.0", + "iconv-lite": "^0.4.23", "mocha": "^5.0.5", "semantic-release": "^15.1.5", - "semantic-release-cli": "^4.0.1", - "@commitlint/cli": "^6.0.2", - "@commitlint/config-conventional": "^6.0.2", - "@commitlint/travis-cli": "^6.0.2", - "chai-datetime": "^1.5.0" + "semantic-release-cli": "^4.0.7" }, "dependencies": { "babel-runtime": "^6.26.0" diff --git a/src/collation.js b/src/collation.js new file mode 100644 index 0000000..8a86e3a --- /dev/null +++ b/src/collation.js @@ -0,0 +1,171 @@ +module.exports.codepageByLcid = { + 0x00000000: 0, //NULL + 0x00000401: 'CP1256', //Arabic + 0x0000048c: 'CP1256', //Dari + 0x00000404: 'CP950', //Chinese_Taiwan_Stroke + 0x00000405: 'CP1250', //Czech + 0x00000406: 'CP1252', //Danish_Norwegian + 0x00000485: 'CP1251', //Yakut + 0x00000408: 'CP1253', //Greek + 0x00000409: 'CP1252', //Latin1_General + 0x0000040a: 'CP1252', //Traditional_Spanish + 0x0000040b: 'CP1252', //Finnish_Swedish + 0x0000040c: 'CP1252', //French + 0x0000040d: 'CP1255', //Hebrew + 0x0000040e: 'CP1250', //Hungarian + 0x0000040f: 'CP1252', //Icelandic + 0x00000483: 'CP1252', //Corsican + 0x00000411: 'CP932', //Japanese + 0x00000412: 'CP949', //Korean_Wansung + 0x00000481: 'UNICODE', //Maori // 0 as Unicode + 0x00000415: 'CP1250', //Polish + 0x00000418: 'CP1250', //Romanian + 0x00000419: 'CP1251', //Cyrillic_General + 0x0000041a: 'CP1250', //Croatian + 0x0000041b: 'CP1250', //Slovak + 0x0000041c: 'CP1250', //Albanian + 0x0000041e: 'CP874', //Thai + 0x0000041f: 'CP1254', //Turkish + 0x00000422: 'CP1251', //Ukrainian + 0x00000424: 'CP1250', //Slovenian + 0x00000425: 'CP1257', //Estonian + 0x00000426: 'CP1257', //Latvian + 0x00000427: 'CP1257', //Lithuanian + 0x0000042a: 'CP1258', //Vietnamese + 0x0000042f: 'CP1251', //Macedonian + 0x00000439: 0, //Hindi + 0x00000800: 'CP_ACP', // NULL - LOCALE_SYSTEM_DEFAULT; used at startup ??? + 0x00000804: 'CP936', //Chinese_PRC + 0x00000480: 'CP1256', //Uighur + 0x0000047e: 'CP1252', //Breton + 0x00000827: 'CP1257', //Lithuanian_Classic + 0x00000c0a: 'CP1252', //Modern_Spanish + 0x00010407: 'CP1252', //German_PhoneBook + 0x0001040e: 'CP1250', //Hungarian_Technical + 0x00010411: 'CP932', //Japanese_Unicode + 0x00010412: 'CP949', //Korean_Wansung_Unicode + 0x00010437: 'CP1252', //Georgian_Modern_Sort + 0x00020804: 'CP936', //Chinese_PRC_Stroke + 0x00030404: 'CP950', //Chinese_Taiwan_Bopomofo + 0x0000042c: 'CP1254', //Azeri_Latin + 0x0000043f: 'CP1251', //Kazakh + 0x00000443: 'CP1254', //Uzbek_Latin + 0x00000444: 'CP1251', //Tatar + 0x0000045a: 0, //Syriac + 0x00000465: 0, //Divehi + 0x0000082c: 'CP1251', //Azeri_Cyrillic + 0x00000c04: 'CP950', //Chinese_Hong_Kong_Stroke + 0x0000047c: 'CP1252', //Mohawk + 0x00001404: 'CP950', //Chinese_Traditional_Pinyin + 0x00021404: 'CP950', //Chinese_Traditional_Stroke_Order + 0x00040411: 'CP932', //Japanese_Bushu_Kakusu + 0x00000414: 'CP1252', //Norwegian + 0x00000417: 'CP1252', //Romansh + 0x0000081a: 'CP1250', //Serbian_Latin + 0x00000c1a: 'CP1251', //Serbian_Cyrillic + 0x0000141a: 'CP1250', //Bosnian_Latin + 0x0000201a: 'CP1251', //Bosnian_Cyrillic + 0x00000420: 'CP1256', //Urdu + 0x00000429: 'CP1256', //Persian + 0x0000047a: 'CP1252', //Mapudungan + 0x0000042e: 'CP1252', //Upper_Sorbian + 0x0000046d: 'CP1251', //Bashkir + 0x0000043a: 0, //Maltese + 0x0000043b: 'CP1252', //Sami_Norway + 0x0000083b: 'CP1252', //Sami_Sweden_Finland + 0x00000442: 'CP1250', //Turkmen + 0x00000445: 0, //Bengali + 0x0000044d: 0, //Assamese + 0x00000463: 0, //Pashto + 0x00000451: 0, //Tibetan + 0x00000452: 'CP1252', //Welsh + 0x00000453: 0, //Khmer + 0x00000454: 0, //Lao + 0x00000462: 'CP1252', //Frisian + 0x0000085f: 'CP1252', //Tamazight + 0x00000461: 0, //Nepali +}; + +module.exports.codepageBySortId = { + 30: 'CP437', // SQL_Latin1_General_CP437_BIN + 31: 'CP437', // SQL_Latin1_General_CP437_CS_AS + 32: 'CP437', // SQL_Latin1_General_CP437_CI_AS + 33: 'CP437', // SQL_Latin1_General_Pref_CP437_CI_AS + 34: 'CP437', // SQL_Latin1_General_CP437_CI_AI + 40: 'CP850', // SQL_Latin1_General_CP850_BIN + 41: 'CP850', // SQL_Latin1_General_CP850_CS_AS + 42: 'CP850', // SQL_Latin1_General_CP850_CI_AS + 43: 'CP850', // SQL_Latin1_General_Pref_CP850_CI_AS + 44: 'CP850', // SQL_Latin1_General_CP850_CI_AI + 49: 'CP850', // SQL_1xCompat_CP850_CI_AS + 51: 'CP1252', // SQL_Latin1_General_Cp1_CS_AS_KI_WI + 52: 'CP1252', // SQL_Latin1_General_Cp1_CI_AS_KI_WI + 53: 'CP1252', // SQL_Latin1_General_Pref_Cp1_CI_AS_KI_WI + 54: 'CP1252', // SQL_Latin1_General_Cp1_CI_AI_KI_WI + 55: 'CP850', // SQL_AltDiction_CP850_CS_AS + 56: 'CP850', // SQL_AltDiction_Pref_CP850_CI_AS + 57: 'CP850', // SQL_AltDiction_CP850_CI_AI + 58: 'CP850', // SQL_Scandinavian_Pref_CP850_CI_AS + 59: 'CP850', // SQL_Scandinavian_CP850_CS_AS + 60: 'CP850', // SQL_Scandinavian_CP850_CI_AS + 61: 'CP850', // SQL_AltDiction_CP850_CI_AS + 80: 'CP1250', // SQL_Latin1_General_1250_BIN + 81: 'CP1250', // SQL_Latin1_General_CP1250_CS_AS + 82: 'CP1250', // SQL_Latin1_General_Cp1250_CI_AS_KI_WI + 83: 'CP1250', // SQL_Czech_Cp1250_CS_AS_KI_WI + 84: 'CP1250', // SQL_Czech_Cp1250_CI_AS_KI_WI + 85: 'CP1250', // SQL_Hungarian_Cp1250_CS_AS_KI_WI + 86: 'CP1250', // SQL_Hungarian_Cp1250_CI_AS_KI_WI + 87: 'CP1250', // SQL_Polish_Cp1250_CS_AS_KI_WI + 88: 'CP1250', // SQL_Polish_Cp1250_CI_AS_KI_WI + 89: 'CP1250', // SQL_Romanian_Cp1250_CS_AS_KI_WI + 90: 'CP1250', // SQL_Romanian_Cp1250_CI_AS_KI_WI + 91: 'CP1250', // SQL_Croatian_Cp1250_CS_AS_KI_WI + 92: 'CP1250', // SQL_Croatian_Cp1250_CI_AS_KI_WI + 93: 'CP1250', // SQL_Slovak_Cp1250_CS_AS_KI_WI + 94: 'CP1250', // SQL_Slovak_Cp1250_CI_AS_KI_WI + 95: 'CP1250', // SQL_Slovenian_Cp1250_CS_AS_KI_WI + 96: 'CP1250', // SQL_Slovenian_Cp1250_CI_AS_KI_WI + 104: 'CP1251', // SQL_Latin1_General_1251_BIN + 105: 'CP1251', // SQL_Latin1_General_CP1251_CS_AS + 106: 'CP1251', // SQL_Latin1_General_CP1251_CI_AS + 107: 'CP1251', // SQL_Ukrainian_Cp1251_CS_AS_KI_WI + 108: 'CP1251', // SQL_Ukrainian_Cp1251_CI_AS_KI_WI + 112: 'CP1253', // SQL_Latin1_General_1253_BIN + 113: 'CP1253', // SQL_Latin1_General_CP1253_CS_AS + 114: 'CP1253', // SQL_Latin1_General_CP1253_CI_AS + 120: 'CP1253', // SQL_MixDiction_CP1253_CS_AS + 121: 'CP1253', // SQL_AltDiction_CP1253_CS_AS + 122: 'CP1253', // SQL_AltDiction2_CP1253_CS_AS + 124: 'CP1253', // SQL_Latin1_General_CP1253_CI_AI + 128: 'CP1254', // SQL_Latin1_General_1254_BIN + 129: 'CP1254', // SQL_Latin1_General_Cp1254_CS_AS_KI_WI + 130: 'CP1254', // SQL_Latin1_General_Cp1254_CI_AS_KI_WI + 136: 'CP1255', // SQL_Latin1_General_1255_BIN + 137: 'CP1255', // SQL_Latin1_General_CP1255_CS_AS + 138: 'CP1255', // SQL_Latin1_General_CP1255_CI_AS + 144: 'CP1256', // SQL_Latin1_General_1256_BIN + 145: 'CP1256', // SQL_Latin1_General_CP1256_CS_AS + 146: 'CP1256', // SQL_Latin1_General_CP1256_CI_AS + 152: 'CP1257', // SQL_Latin1_General_1257_BIN + 153: 'CP1257', // SQL_Latin1_General_CP1257_CS_AS + 154: 'CP1257', // SQL_Latin1_General_CP1257_CI_AS + 155: 'CP1257', // SQL_Estonian_Cp1257_CS_AS_KI_WI + 156: 'CP1257', // SQL_Estonian_Cp1257_CI_AS_KI_WI + 157: 'CP1257', // SQL_Latvian_Cp1257_CS_AS_KI_WI + 158: 'CP1257', // SQL_Latvian_Cp1257_CI_AS_KI_WI + 159: 'CP1257', // SQL_Lithuanian_Cp1257_CS_AS_KI_WI + 160: 'CP1257', // SQL_Lithuanian_Cp1257_CI_AS_KI_WI + 183: 'CP1252', // SQL_Danish_Pref_Cp1_CI_AS_KI_WI + 184: 'CP1252', // SQL_SwedishPhone_Pref_Cp1_CI_AS_KI_WI + 185: 'CP1252', // SQL_SwedishStd_Pref_Cp1_CI_AS_KI_WI + 186: 'CP1252', // SQL_Icelandic_Pref_Cp1_CI_AS_KI_WI + 210: 'CP1252', // SQL_EBCDIC037_CP1_CS_AS + 211: 'CP1252', // SQL_EBCDIC273_CP1_CS_AS + 212: 'CP1252', // SQL_EBCDIC277_CP1_CS_AS + 213: 'CP1252', // SQL_EBCDIC278_CP1_CS_AS + 214: 'CP1252', // SQL_EBCDIC280_CP1_CS_AS + 215: 'CP1252', // SQL_EBCDIC284_CP1_CS_AS + 216: 'CP1252', // SQL_EBCDIC285_CP1_CS_AS + 217: 'CP1252', // SQL_EBCDIC297_CP1_CS_AS +}; diff --git a/src/dataTypes.js b/src/dataTypes.js index 0caadf6..24f955e 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -138,6 +138,16 @@ const type = { hasScale: true, LengthOfDataLength: 1, dataLengthFromScale: true + }, + + //USHORTLEN_TYPE + [0xAF]: { + id: 0xAF, + type: 'BIGCHAR', + name: 'Char', + hasCollation: true, + LengthOfDataLength: 2, + maximumLength: 8000 } }; diff --git a/src/types.js b/src/types.js index a053531..348ff5f 100644 --- a/src/types.js +++ b/src/types.js @@ -1,9 +1,10 @@ /* @flow */ -type readStep = (reader: Reader) => ?readStep; +type readStep = (reader: Reader) =>?readStep; const Reader = require('./reader'); const TYPE = require('./dataTypes').TYPE; +const { codepageByLcid, codepageBySortId } = require('./collation'); class Collation { localeId: number @@ -36,26 +37,33 @@ class Collation { this.codepage = 'CP1252'; } - static fromBuffer(buffer: Buffer) : Collation { + static fromBuffer(buffer: Buffer): Collation { const collation = new Collation(); collation.localeId |= (buffer[2] & 0x0F) << 16; collation.localeId |= buffer[1] << 8; collation.localeId |= buffer[0]; - // This may not be extracting the correct nibbles in the correct order. - // collation.flags = buffer[3] >> 4; - // collation.flags |= buffer[2] & 0xF0; + let collationflag = (buffer[2] & 0xF0) >> 4; + collation.ignoreWidth = (collationflag & 8) ? true : false; + collation.ignoreKana = (collationflag & 4) ? true : false; + collation.ignoreAccent = (collationflag & 2) ? true : false; + collation.ignoreCase = (collationflag & 1) ? true : false; - // This may not be extracting the correct nibble. - collation.version = buffer[3] & 0x0F; + collationflag = buffer[3] & 0x0F; + collation.binary2 = (collationflag & 2) ? true : false; + collation.binary = (collationflag & 1) ? true : false; + + collation.version = (buffer[3] & 0xF0) >> 4; collation.sortId = buffer[4]; - const codepage = undefined; //codepageBySortId[collation.sortId] || codepageByLcid[collation.lcid]; - if (codepage) { - collation.codepage = codepage; + //TODO: Handle raw collation + let codepage = codepageBySortId[collation.sortId]; + if (!codepage) { + codepage = codepageByLcid[collation.localeId]; } + collation.codepage = codepage; return collation; } @@ -282,7 +290,8 @@ function readCollation(reader: Reader) { const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if (type.hasCollation) { - console.log('readCollation not implemented'); + token.collation = Collation.fromBuffer(reader.readBuffer(0, 5)); + reader.consumeBytes(5); } return readSchema; } @@ -306,7 +315,7 @@ function readUDTInfo(reader: Reader) { reader.stash.push(token); return next; } - +/* function readGuidType(reader: Reader) { if (!reader.bytesAvailable(1)) { return; @@ -479,9 +488,10 @@ function readXmlTypeSchemaCollection(reader: Reader) { reader.stash.push(typeInfo); return next; } - + */ module.exports.readTypeInfo = readTypeInfo; module.exports.TypeInfo = TypeInfo; +module.exports.Collation = Collation; /* diff --git a/src/value-parser.js b/src/value-parser.js index 0785841..fda2857 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -1,12 +1,17 @@ /* @flow */ +const iconv = require('iconv-lite'); type readStep = (reader: Reader) =>?readStep; +import type { Collation } from './types'; + const Reader = require('./reader'); const TYPE = require('./dataTypes').TYPE; const guidParser = require('./guid-parser'); const MAX = (1 << 16) - 1; const THREE_AND_A_THIRD = 3 + (1 / 3); const MONEY_DIVISOR = 10000; +const NULL = (1 << 16) - 1; +const DEFAULT_ENCODING = 'utf8'; function valueParse(next: readStep, reader: Reader) { reader.stash.push(next); @@ -267,6 +272,15 @@ function readValue(reader: Reader) { return readDateTimeOffset(dataLength, reader); } + case 'Char': + const collation: Collation = token.typeInfo.collation; + const codepage = collation.codepage; + if (token.dataLength === MAX) { + // TODO: PLP support + } else { + return readChars(dataLength, codepage, NULL, reader); + } + default: console.log('readValue not implemented'); } @@ -398,4 +412,19 @@ function readMoney(reader: Reader) { return reader.stash.pop(); } +function readChars(dataLength: number, codepage: string, nullValue: ?any, reader: Reader) { + const token = reader.stash[reader.stash.length - 2]; + if (codepage == null) { + codepage = DEFAULT_ENCODING; + } + if (dataLength === nullValue) { + token.value = null; + } + else { + const data = reader.readBuffer(0, dataLength); + token.value = iconv.decode(data, codepage); + reader.consumeBytes(dataLength); + } + return reader.stash.pop(); +} module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 86d35ae..a050dfe 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -112,7 +112,7 @@ describe('Parsing a RETURNVALUE token', function() { describe('test VARLENTYPE-BYTELEN', function() { - let reader, data, paramOrdinal, paramName, status, userType, flag, typeid, dataLength, value, offset, tempBuff, tempOffset; + let reader, data, paramOrdinal, paramName, status, userType, flag, typeid, dataLength, value, offset, tempBuff, tempOffset, collation; before(function() { paramOrdinal = 1; @@ -130,7 +130,7 @@ describe('Parsing a RETURNVALUE token', function() { offset = tempOffset; }); - function addListners(done, token, nanoSec) { + function addListners(done, token, options) { reader.on('data', function(retValToken) { assert.instanceOf(retValToken, ReturnValueToken); token = retValToken; @@ -145,18 +145,23 @@ describe('Parsing a RETURNVALUE token', function() { if ((value !== null) && (typeid == 0x28 || typeid == 0x6F)) { assert.equalDate(token.value, value); - if (nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, nanoSec); } + if (options && options.nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, options.nanoSec); } } else if ((value !== null) && typeid == 0x29) { assert.equalTime(token.value, value); } else if ((value !== null) && (typeid == 0x2A || typeid == 0x2B)) { assert.equalDate(token.value, value); assert.equalTime(token.value, value); - if (nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, nanoSec); } + if (options && options.nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, options.nanoSec); } } else { assert.strictEqual(token.value, value); } + if ((value !== null) && (typeid == 0xAF && options && options.collation)) { + assert.strictEqual(token.typeInfo.collation.localeId, options.collation.LCID); + assert.strictEqual(token.typeInfo.collation.codepage, options.collation.codepage); + } + done(); }); } @@ -310,7 +315,6 @@ describe('Parsing a RETURNVALUE token', function() { value = null; tempBuff.copy(data); - // TYPE_INFO data.writeUInt8(typeid, offset++); data.writeUInt8(dataLength, offset++); @@ -659,7 +663,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -745,7 +749,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -773,7 +777,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -801,7 +805,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -830,7 +834,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -859,7 +863,7 @@ describe('Parsing a RETURNVALUE token', function() { offset += dataLength; const token = {}; - addListners(done, token, nanoSec); + addListners(done, token, {nanoSec: nanoSec}); reader.end(data); }); @@ -887,6 +891,79 @@ describe('Parsing a RETURNVALUE token', function() { addListners(done, token); reader.end(data); }); + + it('should parse the BIGCHARTYPE(30) -collation(CI_AI_KS_WS) token correctly', function(done) { + data = Buffer.alloc(63); + tempBuff.copy(data); + + typeid = 0xAF; + dataLength = 30; + + const valueAsBuffer = Buffer.from([0x82, 0xCD, 0x82, 0xB6, 0x82, 0xDF, 0x82, 0xDC, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20]); + + value = 'はじめま '; + const codePage = Buffer.from([0x11, 0x04, 0x34, 0x30, 0x00]); //Japanese_Bushu_Kakusu_140_CI_AI_KS_WS_VSS + collation = { + LCID: 263185, + codepage: 'CP932' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, {collation: collation}); + reader.end(data); + }); + + it('should parse the BIGCHARTYPE(30)-binary collation token correctly', function(done) { + data = Buffer.alloc(63); + tempBuff.copy(data); + + typeid = 0xAF; + dataLength = 30; + + const valueAsBuffer = Buffer.from([0x61, 0x62, 0x63, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20, 0x20]); + value = 'abc '; + const codePage = Buffer.from([0x09, 0x04, 0x00, 0x01, 0x1E]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP437' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + //TODO: add check for flags and LCID + addListners(done, token, collation); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From b4d588e5a6b43573b7f2b9cf2658eba8aab9ad69 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Fri, 17 Aug 2018 10:28:35 -0700 Subject: [PATCH 17/29] parser data in multiple tds packets --- src/reader.js | 38 +++++++++++++++++++++-- src/types.js | 13 ++++++-- src/value-parser.js | 65 ++++++++++++++++++++++++++++++++-------- test/returnValue-test.js | 42 ++++++++++++++++++++++++-- 4 files changed, 137 insertions(+), 21 deletions(-) diff --git a/src/reader.js b/src/reader.js index 09226af..8315f5a 100644 --- a/src/reader.js +++ b/src/reader.js @@ -49,7 +49,6 @@ const Reader = module.exports = class Reader extends Transform { this.version = version; this.position = 0; this.options = options; - this.stash = []; this.next = this.nextToken = nextToken; @@ -64,13 +63,46 @@ const Reader = module.exports = class Reader extends Transform { } readString(encoding: 'ucs2' | 'ascii' | 'utf8', start: number, end: number) { + if (this.shouldWait(end - start)) { + return; + } return this.buffer.toString(encoding, this.position + start, this.position + end); } + /** + * @param {Function} next: parser function to which read data should be passed + * @param {Function} cReader: readXXX function to call + * @param {Function} rest: parameters to be passed to ReadXXX + */ + readData(next: readStep, cReader: Function, ...rest: Array) { + this.stash.push(next, rest, cReader); + return this.invokeReadXXXMethod; + } + + invokeReadXXXMethod(reader: Reader) { + const func = reader.stash[reader.stash.length - 1 ]; + const param = reader.stash[reader.stash.length - 2 ]; + return func.apply(reader, param); + } + readBuffer(start: number, end: number) { - return this.buffer.slice(this.position + start, this.position + end); + if (this.shouldWait(end - start)) { + return; + } + const data = this.buffer.slice(this.position + start, this.position + end); + return this.doneReadXXX(data); } + doneReadXXX(data: ?any) { + const toCall = this.stash[this.stash.length - 3]; + this.stash.splice(this.stash.length - 3); + this.stash.push(data); + return toCall; + } + + shouldWait(length: number) { + return !((this.position + length) <= this.buffer.length); + } readUInt8(offset: number) : number { return this.buffer.readUInt8(this.position + offset); } @@ -151,7 +183,7 @@ const Reader = module.exports = class Reader extends Transform { return callback(new Error('Expected Buffer')); } - this.buffer = Buffer.concat([ this.buffer, chunk ]); + this.buffer = Buffer.concat([ this.buffer.slice(this.position), chunk ]); this.position = 0; try { diff --git a/src/types.js b/src/types.js index 348ff5f..6c9d6df 100644 --- a/src/types.js +++ b/src/types.js @@ -97,7 +97,6 @@ class TypeInfo { function readTypeInfo(next: readStep, reader: Reader) { reader.stash.push(next); - return readTypeId; } @@ -290,9 +289,17 @@ function readCollation(reader: Reader) { const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if (type.hasCollation) { - token.collation = Collation.fromBuffer(reader.readBuffer(0, 5)); - reader.consumeBytes(5); + return reader.readData(parseCollation, reader.readBuffer, 0, 5); } + else + return readSchema; +} + +function parseCollation(reader: Reader) { + const data = reader.stash.pop(); + const token: TypeInfo = reader.stash[reader.stash.length - 1]; + token.collation = Collation.fromBuffer(data); + reader.consumeBytes(5); return readSchema; } diff --git a/src/value-parser.js b/src/value-parser.js index fda2857..f012642 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -11,6 +11,7 @@ const MAX = (1 << 16) - 1; const THREE_AND_A_THIRD = 3 + (1 / 3); const MONEY_DIVISOR = 10000; const NULL = (1 << 16) - 1; +const PLP_NULL = new Buffer([0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]); const DEFAULT_ENCODING = 'utf8'; function valueParse(next: readStep, reader: Reader) { @@ -120,9 +121,8 @@ function readValue(reader: Reader) { token.value = null; return reader.stash.pop(); case 0x10: - token.value = guidParser.arrayToGuid(reader.readBuffer(0, dataLength)); - reader.consumeBytes(dataLength); - return reader.stash.pop(); + reader.stash.push(dataLength); + return readGUID; default: throw new Error('Unknown UniqueIdentifier length'); } @@ -273,12 +273,11 @@ function readValue(reader: Reader) { } case 'Char': - const collation: Collation = token.typeInfo.collation; - const codepage = collation.codepage; if (token.dataLength === MAX) { // TODO: PLP support } else { - return readChars(dataLength, codepage, NULL, reader); + reader.stash.push(dataLength); + return readChars; } default: @@ -412,19 +411,61 @@ function readMoney(reader: Reader) { return reader.stash.pop(); } -function readChars(dataLength: number, codepage: string, nullValue: ?any, reader: Reader) { +function readGUID(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + return reader.readData(parserGUID, reader.readBuffer, 0, dataLength); +} + +function parserGUID(reader: Reader) { + const data = reader.stash.pop(); + const dataLength = reader.stash.pop(); const token = reader.stash[reader.stash.length - 2]; - if (codepage == null) { - codepage = DEFAULT_ENCODING; + + token.value = guidParser.arrayToGuid(data); + reader.consumeBytes(dataLength); + return reader.stash.pop(); +} + +function readChars(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + + let nullValue; + switch (TYPE[token.typeInfo.id].name) { + case 'VarChar': + case 'Char': + nullValue = NULL; + break; + case 'Text': + nullValue = PLP_NULL; + break; + default: + console.log('Invalid type'); } + if (dataLength === nullValue) { token.value = null; + return reader.stash.pop(); } else { - const data = reader.readBuffer(0, dataLength); - token.value = iconv.decode(data, codepage); - reader.consumeBytes(dataLength); + return reader.readData(parseChar, reader.readBuffer, 0, dataLength); } +} + +function parseChar(reader: Reader) { + const data = reader.stash.pop(); + const dataLength = reader.stash.pop(); + const token = reader.stash[reader.stash.length - 2]; + const collation: Collation = token.typeInfo.collation; + let codepage = collation.codepage; + + if (codepage == null) { + codepage = DEFAULT_ENCODING; + } + + token.value = iconv.decode(data, codepage); + reader.consumeBytes(dataLength); return reader.stash.pop(); } + module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index a050dfe..bffb303 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -142,7 +142,6 @@ describe('Parsing a RETURNVALUE token', function() { assert.strictEqual(token.status, status); assert.strictEqual(token.userType, userType); assert.strictEqual(token.typeInfo.id, typeid); - if ((value !== null) && (typeid == 0x28 || typeid == 0x6F)) { assert.equalDate(token.value, value); if (options && options.nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, options.nanoSec); } @@ -152,8 +151,7 @@ describe('Parsing a RETURNVALUE token', function() { assert.equalDate(token.value, value); assert.equalTime(token.value, value); if (options && options.nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, options.nanoSec); } - } - else { + } else { assert.strictEqual(token.value, value); } @@ -964,6 +962,44 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the BIGCHARTYPE(5000)- collation token correctly', function(done) { + data = Buffer.alloc(4049); + tempBuff.copy(data); + typeid = 0xAF; + dataLength = 5000; + + let tempB = Buffer.alloc(4016, 0x20); + let valueAsBuffer = Buffer.from([0x73, 0x73]); + valueAsBuffer.copy(tempB, 0, 0, 2); + valueAsBuffer = tempB; + + value = 'ss' + ' '.repeat(4998); + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); // Latin1_General + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + //TODO: add check for flags and LCID + addListners(done, token); + tempB = Buffer.alloc(984, 0x20); + reader.write(data); + reader.write(tempB); + reader.end(); + }); + }); describe('test FIXEDLENTYPE', function() { From 93da9d0d16d90be7f57bbc4cf11bd14ff5e91221 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 13 Sep 2018 15:44:30 -0700 Subject: [PATCH 18/29] fix: parsing multiple tds packets --- src/reader.js | 52 ++------ src/tokens/error/read.js | 30 ++--- src/tokens/infoerror/read.js | 5 +- src/tokens/loginack/read.js | 4 +- src/tokens/order/read.js | 3 + src/tokens/returnStatus/read.js | 4 + src/tokens/returnvalue/read.js | 27 +++- src/types.js | 37 ++++-- src/value-parser.js | 216 +++++++++++++++++++++++++------- 9 files changed, 250 insertions(+), 128 deletions(-) diff --git a/src/reader.js b/src/reader.js index 8315f5a..8005026 100644 --- a/src/reader.js +++ b/src/reader.js @@ -4,7 +4,7 @@ const Transform = require('stream').Transform; import type Token from './token'; -export type readStep = (reader: Reader) => ?readStep; +export type readStep = (reader: Reader) =>?readStep; function nextToken(reader) { if (!reader.bytesAvailable(1)) { @@ -63,79 +63,49 @@ const Reader = module.exports = class Reader extends Transform { } readString(encoding: 'ucs2' | 'ascii' | 'utf8', start: number, end: number) { - if (this.shouldWait(end - start)) { - return; - } return this.buffer.toString(encoding, this.position + start, this.position + end); } - /** - * @param {Function} next: parser function to which read data should be passed - * @param {Function} cReader: readXXX function to call - * @param {Function} rest: parameters to be passed to ReadXXX - */ - readData(next: readStep, cReader: Function, ...rest: Array) { - this.stash.push(next, rest, cReader); - return this.invokeReadXXXMethod; - } - - invokeReadXXXMethod(reader: Reader) { - const func = reader.stash[reader.stash.length - 1 ]; - const param = reader.stash[reader.stash.length - 2 ]; - return func.apply(reader, param); - } - readBuffer(start: number, end: number) { - if (this.shouldWait(end - start)) { - return; - } - const data = this.buffer.slice(this.position + start, this.position + end); - return this.doneReadXXX(data); - } - - doneReadXXX(data: ?any) { - const toCall = this.stash[this.stash.length - 3]; - this.stash.splice(this.stash.length - 3); - this.stash.push(data); - return toCall; + return this.buffer.slice(this.position + start, this.position + end); } shouldWait(length: number) { return !((this.position + length) <= this.buffer.length); } - readUInt8(offset: number) : number { + readUInt8(offset: number): number { return this.buffer.readUInt8(this.position + offset); } - readUInt16LE(offset: number) : number { + readUInt16LE(offset: number): number { return this.buffer.readUInt16LE(this.position + offset); } - readInt16LE(offset: number) : number { + readInt16LE(offset: number): number { return this.buffer.readInt16LE(this.position + offset); } - readUInt24LE(offset: number) : number { + readUInt24LE(offset: number): number { const low = this.buffer.readUInt16LE(this.position + offset); const high = this.buffer.readUInt8(this.position + offset + 2); return low | (high << 16); } - readUInt40LE(offset: number) : number { + readUInt40LE(offset: number): number { const low = this.buffer.readUInt32LE(this.position + offset); const high = this.buffer.readUInt8(this.position + offset + 4); return (0x100000000 * high) + low; } - readUInt32LE(offset: number) : number { + readUInt32LE(offset: number): number { return this.buffer.readUInt32LE(this.position + offset); } - readInt32LE(offset: number) : number { + readInt32LE(offset: number): number { return this.buffer.readInt32LE(this.position + offset); } - readUInt32BE(offset: number) : number { + readUInt32BE(offset: number): number { return this.buffer.readUInt32BE(this.position + offset); } @@ -183,7 +153,7 @@ const Reader = module.exports = class Reader extends Transform { return callback(new Error('Expected Buffer')); } - this.buffer = Buffer.concat([ this.buffer.slice(this.position), chunk ]); + this.buffer = Buffer.concat([this.buffer.slice(this.position), chunk]); this.position = 0; try { diff --git a/src/tokens/error/read.js b/src/tokens/error/read.js index 6af8eb0..7db8460 100644 --- a/src/tokens/error/read.js +++ b/src/tokens/error/read.js @@ -12,30 +12,26 @@ function readErrorToken(reader: Reader) { if (!reader.bytesAvailable(length)) { return; } - - const tokenData = reader.readBuffer(2, length); - reader.consumeBytes(length); + reader.consumeBytes(2); const token = new ErrorToken(); - token.number = tokenData.readUInt32LE(0); - token.state = tokenData.readUInt8(4); - token.class = tokenData.readUInt8(5); - + token.number = reader.readUInt32LE(0); + token.state = reader.readUInt8(4); + token.class = reader.readUInt8(5); let offset = 6; - const messageByteLength = tokenData.readUInt16LE(offset) * 2; - token.message = tokenData.toString('ucs2', offset += 2, offset += messageByteLength); - - const serverNameByteLength = tokenData.readUInt8(offset) * 2; - token.serverName = tokenData.toString('ucs2', offset += 1, offset += serverNameByteLength); - - const procNameByteLength = tokenData.readUInt8(offset) * 2; - token.procName = tokenData.toString('ucs2', offset += 1, offset += procNameByteLength); + const messageByteLength = reader.readUInt16LE(offset) * 2; + token.message = reader.readBuffer(offset += 2, offset += messageByteLength).toString('ucs2'); + const serverNameByteLength = reader.readUInt8(offset) * 2; + token.serverName = reader.readBuffer(offset += 1, offset += serverNameByteLength).toString('ucs2'); + const procNameByteLength = reader.readUInt8(offset) * 2; + token.procName = reader.readBuffer(offset += 1, offset += procNameByteLength).toString('ucs2'); if (reader.version >= 0x72090002) { - token.lineNumber = tokenData.readUInt32LE(offset); + token.lineNumber = reader.readUInt32LE(offset); } else { - token.lineNumber = tokenData.readUInt16LE(offset); + token.lineNumber = reader.readUInt16LE(offset); } + reader.consumeBytes(length); reader.push(token); diff --git a/src/tokens/infoerror/read.js b/src/tokens/infoerror/read.js index 7c267ee..9c359ef 100644 --- a/src/tokens/infoerror/read.js +++ b/src/tokens/infoerror/read.js @@ -8,11 +8,10 @@ function readInfoErrorToken(reader: Reader) { } const length = reader.readUInt16LE(0); - - reader.consumeBytes(2); - if (!reader.bytesAvailable(length)) { + if (!reader.bytesAvailable(2 + length)) { return; } + reader.consumeBytes(2); let offset = 0; const token = new InfoErrorToken(); diff --git a/src/tokens/loginack/read.js b/src/tokens/loginack/read.js index f8f3878..6e2e810 100644 --- a/src/tokens/loginack/read.js +++ b/src/tokens/loginack/read.js @@ -8,10 +8,10 @@ function readLoginAckToken(reader: Reader) { } const length = reader.readUInt16LE(0); - reader.consumeBytes(2); - if (!reader.bytesAvailable(length)) { + if (!reader.bytesAvailable(2 + length)) { return; } + reader.consumeBytes(2); const token = new LoginAckToken(); let offset = 0; diff --git a/src/tokens/order/read.js b/src/tokens/order/read.js index 83c410b..9ffa550 100644 --- a/src/tokens/order/read.js +++ b/src/tokens/order/read.js @@ -22,6 +22,9 @@ function parseColumnOrder(reader: Reader) { return reader.nextToken; } + if (!reader.bytesAvailable(2)) { + return; + } const colNumber = reader.readUInt16LE(0); reader.consumeBytes(2); token.orderColumns.push(colNumber); diff --git a/src/tokens/returnStatus/read.js b/src/tokens/returnStatus/read.js index 9287ad7..3e12566 100644 --- a/src/tokens/returnStatus/read.js +++ b/src/tokens/returnStatus/read.js @@ -3,6 +3,10 @@ import type Reader from '../../reader'; function readReturnStatus(reader: Reader) { + if (!reader.bytesAvailable(4)) { + return; + } + const value = reader.readInt32LE(0); reader.consumeBytes(4); diff --git a/src/tokens/returnvalue/read.js b/src/tokens/returnvalue/read.js index 0e91349..0231b13 100644 --- a/src/tokens/returnvalue/read.js +++ b/src/tokens/returnvalue/read.js @@ -4,13 +4,29 @@ import type Reader from '../../reader'; import type { TypeInfo } from '../../types'; function readReturnValueToken(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + const token = new ReturnValueToken(); + token.paramOrdinal = reader.readUInt16LE(0); + reader.consumeBytes(2); + reader.stash.push(token); + return readReturnValueParamName; +} - let offset = 0; - token.paramOrdinal = reader.readUInt16LE(offset); - offset += 2; - const paramLength = reader.readUInt8(offset) * 2; - offset += 1; +function readReturnValueParamName(reader: Reader) { + if (!reader.bytesAvailable(1)) { + return; + } + + const paramLength = reader.readUInt8(0) * 2; + if (!reader.bytesAvailable(1 /*paramLength*/ + paramLength + 1 /*status*/)) { + return; + } + let offset = 1; + + const token: ReturnValueToken = reader.stash[reader.stash.length - 1]; token.paramName = reader.readString('ucs2', offset, offset + paramLength); offset += paramLength; @@ -20,6 +36,7 @@ function readReturnValueToken(reader: Reader) { reader.stash.push(token); return parseUserType; + } function parseUserType(reader: Reader) { diff --git a/src/types.js b/src/types.js index 6c9d6df..4981657 100644 --- a/src/types.js +++ b/src/types.js @@ -194,13 +194,13 @@ function readTypeId(reader: Reader) { } function readDataLength(reader: Reader) { - const token: TypeInfo = reader.stash.pop(); + const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if ((token.id & 0x30) === 0x20) { // VARLEN_TYPE if (type.dataLengthFromScale) { - reader.stash.push(token); return readScale; } else if (type.fixedDataLength) { + reader.stash.pop(); // pop the token const next = reader.stash.pop(); reader.stash.push(token); return next; @@ -211,6 +211,9 @@ function readDataLength(reader: Reader) { token.dataLength = undefined; break; case 1: + if (!reader.bytesAvailable(1)) { + return; + } token.dataLength = reader.readUInt8(0); reader.consumeBytes(1); switch (token.dataLength) { @@ -243,20 +246,26 @@ function readDataLength(reader: Reader) { } break; case 2: + if (!reader.bytesAvailable(2)) { + return; + } token.dataLength = reader.readUInt16LE(0); reader.consumeBytes(2); break; case 4: + if (!reader.bytesAvailable(4)) { + return; + } token.dataLength = reader.readUInt32LE(0); reader.consumeBytes(4); break; default: throw new Error('Unsupported dataLengthLength ' + type.LengthOfDataLength + ' for data type ' + type.name); } - reader.stash.push(token); return readPrecision; } } else { + reader.stash.pop(); // remove the token // token.dataLength is not needed for FIXEDLENTYPE type const next = reader.stash.pop(); reader.stash.push(token); @@ -269,6 +278,9 @@ function readPrecision(reader: Reader) { const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if (type.hasPrecision) { + if (!reader.bytesAvailable(1)) { + return; + } token.precision = reader.readUInt8(0); reader.consumeBytes(1); } @@ -279,6 +291,9 @@ function readScale(reader: Reader) { const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if (type.hasScale) { + if (!reader.bytesAvailable(1)) { + return; + } token.scale = reader.readUInt8(0); reader.consumeBytes(1); } @@ -289,17 +304,13 @@ function readCollation(reader: Reader) { const token: TypeInfo = reader.stash[reader.stash.length - 1]; const type = TYPE[token.id]; if (type.hasCollation) { - return reader.readData(parseCollation, reader.readBuffer, 0, 5); + if (!reader.bytesAvailable(5)) { + return; + } + const data = reader.readBuffer(0, 5); + token.collation = Collation.fromBuffer(data); + reader.consumeBytes(5); } - else - return readSchema; -} - -function parseCollation(reader: Reader) { - const data = reader.stash.pop(); - const token: TypeInfo = reader.stash[reader.stash.length - 1]; - token.collation = Collation.fromBuffer(data); - reader.consumeBytes(5); return readSchema; } diff --git a/src/value-parser.js b/src/value-parser.js index f012642..cb62c61 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -26,9 +26,8 @@ function readDataLength(reader: Reader) { switch (token.typeInfo.id & 0x30) { case 0x10: // xx01xxxx - s2.2.4.2.1.1 // token.value = 0; - // reader.stash.push(0); reader.stash.push(0); - //TODO: test this + //TODO: add test for this case return readValue; case 0x20: // xx10xxxx - s2.2.4.2.1.3 @@ -36,14 +35,24 @@ function readDataLength(reader: Reader) { if (token.typeInfo.dataLength !== MAX) { switch (TYPE[token.typeInfo.id].LengthOfDataLength) { case 1: // BYTELEN + // for RETURNVALUE_TOKEN all the flags should be zero (TDS 2.2.7.18) + if (!reader.bytesAvailable(1)) { + return; + } reader.stash.push(reader.readUInt8(0)); reader.consumeBytes(1); return readValue; case 2: // USHORTCHARBINLEN + if (!reader.bytesAvailable(2)) { + return; + } reader.stash.push(reader.readUInt16LE(0)); reader.consumeBytes(2); return readValue; case 4: // LONGLEN + if (!reader.bytesAvailable(4)) { + return; + } reader.stash.push(reader.readUInt32LE(0)); reader.consumeBytes(4); return readValue; @@ -65,53 +74,100 @@ function readDataLength(reader: Reader) { } function readValue(reader: Reader) { - const dataLength = reader.stash.pop(); - const token = reader.stash[reader.stash.length - 2]; + const dataLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; switch (TYPE[token.typeInfo.id].name) { // Fixed-Length Data Types case 'Null': token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'TinyInt': + if (!reader.bytesAvailable(1)) { + return; + } token.value = reader.readUInt8(0); reader.consumeBytes(1); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'Bit': + if (!reader.bytesAvailable(1)) { + return; + } token.value = !!reader.readUInt8(0); reader.consumeBytes(1); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'SmallInt': + if (!reader.bytesAvailable(2)) { + return; + } token.value = reader.readInt16LE(0); reader.consumeBytes(2); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'Int': + if (!reader.bytesAvailable(4)) { + return; + } token.value = reader.readInt32LE(0); reader.consumeBytes(4); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'BigInt': + if (!reader.bytesAvailable(8)) { + return; + } //TODO: replace with better alternative to avoid overflow and to read -ve value token.value = reader.readUInt64LE(0); reader.consumeBytes(8); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'SmallDateTime': return readSmallDateTime; + case 'Real': + if (!reader.bytesAvailable(4)) { + return; + } token.value = reader.readFloatLE(0); reader.consumeBytes(4); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'Money': + if (!reader.bytesAvailable(8)) { + return; + } return readMoney; + case 'DateTime': return readDateTime; + case 'Float': + if (!reader.bytesAvailable(8)) { + return; + } token.value = reader.readDoubleLE(0); reader.consumeBytes(8); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); + case 'SmallMoney': + if (!reader.bytesAvailable(4)) { + return; + } token.value = reader.readInt32LE(0) / MONEY_DIVISOR; reader.consumeBytes(4); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); // Variable-Length Data Types @@ -119,9 +175,9 @@ function readValue(reader: Reader) { switch (dataLength) { case 0: token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 0x10: - reader.stash.push(dataLength); return readGUID; default: throw new Error('Unknown UniqueIdentifier length'); @@ -130,46 +186,70 @@ function readValue(reader: Reader) { switch (dataLength) { case 0: token.value = null; - return reader.stash.pop(); + break; case 1: // TinyInt + if (!reader.bytesAvailable(1)) { + return; + } token.value = reader.readUInt8(0); reader.consumeBytes(1); - return reader.stash.pop(); + break; case 2: // SmallInt + if (!reader.bytesAvailable(2)) { + return; + } token.value = reader.readInt16LE(0); reader.consumeBytes(2); - return reader.stash.pop(); + break; case 4: // Int + if (!reader.bytesAvailable(4)) { + return; + } token.value = reader.readInt32LE(0); reader.consumeBytes(4); - return reader.stash.pop(); + break; case 8: // BigInt + if (!reader.bytesAvailable(8)) { + return; + } // TODO: replace with better alternative to avoid overflow and to read -ve value token.value = reader.readUInt64LE(0); reader.consumeBytes(8); - return reader.stash.pop(); + break; default: throw new Error('Unknown length for integer datatype'); } + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); case 'BitN': switch (dataLength) { case 0: token.value = null; - return reader.stash.pop(); + break; case 1: + if (!reader.bytesAvailable(1)) { + return; + } token.value = !!reader.readUInt8(0); reader.consumeBytes(1); - return reader.stash.pop(); + break; } + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); case 'NumericN': if (dataLength === 0) { token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } + if (!reader.bytesAvailable(dataLength)) { + return; + } let sign = reader.readUInt8(0); reader.consumeBytes(1); sign = sign === 1 ? 1 : -1; let value; + switch (dataLength - 1) { case 4: value = reader.readUInt32LE(0); @@ -191,13 +271,17 @@ function readValue(reader: Reader) { throw new Error(`Unsupported numeric size ${dataLength - 1}`); } token.value = (value * sign) / Math.pow(10, token.typeInfo.scale); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 'FloatN': + if (!reader.bytesAvailable(dataLength)) { + return; + } switch (dataLength) { case 0: token.value = null; - return reader.stash.pop(); + break; case 4: token.value = reader.readFloatLE(0); reader.consumeBytes(4); @@ -209,16 +293,22 @@ function readValue(reader: Reader) { default: throw new Error('Unsupported dataLength ' + dataLength + ' for FloatN'); } + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 'MoneyN': + if (!reader.bytesAvailable(dataLength)) { + return; + } switch (dataLength) { case 0: token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 4: token.value = reader.readInt32LE(0) / MONEY_DIVISOR; reader.consumeBytes(4); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 8: return readMoney; @@ -230,6 +320,7 @@ function readValue(reader: Reader) { switch (dataLength) { case 0: token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 3: return readDateN; @@ -240,15 +331,17 @@ function readValue(reader: Reader) { case 'Time': if (dataLength === 0) { token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } else { - return readTimeN(dataLength, reader); + return readTimeN(reader); } case 'DateTimeN': switch (dataLength) { case 0: token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); case 4: return readSmallDateTime; @@ -259,24 +352,25 @@ function readValue(reader: Reader) { case 'DateTime2': if (dataLength === 0) { token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } else { - return readDateTime2N(dataLength, reader); + return readDateTime2N(reader); } case 'DateTimeOffset': if (dataLength === 0) { token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } else { - return readDateTimeOffset(dataLength, reader); + return readDateTimeOffset(reader); } case 'Char': if (token.dataLength === MAX) { // TODO: PLP support } else { - reader.stash.push(dataLength); return readChars; } @@ -286,6 +380,10 @@ function readValue(reader: Reader) { } function readSmallDateTime(reader: Reader) { + if (!reader.bytesAvailable(4)) { + return; + } + reader.stash.pop(); // datalength const token = reader.stash[reader.stash.length - 2]; const days = reader.readUInt16LE(0); const minutes = reader.readUInt16LE(2); @@ -299,6 +397,10 @@ function readSmallDateTime(reader: Reader) { } function readDateTime(reader: Reader) { + if (!reader.bytesAvailable(8)) { + return; + } + reader.stash.pop(); // remove dataLength const token = reader.stash[reader.stash.length - 2]; const days = reader.readUInt32LE(0); const threeHundredthsOfSecond = reader.readUInt32LE(4); @@ -312,9 +414,15 @@ function readDateTime(reader: Reader) { return reader.stash.pop(); } -function readTimeN(dataLength: number, reader: Reader) { - const token = reader.stash[reader.stash.length - 2]; +function readTimeN(reader: Reader) { + const token = reader.stash[reader.stash.length - 3]; + const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } token.value = readTime(dataLength, token.typeInfo.scale, reader); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } @@ -323,15 +431,12 @@ function readTime(dataLength: number, scale: number, reader: Reader) { switch (dataLength) { case 3: value = reader.readUInt24LE(0); - reader.consumeBytes(3); break; case 4: value = reader.readUInt32LE(0); - reader.consumeBytes(4); break; case 5: value = reader.readUInt40LE(0); - reader.consumeBytes(5); break; default: throw new Error(`Unknown length ${dataLength} for temporal datatype`); @@ -358,8 +463,13 @@ function readTime(dataLength: number, scale: number, reader: Reader) { } function readDateN(reader: Reader) { - const token = reader.stash[reader.stash.length - 2]; + if (!reader.bytesAvailable(3)) { + return; + } + const token = reader.stash[reader.stash.length - 3]; token.value = readDate(undefined, reader.options.useUTC, reader); + reader.consumeBytes(3); + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } @@ -371,17 +481,27 @@ function readDate(time: ?Date, useUTC: boolean, reader: Reader) { } else { value = new Date(2000, 0, days - 730118, time ? (time.getHours(), time.getMinutes(), time.getSeconds(), time.getMilliseconds()) : 0); } - reader.consumeBytes(3); return value; } -function readDateTime2N(dataLength: number, reader: Reader) { +function readDateTime2N(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } + reader.stash.pop(); // remove dataLength const token = reader.stash[reader.stash.length - 2]; token.value = readDateTime2(dataLength - 3, token.typeInfo.scale, reader.options.useUTC, reader); return reader.stash.pop(); } -function readDateTimeOffset(dataLength: number, reader: Reader) { +function readDateTimeOffset(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } + reader.stash.pop(); // remove dataLength + const token = reader.stash[reader.stash.length - 2]; const date = readDateTime2(dataLength - 5, token.typeInfo.scale, true, reader); reader.readInt16LE(0); // TODO: add implementation to handle offset @@ -392,7 +512,9 @@ function readDateTimeOffset(dataLength: number, reader: Reader) { function readDateTime2(dataLength: number, scale: number, useUTC: boolean, reader: Reader) { const time = readTime(dataLength, scale, reader); + reader.consumeBytes(dataLength); const date = readDate(time, useUTC, reader); + reader.consumeBytes(3); // $FlowFixMe Object.defineProperty(date, 'nanosecondsDelta', { enumerable: false, @@ -403,6 +525,7 @@ function readDateTime2(dataLength: number, scale: number, useUTC: boolean, reade } function readMoney(reader: Reader) { + reader.stash.pop(); // remove dataLength const token = reader.stash[reader.stash.length - 2]; const high = reader.readInt32LE(0); const low = reader.readUInt32LE(4); @@ -412,15 +535,15 @@ function readMoney(reader: Reader) { } function readGUID(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; - return reader.readData(parserGUID, reader.readBuffer, 0, dataLength); -} + if (!reader.bytesAvailable(dataLength)) { + return; + } + reader.stash.pop(); // remove dataLength + const data = reader.readBuffer(0, dataLength); -function parserGUID(reader: Reader) { - const data = reader.stash.pop(); - const dataLength = reader.stash.pop(); const token = reader.stash[reader.stash.length - 2]; - token.value = guidParser.arrayToGuid(data); reader.consumeBytes(dataLength); return reader.stash.pop(); @@ -428,6 +551,9 @@ function parserGUID(reader: Reader) { function readChars(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } const token = reader.stash[reader.stash.length - 3]; let nullValue; @@ -445,27 +571,23 @@ function readChars(reader: Reader) { if (dataLength === nullValue) { token.value = null; + reader.stash.pop(); // remove dataLength return reader.stash.pop(); } else { - return reader.readData(parseChar, reader.readBuffer, 0, dataLength); - } -} + const data = reader.readBuffer(0, dataLength); + const collation: Collation = token.typeInfo.collation; + let codepage = collation.codepage; -function parseChar(reader: Reader) { - const data = reader.stash.pop(); - const dataLength = reader.stash.pop(); - const token = reader.stash[reader.stash.length - 2]; - const collation: Collation = token.typeInfo.collation; - let codepage = collation.codepage; + if (codepage == null) { + codepage = DEFAULT_ENCODING; + } - if (codepage == null) { - codepage = DEFAULT_ENCODING; + token.value = iconv.decode(data, codepage); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } - - token.value = iconv.decode(data, codepage); - reader.consumeBytes(dataLength); - return reader.stash.pop(); } module.exports.valueParse = valueParse; From 5920bda6dc98c2f4fecf42f4f512bdac87495768 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 13 Sep 2018 15:46:09 -0700 Subject: [PATCH 19/29] fix: error token parser Error token is sent by Server, so removing writer. Also, fixing failing unit test --- src/tokens/error/index.js | 1 - src/tokens/error/write.js | 65 ----------------------- src/writer.js | 5 -- test/tokens/error-test.js | 107 +++++++++++++++----------------------- 4 files changed, 43 insertions(+), 135 deletions(-) delete mode 100644 src/tokens/error/write.js diff --git a/src/tokens/error/index.js b/src/tokens/error/index.js index 228e22b..ebebe78 100644 --- a/src/tokens/error/index.js +++ b/src/tokens/error/index.js @@ -29,4 +29,3 @@ class ErrorToken extends Token { module.exports = ErrorToken; ErrorToken.read = require('./read'); -ErrorToken.write = require('./write'); diff --git a/src/tokens/error/write.js b/src/tokens/error/write.js deleted file mode 100644 index 2a7fe8a..0000000 --- a/src/tokens/error/write.js +++ /dev/null @@ -1,65 +0,0 @@ -/* @flow */ - -import type Writer from '../../writer'; -import type Token from '../../token'; - -function writeErrorToken(stream: Writer, token: Token) { - if (!(token instanceof ErrorToken)) { - throw new Error('Expected instance of ErrorToken'); - } - - const messageBuffer = Buffer.from(token.message, 'ucs2'); - const serverNameBuffer = Buffer.from(token.serverName, 'ucs2'); - const procNameBuffer = Buffer.from(token.procName, 'ucs2'); - - const chunkLength = (stream.version < 0x72090002 ? 15 : 17) + messageBuffer.length + serverNameBuffer.length + procNameBuffer.length; - const chunk = Buffer.alloc(chunkLength); - - // TokenType - chunk.writeUInt8(0xAA, 0); - - // Length - chunk.writeUInt16LE(chunkLength - 1, 1); - - // Number - chunk.writeUInt32LE(token.number, 3); - - // State - chunk.writeUInt8(token.state, 7); - - // Class - chunk.writeUInt8(token.class, 8); - - let offset = 9; - - // MsgText - chunk.writeUInt16LE(messageBuffer.length / 2, offset); - offset += 2; - messageBuffer.copy(chunk, offset); - offset += messageBuffer.length; - - // ServerName - chunk.writeUInt8(serverNameBuffer.length / 2, offset); - offset += 1; - serverNameBuffer.copy(chunk, offset); - offset += serverNameBuffer.length; - - // ProcName - chunk.writeUInt8(procNameBuffer.length / 2, offset); - offset += 1; - procNameBuffer.copy(chunk, offset); - offset += procNameBuffer.length; - - // LineNumber - if (stream.version < 0x72090002) { - chunk.writeUInt16LE(token.lineNumber, offset); - } else { - chunk.writeUInt32LE(token.lineNumber, offset); - } - - stream.push(chunk); -} - -module.exports = writeErrorToken; - -const ErrorToken = require('.'); diff --git a/src/writer.js b/src/writer.js index f31435f..6e2e1fc 100644 --- a/src/writer.js +++ b/src/writer.js @@ -25,10 +25,6 @@ class Writer extends Transform { case 0xFD: writeDoneToken(this, token); break; - - case 0xAA: - writeErrorToken(this, token); - break; } } catch (e) { return callback(e); @@ -41,4 +37,3 @@ class Writer extends Transform { module.exports = Writer; const writeDoneToken = require('./tokens/done/write'); -const writeErrorToken = require('./tokens/error/write'); diff --git a/test/tokens/error-test.js b/test/tokens/error-test.js index 7597b1e..3194947 100644 --- a/test/tokens/error-test.js +++ b/test/tokens/error-test.js @@ -1,82 +1,61 @@ /* @flow */ const assert = require('chai').assert; -const Writer = require('../../src').Writer; const Reader = require('../../src').Reader; const ErrorToken = require('../../src/tokens/error'); -describe('Writing an `ERROR` token', function() { - let writer; +describe('Reading an `ERROR` token', function() { + let reader; beforeEach(function() { - writer = new Writer(0x07000000); + reader = new Reader(0x74000004); }); - it('should convert the token correctly', function(done) { - const token = new ErrorToken(); - token.number = 5701; - token.state = 2; - token.message = "Changed database context to 'master'."; - - const chunks = []; - - writer.on('error', done); - - writer.on('data', function(chunk) { - chunks.push(chunk); + it('should parse ERROR token correctly', function(done) { + const buffer = Buffer.alloc(88); + const num = 102; + const state = 1; + const clazz = 15; + const message = 'Incorrect syntax near \'*\'.'; + const server = 'localhost'; + let offset = 0; + + buffer.writeUInt8(0xAA, offset++); + buffer.writeUInt16LE(85, offset); // length + offset += 2; + buffer.writeUInt32LE(num, offset); // number + offset += 4; + buffer.writeUInt8(state, offset++); // state + buffer.writeUInt8(clazz, offset++); // class + buffer.writeUInt16LE(message.length, offset); + offset += 2; + buffer.write(message, offset, message.length * 2, 'ucs2'); // message + offset += message.length * 2; + buffer.writeUInt8(server.length, offset++); + buffer.write(server, offset, server.length * 2, 'ucs2'); // serverName + offset += server.length * 2; + + buffer.writeUInt8(0, offset++); + buffer.writeUInt32LE(1, offset); + offset += 2; + + let token; + reader.on('data', function(retValToken) { + assert.instanceOf(retValToken, ErrorToken); + token = retValToken; }); - - writer.on('end', function() { - const result = Buffer.concat(chunks); - - assert.lengthOf(result, 89); - assert.deepEqual(result, Buffer.from([ - 0xaa, - // Length - 0x58, 0x00, - - // Number - 0x45, 0x16, 0x00, 0x00, - - // State - 0x02, - - // Class - 0x00, - - // MsgText - 0x25, 0x00, - 0x43, 0x00, 0x68, 0x00, 0x61, 0x00, 0x6E, 0x00, 0x67, 0x00, 0x65, 0x00, 0x64, 0x00, 0x20, 0x00, - 0x64, 0x00, 0x61, 0x00, 0x74, 0x00, 0x61, 0x00, 0x62, 0x00, 0x61, 0x00, 0x73, 0x00, 0x65, 0x00, - 0x20, 0x00, 0x63, 0x00, 0x6F, 0x00, 0x6E, 0x00, 0x74, 0x00, 0x65, 0x00, 0x78, 0x00, 0x74, 0x00, - 0x20, 0x00, 0x74, 0x00, 0x6F, 0x00, 0x20, 0x00, 0x27, 0x00, 0x6D, 0x00, 0x61, 0x00, 0x73, 0x00, - 0x74, 0x00, 0x65, 0x00, 0x72, 0x00, 0x27, 0x00, 0x2E, 0x00, - - // ServerName - 0x00, - - // ProcName - 0x00, - - // LineNumber - 0x00, 0x00 - ])); - + reader.on('end', function() { + assert.strictEqual(num, token.number); + assert.strictEqual(state, token.state); + assert.strictEqual(clazz, token.class); + assert.strictEqual(message, token.message); + assert.strictEqual(server, token.serverName); done(); }); - - writer.end(token); - }); -}); - -describe('Reading an `ERROR` token', function() { - let reader; - - beforeEach(function() { - reader = new Reader(0x07000000); + reader.end(buffer); }); - it('should parse the token correctly', function(done) { + it.skip('should parse the token correctly', function(done) { const buffer = Buffer.from([ 0xaa, // Length From 50d72995d314b335cf8d679abfd274a4cfae3efc Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 3 Oct 2018 14:29:54 -0700 Subject: [PATCH 20/29] feat: add parsing for nchar --- .gitattributes | 1 + src/dataTypes.js | 9 ++++++ src/types.js | 2 +- src/value-parser.js | 40 +++++++++++++++++++++++ test/returnValue-test.js | 70 ++++++++++++++++++++++++++++++++++++++++ 5 files changed, 121 insertions(+), 1 deletion(-) create mode 100644 .gitattributes diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..07764a7 --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +* text eol=lf \ No newline at end of file diff --git a/src/dataTypes.js b/src/dataTypes.js index 24f955e..1ffdb49 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -148,6 +148,15 @@ const type = { hasCollation: true, LengthOfDataLength: 2, maximumLength: 8000 + }, + + [0xEF]: { + id: 0xEF, + type: 'NCHAR', + name: 'NChar', + hasCollation: true, + LengthOfDataLength: 2, + maximumLength: 4000 } }; diff --git a/src/types.js b/src/types.js index 4981657..d2dc841 100644 --- a/src/types.js +++ b/src/types.js @@ -216,7 +216,7 @@ function readDataLength(reader: Reader) { } token.dataLength = reader.readUInt8(0); reader.consumeBytes(1); - switch (token.dataLength) { + switch (token.id) { case 0x24: // GUIDTYPE if (token.dataLength != 0x00 && token.dataLength != 0x10) { throw new Error('Invalid data length for GUIDTYPE'); diff --git a/src/value-parser.js b/src/value-parser.js index cb62c61..d89d3fa 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -374,6 +374,13 @@ function readValue(reader: Reader) { return readChars; } + + case 'NChar': + if (token.dataLength === MAX) { + // TODO: PLP support + } else { + return readNChars; + } default: console.log('readValue not implemented'); } @@ -590,4 +597,37 @@ function readChars(reader: Reader) { } } +function readNChars(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } + const token = reader.stash[reader.stash.length - 3]; + + let nullValue; + switch (TYPE[token.typeInfo.id].name) { + case 'NVarChar': + case 'NChar': + nullValue = NULL; + break; + case 'NText': + nullValue = PLP_NULL; + break; + default: + console.log('Invalid type'); + } + + if (dataLength === nullValue) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + else { + const data = reader.readBuffer(0, dataLength); + token.value = data.toString('ucs2'); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } +} module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index bffb303..5549d0a 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1000,6 +1000,76 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(); }); + it('should parse the NCHARTYPE(30)- token correctly', function(done) { + data = Buffer.alloc(94); + tempBuff.copy(data); + + typeid = 0xEF; + dataLength = 60; + + const valueAsBuffer = Buffer.from([0x4B, 0x00, 0xF8, 0x00, 0x62, 0x00, 0x65, 0x00, 0x6E, 0x00, 0x68, 0x00, 0x61, 0x00, 0x76, 0x00, 0x6E, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0xFE]); + value = 'København '; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the NCHARTYPE(30)-(Japanese) token correctly', function(done) { + data = Buffer.alloc(94); + tempBuff.copy(data); + + typeid = 0xEF; + dataLength = 60; + + const valueAsBuffer = Buffer.from([0x6F, 0x30, 0x58, 0x30, 0x81, 0x30, 0x7E, 0x30, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0x20, 0x00, 0xFE]); + value = 'はじめま '; + const codePage = Buffer.from([0x11, 0x04, 0x34, 0x30, 0x00]); + collation = { + LCID: 263185, + codepage: 'CP932' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From 2808190c20811d1aacea8dcfa91ed55d10fab0fe Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 3 Oct 2018 15:29:25 -0700 Subject: [PATCH 21/29] feat: add parsing for binary type --- package.json | 2 +- src/dataTypes.js | 8 ++++++++ src/value-parser.js | 43 +++++++++++++++++++++++++++++++++++++++- test/returnValue-test.js | 30 +++++++++++++++++++++++++++- 4 files changed, 80 insertions(+), 3 deletions(-) diff --git a/package.json b/package.json index 6f1a756..d3dd15f 100644 --- a/package.json +++ b/package.json @@ -30,7 +30,7 @@ "iconv-lite": "^0.4.23", "mocha": "^5.0.5", "semantic-release": "^15.1.5", - "semantic-release-cli": "^4.0.7" + "semantic-release-cli": "^4.0.8" }, "dependencies": { "babel-runtime": "^6.26.0" diff --git a/src/dataTypes.js b/src/dataTypes.js index 1ffdb49..2d43213 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -157,6 +157,14 @@ const type = { hasCollation: true, LengthOfDataLength: 2, maximumLength: 4000 + }, + + [0xAD]: { + id: 0xAD, + type: 'BIGBINARY', + name: 'Binary', + LengthOfDataLength: 2, + maximumLength: 8000 } }; diff --git a/src/value-parser.js b/src/value-parser.js index d89d3fa..3095344 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -374,13 +374,20 @@ function readValue(reader: Reader) { return readChars; } - case 'NChar': if (token.dataLength === MAX) { // TODO: PLP support } else { return readNChars; } + + case 'Binary': + if (token.dataLength === MAX) { + // TODO: PLP support + } else { + return readBinary; + } + default: console.log('readValue not implemented'); } @@ -630,4 +637,38 @@ function readNChars(reader: Reader) { return reader.stash.pop(); } } + +function readBinary(reader: Reader) { + const dataLength = reader.stash[reader.stash.length - 1]; + if (!reader.bytesAvailable(dataLength)) { + return; + } + const token = reader.stash[reader.stash.length - 3]; + + let nullValue; + switch (TYPE[token.typeInfo.id].name) { + case 'VarBinary': + case 'Binary': + nullValue = NULL; + break; + case 'Image': + nullValue = PLP_NULL; + break; + default: + console.log('Invalid type'); + } + + if (dataLength === nullValue) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + else { + token.value = reader.readBuffer(0, dataLength); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } +} + module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 5549d0a..f2fca24 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -152,7 +152,7 @@ describe('Parsing a RETURNVALUE token', function() { assert.equalTime(token.value, value); if (options && options.nanoSec) { assert.strictEqual(token.value.nanosecondsDelta, options.nanoSec); } } else { - assert.strictEqual(token.value, value); + assert.deepEqual(token.value, value); } if ((value !== null) && (typeid == 0xAF && options && options.collation)) { @@ -1070,6 +1070,34 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the BIGBINARYTYPE(10)- token correctly', function(done) { + data = Buffer.alloc(39); + tempBuff.copy(data); + + typeid = 0xAD; + dataLength = 10; + + const valueAsBuffer = Buffer.from([0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFE]); + value = Buffer.from([0x56, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // MAXLEN + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From 9a9b843b01a363a7ac29655d0376eed6fad01810 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 3 Oct 2018 16:56:41 -0700 Subject: [PATCH 22/29] feat: support varbinary and null values --- src/dataTypes.js | 8 +++ src/value-parser.js | 129 +++++++++++++-------------------------- test/returnValue-test.js | 50 +++++++++++++++ 3 files changed, 101 insertions(+), 86 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 2d43213..dd911ce 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -165,6 +165,14 @@ const type = { name: 'Binary', LengthOfDataLength: 2, maximumLength: 8000 + }, + + [0xA5]: { + id: 0xA5, + type: 'BIGVARBIN', + name: 'VarBinary', + LengthOfDataLength: 2, + maximumLength: 8000 } }; diff --git a/src/value-parser.js b/src/value-parser.js index 3095344..e71c741 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -367,27 +367,45 @@ function readValue(reader: Reader) { return readDateTimeOffset(reader); } + case 'VarChar': case 'Char': - if (token.dataLength === MAX) { - // TODO: PLP support - } else { + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + else { return readChars; } + case 'NVarChar': case 'NChar': - if (token.dataLength === MAX) { - // TODO: PLP support + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } else { return readNChars; } + case 'VarBinary': case 'Binary': - if (token.dataLength === MAX) { - // TODO: PLP support + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } else { return readBinary; } + case 'Image': + if (dataLength === PLP_NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } else { + //TODO: implement + } default: console.log('readValue not implemented'); } @@ -570,38 +588,18 @@ function readChars(reader: Reader) { } const token = reader.stash[reader.stash.length - 3]; - let nullValue; - switch (TYPE[token.typeInfo.id].name) { - case 'VarChar': - case 'Char': - nullValue = NULL; - break; - case 'Text': - nullValue = PLP_NULL; - break; - default: - console.log('Invalid type'); - } + const data = reader.readBuffer(0, dataLength); + const collation: Collation = token.typeInfo.collation; + let codepage = collation.codepage; - if (dataLength === nullValue) { - token.value = null; - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); + if (codepage == null) { + codepage = DEFAULT_ENCODING; } - else { - const data = reader.readBuffer(0, dataLength); - const collation: Collation = token.typeInfo.collation; - let codepage = collation.codepage; - - if (codepage == null) { - codepage = DEFAULT_ENCODING; - } - token.value = iconv.decode(data, codepage); - reader.consumeBytes(dataLength); - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } + token.value = iconv.decode(data, codepage); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } function readNChars(reader: Reader) { @@ -610,32 +608,11 @@ function readNChars(reader: Reader) { return; } const token = reader.stash[reader.stash.length - 3]; - - let nullValue; - switch (TYPE[token.typeInfo.id].name) { - case 'NVarChar': - case 'NChar': - nullValue = NULL; - break; - case 'NText': - nullValue = PLP_NULL; - break; - default: - console.log('Invalid type'); - } - - if (dataLength === nullValue) { - token.value = null; - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } - else { - const data = reader.readBuffer(0, dataLength); - token.value = data.toString('ucs2'); - reader.consumeBytes(dataLength); - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } + const data = reader.readBuffer(0, dataLength); + token.value = data.toString('ucs2'); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } function readBinary(reader: Reader) { @@ -645,30 +622,10 @@ function readBinary(reader: Reader) { } const token = reader.stash[reader.stash.length - 3]; - let nullValue; - switch (TYPE[token.typeInfo.id].name) { - case 'VarBinary': - case 'Binary': - nullValue = NULL; - break; - case 'Image': - nullValue = PLP_NULL; - break; - default: - console.log('Invalid type'); - } - - if (dataLength === nullValue) { - token.value = null; - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } - else { - token.value = reader.readBuffer(0, dataLength); - reader.consumeBytes(dataLength); - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } + token.value = reader.readBuffer(0, dataLength); + reader.consumeBytes(dataLength); + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); } module.exports.valueParse = valueParse; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index f2fca24..bf084dc 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1098,6 +1098,56 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the BIGVARBINARYTYPE(10)- token correctly', function(done) { + data = Buffer.alloc(30); + tempBuff.copy(data); + + typeid = 0xA5; + dataLength = 1; + + const valueAsBuffer = Buffer.from([0x56, 0xFE]); + value = Buffer.from([0x56]); + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(8000, offset); + offset += 2; + // data length + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the BIGVARBINARYTYPE(10)- token correctly, null value', function(done) { + data = Buffer.alloc(28); + tempBuff.copy(data); + + typeid = 0xA5; + dataLength = (1 << 16) - 1; + value = null; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(8000, offset); + offset += 2; + + // TYPE_VARBYTE + data.writeUInt16LE(dataLength, offset); + + const token = {}; + addListners(done, token); + reader.end(data); + }); + }); describe('test FIXEDLENTYPE', function() { From f87c553b7785264bc4361f288692f88e3dee917b Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 11 Oct 2018 15:53:24 -0700 Subject: [PATCH 23/29] feat: parser varbinary(max) known length --- src/value-parser.js | 95 +++++++++++++++++++++++++++++++++++++--- test/returnValue-test.js | 81 +++++++++++++++++++++++++++++++++- 2 files changed, 167 insertions(+), 9 deletions(-) diff --git a/src/value-parser.js b/src/value-parser.js index e71c741..c9ff491 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -12,6 +12,7 @@ const THREE_AND_A_THIRD = 3 + (1 / 3); const MONEY_DIVISOR = 10000; const NULL = (1 << 16) - 1; const PLP_NULL = new Buffer([0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]); +const UNKNOWN_PLP_LEN = new Buffer([0xFE, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF]); const DEFAULT_ENCODING = 'utf8'; function valueParse(next: readStep, reader: Reader) { @@ -61,8 +62,8 @@ function readDataLength(reader: Reader) { } } else { - //TODO: add test? - return reader.stash.pop(); + reader.stash.push(undefined); + return readValue; } case 0x30: // xx11xxxx - s2.2.4.2.1.2 @@ -390,11 +391,11 @@ function readValue(reader: Reader) { case 'VarBinary': case 'Binary': - if (dataLength === NULL) { - token.value = null; + if (token.typeInfo.dataLength === MAX) { reader.stash.pop(); // remove dataLength - return reader.stash.pop(); - } else { + return readMaxBinary; + } + else { return readBinary; } @@ -615,12 +616,92 @@ function readNChars(reader: Reader) { return reader.stash.pop(); } + +function readMaxBinary(reader: Reader) { + return readMax; +} + +function readMax(reader: Reader) { + if (!reader.bytesAvailable(8)) { + return; + } + const token = reader.stash[reader.stash.length - 2]; + const type = reader.readBuffer(0, 8); + reader.consumeBytes(8); + if (type.equals(PLP_NULL)) { + token.value = null; + return reader.stash.pop(); + } else if (type.equals(UNKNOWN_PLP_LEN)) { + // TODO: implement unknown length + console.log('UNKNOWN_PLP_LEN not implemented'); + } else { + const low = type.readUInt32LE(0); + const high = type.readUInt32LE(4); + if (high >= (2 << (53 - 32))) { + console.warn('Read UInt64LE > 53 bits : high=' + high + ', low=' + low); + } + + const expectedLength = low + (0x100000000 * high); + reader.stash.push(expectedLength); + return readMaxKnownLength; + } +} + +function readMaxKnownLength(reader: Reader) { + const totalLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + token.value = new Buffer(totalLength).fill(0); + const offset = 0; + reader.stash.push(offset); + return readChunk; +} + +function readChunk(reader: Reader) { + let offset = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 4]; + if (!reader.bytesAvailable(4)) { + return; + } + const chunkLength = reader.readUInt32LE(0); + if (!reader.bytesAvailable(chunkLength)) { + return; + } + reader.consumeBytes(4); + if (!chunkLength) { + return doneReadingChunk; + } + const chunk = reader.readBuffer(0, chunkLength); + chunk.copy(token.value, offset); + offset += chunkLength; + reader.stash[reader.stash.length - 1] = offset; + reader.consumeBytes(chunkLength); + return readChunk; +} + +function doneReadingChunk(reader: Reader) { + const offset = reader.stash[reader.stash.length - 1]; + const totalLength = reader.stash[reader.stash.length - 2]; + if (offset !== totalLength) { + throw new Error('Partially Length-prefixed Bytes unmatched lengths : expected ' + totalLength + ', but got ' + offset + ' bytes'); + } + reader.stash.pop();reader.stash.pop(); + const next = reader.stash.pop(); + return next; +} + function readBinary(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + if (!reader.bytesAvailable(dataLength)) { return; } - const token = reader.stash[reader.stash.length - 3]; token.value = reader.readBuffer(0, dataLength); reader.consumeBytes(dataLength); diff --git a/test/returnValue-test.js b/test/returnValue-test.js index bf084dc..239f672 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1103,6 +1103,7 @@ describe('Parsing a RETURNVALUE token', function() { tempBuff.copy(data); typeid = 0xA5; + const maxDataLength = 8000; dataLength = 1; const valueAsBuffer = Buffer.from([0x56, 0xFE]); @@ -1111,7 +1112,7 @@ describe('Parsing a RETURNVALUE token', function() { // TYPE_INFO data.writeUInt8(typeid, offset++); // MAXLEN - data.writeUInt16LE(8000, offset); + data.writeUInt16LE(maxDataLength, offset); offset += 2; // data length data.writeUInt16LE(dataLength, offset); @@ -1131,13 +1132,14 @@ describe('Parsing a RETURNVALUE token', function() { tempBuff.copy(data); typeid = 0xA5; + const maxDataLength = 8000; dataLength = (1 << 16) - 1; value = null; // TYPE_INFO data.writeUInt8(typeid, offset++); // MAXLEN - data.writeUInt16LE(8000, offset); + data.writeUInt16LE(maxDataLength, offset); offset += 2; // TYPE_VARBYTE @@ -1148,6 +1150,81 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the BIGVARBINARYTYPE(max)- token correctly, known length value', function(done) { + data = Buffer.alloc(44); + tempBuff.copy(data); + + typeid = 0xA5; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + const chukLen = 1; + value = Buffer.from([0x55]); + const bufferValue = 0x55; + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(chukLen, offset); + offset += 4; + data.writeUInt8(bufferValue, offset++); + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the BIGVARBINARYTYPE(max)- token correctly, known length value_2 ', function(done) { + data = Buffer.alloc(4070); + tempBuff.copy(data); + + const token = {}; + addListners(done, token); + + typeid = 0xA5; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0x88, 0x13, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + let bufferValue = Buffer.alloc(4032, 0x55); + value = Buffer.alloc(5000, 0x55); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(4032, offset); + offset += 4; + bufferValue.copy(data, offset); + reader.write(data); + + // chunk 2 + data = Buffer.alloc(977); + bufferValue = Buffer.alloc(968, 0x55); + offset = 0; + data.writeUInt32LE(968, offset); + offset += 4; + bufferValue.copy(data, offset); + offset += bufferValue.length; + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + reader.write(data); + reader.end(); + }); + }); describe('test FIXEDLENTYPE', function() { From e5fafe2ea181f24d3fdb92bf14b2d8aab30074c8 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Thu, 25 Oct 2018 17:42:13 -0700 Subject: [PATCH 24/29] feat: parse unknown length varbinary --- src/value-parser.js | 62 +++++++++++++++++++++++++++++----------- test/returnValue-test.js | 40 ++++++++++++++++++++++++++ 2 files changed, 86 insertions(+), 16 deletions(-) diff --git a/src/value-parser.js b/src/value-parser.js index c9ff491..e317991 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -632,8 +632,7 @@ function readMax(reader: Reader) { token.value = null; return reader.stash.pop(); } else if (type.equals(UNKNOWN_PLP_LEN)) { - // TODO: implement unknown length - console.log('UNKNOWN_PLP_LEN not implemented'); + return readMaxUnknownLength; } else { const low = type.readUInt32LE(0); const high = type.readUInt32LE(4); @@ -647,16 +646,50 @@ function readMax(reader: Reader) { } } +function readMaxUnknownLength(reader: Reader) { + const chunks = []; + const length = 0; + const token = reader.stash[reader.stash.length - 2]; + token.value = chunks; //Buffer.concat(chunks, length); + reader.stash.push(length); + return readMaxUnKnownLengthChunk; +} + +function readMaxUnKnownLengthChunk(reader: Reader) { + let length = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + if (!reader.bytesAvailable(4)) { + return; + } + const chunkLength = reader.readUInt32LE(0); + if (!reader.bytesAvailable(chunkLength)) { + return; + } + reader.consumeBytes(4); + if (!chunkLength) { + token.value = Buffer.concat(token.value, length); + reader.stash.pop(); + const next = reader.stash.pop(); + return next; + } + const chunk = reader.readBuffer(0, chunkLength); + token.value.push(chunk); + length += chunkLength; + reader.stash[reader.stash.length - 1] = length; + reader.consumeBytes(chunkLength); + return readMaxUnKnownLengthChunk; +} + function readMaxKnownLength(reader: Reader) { const totalLength = reader.stash[reader.stash.length - 1]; const token = reader.stash[reader.stash.length - 3]; token.value = new Buffer(totalLength).fill(0); const offset = 0; reader.stash.push(offset); - return readChunk; + return readMaxKnownLengthChunk; } -function readChunk(reader: Reader) { +function readMaxKnownLengthChunk(reader: Reader) { let offset = reader.stash[reader.stash.length - 1]; const token = reader.stash[reader.stash.length - 4]; if (!reader.bytesAvailable(4)) { @@ -668,26 +701,23 @@ function readChunk(reader: Reader) { } reader.consumeBytes(4); if (!chunkLength) { - return doneReadingChunk; + const totalLength = reader.stash[reader.stash.length - 2]; + if (offset !== totalLength) { + throw new Error('Partially Length-prefixed Bytes unmatched lengths : expected ' + totalLength + ', but got ' + offset + ' bytes'); + } + reader.stash.pop(); + reader.stash.pop(); + const next = reader.stash.pop(); + return next; } const chunk = reader.readBuffer(0, chunkLength); chunk.copy(token.value, offset); offset += chunkLength; reader.stash[reader.stash.length - 1] = offset; reader.consumeBytes(chunkLength); - return readChunk; + return readMaxKnownLengthChunk; } -function doneReadingChunk(reader: Reader) { - const offset = reader.stash[reader.stash.length - 1]; - const totalLength = reader.stash[reader.stash.length - 2]; - if (offset !== totalLength) { - throw new Error('Partially Length-prefixed Bytes unmatched lengths : expected ' + totalLength + ', but got ' + offset + ' bytes'); - } - reader.stash.pop();reader.stash.pop(); - const next = reader.stash.pop(); - return next; -} function readBinary(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 239f672..2ed8411 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1225,6 +1225,46 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(); }); + it('should parse the BIGVARBINARYTYPE(max)- token correctly, UNKNOWN length value ', function(done) { + data = Buffer.alloc(40); + tempBuff.copy(data); + + const token = {}; + addListners(done, token); + + typeid = 0xA5; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + value = Buffer.from([0x12, 0x34, 0x56, 0x78]); + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(2, offset); + offset += 4; + value.copy(data, offset, 0, 2); + reader.write(data); + + // chunk 2 + data = Buffer.alloc(11); + offset = 0; + data.writeUInt32LE(2, offset); + offset += 4; + value.copy(data, offset, 2, 4); + offset += 2; + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + reader.write(data); + reader.end(); + }); + }); describe('test FIXEDLENTYPE', function() { From dc5243ea74b4c031a54f92b6dc8d8d3770eb8013 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Fri, 26 Oct 2018 13:13:07 -0700 Subject: [PATCH 25/29] test: add varbinay max test for null value --- test/returnValue-test.js | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 2ed8411..5bc5ea7 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1225,6 +1225,30 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(); }); + it('should parse the BIGVARBINARYTYPE(max)-NULL token correctly, null value', function(done) { + data = Buffer.alloc(35); + tempBuff.copy(data); + + typeid = 0xA5; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + value = null; + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + // PLP_TERMINATOR + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token); + reader.end(data); + }); + it('should parse the BIGVARBINARYTYPE(max)- token correctly, UNKNOWN length value ', function(done) { data = Buffer.alloc(40); tempBuff.copy(data); From 67fe99c8a60ad969e40a8c53ce46d09773c1c8a0 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Mon, 29 Oct 2018 15:16:11 -0700 Subject: [PATCH 26/29] feat: nvarchar support for returnvalue parser --- src/dataTypes.js | 9 +++ src/value-parser.js | 24 ++++-- test/returnValue-test.js | 162 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 190 insertions(+), 5 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index dd911ce..85dffc2 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -173,6 +173,15 @@ const type = { name: 'VarBinary', LengthOfDataLength: 2, maximumLength: 8000 + }, + + [0xE7]: { + id: 0xE7, + type: 'NVARCHAR', + name: 'NVarChar', + hasCollation: true, + LengthOfDataLength: 2, + maximumLength: 4000, } }; diff --git a/src/value-parser.js b/src/value-parser.js index e317991..b57d408 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -381,10 +381,9 @@ function readValue(reader: Reader) { case 'NVarChar': case 'NChar': - if (dataLength === NULL) { - token.value = null; + if (token.typeInfo.dataLength === MAX) { reader.stash.pop(); // remove dataLength - return reader.stash.pop(); + return readMaxNChars; } else { return readNChars; } @@ -605,10 +604,17 @@ function readChars(reader: Reader) { function readNChars(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + if (!reader.bytesAvailable(dataLength)) { return; } - const token = reader.stash[reader.stash.length - 3]; const data = reader.readBuffer(0, dataLength); token.value = data.toString('ucs2'); reader.consumeBytes(dataLength); @@ -616,6 +622,9 @@ function readNChars(reader: Reader) { return reader.stash.pop(); } +function readMaxNChars(reader: Reader) { + return readMax; +} function readMaxBinary(reader: Reader) { return readMax; @@ -668,6 +677,9 @@ function readMaxUnKnownLengthChunk(reader: Reader) { reader.consumeBytes(4); if (!chunkLength) { token.value = Buffer.concat(token.value, length); + if (token.typeInfo.id === 0xE7) { + token.value = token.value.toString('ucs2'); + } reader.stash.pop(); const next = reader.stash.pop(); return next; @@ -705,6 +717,9 @@ function readMaxKnownLengthChunk(reader: Reader) { if (offset !== totalLength) { throw new Error('Partially Length-prefixed Bytes unmatched lengths : expected ' + totalLength + ', but got ' + offset + ' bytes'); } + if (token.typeInfo.id === 0xE7) { + token.value = token.value.toString('ucs2'); + } reader.stash.pop(); reader.stash.pop(); const next = reader.stash.pop(); @@ -718,7 +733,6 @@ function readMaxKnownLengthChunk(reader: Reader) { return readMaxKnownLengthChunk; } - function readBinary(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; const token = reader.stash[reader.stash.length - 3]; diff --git a/test/returnValue-test.js b/test/returnValue-test.js index 5bc5ea7..a0f3c5f 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1070,6 +1070,168 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the NCHARTYPE(30)- token correctly, null value', function(done) { + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0xEF; + dataLength = 60; + const maxDataLength = (1 << 16) - 1; + value = null; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // TYPE_VARBYTE + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the NVARCHARTYPE(max)- token correctly, null value', function(done) { + data = Buffer.alloc(40); + tempBuff.copy(data); + + typeid = 0xE7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + value = null; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + codePage.copy(data, offset); + offset += 5; + + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + // PLP_TERMINATOR + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the NVARCHARTYPE(max)- token correctly - known length', function(done) { + data = Buffer.alloc(66); + tempBuff.copy(data); + + typeid = 0xE7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0x12, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + const valueAsBuffer = Buffer.from([0x4B, 0x00, 0xF8, 0x00, 0x62, 0x00, 0x65, 0x00, 0x6E, 0x00, 0x68, 0x00, 0x61, 0x00, 0x76, 0x00, 0x6E, 0x00]); + value = 'København'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + const chukLen = 18; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + codePage.copy(data, offset); + offset += 5; + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(chukLen, offset); + offset += 4; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += valueAsBuffer.length; + + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token); + reader.end(data); + }); + + it('should parse the NVARCHARTYPE(max)- token correctly - unknown length', function(done) { + data = Buffer.alloc(53); + tempBuff.copy(data); + const token = {}; + + typeid = 0xE7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + const valueAsBuffer = Buffer.from([0x4B, 0x00, 0xF8, 0x00, 0x62, 0x00, 0x65, 0x00, 0x6E, 0x00, 0x68, 0x00, 0x61, 0x00, 0x76, 0x00, 0x6E, 0x00]); + value = 'København'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + codePage.copy(data, offset); + offset += 5; + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(10, offset); + offset += 4; + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset, 0, 10); + offset += 10; + reader.write(data); + + // chunk 2 + data = Buffer.alloc(17); + offset = 0; + data.writeUInt32LE(8, offset); + offset += 4; + valueAsBuffer.copy(data, offset, 10, 18); + // data.write(value.slice(3, 6), offset, 3); + offset += 8; + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + addListners(done, token); + reader.write(data); + reader.end(); + }); + it('should parse the BIGBINARYTYPE(10)- token correctly', function(done) { data = Buffer.alloc(39); tempBuff.copy(data); From 8c6e615f7502d2148382a6555e5dc45676346801 Mon Sep 17 00:00:00 2001 From: Suraiya Hameed Date: Wed, 31 Oct 2018 15:45:09 -0700 Subject: [PATCH 27/29] feat: support varchar parsing --- src/dataTypes.js | 9 ++ src/value-parser.js | 34 ++++- test/returnValue-test.js | 287 +++++++++++++++++++++++++++++++++++++++ 3 files changed, 325 insertions(+), 5 deletions(-) diff --git a/src/dataTypes.js b/src/dataTypes.js index 85dffc2..dbdf796 100644 --- a/src/dataTypes.js +++ b/src/dataTypes.js @@ -175,6 +175,15 @@ const type = { maximumLength: 8000 }, + [0xA7]: { + id: 0xA7, + type: 'BIGVARCHR', + name: 'VarChar', + hasCollation: true, + LengthOfDataLength: 2, + maximumLength: 8000, + }, + [0xE7]: { id: 0xE7, type: 'NVARCHAR', diff --git a/src/value-parser.js b/src/value-parser.js index b57d408..f4b6321 100644 --- a/src/value-parser.js +++ b/src/value-parser.js @@ -370,10 +370,9 @@ function readValue(reader: Reader) { case 'VarChar': case 'Char': - if (dataLength === NULL) { - token.value = null; - reader.stash.pop(); // remove dataLength - return reader.stash.pop(); + if (token.typeInfo.dataLength === MAX) { + reader.stash.pop(); + return readMaxChars; } else { return readChars; @@ -583,10 +582,17 @@ function readGUID(reader: Reader) { function readChars(reader: Reader) { const dataLength = reader.stash[reader.stash.length - 1]; + const token = reader.stash[reader.stash.length - 3]; + + if (dataLength === NULL) { + token.value = null; + reader.stash.pop(); // remove dataLength + return reader.stash.pop(); + } + if (!reader.bytesAvailable(dataLength)) { return; } - const token = reader.stash[reader.stash.length - 3]; const data = reader.readBuffer(0, dataLength); const collation: Collation = token.typeInfo.collation; @@ -622,6 +628,10 @@ function readNChars(reader: Reader) { return reader.stash.pop(); } +function readMaxChars(reader: Reader) { + return readMax; +} + function readMaxNChars(reader: Reader) { return readMax; } @@ -679,6 +689,13 @@ function readMaxUnKnownLengthChunk(reader: Reader) { token.value = Buffer.concat(token.value, length); if (token.typeInfo.id === 0xE7) { token.value = token.value.toString('ucs2'); + } else if (token.typeInfo.id === 0xA7) { + const collation: Collation = token.typeInfo.collation; + let codepage = collation.codepage; + if (codepage == null) { + codepage = DEFAULT_ENCODING; + } + token.value = iconv.decode(token.value, codepage); } reader.stash.pop(); const next = reader.stash.pop(); @@ -719,6 +736,13 @@ function readMaxKnownLengthChunk(reader: Reader) { } if (token.typeInfo.id === 0xE7) { token.value = token.value.toString('ucs2'); + } else if (token.typeInfo.id === 0xA7) { + const collation: Collation = token.typeInfo.collation; + let codepage = collation.codepage; + if (codepage == null) { + codepage = DEFAULT_ENCODING; + } + token.value = iconv.decode(token.value, codepage); } reader.stash.pop(); reader.stash.pop(); diff --git a/test/returnValue-test.js b/test/returnValue-test.js index a0f3c5f..485b552 100644 --- a/test/returnValue-test.js +++ b/test/returnValue-test.js @@ -1000,6 +1000,254 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(); }); + it('should parse the BIGVARCHRTYPE(30)- token correctly', function(done) { + data = Buffer.alloc(37); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = 8000; + dataLength = 3; + + const valueAsBuffer = Buffer.from([0x61, 0x62, 0x63]); + value = 'abc'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += dataLength; + + data.writeUInt8(0xFE, offset++); + const token = {}; + //TODO: add check for flags and LCID + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the BIGVARCHRTYPE(30)- null value - token correctly', function(done) { + data = Buffer.alloc(34); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = 8000; + dataLength = (1 << 16) - 1; + + value = null; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + data.writeUInt16LE(dataLength, offset); + offset += 2; + + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the BIGVARCHRTYPE(max)- null value - token correctly', function(done) { + data = Buffer.alloc(40); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + value = null; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + // TYPE_VARBYTE + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the BIGVARCHRTYPE(max)- knownlength - token correctly', function(done) { + data = Buffer.alloc(51); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + const valueAsBuffer = Buffer.from([0x61, 0x62, 0x63]); + value = 'abc'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + const chukLen = 3; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(chukLen, offset); + offset += 4; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += valueAsBuffer.length; + + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the BIGVARCHRTYPE(max)- knownlength - multi packet - token correctly', function(done) { + data = Buffer.alloc(44); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]); + const valueAsBuffer = Buffer.from([0x61, 0x62, 0x63]); + value = 'abc'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + let chukLen = 1; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(chukLen, offset); + offset += 4; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset, 0, 1); + reader.write(data); + + data = Buffer.alloc(11); + offset = 0; + chukLen = 2; + data.writeUInt32LE(chukLen, offset); + offset += 4; + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset, 1); + offset += chukLen; + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + + it('should parse the BIGVARCHRTYPE(max)- UnKnownlength - token correctly', function(done) { + data = Buffer.alloc(46); + tempBuff.copy(data); + + typeid = 0xA7; + const maxDataLength = (1 << 16) - 1; + dataLength = Buffer.from([0xfe, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff]); + const valueAsBuffer = Buffer.from([0x61, 0x62, 0x63, 0x64, 0x65, 0x66]); + value = 'abcdef'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); //SQL_Latin1_General_Cp437_BIN + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + let chukLen = 3; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + data.writeUInt16LE(maxDataLength, offset); + offset += 2; + + // COLLATION + MAXLEN + codePage.copy(data, offset); + offset += 5; + dataLength.copy(data, offset); + offset += 8; + + data.writeUInt32LE(chukLen, offset); + offset += 4; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset, 0, chukLen); + offset += chukLen; + reader.write(data); + + data = Buffer.alloc(12); + offset = 0; + chukLen = 3; + data.writeUInt32LE(chukLen, offset); + offset += 4; + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset, chukLen); + offset += chukLen; + // PLP_TERMINATOR + data.writeUInt32LE(0, offset); + offset += 4; + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token, collation); + reader.end(data); + }); + it('should parse the NCHARTYPE(30)- token correctly', function(done) { data = Buffer.alloc(94); tempBuff.copy(data); @@ -1102,6 +1350,45 @@ describe('Parsing a RETURNVALUE token', function() { reader.end(data); }); + it('should parse the NVARCHARTYPE(30)- token correctly - known length', function(done) { + data = Buffer.alloc(52); + tempBuff.copy(data); + + typeid = 0xE7; + const maxDataLength = 30; + dataLength = 18; + const valueAsBuffer = Buffer.from([0x4B, 0x00, 0xF8, 0x00, 0x62, 0x00, 0x65, 0x00, 0x6E, 0x00, 0x68, 0x00, 0x61, 0x00, 0x76, 0x00, 0x6E, 0x00]); + value = 'København'; + const codePage = Buffer.from([0x09, 0x04, 0xD0, 0x00, 0x34]); + collation = { + LCID: 1033, + codepage: 'CP1252' + }; + const chukLen = 18; + + // TYPE_INFO + data.writeUInt8(typeid, offset++); + // MAXLEN + data.writeUInt16LE(maxDataLength * 2, offset); + offset += 2; + + // COLLATION + codePage.copy(data, offset); + offset += 5; + + data.writeUInt16LE(chukLen, offset); + offset += 2; + + // TYPE_VARBYTE + valueAsBuffer.copy(data, offset); + offset += valueAsBuffer.length; + + data.writeUInt8(0xFE, offset++); + const token = {}; + addListners(done, token); + reader.end(data); + }); + it('should parse the NVARCHARTYPE(max)- token correctly, null value', function(done) { data = Buffer.alloc(40); tempBuff.copy(data); From 5652861aada21f57de8584cbc7dfc6101b53dd25 Mon Sep 17 00:00:00 2001 From: Hadis-Fard Date: Thu, 1 Nov 2018 14:19:09 -0700 Subject: [PATCH 28/29] feat: sspi token parser --- src/tokens/sspi/index.js | 13 +++++++++++++ src/tokens/sspi/read.js | 25 +++++++++++++++++++++++++ test/sspi-test.js | 38 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 76 insertions(+) create mode 100644 src/tokens/sspi/index.js create mode 100644 src/tokens/sspi/read.js create mode 100644 test/sspi-test.js diff --git a/src/tokens/sspi/index.js b/src/tokens/sspi/index.js new file mode 100644 index 0000000..2007541 --- /dev/null +++ b/src/tokens/sspi/index.js @@ -0,0 +1,13 @@ +/*@flow*/ + +const Token = require('../../token.js'); + +class SSPIToken extends Token { + SSPIBuffer : ?(string | Buffer); + constructor() { + super(0xED); + this.SSPIBuffer = Buffer.alloc(0); + } +} +module.exports = SSPIToken; +SSPIToken.read = require('./read'); diff --git a/src/tokens/sspi/read.js b/src/tokens/sspi/read.js new file mode 100644 index 0000000..41da524 --- /dev/null +++ b/src/tokens/sspi/read.js @@ -0,0 +1,25 @@ +/* @flow */ + +import type Reader from '../../reader'; + +function readSSPIToken(reader: Reader) { + if (!reader.bytesAvailable(2)) { + return; + } + const length = reader.readUInt16LE(0); + if (!reader.bytesAvailable(length)) { + return; + } + reader.consumeBytes(2); + + const token = new SSPIToken(); + token.SSPIBuffer = reader.readBuffer(0, length); + reader.consumeBytes(length); + + reader.push(token); + return reader.nextToken; +} + +module.exports = readSSPIToken; + +const SSPIToken = require('.'); diff --git a/test/sspi-test.js b/test/sspi-test.js new file mode 100644 index 0000000..18d0a33 --- /dev/null +++ b/test/sspi-test.js @@ -0,0 +1,38 @@ +/* @flow */ + +const assert = require('chai').assert; +const Reader = require('../src').Reader; +const SSPIToken = require('../src/tokens/sspi'); + +describe('sspi-token', function() { + let reader; + beforeEach(function() { + reader = new Reader(0x74000004); + }); + + it('should parse the token correctly', function(done) { + + const sspiData = 'sspiToken'; + const buffer = Buffer.alloc(sspiData.length + 3); + let offset = 0; + buffer.writeUInt8(0xED, offset++); + buffer.writeInt16LE(sspiData.length, offset); + offset += 2; + buffer.write(sspiData, offset); + + let token; + reader.on('data', function(data) { + assert.instanceOf(data, SSPIToken); + token = data; + }); + + reader.on('error', done); + + reader.on('end', function() { + assert.strictEqual(token.SSPIBuffer.toString(), sspiData); + done(); + }); + + reader.end(buffer); + }); +}); From 9b3a8d1a16ae36b65454c7dc4690988b3c86d8be Mon Sep 17 00:00:00 2001 From: Hadis-Fard Date: Thu, 1 Nov 2018 14:22:02 -0700 Subject: [PATCH 29/29] fix: add sspi token to reader --- src/reader.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/reader.js b/src/reader.js index 8005026..fec383b 100644 --- a/src/reader.js +++ b/src/reader.js @@ -25,6 +25,7 @@ function nextToken(reader) { case 0xA9: return readOrderToken; case 0x79: return readReturnStatus; case 0xAC: return readReturnValueToken; + case 0xED: return readSSPIToken; default: console.log(reader.buffer.slice(reader.position - 1)); throw new Error('Unknown token type ' + type.toString(16)); @@ -189,3 +190,4 @@ const readLoginAckToken = require('./tokens/loginack/read'); const readOrderToken = require('./tokens/order/read'); const readReturnStatus = require('./tokens/returnStatus/read'); const readReturnValueToken = require('./tokens/returnvalue/read'); +const readSSPIToken = require('./tokens/sspi/read');