diff --git a/node_modules/.bin/mkdirp b/node_modules/.bin/mkdirp new file mode 120000 index 0000000..017896c --- /dev/null +++ b/node_modules/.bin/mkdirp @@ -0,0 +1 @@ +../mkdirp/bin/cmd.js \ No newline at end of file diff --git a/node_modules/append-field/.npmignore b/node_modules/append-field/.npmignore new file mode 100644 index 0000000..c2658d7 --- /dev/null +++ b/node_modules/append-field/.npmignore @@ -0,0 +1 @@ +node_modules/ diff --git a/node_modules/append-field/LICENSE b/node_modules/append-field/LICENSE new file mode 100644 index 0000000..14b1f89 --- /dev/null +++ b/node_modules/append-field/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Linus Unnebäck + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/append-field/README.md b/node_modules/append-field/README.md new file mode 100644 index 0000000..62b901b --- /dev/null +++ b/node_modules/append-field/README.md @@ -0,0 +1,44 @@ +# `append-field` + +A [W3C HTML JSON forms spec](http://www.w3.org/TR/html-json-forms/) compliant +field appender (for lack of a better name). Useful for people implementing +`application/x-www-form-urlencoded` and `multipart/form-data` parsers. + +It works best on objects created with `Object.create(null)`. Otherwise it might +conflict with variables from the prototype (e.g. `hasOwnProperty`). + +## Installation + +```sh +npm install --save append-field +``` + +## Usage + +```javascript +var appendField = require('append-field') +var obj = Object.create(null) + +appendField(obj, 'pets[0][species]', 'Dahut') +appendField(obj, 'pets[0][name]', 'Hypatia') +appendField(obj, 'pets[1][species]', 'Felis Stultus') +appendField(obj, 'pets[1][name]', 'Billie') + +console.log(obj) +``` + +```text +{ pets: + [ { species: 'Dahut', name: 'Hypatia' }, + { species: 'Felis Stultus', name: 'Billie' } ] } +``` + +## API + +### `appendField(store, key, value)` + +Adds the field named `key` with the value `value` to the object `store`. + +## License + +MIT diff --git a/node_modules/append-field/index.js b/node_modules/append-field/index.js new file mode 100644 index 0000000..fc5acc8 --- /dev/null +++ b/node_modules/append-field/index.js @@ -0,0 +1,12 @@ +var parsePath = require('./lib/parse-path') +var setValue = require('./lib/set-value') + +function appendField (store, key, value) { + var steps = parsePath(key) + + steps.reduce(function (context, step) { + return setValue(context, step, context[step.key], value) + }, store) +} + +module.exports = appendField diff --git a/node_modules/append-field/lib/parse-path.js b/node_modules/append-field/lib/parse-path.js new file mode 100644 index 0000000..31d6179 --- /dev/null +++ b/node_modules/append-field/lib/parse-path.js @@ -0,0 +1,53 @@ +var reFirstKey = /^[^\[]*/ +var reDigitPath = /^\[(\d+)\]/ +var reNormalPath = /^\[([^\]]+)\]/ + +function parsePath (key) { + function failure () { + return [{ type: 'object', key: key, last: true }] + } + + var firstKey = reFirstKey.exec(key)[0] + if (!firstKey) return failure() + + var len = key.length + var pos = firstKey.length + var tail = { type: 'object', key: firstKey } + var steps = [tail] + + while (pos < len) { + var m + + if (key[pos] === '[' && key[pos + 1] === ']') { + pos += 2 + tail.append = true + if (pos !== len) return failure() + continue + } + + m = reDigitPath.exec(key.substring(pos)) + if (m !== null) { + pos += m[0].length + tail.nextType = 'array' + tail = { type: 'array', key: parseInt(m[1], 10) } + steps.push(tail) + continue + } + + m = reNormalPath.exec(key.substring(pos)) + if (m !== null) { + pos += m[0].length + tail.nextType = 'object' + tail = { type: 'object', key: m[1] } + steps.push(tail) + continue + } + + return failure() + } + + tail.last = true + return steps +} + +module.exports = parsePath diff --git a/node_modules/append-field/lib/set-value.js b/node_modules/append-field/lib/set-value.js new file mode 100644 index 0000000..c15e873 --- /dev/null +++ b/node_modules/append-field/lib/set-value.js @@ -0,0 +1,64 @@ +function valueType (value) { + if (value === undefined) return 'undefined' + if (Array.isArray(value)) return 'array' + if (typeof value === 'object') return 'object' + return 'scalar' +} + +function setLastValue (context, step, currentValue, entryValue) { + switch (valueType(currentValue)) { + case 'undefined': + if (step.append) { + context[step.key] = [entryValue] + } else { + context[step.key] = entryValue + } + break + case 'array': + context[step.key].push(entryValue) + break + case 'object': + return setLastValue(currentValue, { type: 'object', key: '', last: true }, currentValue[''], entryValue) + case 'scalar': + context[step.key] = [context[step.key], entryValue] + break + } + + return context +} + +function setValue (context, step, currentValue, entryValue) { + if (step.last) return setLastValue(context, step, currentValue, entryValue) + + var obj + switch (valueType(currentValue)) { + case 'undefined': + if (step.nextType === 'array') { + context[step.key] = [] + } else { + context[step.key] = Object.create(null) + } + return context[step.key] + case 'object': + return context[step.key] + case 'array': + if (step.nextType === 'array') { + return currentValue + } + + obj = Object.create(null) + context[step.key] = obj + currentValue.forEach(function (item, i) { + if (item !== undefined) obj['' + i] = item + }) + + return obj + case 'scalar': + obj = Object.create(null) + obj[''] = currentValue + context[step.key] = obj + return obj + } +} + +module.exports = setValue diff --git a/node_modules/append-field/package.json b/node_modules/append-field/package.json new file mode 100644 index 0000000..8d6e716 --- /dev/null +++ b/node_modules/append-field/package.json @@ -0,0 +1,19 @@ +{ + "name": "append-field", + "version": "1.0.0", + "license": "MIT", + "author": "Linus Unnebäck ", + "main": "index.js", + "devDependencies": { + "mocha": "^2.2.4", + "standard": "^6.0.5", + "testdata-w3c-json-form": "^0.2.0" + }, + "scripts": { + "test": "standard && mocha" + }, + "repository": { + "type": "git", + "url": "http://github.com/LinusU/node-append-field.git" + } +} diff --git a/node_modules/append-field/test/forms.js b/node_modules/append-field/test/forms.js new file mode 100644 index 0000000..dd6fbc9 --- /dev/null +++ b/node_modules/append-field/test/forms.js @@ -0,0 +1,19 @@ +/* eslint-env mocha */ + +var assert = require('assert') +var appendField = require('../') +var testData = require('testdata-w3c-json-form') + +describe('Append Field', function () { + for (var test of testData) { + it('handles ' + test.name, function () { + var store = Object.create(null) + + for (var field of test.fields) { + appendField(store, field.key, field.value) + } + + assert.deepEqual(store, test.expected) + }) + } +}) diff --git a/node_modules/buffer-from/LICENSE b/node_modules/buffer-from/LICENSE new file mode 100644 index 0000000..e4bf1d6 --- /dev/null +++ b/node_modules/buffer-from/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2016, 2018 Linus Unnebäck + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/buffer-from/index.js b/node_modules/buffer-from/index.js new file mode 100644 index 0000000..e1a58b5 --- /dev/null +++ b/node_modules/buffer-from/index.js @@ -0,0 +1,72 @@ +/* eslint-disable node/no-deprecated-api */ + +var toString = Object.prototype.toString + +var isModern = ( + typeof Buffer !== 'undefined' && + typeof Buffer.alloc === 'function' && + typeof Buffer.allocUnsafe === 'function' && + typeof Buffer.from === 'function' +) + +function isArrayBuffer (input) { + return toString.call(input).slice(8, -1) === 'ArrayBuffer' +} + +function fromArrayBuffer (obj, byteOffset, length) { + byteOffset >>>= 0 + + var maxLength = obj.byteLength - byteOffset + + if (maxLength < 0) { + throw new RangeError("'offset' is out of bounds") + } + + if (length === undefined) { + length = maxLength + } else { + length >>>= 0 + + if (length > maxLength) { + throw new RangeError("'length' is out of bounds") + } + } + + return isModern + ? Buffer.from(obj.slice(byteOffset, byteOffset + length)) + : new Buffer(new Uint8Array(obj.slice(byteOffset, byteOffset + length))) +} + +function fromString (string, encoding) { + if (typeof encoding !== 'string' || encoding === '') { + encoding = 'utf8' + } + + if (!Buffer.isEncoding(encoding)) { + throw new TypeError('"encoding" must be a valid string encoding') + } + + return isModern + ? Buffer.from(string, encoding) + : new Buffer(string, encoding) +} + +function bufferFrom (value, encodingOrOffset, length) { + if (typeof value === 'number') { + throw new TypeError('"value" argument must not be a number') + } + + if (isArrayBuffer(value)) { + return fromArrayBuffer(value, encodingOrOffset, length) + } + + if (typeof value === 'string') { + return fromString(value, encodingOrOffset) + } + + return isModern + ? Buffer.from(value) + : new Buffer(value) +} + +module.exports = bufferFrom diff --git a/node_modules/buffer-from/package.json b/node_modules/buffer-from/package.json new file mode 100644 index 0000000..6ac5327 --- /dev/null +++ b/node_modules/buffer-from/package.json @@ -0,0 +1,19 @@ +{ + "name": "buffer-from", + "version": "1.1.2", + "license": "MIT", + "repository": "LinusU/buffer-from", + "files": [ + "index.js" + ], + "scripts": { + "test": "standard && node test" + }, + "devDependencies": { + "standard": "^12.0.1" + }, + "keywords": [ + "buffer", + "buffer from" + ] +} diff --git a/node_modules/buffer-from/readme.md b/node_modules/buffer-from/readme.md new file mode 100644 index 0000000..9880a55 --- /dev/null +++ b/node_modules/buffer-from/readme.md @@ -0,0 +1,69 @@ +# Buffer From + +A [ponyfill](https://ponyfill.com) for `Buffer.from`, uses native implementation if available. + +## Installation + +```sh +npm install --save buffer-from +``` + +## Usage + +```js +const bufferFrom = require('buffer-from') + +console.log(bufferFrom([1, 2, 3, 4])) +//=> + +const arr = new Uint8Array([1, 2, 3, 4]) +console.log(bufferFrom(arr.buffer, 1, 2)) +//=> + +console.log(bufferFrom('test', 'utf8')) +//=> + +const buf = bufferFrom('test') +console.log(bufferFrom(buf)) +//=> +``` + +## API + +### bufferFrom(array) + +- `array` <Array> + +Allocates a new `Buffer` using an `array` of octets. + +### bufferFrom(arrayBuffer[, byteOffset[, length]]) + +- `arrayBuffer` <ArrayBuffer> The `.buffer` property of a TypedArray or ArrayBuffer +- `byteOffset` <Integer> Where to start copying from `arrayBuffer`. **Default:** `0` +- `length` <Integer> How many bytes to copy from `arrayBuffer`. **Default:** `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a TypedArray instance, the +newly created `Buffer` will share the same allocated memory as the TypedArray. + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +### bufferFrom(buffer) + +- `buffer` <Buffer> An existing `Buffer` to copy data from + +Copies the passed `buffer` data onto a new `Buffer` instance. + +### bufferFrom(string[, encoding]) + +- `string` <String> A string to encode. +- `encoding` <String> The encoding of `string`. **Default:** `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `string`. If +provided, the `encoding` parameter identifies the character encoding of +`string`. + +## See also + +- [buffer-alloc](https://github.com/LinusU/buffer-alloc) A ponyfill for `Buffer.alloc` +- [buffer-alloc-unsafe](https://github.com/LinusU/buffer-alloc-unsafe) A ponyfill for `Buffer.allocUnsafe` diff --git a/node_modules/busboy/.eslintrc.js b/node_modules/busboy/.eslintrc.js new file mode 100644 index 0000000..be9311d --- /dev/null +++ b/node_modules/busboy/.eslintrc.js @@ -0,0 +1,5 @@ +'use strict'; + +module.exports = { + extends: '@mscdex/eslint-config', +}; diff --git a/node_modules/busboy/.github/workflows/ci.yml b/node_modules/busboy/.github/workflows/ci.yml new file mode 100644 index 0000000..799bae0 --- /dev/null +++ b/node_modules/busboy/.github/workflows/ci.yml @@ -0,0 +1,24 @@ +name: CI + +on: + pull_request: + push: + branches: [ master ] + +jobs: + tests-linux: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + node-version: [10.16.0, 10.x, 12.x, 14.x, 16.x] + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Install module + run: npm install + - name: Run tests + run: npm test diff --git a/node_modules/busboy/.github/workflows/lint.yml b/node_modules/busboy/.github/workflows/lint.yml new file mode 100644 index 0000000..9f9e1f5 --- /dev/null +++ b/node_modules/busboy/.github/workflows/lint.yml @@ -0,0 +1,23 @@ +name: lint + +on: + pull_request: + push: + branches: [ master ] + +env: + NODE_VERSION: 16.x + +jobs: + lint-js: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ env.NODE_VERSION }} + uses: actions/setup-node@v1 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install ESLint + ESLint configs/plugins + run: npm install --only=dev + - name: Lint files + run: npm run lint diff --git a/node_modules/busboy/LICENSE b/node_modules/busboy/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/busboy/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/busboy/README.md b/node_modules/busboy/README.md new file mode 100644 index 0000000..654af30 --- /dev/null +++ b/node_modules/busboy/README.md @@ -0,0 +1,191 @@ +# Description + +A node.js module for parsing incoming HTML form data. + +Changes (breaking or otherwise) in v1.0.0 can be found [here](https://github.com/mscdex/busboy/issues/266). + +# Requirements + +* [node.js](http://nodejs.org/) -- v10.16.0 or newer + + +# Install + + npm install busboy + + +# Examples + +* Parsing (multipart) with default options: + +```js +const http = require('http'); + +const busboy = require('busboy'); + +http.createServer((req, res) => { + if (req.method === 'POST') { + console.log('POST request'); + const bb = busboy({ headers: req.headers }); + bb.on('file', (name, file, info) => { + const { filename, encoding, mimeType } = info; + console.log( + `File [${name}]: filename: %j, encoding: %j, mimeType: %j`, + filename, + encoding, + mimeType + ); + file.on('data', (data) => { + console.log(`File [${name}] got ${data.length} bytes`); + }).on('close', () => { + console.log(`File [${name}] done`); + }); + }); + bb.on('field', (name, val, info) => { + console.log(`Field [${name}]: value: %j`, val); + }); + bb.on('close', () => { + console.log('Done parsing form!'); + res.writeHead(303, { Connection: 'close', Location: '/' }); + res.end(); + }); + req.pipe(bb); + } else if (req.method === 'GET') { + res.writeHead(200, { Connection: 'close' }); + res.end(` + + + +
+
+
+ +
+ + + `); + } +}).listen(8000, () => { + console.log('Listening for requests'); +}); + +// Example output: +// +// Listening for requests +// < ... form submitted ... > +// POST request +// File [filefield]: filename: "logo.jpg", encoding: "binary", mime: "image/jpeg" +// File [filefield] got 11912 bytes +// Field [textfield]: value: "testing! :-)" +// File [filefield] done +// Done parsing form! +``` + +* Save all incoming files to disk: + +```js +const { randomFillSync } = require('crypto'); +const fs = require('fs'); +const http = require('http'); +const os = require('os'); +const path = require('path'); + +const busboy = require('busboy'); + +const random = (() => { + const buf = Buffer.alloc(16); + return () => randomFillSync(buf).toString('hex'); +})(); + +http.createServer((req, res) => { + if (req.method === 'POST') { + const bb = busboy({ headers: req.headers }); + bb.on('file', (name, file, info) => { + const saveTo = path.join(os.tmpdir(), `busboy-upload-${random()}`); + file.pipe(fs.createWriteStream(saveTo)); + }); + bb.on('close', () => { + res.writeHead(200, { 'Connection': 'close' }); + res.end(`That's all folks!`); + }); + req.pipe(bb); + return; + } + res.writeHead(404); + res.end(); +}).listen(8000, () => { + console.log('Listening for requests'); +}); +``` + + +# API + +## Exports + +`busboy` exports a single function: + +**( _function_ )**(< _object_ >config) - Creates and returns a new _Writable_ form parser stream. + +* Valid `config` properties: + + * **headers** - _object_ - These are the HTTP headers of the incoming request, which are used by individual parsers. + + * **highWaterMark** - _integer_ - highWaterMark to use for the parser stream. **Default:** node's _stream.Writable_ default. + + * **fileHwm** - _integer_ - highWaterMark to use for individual file streams. **Default:** node's _stream.Readable_ default. + + * **defCharset** - _string_ - Default character set to use when one isn't defined. **Default:** `'utf8'`. + + * **defParamCharset** - _string_ - For multipart forms, the default character set to use for values of part header parameters (e.g. filename) that are not extended parameters (that contain an explicit charset). **Default:** `'latin1'`. + + * **preservePath** - _boolean_ - If paths in filenames from file parts in a `'multipart/form-data'` request shall be preserved. **Default:** `false`. + + * **limits** - _object_ - Various limits on incoming data. Valid properties are: + + * **fieldNameSize** - _integer_ - Max field name size (in bytes). **Default:** `100`. + + * **fieldSize** - _integer_ - Max field value size (in bytes). **Default:** `1048576` (1MB). + + * **fields** - _integer_ - Max number of non-file fields. **Default:** `Infinity`. + + * **fileSize** - _integer_ - For multipart forms, the max file size (in bytes). **Default:** `Infinity`. + + * **files** - _integer_ - For multipart forms, the max number of file fields. **Default:** `Infinity`. + + * **parts** - _integer_ - For multipart forms, the max number of parts (fields + files). **Default:** `Infinity`. + + * **headerPairs** - _integer_ - For multipart forms, the max number of header key-value pairs to parse. **Default:** `2000` (same as node's http module). + +This function can throw exceptions if there is something wrong with the values in `config`. For example, if the Content-Type in `headers` is missing entirely, is not a supported type, or is missing the boundary for `'multipart/form-data'` requests. + +## (Special) Parser stream events + +* **file**(< _string_ >name, < _Readable_ >stream, < _object_ >info) - Emitted for each new file found. `name` contains the form field name. `stream` is a _Readable_ stream containing the file's data. No transformations/conversions (e.g. base64 to raw binary) are done on the file's data. `info` contains the following properties: + + * `filename` - _string_ - If supplied, this contains the file's filename. **WARNING:** You should almost _never_ use this value as-is (especially if you are using `preservePath: true` in your `config`) as it could contain malicious input. You are better off generating your own (safe) filenames, or at the very least using a hash of the filename. + + * `encoding` - _string_ - The file's `'Content-Transfer-Encoding'` value. + + * `mimeType` - _string_ - The file's `'Content-Type'` value. + + **Note:** If you listen for this event, you should always consume the `stream` whether you care about its contents or not (you can simply do `stream.resume();` if you want to discard/skip the contents), otherwise the `'finish'`/`'close'` event will never fire on the busboy parser stream. + However, if you aren't accepting files, you can either simply not listen for the `'file'` event at all or set `limits.files` to `0`, and any/all files will be automatically skipped (these skipped files will still count towards any configured `limits.files` and `limits.parts` limits though). + + **Note:** If a configured `limits.fileSize` limit was reached for a file, `stream` will both have a boolean property `truncated` set to `true` (best checked at the end of the stream) and emit a `'limit'` event to notify you when this happens. + +* **field**(< _string_ >name, < _string_ >value, < _object_ >info) - Emitted for each new non-file field found. `name` contains the form field name. `value` contains the string value of the field. `info` contains the following properties: + + * `nameTruncated` - _boolean_ - Whether `name` was truncated or not (due to a configured `limits.fieldNameSize` limit) + + * `valueTruncated` - _boolean_ - Whether `value` was truncated or not (due to a configured `limits.fieldSize` limit) + + * `encoding` - _string_ - The field's `'Content-Transfer-Encoding'` value. + + * `mimeType` - _string_ - The field's `'Content-Type'` value. + +* **partsLimit**() - Emitted when the configured `limits.parts` limit has been reached. No more `'file'` or `'field'` events will be emitted. + +* **filesLimit**() - Emitted when the configured `limits.files` limit has been reached. No more `'file'` events will be emitted. + +* **fieldsLimit**() - Emitted when the configured `limits.fields` limit has been reached. No more `'field'` events will be emitted. diff --git a/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js b/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js new file mode 100644 index 0000000..ef15729 --- /dev/null +++ b/node_modules/busboy/bench/bench-multipart-fields-100mb-big.js @@ -0,0 +1,149 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="field${i + 1}"`, + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, [ + 10, + 10, + 10, + 20, + 50, +]); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('field', (name, val, info) => { + ++calls.partBegin; + ++calls.partData; + ++calls.partEnd; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js b/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js new file mode 100644 index 0000000..f32d421 --- /dev/null +++ b/node_modules/busboy/bench/bench-multipart-fields-100mb-small.js @@ -0,0 +1,143 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="field${i + 1}"`, + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1)); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('field', (name, val, info) => { + ++calls.partBegin; + ++calls.partData; + ++calls.partEnd; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/bench/bench-multipart-files-100mb-big.js b/node_modules/busboy/bench/bench-multipart-files-100mb-big.js new file mode 100644 index 0000000..b46bdee --- /dev/null +++ b/node_modules/busboy/bench/bench-multipart-files-100mb-big.js @@ -0,0 +1,154 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="file${i + 1}"; ` + + `filename="random${i + 1}.bin"`, + 'content-type: application/octet-stream', + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, [ + 10, + 10, + 10, + 20, + 50, +]); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('file', (name, stream, info) => { + ++calls.partBegin; + stream.on('data', (chunk) => { + ++calls.partData; + }).on('end', () => { + ++calls.partEnd; + }); + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/bench/bench-multipart-files-100mb-small.js b/node_modules/busboy/bench/bench-multipart-files-100mb-small.js new file mode 100644 index 0000000..46b5dff --- /dev/null +++ b/node_modules/busboy/bench/bench-multipart-files-100mb-small.js @@ -0,0 +1,148 @@ +'use strict'; + +function createMultipartBuffers(boundary, sizes) { + const bufs = []; + for (let i = 0; i < sizes.length; ++i) { + const mb = sizes[i] * 1024 * 1024; + bufs.push(Buffer.from([ + `--${boundary}`, + `content-disposition: form-data; name="file${i + 1}"; ` + + `filename="random${i + 1}.bin"`, + 'content-type: application/octet-stream', + '', + '0'.repeat(mb), + '', + ].join('\r\n'))); + } + bufs.push(Buffer.from([ + `--${boundary}--`, + '', + ].join('\r\n'))); + return bufs; +} + +const boundary = '-----------------------------168072824752491622650073'; +const buffers = createMultipartBuffers(boundary, (new Array(100)).fill(1)); +const calls = { + partBegin: 0, + headerField: 0, + headerValue: 0, + headerEnd: 0, + headersEnd: 0, + partData: 0, + partEnd: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }, + }); + parser.on('file', (name, stream, info) => { + ++calls.partBegin; + stream.on('data', (chunk) => { + ++calls.partData; + }).on('end', () => { + ++calls.partEnd; + }); + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + break; + } + + case 'formidable': { + const { MultipartParser } = require('formidable'); + + const parser = new MultipartParser(); + parser.initWithBoundary(boundary); + parser.on('data', ({ name }) => { + ++calls[name]; + if (name === 'end') + console.timeEnd(moduleName); + }); + + console.time(moduleName); + for (const buf of buffers) + parser.write(buf); + + break; + } + + case 'multiparty': { + const { Readable } = require('stream'); + + const { Form } = require('multiparty'); + + const form = new Form({ + maxFieldsSize: Infinity, + maxFields: Infinity, + maxFilesSize: Infinity, + autoFields: false, + autoFiles: false, + }); + + const req = new Readable({ read: () => {} }); + req.headers = { + 'content-type': `multipart/form-data; boundary=${boundary}`, + }; + + function hijack(name, fn) { + const oldFn = form[name]; + form[name] = function() { + fn(); + return oldFn.apply(this, arguments); + }; + } + + hijack('onParseHeaderField', () => { + ++calls.headerField; + }); + hijack('onParseHeaderValue', () => { + ++calls.headerValue; + }); + hijack('onParsePartBegin', () => { + ++calls.partBegin; + }); + hijack('onParsePartData', () => { + ++calls.partData; + }); + hijack('onParsePartEnd', () => { + ++calls.partEnd; + }); + + form.on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }).on('part', (p) => p.resume()); + + console.time(moduleName); + form.parse(req); + for (const buf of buffers) + req.push(buf); + req.push(null); + + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js b/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js new file mode 100644 index 0000000..5c337df --- /dev/null +++ b/node_modules/busboy/bench/bench-urlencoded-fields-100pairs-small.js @@ -0,0 +1,101 @@ +'use strict'; + +const buffers = [ + Buffer.from( + (new Array(100)).fill('').map((_, i) => `key${i}=value${i}`).join('&') + ), +]; +const calls = { + field: 0, + end: 0, +}; + +let n = 3e3; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + console.time(moduleName); + (function next() { + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': 'application/x-www-form-urlencoded; charset=utf-8', + }, + }); + parser.on('field', (name, val, info) => { + ++calls.field; + }).on('close', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + case 'formidable': { + const QuerystringParser = + require('formidable/src/parsers/Querystring.js'); + + console.time(moduleName); + (function next() { + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + case 'formidable-streaming': { + const QuerystringParser = + require('formidable/src/parsers/StreamingQuerystring.js'); + + console.time(moduleName); + (function next() { + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + if (--n === 0) + console.timeEnd(moduleName); + else + process.nextTick(next); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + })(); + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js b/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js new file mode 100644 index 0000000..1f5645c --- /dev/null +++ b/node_modules/busboy/bench/bench-urlencoded-fields-900pairs-small-alt.js @@ -0,0 +1,84 @@ +'use strict'; + +const buffers = [ + Buffer.from( + (new Array(900)).fill('').map((_, i) => `key${i}=value${i}`).join('&') + ), +]; +const calls = { + field: 0, + end: 0, +}; + +const moduleName = process.argv[2]; +switch (moduleName) { + case 'busboy': { + const busboy = require('busboy'); + + console.time(moduleName); + const parser = busboy({ + limits: { + fieldSizeLimit: Infinity, + }, + headers: { + 'content-type': 'application/x-www-form-urlencoded; charset=utf-8', + }, + }); + parser.on('field', (name, val, info) => { + ++calls.field; + }).on('close', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + case 'formidable': { + const QuerystringParser = + require('formidable/src/parsers/Querystring.js'); + + console.time(moduleName); + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + case 'formidable-streaming': { + const QuerystringParser = + require('formidable/src/parsers/StreamingQuerystring.js'); + + console.time(moduleName); + const parser = new QuerystringParser(); + parser.on('data', (obj) => { + ++calls.field; + }).on('end', () => { + ++calls.end; + console.timeEnd(moduleName); + }); + + for (const buf of buffers) + parser.write(buf); + parser.end(); + break; + } + + default: + if (moduleName === undefined) + console.error('Missing parser module name'); + else + console.error(`Invalid parser module name: ${moduleName}`); + process.exit(1); +} diff --git a/node_modules/busboy/lib/index.js b/node_modules/busboy/lib/index.js new file mode 100644 index 0000000..873272d --- /dev/null +++ b/node_modules/busboy/lib/index.js @@ -0,0 +1,57 @@ +'use strict'; + +const { parseContentType } = require('./utils.js'); + +function getInstance(cfg) { + const headers = cfg.headers; + const conType = parseContentType(headers['content-type']); + if (!conType) + throw new Error('Malformed content type'); + + for (const type of TYPES) { + const matched = type.detect(conType); + if (!matched) + continue; + + const instanceCfg = { + limits: cfg.limits, + headers, + conType, + highWaterMark: undefined, + fileHwm: undefined, + defCharset: undefined, + defParamCharset: undefined, + preservePath: false, + }; + if (cfg.highWaterMark) + instanceCfg.highWaterMark = cfg.highWaterMark; + if (cfg.fileHwm) + instanceCfg.fileHwm = cfg.fileHwm; + instanceCfg.defCharset = cfg.defCharset; + instanceCfg.defParamCharset = cfg.defParamCharset; + instanceCfg.preservePath = cfg.preservePath; + return new type(instanceCfg); + } + + throw new Error(`Unsupported content type: ${headers['content-type']}`); +} + +// Note: types are explicitly listed here for easier bundling +// See: https://github.com/mscdex/busboy/issues/121 +const TYPES = [ + require('./types/multipart'), + require('./types/urlencoded'), +].filter(function(typemod) { return typeof typemod.detect === 'function'; }); + +module.exports = (cfg) => { + if (typeof cfg !== 'object' || cfg === null) + cfg = {}; + + if (typeof cfg.headers !== 'object' + || cfg.headers === null + || typeof cfg.headers['content-type'] !== 'string') { + throw new Error('Missing Content-Type'); + } + + return getInstance(cfg); +}; diff --git a/node_modules/busboy/lib/types/multipart.js b/node_modules/busboy/lib/types/multipart.js new file mode 100644 index 0000000..cc0d7bb --- /dev/null +++ b/node_modules/busboy/lib/types/multipart.js @@ -0,0 +1,653 @@ +'use strict'; + +const { Readable, Writable } = require('stream'); + +const StreamSearch = require('streamsearch'); + +const { + basename, + convertToUTF8, + getDecoder, + parseContentType, + parseDisposition, +} = require('../utils.js'); + +const BUF_CRLF = Buffer.from('\r\n'); +const BUF_CR = Buffer.from('\r'); +const BUF_DASH = Buffer.from('-'); + +function noop() {} + +const MAX_HEADER_PAIRS = 2000; // From node +const MAX_HEADER_SIZE = 16 * 1024; // From node (its default value) + +const HPARSER_NAME = 0; +const HPARSER_PRE_OWS = 1; +const HPARSER_VALUE = 2; +class HeaderParser { + constructor(cb) { + this.header = Object.create(null); + this.pairCount = 0; + this.byteCount = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + this.crlf = 0; + this.cb = cb; + } + + reset() { + this.header = Object.create(null); + this.pairCount = 0; + this.byteCount = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + this.crlf = 0; + } + + push(chunk, pos, end) { + let start = pos; + while (pos < end) { + switch (this.state) { + case HPARSER_NAME: { + let done = false; + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (TOKEN[code] !== 1) { + if (code !== 58/* ':' */) + return -1; + this.name += chunk.latin1Slice(start, pos); + if (this.name.length === 0) + return -1; + ++pos; + done = true; + this.state = HPARSER_PRE_OWS; + break; + } + } + if (!done) { + this.name += chunk.latin1Slice(start, pos); + break; + } + // FALLTHROUGH + } + case HPARSER_PRE_OWS: { + // Skip optional whitespace + let done = false; + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) { + start = pos; + done = true; + this.state = HPARSER_VALUE; + break; + } + } + if (!done) + break; + // FALLTHROUGH + } + case HPARSER_VALUE: + switch (this.crlf) { + case 0: // Nothing yet + for (; pos < end; ++pos) { + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (FIELD_VCHAR[code] !== 1) { + if (code !== 13/* '\r' */) + return -1; + ++this.crlf; + break; + } + } + this.value += chunk.latin1Slice(start, pos++); + break; + case 1: // Received CR + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + if (chunk[pos++] !== 10/* '\n' */) + return -1; + ++this.crlf; + break; + case 2: { // Received CR LF + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + const code = chunk[pos]; + if (code === 32/* ' ' */ || code === 9/* '\t' */) { + // Folded value + start = pos; + this.crlf = 0; + } else { + if (++this.pairCount < MAX_HEADER_PAIRS) { + this.name = this.name.toLowerCase(); + if (this.header[this.name] === undefined) + this.header[this.name] = [this.value]; + else + this.header[this.name].push(this.value); + } + if (code === 13/* '\r' */) { + ++this.crlf; + ++pos; + } else { + // Assume start of next header field name + start = pos; + this.crlf = 0; + this.state = HPARSER_NAME; + this.name = ''; + this.value = ''; + } + } + break; + } + case 3: { // Received CR LF CR + if (this.byteCount === MAX_HEADER_SIZE) + return -1; + ++this.byteCount; + if (chunk[pos++] !== 10/* '\n' */) + return -1; + // End of header + const header = this.header; + this.reset(); + this.cb(header); + return pos; + } + } + break; + } + } + + return pos; + } +} + +class FileStream extends Readable { + constructor(opts, owner) { + super(opts); + this.truncated = false; + this._readcb = null; + this.once('end', () => { + // We need to make sure that we call any outstanding _writecb() that is + // associated with this file so that processing of the rest of the form + // can continue. This may not happen if the file stream ends right after + // backpressure kicks in, so we force it here. + this._read(); + if (--owner._fileEndsLeft === 0 && owner._finalcb) { + const cb = owner._finalcb; + owner._finalcb = null; + // Make sure other 'end' event handlers get a chance to be executed + // before busboy's 'finish' event is emitted + process.nextTick(cb); + } + }); + } + _read(n) { + const cb = this._readcb; + if (cb) { + this._readcb = null; + cb(); + } + } +} + +const ignoreData = { + push: (chunk, pos) => {}, + destroy: () => {}, +}; + +function callAndUnsetCb(self, err) { + const cb = self._writecb; + self._writecb = null; + if (err) + self.destroy(err); + else if (cb) + cb(); +} + +function nullDecoder(val, hint) { + return val; +} + +class Multipart extends Writable { + constructor(cfg) { + const streamOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : undefined), + }; + super(streamOpts); + + if (!cfg.conType.params || typeof cfg.conType.params.boundary !== 'string') + throw new Error('Multipart: Boundary not found'); + + const boundary = cfg.conType.params.boundary; + const paramDecoder = (typeof cfg.defParamCharset === 'string' + && cfg.defParamCharset + ? getDecoder(cfg.defParamCharset) + : nullDecoder); + const defCharset = (cfg.defCharset || 'utf8'); + const preservePath = cfg.preservePath; + const fileOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.fileHwm === 'number' + ? cfg.fileHwm + : undefined), + }; + + const limits = cfg.limits; + const fieldSizeLimit = (limits && typeof limits.fieldSize === 'number' + ? limits.fieldSize + : 1 * 1024 * 1024); + const fileSizeLimit = (limits && typeof limits.fileSize === 'number' + ? limits.fileSize + : Infinity); + const filesLimit = (limits && typeof limits.files === 'number' + ? limits.files + : Infinity); + const fieldsLimit = (limits && typeof limits.fields === 'number' + ? limits.fields + : Infinity); + const partsLimit = (limits && typeof limits.parts === 'number' + ? limits.parts + : Infinity); + + let parts = -1; // Account for initial boundary + let fields = 0; + let files = 0; + let skipPart = false; + + this._fileEndsLeft = 0; + this._fileStream = undefined; + this._complete = false; + let fileSize = 0; + + let field; + let fieldSize = 0; + let partCharset; + let partEncoding; + let partType; + let partName; + let partTruncated = false; + + let hitFilesLimit = false; + let hitFieldsLimit = false; + + this._hparser = null; + const hparser = new HeaderParser((header) => { + this._hparser = null; + skipPart = false; + + partType = 'text/plain'; + partCharset = defCharset; + partEncoding = '7bit'; + partName = undefined; + partTruncated = false; + + let filename; + if (!header['content-disposition']) { + skipPart = true; + return; + } + + const disp = parseDisposition(header['content-disposition'][0], + paramDecoder); + if (!disp || disp.type !== 'form-data') { + skipPart = true; + return; + } + + if (disp.params) { + if (disp.params.name) + partName = disp.params.name; + + if (disp.params['filename*']) + filename = disp.params['filename*']; + else if (disp.params.filename) + filename = disp.params.filename; + + if (filename !== undefined && !preservePath) + filename = basename(filename); + } + + if (header['content-type']) { + const conType = parseContentType(header['content-type'][0]); + if (conType) { + partType = `${conType.type}/${conType.subtype}`; + if (conType.params && typeof conType.params.charset === 'string') + partCharset = conType.params.charset.toLowerCase(); + } + } + + if (header['content-transfer-encoding']) + partEncoding = header['content-transfer-encoding'][0].toLowerCase(); + + if (partType === 'application/octet-stream' || filename !== undefined) { + // File + + if (files === filesLimit) { + if (!hitFilesLimit) { + hitFilesLimit = true; + this.emit('filesLimit'); + } + skipPart = true; + return; + } + ++files; + + if (this.listenerCount('file') === 0) { + skipPart = true; + return; + } + + fileSize = 0; + this._fileStream = new FileStream(fileOpts, this); + ++this._fileEndsLeft; + this.emit( + 'file', + partName, + this._fileStream, + { filename, + encoding: partEncoding, + mimeType: partType } + ); + } else { + // Non-file + + if (fields === fieldsLimit) { + if (!hitFieldsLimit) { + hitFieldsLimit = true; + this.emit('fieldsLimit'); + } + skipPart = true; + return; + } + ++fields; + + if (this.listenerCount('field') === 0) { + skipPart = true; + return; + } + + field = []; + fieldSize = 0; + } + }); + + let matchPostBoundary = 0; + const ssCb = (isMatch, data, start, end, isDataSafe) => { +retrydata: + while (data) { + if (this._hparser !== null) { + const ret = this._hparser.push(data, start, end); + if (ret === -1) { + this._hparser = null; + hparser.reset(); + this.emit('error', new Error('Malformed part header')); + break; + } + start = ret; + } + + if (start === end) + break; + + if (matchPostBoundary !== 0) { + if (matchPostBoundary === 1) { + switch (data[start]) { + case 45: // '-' + // Try matching '--' after boundary + matchPostBoundary = 2; + ++start; + break; + case 13: // '\r' + // Try matching CR LF before header + matchPostBoundary = 3; + ++start; + break; + default: + matchPostBoundary = 0; + } + if (start === end) + return; + } + + if (matchPostBoundary === 2) { + matchPostBoundary = 0; + if (data[start] === 45/* '-' */) { + // End of multipart data + this._complete = true; + this._bparser = ignoreData; + return; + } + // We saw something other than '-', so put the dash we consumed + // "back" + const writecb = this._writecb; + this._writecb = noop; + ssCb(false, BUF_DASH, 0, 1, false); + this._writecb = writecb; + } else if (matchPostBoundary === 3) { + matchPostBoundary = 0; + if (data[start] === 10/* '\n' */) { + ++start; + if (parts >= partsLimit) + break; + // Prepare the header parser + this._hparser = hparser; + if (start === end) + break; + // Process the remaining data as a header + continue retrydata; + } else { + // We saw something other than LF, so put the CR we consumed + // "back" + const writecb = this._writecb; + this._writecb = noop; + ssCb(false, BUF_CR, 0, 1, false); + this._writecb = writecb; + } + } + } + + if (!skipPart) { + if (this._fileStream) { + let chunk; + const actualLen = Math.min(end - start, fileSizeLimit - fileSize); + if (!isDataSafe) { + chunk = Buffer.allocUnsafe(actualLen); + data.copy(chunk, 0, start, start + actualLen); + } else { + chunk = data.slice(start, start + actualLen); + } + + fileSize += chunk.length; + if (fileSize === fileSizeLimit) { + if (chunk.length > 0) + this._fileStream.push(chunk); + this._fileStream.emit('limit'); + this._fileStream.truncated = true; + skipPart = true; + } else if (!this._fileStream.push(chunk)) { + if (this._writecb) + this._fileStream._readcb = this._writecb; + this._writecb = null; + } + } else if (field !== undefined) { + let chunk; + const actualLen = Math.min( + end - start, + fieldSizeLimit - fieldSize + ); + if (!isDataSafe) { + chunk = Buffer.allocUnsafe(actualLen); + data.copy(chunk, 0, start, start + actualLen); + } else { + chunk = data.slice(start, start + actualLen); + } + + fieldSize += actualLen; + field.push(chunk); + if (fieldSize === fieldSizeLimit) { + skipPart = true; + partTruncated = true; + } + } + } + + break; + } + + if (isMatch) { + matchPostBoundary = 1; + + if (this._fileStream) { + // End the active file stream if the previous part was a file + this._fileStream.push(null); + this._fileStream = null; + } else if (field !== undefined) { + let data; + switch (field.length) { + case 0: + data = ''; + break; + case 1: + data = convertToUTF8(field[0], partCharset, 0); + break; + default: + data = convertToUTF8( + Buffer.concat(field, fieldSize), + partCharset, + 0 + ); + } + field = undefined; + fieldSize = 0; + this.emit( + 'field', + partName, + data, + { nameTruncated: false, + valueTruncated: partTruncated, + encoding: partEncoding, + mimeType: partType } + ); + } + + if (++parts === partsLimit) + this.emit('partsLimit'); + } + }; + this._bparser = new StreamSearch(`\r\n--${boundary}`, ssCb); + + this._writecb = null; + this._finalcb = null; + + // Just in case there is no preamble + this.write(BUF_CRLF); + } + + static detect(conType) { + return (conType.type === 'multipart' && conType.subtype === 'form-data'); + } + + _write(chunk, enc, cb) { + this._writecb = cb; + this._bparser.push(chunk, 0); + if (this._writecb) + callAndUnsetCb(this); + } + + _destroy(err, cb) { + this._hparser = null; + this._bparser = ignoreData; + if (!err) + err = checkEndState(this); + const fileStream = this._fileStream; + if (fileStream) { + this._fileStream = null; + fileStream.destroy(err); + } + cb(err); + } + + _final(cb) { + this._bparser.destroy(); + if (!this._complete) + return cb(new Error('Unexpected end of form')); + if (this._fileEndsLeft) + this._finalcb = finalcb.bind(null, this, cb); + else + finalcb(this, cb); + } +} + +function finalcb(self, cb, err) { + if (err) + return cb(err); + err = checkEndState(self); + cb(err); +} + +function checkEndState(self) { + if (self._hparser) + return new Error('Malformed part header'); + const fileStream = self._fileStream; + if (fileStream) { + self._fileStream = null; + fileStream.destroy(new Error('Unexpected end of file')); + } + if (!self._complete) + return new Error('Unexpected end of form'); +} + +const TOKEN = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const FIELD_VCHAR = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +]; + +module.exports = Multipart; diff --git a/node_modules/busboy/lib/types/urlencoded.js b/node_modules/busboy/lib/types/urlencoded.js new file mode 100644 index 0000000..5c463a2 --- /dev/null +++ b/node_modules/busboy/lib/types/urlencoded.js @@ -0,0 +1,350 @@ +'use strict'; + +const { Writable } = require('stream'); + +const { getDecoder } = require('../utils.js'); + +class URLEncoded extends Writable { + constructor(cfg) { + const streamOpts = { + autoDestroy: true, + emitClose: true, + highWaterMark: (typeof cfg.highWaterMark === 'number' + ? cfg.highWaterMark + : undefined), + }; + super(streamOpts); + + let charset = (cfg.defCharset || 'utf8'); + if (cfg.conType.params && typeof cfg.conType.params.charset === 'string') + charset = cfg.conType.params.charset; + + this.charset = charset; + + const limits = cfg.limits; + this.fieldSizeLimit = (limits && typeof limits.fieldSize === 'number' + ? limits.fieldSize + : 1 * 1024 * 1024); + this.fieldsLimit = (limits && typeof limits.fields === 'number' + ? limits.fields + : Infinity); + this.fieldNameSizeLimit = ( + limits && typeof limits.fieldNameSize === 'number' + ? limits.fieldNameSize + : 100 + ); + + this._inKey = true; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + this._fields = 0; + this._key = ''; + this._val = ''; + this._byte = -2; + this._lastPos = 0; + this._encode = 0; + this._decoder = getDecoder(charset); + } + + static detect(conType) { + return (conType.type === 'application' + && conType.subtype === 'x-www-form-urlencoded'); + } + + _write(chunk, enc, cb) { + if (this._fields >= this.fieldsLimit) + return cb(); + + let i = 0; + const len = chunk.length; + this._lastPos = 0; + + // Check if we last ended mid-percent-encoded byte + if (this._byte !== -2) { + i = readPctEnc(this, chunk, i, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + if (this._inKey) + ++this._bytesKey; + else + ++this._bytesVal; + } + +main: + while (i < len) { + if (this._inKey) { + // Parsing key + + i = skipKeyBytes(this, chunk, i, len); + + while (i < len) { + switch (chunk[i]) { + case 61: // '=' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._key = this._decoder(this._key, this._encode); + this._encode = 0; + this._inKey = false; + continue main; + case 38: // '&' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._key = this._decoder(this._key, this._encode); + this._encode = 0; + if (this._bytesKey > 0) { + this.emit( + 'field', + this._key, + '', + { nameTruncated: this._keyTrunc, + valueTruncated: false, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + this._key = ''; + this._val = ''; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + if (++this._fields >= this.fieldsLimit) { + this.emit('fieldsLimit'); + return cb(); + } + continue; + case 43: // '+' + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._key += ' '; + this._lastPos = i + 1; + break; + case 37: // '%' + if (this._encode === 0) + this._encode = 1; + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + this._lastPos = i + 1; + this._byte = -1; + i = readPctEnc(this, chunk, i + 1, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + ++this._bytesKey; + i = skipKeyBytes(this, chunk, i, len); + continue; + } + ++i; + ++this._bytesKey; + i = skipKeyBytes(this, chunk, i, len); + } + if (this._lastPos < i) + this._key += chunk.latin1Slice(this._lastPos, i); + } else { + // Parsing value + + i = skipValBytes(this, chunk, i, len); + + while (i < len) { + switch (chunk[i]) { + case 38: // '&' + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._lastPos = ++i; + this._inKey = true; + this._val = this._decoder(this._val, this._encode); + this._encode = 0; + if (this._bytesKey > 0 || this._bytesVal > 0) { + this.emit( + 'field', + this._key, + this._val, + { nameTruncated: this._keyTrunc, + valueTruncated: this._valTrunc, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + this._key = ''; + this._val = ''; + this._keyTrunc = false; + this._valTrunc = false; + this._bytesKey = 0; + this._bytesVal = 0; + if (++this._fields >= this.fieldsLimit) { + this.emit('fieldsLimit'); + return cb(); + } + continue main; + case 43: // '+' + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._val += ' '; + this._lastPos = i + 1; + break; + case 37: // '%' + if (this._encode === 0) + this._encode = 1; + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + this._lastPos = i + 1; + this._byte = -1; + i = readPctEnc(this, chunk, i + 1, len); + if (i === -1) + return cb(new Error('Malformed urlencoded form')); + if (i >= len) + return cb(); + ++this._bytesVal; + i = skipValBytes(this, chunk, i, len); + continue; + } + ++i; + ++this._bytesVal; + i = skipValBytes(this, chunk, i, len); + } + if (this._lastPos < i) + this._val += chunk.latin1Slice(this._lastPos, i); + } + } + + cb(); + } + + _final(cb) { + if (this._byte !== -2) + return cb(new Error('Malformed urlencoded form')); + if (!this._inKey || this._bytesKey > 0 || this._bytesVal > 0) { + if (this._inKey) + this._key = this._decoder(this._key, this._encode); + else + this._val = this._decoder(this._val, this._encode); + this.emit( + 'field', + this._key, + this._val, + { nameTruncated: this._keyTrunc, + valueTruncated: this._valTrunc, + encoding: this.charset, + mimeType: 'text/plain' } + ); + } + cb(); + } +} + +function readPctEnc(self, chunk, pos, len) { + if (pos >= len) + return len; + + if (self._byte === -1) { + // We saw a '%' but no hex characters yet + const hexUpper = HEX_VALUES[chunk[pos++]]; + if (hexUpper === -1) + return -1; + + if (hexUpper >= 8) + self._encode = 2; // Indicate high bits detected + + if (pos < len) { + // Both hex characters are in this chunk + const hexLower = HEX_VALUES[chunk[pos++]]; + if (hexLower === -1) + return -1; + + if (self._inKey) + self._key += String.fromCharCode((hexUpper << 4) + hexLower); + else + self._val += String.fromCharCode((hexUpper << 4) + hexLower); + + self._byte = -2; + self._lastPos = pos; + } else { + // Only one hex character was available in this chunk + self._byte = hexUpper; + } + } else { + // We saw only one hex character so far + const hexLower = HEX_VALUES[chunk[pos++]]; + if (hexLower === -1) + return -1; + + if (self._inKey) + self._key += String.fromCharCode((self._byte << 4) + hexLower); + else + self._val += String.fromCharCode((self._byte << 4) + hexLower); + + self._byte = -2; + self._lastPos = pos; + } + + return pos; +} + +function skipKeyBytes(self, chunk, pos, len) { + // Skip bytes if we've truncated + if (self._bytesKey > self.fieldNameSizeLimit) { + if (!self._keyTrunc) { + if (self._lastPos < pos) + self._key += chunk.latin1Slice(self._lastPos, pos - 1); + } + self._keyTrunc = true; + for (; pos < len; ++pos) { + const code = chunk[pos]; + if (code === 61/* '=' */ || code === 38/* '&' */) + break; + ++self._bytesKey; + } + self._lastPos = pos; + } + + return pos; +} + +function skipValBytes(self, chunk, pos, len) { + // Skip bytes if we've truncated + if (self._bytesVal > self.fieldSizeLimit) { + if (!self._valTrunc) { + if (self._lastPos < pos) + self._val += chunk.latin1Slice(self._lastPos, pos - 1); + } + self._valTrunc = true; + for (; pos < len; ++pos) { + if (chunk[pos] === 38/* '&' */) + break; + ++self._bytesVal; + } + self._lastPos = pos; + } + + return pos; +} + +/* eslint-disable no-multi-spaces */ +const HEX_VALUES = [ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]; +/* eslint-enable no-multi-spaces */ + +module.exports = URLEncoded; diff --git a/node_modules/busboy/lib/utils.js b/node_modules/busboy/lib/utils.js new file mode 100644 index 0000000..8274f6c --- /dev/null +++ b/node_modules/busboy/lib/utils.js @@ -0,0 +1,596 @@ +'use strict'; + +function parseContentType(str) { + if (str.length === 0) + return; + + const params = Object.create(null); + let i = 0; + + // Parse type + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code !== 47/* '/' */ || i === 0) + return; + break; + } + } + // Check for type without subtype + if (i === str.length) + return; + + const type = str.slice(0, i).toLowerCase(); + + // Parse subtype + const subtypeStart = ++i; + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // Make sure we have a subtype + if (i === subtypeStart) + return; + + if (parseContentTypeParams(str, i, params) === undefined) + return; + break; + } + } + // Make sure we have a subtype + if (i === subtypeStart) + return; + + const subtype = str.slice(subtypeStart, i).toLowerCase(); + + return { type, subtype, params }; +} + +function parseContentTypeParams(str, i, params) { + while (i < str.length) { + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace + if (i === str.length) + break; + + // Check for malformed parameter + if (str.charCodeAt(i++) !== 59/* ';' */) + return; + + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace (malformed) + if (i === str.length) + return; + + let name; + const nameStart = i; + // Parse parameter name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code !== 61/* '=' */) + return; + break; + } + } + + // No value (malformed) + if (i === str.length) + return; + + name = str.slice(nameStart, i); + ++i; // Skip over '=' + + // No value (malformed) + if (i === str.length) + return; + + let value = ''; + let valueStart; + if (str.charCodeAt(i) === 34/* '"' */) { + valueStart = ++i; + let escaping = false; + // Parse quoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 92/* '\\' */) { + if (escaping) { + valueStart = i; + escaping = false; + } else { + value += str.slice(valueStart, i); + escaping = true; + } + continue; + } + if (code === 34/* '"' */) { + if (escaping) { + valueStart = i; + escaping = false; + continue; + } + value += str.slice(valueStart, i); + break; + } + if (escaping) { + valueStart = i - 1; + escaping = false; + } + // Invalid unescaped quoted character (malformed) + if (QDTEXT[code] !== 1) + return; + } + + // No end quote (malformed) + if (i === str.length) + return; + + ++i; // Skip over double quote + } else { + valueStart = i; + // Parse unquoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // No value (malformed) + if (i === valueStart) + return; + break; + } + } + value = str.slice(valueStart, i); + } + + name = name.toLowerCase(); + if (params[name] === undefined) + params[name] = value; + } + + return params; +} + +function parseDisposition(str, defDecoder) { + if (str.length === 0) + return; + + const params = Object.create(null); + let i = 0; + + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (parseDispositionParams(str, i, params, defDecoder) === undefined) + return; + break; + } + } + + const type = str.slice(0, i).toLowerCase(); + + return { type, params }; +} + +function parseDispositionParams(str, i, params, defDecoder) { + while (i < str.length) { + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace + if (i === str.length) + break; + + // Check for malformed parameter + if (str.charCodeAt(i++) !== 59/* ';' */) + return; + + // Consume whitespace + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code !== 32/* ' ' */ && code !== 9/* '\t' */) + break; + } + + // Ended on whitespace (malformed) + if (i === str.length) + return; + + let name; + const nameStart = i; + // Parse parameter name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + if (code === 61/* '=' */) + break; + return; + } + } + + // No value (malformed) + if (i === str.length) + return; + + let value = ''; + let valueStart; + let charset; + //~ let lang; + name = str.slice(nameStart, i); + if (name.charCodeAt(name.length - 1) === 42/* '*' */) { + // Extended value + + const charsetStart = ++i; + // Parse charset name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (CHARSET[code] !== 1) { + if (code !== 39/* '\'' */) + return; + break; + } + } + + // Incomplete charset (malformed) + if (i === str.length) + return; + + charset = str.slice(charsetStart, i); + ++i; // Skip over the '\'' + + //~ const langStart = ++i; + // Parse language name + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 39/* '\'' */) + break; + } + + // Incomplete language (malformed) + if (i === str.length) + return; + + //~ lang = str.slice(langStart, i); + ++i; // Skip over the '\'' + + // No value (malformed) + if (i === str.length) + return; + + valueStart = i; + + let encode = 0; + // Parse value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (EXTENDED_VALUE[code] !== 1) { + if (code === 37/* '%' */) { + let hexUpper; + let hexLower; + if (i + 2 < str.length + && (hexUpper = HEX_VALUES[str.charCodeAt(i + 1)]) !== -1 + && (hexLower = HEX_VALUES[str.charCodeAt(i + 2)]) !== -1) { + const byteVal = (hexUpper << 4) + hexLower; + value += str.slice(valueStart, i); + value += String.fromCharCode(byteVal); + i += 2; + valueStart = i + 1; + if (byteVal >= 128) + encode = 2; + else if (encode === 0) + encode = 1; + continue; + } + // '%' disallowed in non-percent encoded contexts (malformed) + return; + } + break; + } + } + + value += str.slice(valueStart, i); + value = convertToUTF8(value, charset, encode); + if (value === undefined) + return; + } else { + // Non-extended value + + ++i; // Skip over '=' + + // No value (malformed) + if (i === str.length) + return; + + if (str.charCodeAt(i) === 34/* '"' */) { + valueStart = ++i; + let escaping = false; + // Parse quoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (code === 92/* '\\' */) { + if (escaping) { + valueStart = i; + escaping = false; + } else { + value += str.slice(valueStart, i); + escaping = true; + } + continue; + } + if (code === 34/* '"' */) { + if (escaping) { + valueStart = i; + escaping = false; + continue; + } + value += str.slice(valueStart, i); + break; + } + if (escaping) { + valueStart = i - 1; + escaping = false; + } + // Invalid unescaped quoted character (malformed) + if (QDTEXT[code] !== 1) + return; + } + + // No end quote (malformed) + if (i === str.length) + return; + + ++i; // Skip over double quote + } else { + valueStart = i; + // Parse unquoted value + for (; i < str.length; ++i) { + const code = str.charCodeAt(i); + if (TOKEN[code] !== 1) { + // No value (malformed) + if (i === valueStart) + return; + break; + } + } + value = str.slice(valueStart, i); + } + + value = defDecoder(value, 2); + if (value === undefined) + return; + } + + name = name.toLowerCase(); + if (params[name] === undefined) + params[name] = value; + } + + return params; +} + +function getDecoder(charset) { + let lc; + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8; + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1; + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le; + case 'base64': + return decoders.base64; + default: + if (lc === undefined) { + lc = true; + charset = charset.toLowerCase(); + continue; + } + return decoders.other.bind(charset); + } + } +} + +const decoders = { + utf8: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') { + // If `data` never had any percent-encoded bytes or never had any that + // were outside of the ASCII range, then we can safely just return the + // input since UTF-8 is ASCII compatible + if (hint < 2) + return data; + + data = Buffer.from(data, 'latin1'); + } + return data.utf8Slice(0, data.length); + }, + + latin1: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + return data; + return data.latin1Slice(0, data.length); + }, + + utf16le: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + return data.ucs2Slice(0, data.length); + }, + + base64: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + return data.base64Slice(0, data.length); + }, + + other: (data, hint) => { + if (data.length === 0) + return ''; + if (typeof data === 'string') + data = Buffer.from(data, 'latin1'); + try { + const decoder = new TextDecoder(this); + return decoder.decode(data); + } catch {} + }, +}; + +function convertToUTF8(data, charset, hint) { + const decode = getDecoder(charset); + if (decode) + return decode(data, hint); +} + +function basename(path) { + if (typeof path !== 'string') + return ''; + for (let i = path.length - 1; i >= 0; --i) { + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1); + return (path === '..' || path === '.' ? '' : path); + } + } + return (path === '..' || path === '.' ? '' : path); +} + +const TOKEN = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const QDTEXT = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, +]; + +const CHARSET = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 1, 1, 0, 0, 0, 0, 1, 0, 1, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +const EXTENDED_VALUE = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 0, 1, 1, 0, 1, 0, 0, 0, 0, 1, 0, 1, 1, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 1, 0, 1, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, +]; + +/* eslint-disable no-multi-spaces */ +const HEX_VALUES = [ + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, 10, 11, 12, 13, 14, 15, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, + -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, -1, +]; +/* eslint-enable no-multi-spaces */ + +module.exports = { + basename, + convertToUTF8, + getDecoder, + parseContentType, + parseDisposition, +}; diff --git a/node_modules/busboy/package.json b/node_modules/busboy/package.json new file mode 100644 index 0000000..ac2577f --- /dev/null +++ b/node_modules/busboy/package.json @@ -0,0 +1,22 @@ +{ "name": "busboy", + "version": "1.6.0", + "author": "Brian White ", + "description": "A streaming parser for HTML form data for node.js", + "main": "./lib/index.js", + "dependencies": { + "streamsearch": "^1.1.0" + }, + "devDependencies": { + "@mscdex/eslint-config": "^1.1.0", + "eslint": "^7.32.0" + }, + "scripts": { + "test": "node test/test.js", + "lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test bench", + "lint:fix": "npm run lint -- --fix" + }, + "engines": { "node": ">=10.16.0" }, + "keywords": [ "uploads", "forms", "multipart", "form-data" ], + "licenses": [ { "type": "MIT", "url": "http://github.com/mscdex/busboy/raw/master/LICENSE" } ], + "repository": { "type": "git", "url": "http://github.com/mscdex/busboy.git" } +} diff --git a/node_modules/busboy/test/common.js b/node_modules/busboy/test/common.js new file mode 100644 index 0000000..fb82ad8 --- /dev/null +++ b/node_modules/busboy/test/common.js @@ -0,0 +1,109 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const mustCallChecks = []; + +function noop() {} + +function runCallChecks(exitCode) { + if (exitCode !== 0) return; + + const failed = mustCallChecks.filter((context) => { + if ('minimum' in context) { + context.messageSegment = `at least ${context.minimum}`; + return context.actual < context.minimum; + } + context.messageSegment = `exactly ${context.exact}`; + return context.actual !== context.exact; + }); + + failed.forEach((context) => { + console.error('Mismatched %s function calls. Expected %s, actual %d.', + context.name, + context.messageSegment, + context.actual); + console.error(context.stack.split('\n').slice(2).join('\n')); + }); + + if (failed.length) + process.exit(1); +} + +function mustCall(fn, exact) { + return _mustCallInner(fn, exact, 'exact'); +} + +function mustCallAtLeast(fn, minimum) { + return _mustCallInner(fn, minimum, 'minimum'); +} + +function _mustCallInner(fn, criteria = 1, field) { + if (process._exiting) + throw new Error('Cannot use common.mustCall*() in process exit handler'); + + if (typeof fn === 'number') { + criteria = fn; + fn = noop; + } else if (fn === undefined) { + fn = noop; + } + + if (typeof criteria !== 'number') + throw new TypeError(`Invalid ${field} value: ${criteria}`); + + const context = { + [field]: criteria, + actual: 0, + stack: inspect(new Error()), + name: fn.name || '' + }; + + // Add the exit listener only once to avoid listener leak warnings + if (mustCallChecks.length === 0) + process.on('exit', runCallChecks); + + mustCallChecks.push(context); + + function wrapped(...args) { + ++context.actual; + return fn.call(this, ...args); + } + // TODO: remove origFn? + wrapped.origFn = fn; + + return wrapped; +} + +function getCallSite(top) { + const originalStackFormatter = Error.prepareStackTrace; + Error.prepareStackTrace = (err, stack) => + `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; + const err = new Error(); + Error.captureStackTrace(err, top); + // With the V8 Error API, the stack is not formatted until it is accessed + // eslint-disable-next-line no-unused-expressions + err.stack; + Error.prepareStackTrace = originalStackFormatter; + return err.stack; +} + +function mustNotCall(msg) { + const callSite = getCallSite(mustNotCall); + return function mustNotCall(...args) { + args = args.map(inspect).join(', '); + const argsInfo = (args.length > 0 + ? `\ncalled with arguments: ${args}` + : ''); + assert.fail( + `${msg || 'function should not have been called'} at ${callSite}` + + argsInfo); + }; +} + +module.exports = { + mustCall, + mustCallAtLeast, + mustNotCall, +}; diff --git a/node_modules/busboy/test/test-types-multipart-charsets.js b/node_modules/busboy/test/test-types-multipart-charsets.js new file mode 100644 index 0000000..ed9c38a --- /dev/null +++ b/node_modules/busboy/test/test-types-multipart-charsets.js @@ -0,0 +1,94 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const { mustCall } = require(`${__dirname}/common.js`); + +const busboy = require('..'); + +const input = Buffer.from([ + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="テスト.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' +].join('\r\n')); +const boundary = '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k'; +const expected = [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('A'.repeat(1023)), + info: { + filename: 'テスト.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, +]; +const bb = busboy({ + defParamCharset: 'utf8', + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } +}); +const results = []; + +bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); +}); + +bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); +}); + +bb.on('error', (err) => { + results.push({ error: err.message }); +}); + +bb.on('partsLimit', () => { + results.push('partsLimit'); +}); + +bb.on('filesLimit', () => { + results.push('filesLimit'); +}); + +bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); +}); + +bb.on('close', mustCall(() => { + assert.deepStrictEqual( + results, + expected, + 'Results mismatch.\n' + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(expected)}` + ); +})); + +bb.end(input); diff --git a/node_modules/busboy/test/test-types-multipart-stream-pause.js b/node_modules/busboy/test/test-types-multipart-stream-pause.js new file mode 100644 index 0000000..df7268a --- /dev/null +++ b/node_modules/busboy/test/test-types-multipart-stream-pause.js @@ -0,0 +1,102 @@ +'use strict'; + +const assert = require('assert'); +const { randomFillSync } = require('crypto'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const { mustCall } = require('./common.js'); + +const BOUNDARY = 'u2KxIV5yF1y+xUspOQCCZopaVgeV6Jxihv35XQJmuTx8X3sh'; + +function formDataSection(key, value) { + return Buffer.from( + `\r\n--${BOUNDARY}` + + `\r\nContent-Disposition: form-data; name="${key}"` + + `\r\n\r\n${value}` + ); +} + +function formDataFile(key, filename, contentType) { + const buf = Buffer.allocUnsafe(100000); + return Buffer.concat([ + Buffer.from(`\r\n--${BOUNDARY}\r\n`), + Buffer.from(`Content-Disposition: form-data; name="${key}"` + + `; filename="${filename}"\r\n`), + Buffer.from(`Content-Type: ${contentType}\r\n\r\n`), + randomFillSync(buf) + ]); +} + +const reqChunks = [ + Buffer.concat([ + formDataFile('file', 'file.bin', 'application/octet-stream'), + formDataSection('foo', 'foo value'), + ]), + formDataSection('bar', 'bar value'), + Buffer.from(`\r\n--${BOUNDARY}--\r\n`) +]; +const bb = busboy({ + headers: { + 'content-type': `multipart/form-data; boundary=${BOUNDARY}` + } +}); +const expected = [ + { type: 'file', + name: 'file', + info: { + filename: 'file.bin', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + }, + { type: 'field', + name: 'foo', + val: 'foo value', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'bar', + val: 'bar value', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, +]; +const results = []; + +bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); +}); + +bb.on('file', (name, stream, info) => { + results.push({ type: 'file', name, info }); + // Simulate a pipe where the destination is pausing (perhaps due to waiting + // for file system write to finish) + setTimeout(() => { + stream.resume(); + }, 10); +}); + +bb.on('close', mustCall(() => { + assert.deepStrictEqual( + results, + expected, + 'Results mismatch.\n' + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(expected)}` + ); +})); + +for (const chunk of reqChunks) + bb.write(chunk); +bb.end(); diff --git a/node_modules/busboy/test/test-types-multipart.js b/node_modules/busboy/test/test-types-multipart.js new file mode 100644 index 0000000..9755642 --- /dev/null +++ b/node_modules/busboy/test/test-types-multipart.js @@ -0,0 +1,1053 @@ +'use strict'; + +const assert = require('assert'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const active = new Map(); + +const tests = [ + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'B'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'file_name_1', + val: 'super beta file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('A'.repeat(1023)), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('B'.repeat(1023)), + info: { + filename: '1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Fields and files' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="pass"', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name=bit', + '', + '2', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: 'some random content', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'pass', + val: 'some random pass', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'bit', + val: '2', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + what: 'Fields only' + }, + { source: [ + '' + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { error: 'Unexpected end of form' }, + ], + what: 'No fields and no files' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fileSize: 13, + fieldSize: 5 + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super', + info: { + nameTruncated: false, + valueTruncated: true, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLM'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: true, + }, + ], + what: 'Fields and files (limits)' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 0 + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'filesLimit', + ], + what: 'Fields and files (limits: 0 files)' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'super alpha file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'super beta file', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'A'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'B'.repeat(1023), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'super alpha file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + { type: 'field', + name: 'file_name_1', + val: 'super beta file', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + events: ['field'], + what: 'Fields and (ignored) files' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="/tmp/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="C:\\files\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '1k_c.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Files with filenames containing paths' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="/absolute/1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="C:\\absolute\\1k_b.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_2"; filename="relative/1k_c.dat"', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + preservePath: true, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: '/absolute/1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'C:\\absolute\\1k_b.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'relative/1k_c.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Paths to be preserved through the preservePath option' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: ', + '', + 'some random content', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: ', + '', + 'some random pass', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--' + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: 'some random content', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + ], + what: 'Empty content-type and empty content-disposition' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="file"; filename*=utf-8\'\'n%C3%A4me.txt', + 'Content-Type: application/octet-stream', + '', + 'ABCDEFGHIJKLMNOPQRSTUVWXYZ', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--' + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'file', + data: Buffer.from('ABCDEFGHIJKLMNOPQRSTUVWXYZ'), + info: { + filename: 'näme.txt', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + }, + ], + what: 'Unicode filenames' + }, + { source: [ + ['--asdasdasdasd\r\n', + 'Content-Type: text/plain\r\n', + 'Content-Disposition: form-data; name="foo"\r\n', + '\r\n', + 'asd\r\n', + '--asdasdasdasd--' + ].join(':)') + ], + boundary: 'asdasdasdasd', + expected: [ + { error: 'Malformed part header' }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-header' + }, + { source: [ + ['------WebKitFormBoundaryTB2MiQ36fnSJlrhY', + 'Content-Disposition: form-data; name="cont"', + 'Content-Type: application/json', + '', + '{}', + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--', + ].join('\r\n') + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [ + { type: 'field', + name: 'cont', + val: '{}', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'application/json', + }, + }, + ], + what: 'content-type for fields' + }, + { source: [ + '------WebKitFormBoundaryTB2MiQ36fnSJlrhY--', + ], + boundary: '----WebKitFormBoundaryTB2MiQ36fnSJlrhY', + expected: [], + what: 'empty form' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name=upload_file_0; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + 'Content-Transfer-Encoding: binary', + '', + '', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.alloc(0), + info: { + filename: '1k_a.dat', + encoding: 'binary', + mimeType: 'application/octet-stream', + }, + limited: false, + err: 'Unexpected end of form', + }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-file #1' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name=upload_file_0; filename="1k_a.dat"', + 'Content-Type: application/octet-stream', + '', + 'a', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: '1k_a.dat', + encoding: '7bit', + mimeType: 'application/octet-stream', + }, + limited: false, + err: 'Unexpected end of form', + }, + { error: 'Unexpected end of form' }, + ], + what: 'Stopped mid-file #2' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Text file with charset' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Folded header value' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Type: text/plain; charset=utf8', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [], + what: 'No Content-Disposition' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a'.repeat(64 * 1024), + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'bc', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fieldSize: Infinity, + }, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('bc'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + events: [ 'file' ], + what: 'Skip field parts if no listener' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: ', + ' text/plain; charset=utf8', + '', + 'bc', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + parts: 1, + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'a', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'partsLimit', + ], + what: 'Parts limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_0"', + '', + 'a', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; name="file_name_1"', + '', + 'b', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + fields: 1, + }, + expected: [ + { type: 'field', + name: 'file_name_0', + val: 'a', + info: { + nameTruncated: false, + valueTruncated: false, + encoding: '7bit', + mimeType: 'text/plain', + }, + }, + 'fieldsLimit', + ], + what: 'Fields limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="notes2.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + limits: { + files: 1, + }, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ab'), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + 'filesLimit', + ], + what: 'Files limit' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_0"; filename="${'a'.repeat(64 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_1"; filename="notes2.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { error: 'Malformed part header' }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('cd'), + info: { + filename: 'notes2.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Oversized part header' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + 'name="upload_file_0"; filename="notes.txt"', + 'Content-Type: text/plain; charset=utf8', + '', + 'a'.repeat(31) + '\r', + ].join('\r\n'), + 'b'.repeat(40), + '\r\n-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + fileHwm: 32, + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('a'.repeat(31) + '\r' + 'b'.repeat(40)), + info: { + filename: 'notes.txt', + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Lookbehind data should not stall file streams' + }, + { source: [ + ['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_0"; filename="${'a'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ab', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_1"; filename="${'b'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'cd', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + 'Content-Disposition: form-data; ' + + `name="upload_file_2"; filename="${'c'.repeat(8 * 1024)}.txt"`, + 'Content-Type: text/plain; charset=utf8', + '', + 'ef', + '-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--', + ].join('\r\n') + ], + boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k', + expected: [ + { type: 'file', + name: 'upload_file_0', + data: Buffer.from('ab'), + info: { + filename: `${'a'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_1', + data: Buffer.from('cd'), + info: { + filename: `${'b'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + { type: 'file', + name: 'upload_file_2', + data: Buffer.from('ef'), + info: { + filename: `${'c'.repeat(8 * 1024)}.txt`, + encoding: '7bit', + mimeType: 'text/plain', + }, + limited: false, + }, + ], + what: 'Header size limit should be per part' + }, + { source: [ + '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee\r\n', + 'Content-Type: application/gzip\r\n' + + 'Content-Encoding: gzip\r\n' + + 'Content-Disposition: form-data; name=batch-1; filename=batch-1' + + '\r\n\r\n', + '\r\n--d1bf46b3-aa33-4061-b28d-6c5ced8b08ee--', + ], + boundary: 'd1bf46b3-aa33-4061-b28d-6c5ced8b08ee', + expected: [ + { type: 'file', + name: 'batch-1', + data: Buffer.alloc(0), + info: { + filename: 'batch-1', + encoding: '7bit', + mimeType: 'application/gzip', + }, + limited: false, + }, + ], + what: 'Empty part' + }, +]; + +for (const test of tests) { + active.set(test, 1); + + const { what, boundary, events, limits, preservePath, fileHwm } = test; + const bb = busboy({ + fileHwm, + limits, + preservePath, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } + }); + const results = []; + + if (events === undefined || events.includes('field')) { + bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); + }); + } + + if (events === undefined || events.includes('file')) { + bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); + }); + } + + bb.on('error', (err) => { + results.push({ error: err.message }); + }); + + bb.on('partsLimit', () => { + results.push('partsLimit'); + }); + + bb.on('filesLimit', () => { + results.push('filesLimit'); + }); + + bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + bb.write(buf); + } + bb.end(); +} + +// Byte-by-byte versions +for (let test of tests) { + test = { ...test }; + test.what += ' (byte-by-byte)'; + active.set(test, 1); + + const { what, boundary, events, limits, preservePath, fileHwm } = test; + const bb = busboy({ + fileHwm, + limits, + preservePath, + headers: { + 'content-type': `multipart/form-data; boundary=${boundary}`, + } + }); + const results = []; + + if (events === undefined || events.includes('field')) { + bb.on('field', (name, val, info) => { + results.push({ type: 'field', name, val, info }); + }); + } + + if (events === undefined || events.includes('file')) { + bb.on('file', (name, stream, info) => { + const data = []; + let nb = 0; + const file = { + type: 'file', + name, + data: null, + info, + limited: false, + }; + results.push(file); + stream.on('data', (d) => { + data.push(d); + nb += d.length; + }).on('limit', () => { + file.limited = true; + }).on('close', () => { + file.data = Buffer.concat(data, nb); + assert.strictEqual(stream.truncated, file.limited); + }).once('error', (err) => { + file.err = err.message; + }); + }); + } + + bb.on('error', (err) => { + results.push({ error: err.message }); + }); + + bb.on('partsLimit', () => { + results.push('partsLimit'); + }); + + bb.on('filesLimit', () => { + results.push('filesLimit'); + }); + + bb.on('fieldsLimit', () => { + results.push('fieldsLimit'); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + for (let i = 0; i < buf.length; ++i) + bb.write(buf.slice(i, i + 1)); + } + bb.end(); +} + +{ + let exception = false; + process.once('uncaughtException', (ex) => { + exception = true; + throw ex; + }); + process.on('exit', () => { + if (exception || active.size === 0) + return; + process.exitCode = 1; + console.error('=========================='); + console.error(`${active.size} test(s) did not finish:`); + console.error('=========================='); + console.error(Array.from(active.keys()).map((v) => v.what).join('\n')); + }); +} diff --git a/node_modules/busboy/test/test-types-urlencoded.js b/node_modules/busboy/test/test-types-urlencoded.js new file mode 100644 index 0000000..c35962b --- /dev/null +++ b/node_modules/busboy/test/test-types-urlencoded.js @@ -0,0 +1,488 @@ +'use strict'; + +const assert = require('assert'); +const { transcode } = require('buffer'); +const { inspect } = require('util'); + +const busboy = require('..'); + +const active = new Map(); + +const tests = [ + { source: ['foo'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Unassigned value' + }, + { source: ['foo=bar'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value' + }, + { source: ['foo&bar=baz'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + 'baz', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Unassigned and assigned value' + }, + { source: ['foo=bar&baz'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned and unassigned value' + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bla', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two assigned values' + }, + { source: ['foo&bar'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two unassigned values' + }, + { source: ['foo&bar&'], + expected: [ + ['foo', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['bar', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two unassigned values and ampersand' + }, + { source: ['foo+1=bar+baz%2Bquux'], + expected: [ + ['foo 1', + 'bar baz+quux', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned key and value with (plus) space' + }, + { source: ['foo=bar%20baz%21'], + expected: [ + ['foo', + 'bar baz!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value with encoded bytes' + }, + { source: ['foo%20bar=baz%20bla%21'], + expected: [ + ['foo bar', + 'baz bla!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value with encoded bytes #2' + }, + { source: ['foo=bar%20baz%21&num=1000'], + expected: [ + ['foo', + 'bar baz!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['num', + '1000', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Two assigned values, one with encoded bytes' + }, + { source: [ + Array.from(transcode(Buffer.from('foo'), 'utf8', 'utf16le')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + '=', + Array.from(transcode(Buffer.from('😀!'), 'utf8', 'utf16le')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + ], + expected: [ + ['foo', + '😀!', + { nameTruncated: false, + valueTruncated: false, + encoding: 'UTF-16LE', + mimeType: 'text/plain' }, + ], + ], + charset: 'UTF-16LE', + what: 'Encoded value with multi-byte charset' + }, + { source: [ + 'foo=<', + Array.from(transcode(Buffer.from('©:^þ'), 'utf8', 'latin1')).map( + (n) => `%${n.toString(16).padStart(2, '0')}` + ).join(''), + ], + expected: [ + ['foo', + '<©:^þ', + { nameTruncated: false, + valueTruncated: false, + encoding: 'ISO-8859-1', + mimeType: 'text/plain' }, + ], + ], + charset: 'ISO-8859-1', + what: 'Encoded value with single-byte, ASCII-compatible, non-UTF8 charset' + }, + { source: ['foo=bar&baz=bla'], + expected: [], + what: 'Limits: zero fields', + limits: { fields: 0 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: one field', + limits: { fields: 1 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'bar', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bla', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: field part lengths match limits', + limits: { fieldNameSize: 3, fieldSize: 3 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + 'bar', + { nameTruncated: true, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + 'bla', + { nameTruncated: true, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name', + limits: { fieldNameSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['foo', + 'ba', + { nameTruncated: false, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['baz', + 'bl', + { nameTruncated: false, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field value', + limits: { fieldSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + 'ba', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + 'bl', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name and value', + limits: { fieldNameSize: 2, fieldSize: 2 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['fo', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['ba', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated field name and zero value limit', + limits: { fieldNameSize: 2, fieldSize: 0 } + }, + { source: ['foo=bar&baz=bla'], + expected: [ + ['', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ['', + '', + { nameTruncated: true, + valueTruncated: true, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Limits: truncated zero field name and zero value limit', + limits: { fieldNameSize: 0, fieldSize: 0 } + }, + { source: ['&'], + expected: [], + what: 'Ampersand' + }, + { source: ['&&&&&'], + expected: [], + what: 'Many ampersands' + }, + { source: ['='], + expected: [ + ['', + '', + { nameTruncated: false, + valueTruncated: false, + encoding: 'utf-8', + mimeType: 'text/plain' }, + ], + ], + what: 'Assigned value, empty name and value' + }, + { source: [''], + expected: [], + what: 'Nothing' + }, +]; + +for (const test of tests) { + active.set(test, 1); + + const { what } = test; + const charset = test.charset || 'utf-8'; + const bb = busboy({ + limits: test.limits, + headers: { + 'content-type': `application/x-www-form-urlencoded; charset=${charset}`, + }, + }); + const results = []; + + bb.on('field', (key, val, info) => { + results.push([key, val, info]); + }); + + bb.on('file', () => { + throw new Error(`[${what}] Unexpected file`); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + bb.write(buf); + } + bb.end(); +} + +// Byte-by-byte versions +for (let test of tests) { + test = { ...test }; + test.what += ' (byte-by-byte)'; + active.set(test, 1); + + const { what } = test; + const charset = test.charset || 'utf-8'; + const bb = busboy({ + limits: test.limits, + headers: { + 'content-type': `application/x-www-form-urlencoded; charset="${charset}"`, + }, + }); + const results = []; + + bb.on('field', (key, val, info) => { + results.push([key, val, info]); + }); + + bb.on('file', () => { + throw new Error(`[${what}] Unexpected file`); + }); + + bb.on('close', () => { + active.delete(test); + + assert.deepStrictEqual( + results, + test.expected, + `[${what}] Results mismatch.\n` + + `Parsed: ${inspect(results)}\n` + + `Expected: ${inspect(test.expected)}` + ); + }); + + for (const src of test.source) { + const buf = (typeof src === 'string' ? Buffer.from(src, 'utf8') : src); + for (let i = 0; i < buf.length; ++i) + bb.write(buf.slice(i, i + 1)); + } + bb.end(); +} + +{ + let exception = false; + process.once('uncaughtException', (ex) => { + exception = true; + throw ex; + }); + process.on('exit', () => { + if (exception || active.size === 0) + return; + process.exitCode = 1; + console.error('=========================='); + console.error(`${active.size} test(s) did not finish:`); + console.error('=========================='); + console.error(Array.from(active.keys()).map((v) => v.what).join('\n')); + }); +} diff --git a/node_modules/busboy/test/test.js b/node_modules/busboy/test/test.js new file mode 100644 index 0000000..d0380f2 --- /dev/null +++ b/node_modules/busboy/test/test.js @@ -0,0 +1,20 @@ +'use strict'; + +const { spawnSync } = require('child_process'); +const { readdirSync } = require('fs'); +const { join } = require('path'); + +const files = readdirSync(__dirname).sort(); +for (const filename of files) { + if (filename.startsWith('test-')) { + const path = join(__dirname, filename); + console.log(`> Running ${filename} ...`); + const result = spawnSync(`${process.argv0} ${path}`, { + shell: true, + stdio: 'inherit', + windowsHide: true + }); + if (result.status !== 0) + process.exitCode = 1; + } +} diff --git a/node_modules/concat-stream/LICENSE b/node_modules/concat-stream/LICENSE new file mode 100644 index 0000000..99c130e --- /dev/null +++ b/node_modules/concat-stream/LICENSE @@ -0,0 +1,24 @@ +The MIT License + +Copyright (c) 2013 Max Ogden + +Permission is hereby granted, free of charge, +to any person obtaining a copy of this software and +associated documentation files (the "Software"), to +deal in the Software without restriction, including +without limitation the rights to use, copy, modify, +merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom +the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice +shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR +ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/concat-stream/index.js b/node_modules/concat-stream/index.js new file mode 100644 index 0000000..dd672a7 --- /dev/null +++ b/node_modules/concat-stream/index.js @@ -0,0 +1,144 @@ +var Writable = require('readable-stream').Writable +var inherits = require('inherits') +var bufferFrom = require('buffer-from') + +if (typeof Uint8Array === 'undefined') { + var U8 = require('typedarray').Uint8Array +} else { + var U8 = Uint8Array +} + +function ConcatStream(opts, cb) { + if (!(this instanceof ConcatStream)) return new ConcatStream(opts, cb) + + if (typeof opts === 'function') { + cb = opts + opts = {} + } + if (!opts) opts = {} + + var encoding = opts.encoding + var shouldInferEncoding = false + + if (!encoding) { + shouldInferEncoding = true + } else { + encoding = String(encoding).toLowerCase() + if (encoding === 'u8' || encoding === 'uint8') { + encoding = 'uint8array' + } + } + + Writable.call(this, { objectMode: true }) + + this.encoding = encoding + this.shouldInferEncoding = shouldInferEncoding + + if (cb) this.on('finish', function () { cb(this.getBody()) }) + this.body = [] +} + +module.exports = ConcatStream +inherits(ConcatStream, Writable) + +ConcatStream.prototype._write = function(chunk, enc, next) { + this.body.push(chunk) + next() +} + +ConcatStream.prototype.inferEncoding = function (buff) { + var firstBuffer = buff === undefined ? this.body[0] : buff; + if (Buffer.isBuffer(firstBuffer)) return 'buffer' + if (typeof Uint8Array !== 'undefined' && firstBuffer instanceof Uint8Array) return 'uint8array' + if (Array.isArray(firstBuffer)) return 'array' + if (typeof firstBuffer === 'string') return 'string' + if (Object.prototype.toString.call(firstBuffer) === "[object Object]") return 'object' + return 'buffer' +} + +ConcatStream.prototype.getBody = function () { + if (!this.encoding && this.body.length === 0) return [] + if (this.shouldInferEncoding) this.encoding = this.inferEncoding() + if (this.encoding === 'array') return arrayConcat(this.body) + if (this.encoding === 'string') return stringConcat(this.body) + if (this.encoding === 'buffer') return bufferConcat(this.body) + if (this.encoding === 'uint8array') return u8Concat(this.body) + return this.body +} + +var isArray = Array.isArray || function (arr) { + return Object.prototype.toString.call(arr) == '[object Array]' +} + +function isArrayish (arr) { + return /Array\]$/.test(Object.prototype.toString.call(arr)) +} + +function isBufferish (p) { + return typeof p === 'string' || isArrayish(p) || (p && typeof p.subarray === 'function') +} + +function stringConcat (parts) { + var strings = [] + var needsToString = false + for (var i = 0; i < parts.length; i++) { + var p = parts[i] + if (typeof p === 'string') { + strings.push(p) + } else if (Buffer.isBuffer(p)) { + strings.push(p) + } else if (isBufferish(p)) { + strings.push(bufferFrom(p)) + } else { + strings.push(bufferFrom(String(p))) + } + } + if (Buffer.isBuffer(parts[0])) { + strings = Buffer.concat(strings) + strings = strings.toString('utf8') + } else { + strings = strings.join('') + } + return strings +} + +function bufferConcat (parts) { + var bufs = [] + for (var i = 0; i < parts.length; i++) { + var p = parts[i] + if (Buffer.isBuffer(p)) { + bufs.push(p) + } else if (isBufferish(p)) { + bufs.push(bufferFrom(p)) + } else { + bufs.push(bufferFrom(String(p))) + } + } + return Buffer.concat(bufs) +} + +function arrayConcat (parts) { + var res = [] + for (var i = 0; i < parts.length; i++) { + res.push.apply(res, parts[i]) + } + return res +} + +function u8Concat (parts) { + var len = 0 + for (var i = 0; i < parts.length; i++) { + if (typeof parts[i] === 'string') { + parts[i] = bufferFrom(parts[i]) + } + len += parts[i].length + } + var u8 = new U8(len) + for (var i = 0, offset = 0; i < parts.length; i++) { + var part = parts[i] + for (var j = 0; j < part.length; j++) { + u8[offset++] = part[j] + } + } + return u8 +} diff --git a/node_modules/concat-stream/package.json b/node_modules/concat-stream/package.json new file mode 100644 index 0000000..f709022 --- /dev/null +++ b/node_modules/concat-stream/package.json @@ -0,0 +1,55 @@ +{ + "name": "concat-stream", + "version": "1.6.2", + "description": "writable stream that concatenates strings or binary data and calls a callback with the result", + "tags": [ + "stream", + "simple", + "util", + "utility" + ], + "author": "Max Ogden ", + "repository": { + "type": "git", + "url": "http://github.com/maxogden/concat-stream.git" + }, + "bugs": { + "url": "http://github.com/maxogden/concat-stream/issues" + }, + "engines": [ + "node >= 0.8" + ], + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "tape test/*.js test/server/*.js" + }, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "inherits": "^2.0.3", + "readable-stream": "^2.2.2", + "typedarray": "^0.0.6" + }, + "devDependencies": { + "tape": "^4.6.3" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/concat-stream/readme.md b/node_modules/concat-stream/readme.md new file mode 100644 index 0000000..7aa19c4 --- /dev/null +++ b/node_modules/concat-stream/readme.md @@ -0,0 +1,102 @@ +# concat-stream + +Writable stream that concatenates all the data from a stream and calls a callback with the result. Use this when you want to collect all the data from a stream into a single buffer. + +[![Build Status](https://travis-ci.org/maxogden/concat-stream.svg?branch=master)](https://travis-ci.org/maxogden/concat-stream) + +[![NPM](https://nodei.co/npm/concat-stream.png)](https://nodei.co/npm/concat-stream/) + +### description + +Streams emit many buffers. If you want to collect all of the buffers, and when the stream ends concatenate all of the buffers together and receive a single buffer then this is the module for you. + +Only use this if you know you can fit all of the output of your stream into a single Buffer (e.g. in RAM). + +There are also `objectMode` streams that emit things other than Buffers, and you can concatenate these too. See below for details. + +## Related + +`concat-stream` is part of the [mississippi stream utility collection](https://github.com/maxogden/mississippi) which includes more useful stream modules similar to this one. + +### examples + +#### Buffers + +```js +var fs = require('fs') +var concat = require('concat-stream') + +var readStream = fs.createReadStream('cat.png') +var concatStream = concat(gotPicture) + +readStream.on('error', handleError) +readStream.pipe(concatStream) + +function gotPicture(imageBuffer) { + // imageBuffer is all of `cat.png` as a node.js Buffer +} + +function handleError(err) { + // handle your error appropriately here, e.g.: + console.error(err) // print the error to STDERR + process.exit(1) // exit program with non-zero exit code +} + +``` + +#### Arrays + +```js +var write = concat(function(data) {}) +write.write([1,2,3]) +write.write([4,5,6]) +write.end() +// data will be [1,2,3,4,5,6] in the above callback +``` + +#### Uint8Arrays + +```js +var write = concat(function(data) {}) +var a = new Uint8Array(3) +a[0] = 97; a[1] = 98; a[2] = 99 +write.write(a) +write.write('!') +write.end(Buffer.from('!!1')) +``` + +See `test/` for more examples + +# methods + +```js +var concat = require('concat-stream') +``` + +## var writable = concat(opts={}, cb) + +Return a `writable` stream that will fire `cb(data)` with all of the data that +was written to the stream. Data can be written to `writable` as strings, +Buffers, arrays of byte integers, and Uint8Arrays. + +By default `concat-stream` will give you back the same data type as the type of the first buffer written to the stream. Use `opts.encoding` to set what format `data` should be returned as, e.g. if you if you don't want to rely on the built-in type checking or for some other reason. + +* `string` - get a string +* `buffer` - get back a Buffer +* `array` - get an array of byte integers +* `uint8array`, `u8`, `uint8` - get back a Uint8Array +* `object`, get back an array of Objects + +If you don't specify an encoding, and the types can't be inferred (e.g. you write things that aren't in the list above), it will try to convert concat them into a `Buffer`. + +If nothing is written to `writable` then `data` will be an empty array `[]`. + +# error handling + +`concat-stream` does not handle errors for you, so you must handle errors on whatever streams you pipe into `concat-stream`. This is a general rule when programming with node.js streams: always handle errors on each and every stream. Since `concat-stream` is not itself a stream it does not emit errors. + +We recommend using [`end-of-stream`](https://npmjs.org/end-of-stream) or [`pump`](https://npmjs.org/pump) for writing error tolerant stream code. + +# license + +MIT LICENSE diff --git a/node_modules/core-util-is/LICENSE b/node_modules/core-util-is/LICENSE new file mode 100644 index 0000000..d8d7f94 --- /dev/null +++ b/node_modules/core-util-is/LICENSE @@ -0,0 +1,19 @@ +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. diff --git a/node_modules/core-util-is/README.md b/node_modules/core-util-is/README.md new file mode 100644 index 0000000..5a76b41 --- /dev/null +++ b/node_modules/core-util-is/README.md @@ -0,0 +1,3 @@ +# core-util-is + +The `util.is*` functions introduced in Node v0.12. diff --git a/node_modules/core-util-is/lib/util.js b/node_modules/core-util-is/lib/util.js new file mode 100644 index 0000000..6e5a20d --- /dev/null +++ b/node_modules/core-util-is/lib/util.js @@ -0,0 +1,107 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// NOTE: These type checking functions intentionally don't use `instanceof` +// because it is fragile and can be easily faked with `Object.create()`. + +function isArray(arg) { + if (Array.isArray) { + return Array.isArray(arg); + } + return objectToString(arg) === '[object Array]'; +} +exports.isArray = isArray; + +function isBoolean(arg) { + return typeof arg === 'boolean'; +} +exports.isBoolean = isBoolean; + +function isNull(arg) { + return arg === null; +} +exports.isNull = isNull; + +function isNullOrUndefined(arg) { + return arg == null; +} +exports.isNullOrUndefined = isNullOrUndefined; + +function isNumber(arg) { + return typeof arg === 'number'; +} +exports.isNumber = isNumber; + +function isString(arg) { + return typeof arg === 'string'; +} +exports.isString = isString; + +function isSymbol(arg) { + return typeof arg === 'symbol'; +} +exports.isSymbol = isSymbol; + +function isUndefined(arg) { + return arg === void 0; +} +exports.isUndefined = isUndefined; + +function isRegExp(re) { + return objectToString(re) === '[object RegExp]'; +} +exports.isRegExp = isRegExp; + +function isObject(arg) { + return typeof arg === 'object' && arg !== null; +} +exports.isObject = isObject; + +function isDate(d) { + return objectToString(d) === '[object Date]'; +} +exports.isDate = isDate; + +function isError(e) { + return (objectToString(e) === '[object Error]' || e instanceof Error); +} +exports.isError = isError; + +function isFunction(arg) { + return typeof arg === 'function'; +} +exports.isFunction = isFunction; + +function isPrimitive(arg) { + return arg === null || + typeof arg === 'boolean' || + typeof arg === 'number' || + typeof arg === 'string' || + typeof arg === 'symbol' || // ES6 symbol + typeof arg === 'undefined'; +} +exports.isPrimitive = isPrimitive; + +exports.isBuffer = require('buffer').Buffer.isBuffer; + +function objectToString(o) { + return Object.prototype.toString.call(o); +} diff --git a/node_modules/core-util-is/package.json b/node_modules/core-util-is/package.json new file mode 100644 index 0000000..b0c51f5 --- /dev/null +++ b/node_modules/core-util-is/package.json @@ -0,0 +1,38 @@ +{ + "name": "core-util-is", + "version": "1.0.3", + "description": "The `util.is*` functions introduced in Node v0.12.", + "main": "lib/util.js", + "files": [ + "lib" + ], + "repository": { + "type": "git", + "url": "git://github.com/isaacs/core-util-is" + }, + "keywords": [ + "util", + "isBuffer", + "isArray", + "isNumber", + "isString", + "isRegExp", + "isThis", + "isThat", + "polyfill" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/isaacs/core-util-is/issues" + }, + "scripts": { + "test": "tap test.js", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags" + }, + "devDependencies": { + "tap": "^15.0.9" + } +} diff --git a/node_modules/isarray/.npmignore b/node_modules/isarray/.npmignore new file mode 100644 index 0000000..3c3629e --- /dev/null +++ b/node_modules/isarray/.npmignore @@ -0,0 +1 @@ +node_modules diff --git a/node_modules/isarray/.travis.yml b/node_modules/isarray/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/isarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/isarray/Makefile b/node_modules/isarray/Makefile new file mode 100644 index 0000000..787d56e --- /dev/null +++ b/node_modules/isarray/Makefile @@ -0,0 +1,6 @@ + +test: + @node_modules/.bin/tape test.js + +.PHONY: test + diff --git a/node_modules/isarray/README.md b/node_modules/isarray/README.md new file mode 100644 index 0000000..16d2c59 --- /dev/null +++ b/node_modules/isarray/README.md @@ -0,0 +1,60 @@ + +# isarray + +`Array#isArray` for older browsers. + +[![build status](https://secure.travis-ci.org/juliangruber/isarray.svg)](http://travis-ci.org/juliangruber/isarray) +[![downloads](https://img.shields.io/npm/dm/isarray.svg)](https://www.npmjs.org/package/isarray) + +[![browser support](https://ci.testling.com/juliangruber/isarray.png) +](https://ci.testling.com/juliangruber/isarray) + +## Usage + +```js +var isArray = require('isarray'); + +console.log(isArray([])); // => true +console.log(isArray({})); // => false +``` + +## Installation + +With [npm](http://npmjs.org) do + +```bash +$ npm install isarray +``` + +Then bundle for the browser with +[browserify](https://github.com/substack/browserify). + +With [component](http://component.io) do + +```bash +$ component install juliangruber/isarray +``` + +## License + +(MIT) + +Copyright (c) 2013 Julian Gruber <julian@juliangruber.com> + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/node_modules/isarray/component.json b/node_modules/isarray/component.json new file mode 100644 index 0000000..9e31b68 --- /dev/null +++ b/node_modules/isarray/component.json @@ -0,0 +1,19 @@ +{ + "name" : "isarray", + "description" : "Array#isArray for older browsers", + "version" : "0.0.1", + "repository" : "juliangruber/isarray", + "homepage": "https://github.com/juliangruber/isarray", + "main" : "index.js", + "scripts" : [ + "index.js" + ], + "dependencies" : {}, + "keywords": ["browser","isarray","array"], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT" +} diff --git a/node_modules/isarray/index.js b/node_modules/isarray/index.js new file mode 100644 index 0000000..a57f634 --- /dev/null +++ b/node_modules/isarray/index.js @@ -0,0 +1,5 @@ +var toString = {}.toString; + +module.exports = Array.isArray || function (arr) { + return toString.call(arr) == '[object Array]'; +}; diff --git a/node_modules/isarray/package.json b/node_modules/isarray/package.json new file mode 100644 index 0000000..1a4317a --- /dev/null +++ b/node_modules/isarray/package.json @@ -0,0 +1,45 @@ +{ + "name": "isarray", + "description": "Array#isArray for older browsers", + "version": "1.0.0", + "repository": { + "type": "git", + "url": "git://github.com/juliangruber/isarray.git" + }, + "homepage": "https://github.com/juliangruber/isarray", + "main": "index.js", + "dependencies": {}, + "devDependencies": { + "tape": "~2.13.4" + }, + "keywords": [ + "browser", + "isarray", + "array" + ], + "author": { + "name": "Julian Gruber", + "email": "mail@juliangruber.com", + "url": "http://juliangruber.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/8..latest", + "firefox/17..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + }, + "scripts": { + "test": "tape test.js" + } +} diff --git a/node_modules/isarray/test.js b/node_modules/isarray/test.js new file mode 100644 index 0000000..e0c3444 --- /dev/null +++ b/node_modules/isarray/test.js @@ -0,0 +1,20 @@ +var isArray = require('./'); +var test = require('tape'); + +test('is array', function(t){ + t.ok(isArray([])); + t.notOk(isArray({})); + t.notOk(isArray(null)); + t.notOk(isArray(false)); + + var obj = {}; + obj[0] = true; + t.notOk(isArray(obj)); + + var arr = []; + arr.foo = 'bar'; + t.ok(isArray(arr)); + + t.end(); +}); + diff --git a/node_modules/minimist/.eslintrc b/node_modules/minimist/.eslintrc new file mode 100644 index 0000000..bd1a5e0 --- /dev/null +++ b/node_modules/minimist/.eslintrc @@ -0,0 +1,29 @@ +{ + "root": true, + + "extends": "@ljharb/eslint-config/node/0.4", + + "rules": { + "array-element-newline": 0, + "complexity": 0, + "func-style": [2, "declaration"], + "max-lines-per-function": 0, + "max-nested-callbacks": 1, + "max-statements-per-line": 1, + "max-statements": 0, + "multiline-comment-style": 0, + "no-continue": 1, + "no-param-reassign": 1, + "no-restricted-syntax": 1, + "object-curly-newline": 0, + }, + + "overrides": [ + { + "files": "test/**", + "rules": { + "camelcase": 0, + }, + }, + ] +} diff --git a/node_modules/minimist/.github/FUNDING.yml b/node_modules/minimist/.github/FUNDING.yml new file mode 100644 index 0000000..a936622 --- /dev/null +++ b/node_modules/minimist/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: [ljharb] +patreon: # Replace with a single Patreon username +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: npm/minimist +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2'] diff --git a/node_modules/minimist/.nycrc b/node_modules/minimist/.nycrc new file mode 100644 index 0000000..55c3d29 --- /dev/null +++ b/node_modules/minimist/.nycrc @@ -0,0 +1,14 @@ +{ + "all": true, + "check-coverage": false, + "reporter": ["text-summary", "text", "html", "json"], + "lines": 86, + "statements": 85.93, + "functions": 82.43, + "branches": 76.06, + "exclude": [ + "coverage", + "example", + "test" + ] +} diff --git a/node_modules/minimist/CHANGELOG.md b/node_modules/minimist/CHANGELOG.md new file mode 100644 index 0000000..c9a1e15 --- /dev/null +++ b/node_modules/minimist/CHANGELOG.md @@ -0,0 +1,298 @@ +# Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [v1.2.8](https://github.com/minimistjs/minimist/compare/v1.2.7...v1.2.8) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] Fix long option followed by single dash [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) +- [Fix] Fix handling of short option with non-trivial equals [`#5`](https://github.com/minimistjs/minimist/issues/5) +- [Tests] Remove duplicate test [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- Merge tag 'v0.2.3' [`a026794`](https://github.com/minimistjs/minimist/commit/a0267947c7870fc5847cf2d437fbe33f392767da) +- [eslint] fix indentation and whitespace [`5368ca4`](https://github.com/minimistjs/minimist/commit/5368ca4147e974138a54cc0dc4cea8f756546b70) +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`62fde7d`](https://github.com/minimistjs/minimist/commit/62fde7d935f83417fb046741531a9e2346a36976) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`3124ed3`](https://github.com/minimistjs/minimist/commit/3124ed3e46306301ebb3c834874ce0241555c2c4) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) +- [actions] Avoid 0.6 tests due to build failures [`ba92fe6`](https://github.com/minimistjs/minimist/commit/ba92fe6ebbdc0431cca9a2ea8f27beb492f5e4ec) +- [Dev Deps] update `tape` [`950eaa7`](https://github.com/minimistjs/minimist/commit/950eaa74f112e04d23e9c606c67472c46739b473) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) +- Merge tag 'v0.2.2' [`980d7ac`](https://github.com/minimistjs/minimist/commit/980d7ac61a0b4bd552711251ac107d506b23e41f) + +## [v1.2.7](https://github.com/minimistjs/minimist/compare/v1.2.6...v1.2.7) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`0ebf4eb`](https://github.com/minimistjs/minimist/commit/0ebf4ebcd5f7787a5524d31a849ef41316b83c3c) +- [actions] add reusable workflows [`e115b63`](https://github.com/minimistjs/minimist/commit/e115b63fa9d3909f33b00a2db647ff79068388de) +- [eslint] add eslint; rules to enable later are warnings [`f58745b`](https://github.com/minimistjs/minimist/commit/f58745b9bb84348e1be72af7dbba5840c7c13013) +- [Dev Deps] switch from `covert` to `nyc` [`ab03356`](https://github.com/minimistjs/minimist/commit/ab033567b9c8b31117cb026dc7f1e592ce455c65) +- [readme] rename and add badges [`236f4a0`](https://github.com/minimistjs/minimist/commit/236f4a07e4ebe5ee44f1496ec6974991ab293ffd) +- [meta] create FUNDING.yml; add `funding` in package.json [`783a49b`](https://github.com/minimistjs/minimist/commit/783a49bfd47e8335d3098a8cac75662cf71eb32a) +- [meta] use `npmignore` to autogenerate an npmignore file [`f81ece6`](https://github.com/minimistjs/minimist/commit/f81ece6aaec2fa14e69ff4f1e0407a8c4e2635a2) +- Only apps should have lockfiles [`56cad44`](https://github.com/minimistjs/minimist/commit/56cad44c7f879b9bb5ec18fcc349308024a89bfc) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`49c5f9f`](https://github.com/minimistjs/minimist/commit/49c5f9fb7e6a92db9eb340cc679de92fb3aacded) +- [Tests] add `aud` in `posttest` [`228ae93`](https://github.com/minimistjs/minimist/commit/228ae938f3cd9db9dfd8bd7458b076a7b2aef280) +- [meta] add `safe-publish-latest` [`01fc23f`](https://github.com/minimistjs/minimist/commit/01fc23f5104f85c75059972e01dd33796ab529ff) +- [meta] update repo URLs [`6b164c7`](https://github.com/minimistjs/minimist/commit/6b164c7d68e0b6bf32f894699effdfb7c63041dd) + +## [v1.2.6](https://github.com/minimistjs/minimist/compare/v1.2.5...v1.2.6) - 2022-03-21 + +### Commits + +- test from prototype pollution PR [`bc8ecee`](https://github.com/minimistjs/minimist/commit/bc8ecee43875261f4f17eb20b1243d3ed15e70eb) +- isConstructorOrProto adapted from PR [`c2b9819`](https://github.com/minimistjs/minimist/commit/c2b981977fa834b223b408cfb860f933c9811e4d) +- security notice for additional prototype pollution issue [`ef88b93`](https://github.com/minimistjs/minimist/commit/ef88b9325f77b5ee643ccfc97e2ebda577e4c4e2) + +## [v1.2.5](https://github.com/minimistjs/minimist/compare/v1.2.4...v1.2.5) - 2020-03-12 + +## [v1.2.4](https://github.com/minimistjs/minimist/compare/v1.2.3...v1.2.4) - 2020-03-11 + +### Commits + +- security notice [`4cf1354`](https://github.com/minimistjs/minimist/commit/4cf1354839cb972e38496d35e12f806eea92c11f) +- additional test for constructor prototype pollution [`1043d21`](https://github.com/minimistjs/minimist/commit/1043d212c3caaf871966e710f52cfdf02f9eea4b) + +## [v1.2.3](https://github.com/minimistjs/minimist/compare/v1.2.2...v1.2.3) - 2020-03-10 + +### Commits + +- more failing proto pollution tests [`13c01a5`](https://github.com/minimistjs/minimist/commit/13c01a5327736903704984b7f65616b8476850cc) +- even more aggressive checks for protocol pollution [`38a4d1c`](https://github.com/minimistjs/minimist/commit/38a4d1caead72ef99e824bb420a2528eec03d9ab) + +## [v1.2.2](https://github.com/minimistjs/minimist/compare/v1.2.1...v1.2.2) - 2020-03-10 + +### Commits + +- failing test for protocol pollution [`0efed03`](https://github.com/minimistjs/minimist/commit/0efed0340ec8433638758f7ca0c77cb20a0bfbab) +- cleanup [`67d3722`](https://github.com/minimistjs/minimist/commit/67d3722413448d00a62963d2d30c34656a92d7e2) +- console.dir -> console.log [`47acf72`](https://github.com/minimistjs/minimist/commit/47acf72c715a630bf9ea013867f47f1dd69dfc54) +- don't assign onto __proto__ [`63e7ed0`](https://github.com/minimistjs/minimist/commit/63e7ed05aa4b1889ec2f3b196426db4500cbda94) + +## [v1.2.1](https://github.com/minimistjs/minimist/compare/v1.2.0...v1.2.1) - 2020-03-10 + +### Merged + +- move the `opts['--']` example back where it belongs [`#63`](https://github.com/minimistjs/minimist/pull/63) + +### Commits + +- add test [`6be5dae`](https://github.com/minimistjs/minimist/commit/6be5dae35a32a987bcf4137fcd6c19c5200ee909) +- fix bad boolean regexp [`ac3fc79`](https://github.com/minimistjs/minimist/commit/ac3fc796e63b95128fdbdf67ea7fad71bd59aa76) + +## [v1.2.0](https://github.com/minimistjs/minimist/compare/v1.1.3...v1.2.0) - 2015-08-24 + +### Commits + +- failing -k=v short test [`63416b8`](https://github.com/minimistjs/minimist/commit/63416b8cd1d0d70e4714564cce465a36e4dd26d7) +- kv short fix [`6bbe145`](https://github.com/minimistjs/minimist/commit/6bbe14529166245e86424f220a2321442fe88dc3) +- failing kv short test [`f72ab7f`](https://github.com/minimistjs/minimist/commit/f72ab7f4572adc52902c9b6873cc969192f01b10) +- fixed kv test [`f5a48c3`](https://github.com/minimistjs/minimist/commit/f5a48c3e50e40ca54f00c8e84de4b4d6e9897fa8) +- enforce space between arg key and value [`86b321a`](https://github.com/minimistjs/minimist/commit/86b321affe648a8e016c095a4f0efa9d9074f502) + +## [v1.1.3](https://github.com/minimistjs/minimist/compare/v1.1.2...v1.1.3) - 2015-08-06 + +### Commits + +- add failing test - boolean alias array [`0fa3c5b`](https://github.com/minimistjs/minimist/commit/0fa3c5b3dd98551ddecf5392831b4c21211743fc) +- fix boolean values with multiple aliases [`9c0a6e7`](https://github.com/minimistjs/minimist/commit/9c0a6e7de25a273b11bbf9a7464f0bd833779795) + +## [v1.1.2](https://github.com/minimistjs/minimist/compare/v1.1.1...v1.1.2) - 2015-07-22 + +### Commits + +- Convert boolean arguments to boolean values [`8f3dc27`](https://github.com/minimistjs/minimist/commit/8f3dc27cf833f1d54671b6d0bcb55c2fe19672a9) +- use non-ancient npm, node 0.12 and iojs [`61ed1d0`](https://github.com/minimistjs/minimist/commit/61ed1d034b9ec7282764ce76f3992b1a0b4906ae) +- an older npm for 0.8 [`25cf778`](https://github.com/minimistjs/minimist/commit/25cf778b1220e7838a526832ad6972f75244054f) + +## [v1.1.1](https://github.com/minimistjs/minimist/compare/v1.1.0...v1.1.1) - 2015-03-10 + +### Commits + +- check that they type of a value is a boolean, not just that it is currently set to a boolean [`6863198`](https://github.com/minimistjs/minimist/commit/6863198e36139830ff1f20ffdceaddd93f2c1db9) +- upgrade tape, fix type issues from old tape version [`806712d`](https://github.com/minimistjs/minimist/commit/806712df91604ed02b8e39aa372b84aea659ee34) +- test for setting a boolean to a null default [`8c444fe`](https://github.com/minimistjs/minimist/commit/8c444fe89384ded7d441c120915ea60620b01dd3) +- if the previous value was a boolean, without an default (or with an alias) don't make an array either [`e5f419a`](https://github.com/minimistjs/minimist/commit/e5f419a3b5b3bc3f9e5ac71b7040621af70ed2dd) + +## [v1.1.0](https://github.com/minimistjs/minimist/compare/v1.0.0...v1.1.0) - 2014-08-10 + +### Commits + +- add support for handling "unknown" options not registered with the parser. [`6f3cc5d`](https://github.com/minimistjs/minimist/commit/6f3cc5d4e84524932a6ef2ce3592acc67cdd4383) +- reformat package.json [`02ed371`](https://github.com/minimistjs/minimist/commit/02ed37115194d3697ff358e8e25e5e66bab1d9f8) +- coverage script [`e5531ba`](https://github.com/minimistjs/minimist/commit/e5531ba0479da3b8138d3d8cac545d84ccb1c8df) +- extra fn to get 100% coverage again [`a6972da`](https://github.com/minimistjs/minimist/commit/a6972da89e56bf77642f8ec05a13b6558db93498) + +## [v1.0.0](https://github.com/minimistjs/minimist/compare/v0.2.3...v1.0.0) - 2014-08-10 + +### Commits + +- added stopEarly option [`471c7e4`](https://github.com/minimistjs/minimist/commit/471c7e4a7e910fc7ad8f9df850a186daf32c64e9) +- fix list [`fef6ae7`](https://github.com/minimistjs/minimist/commit/fef6ae79c38b9dc1c49569abb7cd04eb965eac5e) + +## [v0.2.3](https://github.com/minimistjs/minimist/compare/v0.2.2...v0.2.3) - 2023-02-09 + +### Merged + +- [Fix] Fix long option followed by single dash [`#17`](https://github.com/minimistjs/minimist/pull/17) +- [Tests] Remove duplicate test [`#12`](https://github.com/minimistjs/minimist/pull/12) +- [Fix] opt.string works with multiple aliases [`#10`](https://github.com/minimistjs/minimist/pull/10) + +### Fixed + +- [Fix] Fix long option followed by single dash (#17) [`#15`](https://github.com/minimistjs/minimist/issues/15) +- [Tests] Remove duplicate test (#12) [`#8`](https://github.com/minimistjs/minimist/issues/8) +- [Fix] opt.string works with multiple aliases (#10) [`#9`](https://github.com/minimistjs/minimist/issues/9) + +### Commits + +- [eslint] fix indentation and whitespace [`e5f5067`](https://github.com/minimistjs/minimist/commit/e5f5067259ceeaf0b098d14bec910f87e58708c7) +- [eslint] more cleanup [`36ac5d0`](https://github.com/minimistjs/minimist/commit/36ac5d0d95e4947d074e5737d94814034ca335d1) +- [eslint] fix indentation [`34b0f1c`](https://github.com/minimistjs/minimist/commit/34b0f1ccaa45183c3c4f06a91f9b405180a6f982) +- isConstructorOrProto adapted from PR [`ef9153f`](https://github.com/minimistjs/minimist/commit/ef9153fc52b6cea0744b2239921c5dcae4697f11) +- [Dev Deps] update `@ljharb/eslint-config`, `aud` [`098873c`](https://github.com/minimistjs/minimist/commit/098873c213cdb7c92e55ae1ef5aa1af3a8192a79) +- [Dev Deps] add missing `npmignore` dev dep [`3226afa`](https://github.com/minimistjs/minimist/commit/3226afaf09e9d127ca369742437fe6e88f752d6b) + +## [v0.2.2](https://github.com/minimistjs/minimist/compare/v0.2.1...v0.2.2) - 2022-10-10 + +### Commits + +- [meta] add `auto-changelog` [`73923d2`](https://github.com/minimistjs/minimist/commit/73923d223553fca08b1ba77e3fbc2a492862ae4c) +- [actions] add reusable workflows [`d80727d`](https://github.com/minimistjs/minimist/commit/d80727df77bfa9e631044d7f16368d8f09242c91) +- [eslint] add eslint; rules to enable later are warnings [`48bc06a`](https://github.com/minimistjs/minimist/commit/48bc06a1b41f00e9cdf183db34f7a51ba70e98d4) +- [readme] rename and add badges [`5df0fe4`](https://github.com/minimistjs/minimist/commit/5df0fe49211bd09a3636f8686a7cb3012c3e98f0) +- [Dev Deps] switch from `covert` to `nyc` [`a48b128`](https://github.com/minimistjs/minimist/commit/a48b128fdb8d427dfb20a15273f83e38d97bef07) +- [Dev Deps] update `covert`, `tape`; remove unnecessary `tap` [`f0fb958`](https://github.com/minimistjs/minimist/commit/f0fb958e9a1fe980cdffc436a211b0bda58f621b) +- [meta] create FUNDING.yml; add `funding` in package.json [`3639e0c`](https://github.com/minimistjs/minimist/commit/3639e0c819359a366387e425ab6eabf4c78d3caa) +- [meta] use `npmignore` to autogenerate an npmignore file [`be2e038`](https://github.com/minimistjs/minimist/commit/be2e038c342d8333b32f0fde67a0026b79c8150e) +- Only apps should have lockfiles [`282b570`](https://github.com/minimistjs/minimist/commit/282b570e7489d01b03f2d6d3dabf79cd3e5f84cf) +- [meta] add `safe-publish-latest` [`4b927de`](https://github.com/minimistjs/minimist/commit/4b927de696d561c636b4f43bf49d4597cb36d6d6) +- [Tests] add `aud` in `posttest` [`b32d9bd`](https://github.com/minimistjs/minimist/commit/b32d9bd0ab340f4e9f8c3a97ff2a4424f25fab8c) +- [meta] update repo URLs [`f9fdfc0`](https://github.com/minimistjs/minimist/commit/f9fdfc032c54884d9a9996a390c63cd0719bbe1a) + +## [v0.2.1](https://github.com/minimistjs/minimist/compare/v0.2.0...v0.2.1) - 2020-03-12 + +## [v0.2.0](https://github.com/minimistjs/minimist/compare/v0.1.0...v0.2.0) - 2014-06-19 + +### Commits + +- support all-boolean mode [`450a97f`](https://github.com/minimistjs/minimist/commit/450a97f6e2bc85c7a4a13185c19a818d9a5ebe69) + +## [v0.1.0](https://github.com/minimistjs/minimist/compare/v0.0.10...v0.1.0) - 2014-05-12 + +### Commits + +- Provide a mechanism to segregate -- arguments [`ce4a1e6`](https://github.com/minimistjs/minimist/commit/ce4a1e63a7e8d5ab88d2a3768adefa6af98a445a) +- documented argv['--'] [`14db0e6`](https://github.com/minimistjs/minimist/commit/14db0e6dbc6d2b9e472adaa54dad7004b364634f) +- Adding a test-case for notFlags segregation [`715c1e3`](https://github.com/minimistjs/minimist/commit/715c1e3714be223f998f6c537af6b505f0236c16) + +## [v0.0.10](https://github.com/minimistjs/minimist/compare/v0.0.9...v0.0.10) - 2014-05-11 + +### Commits + +- dedicated boolean test [`46e448f`](https://github.com/minimistjs/minimist/commit/46e448f9f513cfeb2bcc8b688b9b47ba1e515c2b) +- dedicated num test [`9bf2d36`](https://github.com/minimistjs/minimist/commit/9bf2d36f1d3b8795be90b8f7de0a937f098aa394) +- aliased values treated as strings [`1ab743b`](https://github.com/minimistjs/minimist/commit/1ab743bad4484d69f1259bed42f9531de01119de) +- cover the case of already numbers, at 100% coverage [`b2bb044`](https://github.com/minimistjs/minimist/commit/b2bb04436599d77a2ce029e8e555e25b3aa55d13) +- another test for higher coverage [`3662624`](https://github.com/minimistjs/minimist/commit/3662624be976d5489d486a856849c048d13be903) + +## [v0.0.9](https://github.com/minimistjs/minimist/compare/v0.0.8...v0.0.9) - 2014-05-08 + +### Commits + +- Eliminate `longest` fn. [`824f642`](https://github.com/minimistjs/minimist/commit/824f642038d1b02ede68b6261d1d65163390929a) + +## [v0.0.8](https://github.com/minimistjs/minimist/compare/v0.0.7...v0.0.8) - 2014-02-20 + +### Commits + +- return '' if flag is string and empty [`fa63ed4`](https://github.com/minimistjs/minimist/commit/fa63ed4651a4ef4eefddce34188e0d98d745a263) +- handle joined single letters [`66c248f`](https://github.com/minimistjs/minimist/commit/66c248f0241d4d421d193b022e9e365f11178534) + +## [v0.0.7](https://github.com/minimistjs/minimist/compare/v0.0.6...v0.0.7) - 2014-02-08 + +### Commits + +- another swap of .test for .match [`d1da408`](https://github.com/minimistjs/minimist/commit/d1da40819acbe846d89a5c02721211e3c1260dde) + +## [v0.0.6](https://github.com/minimistjs/minimist/compare/v0.0.5...v0.0.6) - 2014-02-08 + +### Commits + +- use .test() instead of .match() to not crash on non-string values in the arguments array [`7e0d1ad`](https://github.com/minimistjs/minimist/commit/7e0d1add8c9e5b9b20a4d3d0f9a94d824c578da1) + +## [v0.0.5](https://github.com/minimistjs/minimist/compare/v0.0.4...v0.0.5) - 2013-09-18 + +### Commits + +- Improve '--' handling. [`b11822c`](https://github.com/minimistjs/minimist/commit/b11822c09cc9d2460f30384d12afc0b953c037a4) + +## [v0.0.4](https://github.com/minimistjs/minimist/compare/v0.0.3...v0.0.4) - 2013-09-17 + +## [v0.0.3](https://github.com/minimistjs/minimist/compare/v0.0.2...v0.0.3) - 2013-09-12 + +### Commits + +- failing test for single dash preceeding a double dash [`b465514`](https://github.com/minimistjs/minimist/commit/b465514b82c9ae28972d714facd951deb2ad762b) +- fix for the dot test [`6a095f1`](https://github.com/minimistjs/minimist/commit/6a095f1d364c8fab2d6753d2291a0649315d297a) + +## [v0.0.2](https://github.com/minimistjs/minimist/compare/v0.0.1...v0.0.2) - 2013-08-28 + +### Commits + +- allow dotted aliases & defaults [`321c33e`](https://github.com/minimistjs/minimist/commit/321c33e755485faaeb44eeb1c05d33b2e0a5a7c4) +- use a better version of ff [`e40f611`](https://github.com/minimistjs/minimist/commit/e40f61114cf7be6f7947f7b3eed345853a67dbbb) + +## [v0.0.1](https://github.com/minimistjs/minimist/compare/v0.0.0...v0.0.1) - 2013-06-25 + +### Commits + +- remove trailing commas [`6ff0fa0`](https://github.com/minimistjs/minimist/commit/6ff0fa055064f15dbe06d50b89d5173a6796e1db) + +## v0.0.0 - 2013-06-25 + +### Commits + +- half of the parse test ported [`3079326`](https://github.com/minimistjs/minimist/commit/307932601325087de6cf94188eb798ffc4f3088a) +- stripped down code and a passing test from optimist [`7cced88`](https://github.com/minimistjs/minimist/commit/7cced88d82e399d1a03ed23eb667f04d3f320d10) +- ported parse tests completely over [`9448754`](https://github.com/minimistjs/minimist/commit/944875452e0820df6830b1408c26a0f7d3e1db04) +- docs, package.json [`a5bf46a`](https://github.com/minimistjs/minimist/commit/a5bf46ac9bb3bd114a9c340276c62c1091e538d5) +- move more short tests into short.js [`503edb5`](https://github.com/minimistjs/minimist/commit/503edb5c41d89c0d40831ee517154fc13b0f18b9) +- default bool test was wrong, not the code [`1b9f5db`](https://github.com/minimistjs/minimist/commit/1b9f5db4741b49962846081b68518de824992097) +- passing long tests ripped out of parse.js [`7972c4a`](https://github.com/minimistjs/minimist/commit/7972c4aff1f4803079e1668006658e2a761a0428) +- badges [`84c0370`](https://github.com/minimistjs/minimist/commit/84c037063664d42878aace715fe6572ce01b6f3b) +- all the tests now ported, some failures [`64239ed`](https://github.com/minimistjs/minimist/commit/64239edfe92c711c4eb0da254fcdfad2a5fdb605) +- failing short test [`f8a5341`](https://github.com/minimistjs/minimist/commit/f8a534112dd1138d2fad722def56a848480c446f) +- fixed the numeric test [`6b034f3`](https://github.com/minimistjs/minimist/commit/6b034f37c79342c60083ed97fd222e16928aac51) diff --git a/node_modules/minimist/LICENSE b/node_modules/minimist/LICENSE new file mode 100644 index 0000000..ee27ba4 --- /dev/null +++ b/node_modules/minimist/LICENSE @@ -0,0 +1,18 @@ +This software is released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/minimist/README.md b/node_modules/minimist/README.md new file mode 100644 index 0000000..74da323 --- /dev/null +++ b/node_modules/minimist/README.md @@ -0,0 +1,121 @@ +# minimist [![Version Badge][npm-version-svg]][package-url] + +[![github actions][actions-image]][actions-url] +[![coverage][codecov-image]][codecov-url] +[![License][license-image]][license-url] +[![Downloads][downloads-image]][downloads-url] + +[![npm badge][npm-badge-png]][package-url] + +parse argument options + +This module is the guts of optimist's argument parser without all the +fanciful decoration. + +# example + +``` js +var argv = require('minimist')(process.argv.slice(2)); +console.log(argv); +``` + +``` +$ node example/parse.js -a beep -b boop +{ _: [], a: 'beep', b: 'boop' } +``` + +``` +$ node example/parse.js -x 3 -y 4 -n5 -abc --beep=boop foo bar baz +{ + _: ['foo', 'bar', 'baz'], + x: 3, + y: 4, + n: 5, + a: true, + b: true, + c: true, + beep: 'boop' +} +``` + +# security + +Previous versions had a prototype pollution bug that could cause privilege +escalation in some circumstances when handling untrusted user input. + +Please use version 1.2.6 or later: + +* https://security.snyk.io/vuln/SNYK-JS-MINIMIST-2429795 (version <=1.2.5) +* https://snyk.io/vuln/SNYK-JS-MINIMIST-559764 (version <=1.2.3) + +# methods + +``` js +var parseArgs = require('minimist') +``` + +## var argv = parseArgs(args, opts={}) + +Return an argument object `argv` populated with the array arguments from `args`. + +`argv._` contains all the arguments that didn't have an option associated with +them. + +Numeric-looking arguments will be returned as numbers unless `opts.string` or +`opts.boolean` is set for that argument name. + +Any arguments after `'--'` will not be parsed and will end up in `argv._`. + +options can be: + +* `opts.string` - a string or array of strings argument names to always treat as +strings +* `opts.boolean` - a boolean, string or array of strings to always treat as +booleans. if `true` will treat all double hyphenated arguments without equal signs +as boolean (e.g. affects `--foo`, not `-f` or `--foo=bar`) +* `opts.alias` - an object mapping string names to strings or arrays of string +argument names to use as aliases +* `opts.default` - an object mapping string argument names to default values +* `opts.stopEarly` - when true, populate `argv._` with everything after the +first non-option +* `opts['--']` - when true, populate `argv._` with everything before the `--` +and `argv['--']` with everything after the `--`. Here's an example: + + ``` + > require('./')('one two three -- four five --six'.split(' '), { '--': true }) + { + _: ['one', 'two', 'three'], + '--': ['four', 'five', '--six'] + } + ``` + + Note that with `opts['--']` set, parsing for arguments still stops after the + `--`. + +* `opts.unknown` - a function which is invoked with a command line parameter not +defined in the `opts` configuration object. If the function returns `false`, the +unknown option is not added to `argv`. + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install minimist +``` + +# license + +MIT + +[package-url]: https://npmjs.org/package/minimist +[npm-version-svg]: https://versionbadg.es/minimistjs/minimist.svg +[npm-badge-png]: https://nodei.co/npm/minimist.png?downloads=true&stars=true +[license-image]: https://img.shields.io/npm/l/minimist.svg +[license-url]: LICENSE +[downloads-image]: https://img.shields.io/npm/dm/minimist.svg +[downloads-url]: https://npm-stat.com/charts.html?package=minimist +[codecov-image]: https://codecov.io/gh/minimistjs/minimist/branch/main/graphs/badge.svg +[codecov-url]: https://app.codecov.io/gh/minimistjs/minimist/ +[actions-image]: https://img.shields.io/endpoint?url=https://github-actions-badge-u3jn4tfpocch.runkit.sh/minimistjs/minimist +[actions-url]: https://github.com/minimistjs/minimist/actions diff --git a/node_modules/minimist/example/parse.js b/node_modules/minimist/example/parse.js new file mode 100644 index 0000000..9d90ffb --- /dev/null +++ b/node_modules/minimist/example/parse.js @@ -0,0 +1,4 @@ +'use strict'; + +var argv = require('../')(process.argv.slice(2)); +console.log(argv); diff --git a/node_modules/minimist/index.js b/node_modules/minimist/index.js new file mode 100644 index 0000000..f020f39 --- /dev/null +++ b/node_modules/minimist/index.js @@ -0,0 +1,263 @@ +'use strict'; + +function hasKey(obj, keys) { + var o = obj; + keys.slice(0, -1).forEach(function (key) { + o = o[key] || {}; + }); + + var key = keys[keys.length - 1]; + return key in o; +} + +function isNumber(x) { + if (typeof x === 'number') { return true; } + if ((/^0x[0-9a-f]+$/i).test(x)) { return true; } + return (/^[-+]?(?:\d+(?:\.\d*)?|\.\d+)(e[-+]?\d+)?$/).test(x); +} + +function isConstructorOrProto(obj, key) { + return (key === 'constructor' && typeof obj[key] === 'function') || key === '__proto__'; +} + +module.exports = function (args, opts) { + if (!opts) { opts = {}; } + + var flags = { + bools: {}, + strings: {}, + unknownFn: null, + }; + + if (typeof opts.unknown === 'function') { + flags.unknownFn = opts.unknown; + } + + if (typeof opts.boolean === 'boolean' && opts.boolean) { + flags.allBools = true; + } else { + [].concat(opts.boolean).filter(Boolean).forEach(function (key) { + flags.bools[key] = true; + }); + } + + var aliases = {}; + + function aliasIsBoolean(key) { + return aliases[key].some(function (x) { + return flags.bools[x]; + }); + } + + Object.keys(opts.alias || {}).forEach(function (key) { + aliases[key] = [].concat(opts.alias[key]); + aliases[key].forEach(function (x) { + aliases[x] = [key].concat(aliases[key].filter(function (y) { + return x !== y; + })); + }); + }); + + [].concat(opts.string).filter(Boolean).forEach(function (key) { + flags.strings[key] = true; + if (aliases[key]) { + [].concat(aliases[key]).forEach(function (k) { + flags.strings[k] = true; + }); + } + }); + + var defaults = opts.default || {}; + + var argv = { _: [] }; + + function argDefined(key, arg) { + return (flags.allBools && (/^--[^=]+$/).test(arg)) + || flags.strings[key] + || flags.bools[key] + || aliases[key]; + } + + function setKey(obj, keys, value) { + var o = obj; + for (var i = 0; i < keys.length - 1; i++) { + var key = keys[i]; + if (isConstructorOrProto(o, key)) { return; } + if (o[key] === undefined) { o[key] = {}; } + if ( + o[key] === Object.prototype + || o[key] === Number.prototype + || o[key] === String.prototype + ) { + o[key] = {}; + } + if (o[key] === Array.prototype) { o[key] = []; } + o = o[key]; + } + + var lastKey = keys[keys.length - 1]; + if (isConstructorOrProto(o, lastKey)) { return; } + if ( + o === Object.prototype + || o === Number.prototype + || o === String.prototype + ) { + o = {}; + } + if (o === Array.prototype) { o = []; } + if (o[lastKey] === undefined || flags.bools[lastKey] || typeof o[lastKey] === 'boolean') { + o[lastKey] = value; + } else if (Array.isArray(o[lastKey])) { + o[lastKey].push(value); + } else { + o[lastKey] = [o[lastKey], value]; + } + } + + function setArg(key, val, arg) { + if (arg && flags.unknownFn && !argDefined(key, arg)) { + if (flags.unknownFn(arg) === false) { return; } + } + + var value = !flags.strings[key] && isNumber(val) + ? Number(val) + : val; + setKey(argv, key.split('.'), value); + + (aliases[key] || []).forEach(function (x) { + setKey(argv, x.split('.'), value); + }); + } + + Object.keys(flags.bools).forEach(function (key) { + setArg(key, defaults[key] === undefined ? false : defaults[key]); + }); + + var notFlags = []; + + if (args.indexOf('--') !== -1) { + notFlags = args.slice(args.indexOf('--') + 1); + args = args.slice(0, args.indexOf('--')); + } + + for (var i = 0; i < args.length; i++) { + var arg = args[i]; + var key; + var next; + + if ((/^--.+=/).test(arg)) { + // Using [\s\S] instead of . because js doesn't support the + // 'dotall' regex modifier. See: + // http://stackoverflow.com/a/1068308/13216 + var m = arg.match(/^--([^=]+)=([\s\S]*)$/); + key = m[1]; + var value = m[2]; + if (flags.bools[key]) { + value = value !== 'false'; + } + setArg(key, value, arg); + } else if ((/^--no-.+/).test(arg)) { + key = arg.match(/^--no-(.+)/)[1]; + setArg(key, false, arg); + } else if ((/^--.+/).test(arg)) { + key = arg.match(/^--(.+)/)[1]; + next = args[i + 1]; + if ( + next !== undefined + && !(/^(-|--)[^-]/).test(next) + && !flags.bools[key] + && !flags.allBools + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, next, arg); + i += 1; + } else if ((/^(true|false)$/).test(next)) { + setArg(key, next === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } else if ((/^-[^-]+/).test(arg)) { + var letters = arg.slice(1, -1).split(''); + + var broken = false; + for (var j = 0; j < letters.length; j++) { + next = arg.slice(j + 2); + + if (next === '-') { + setArg(letters[j], next, arg); + continue; + } + + if ((/[A-Za-z]/).test(letters[j]) && next[0] === '=') { + setArg(letters[j], next.slice(1), arg); + broken = true; + break; + } + + if ( + (/[A-Za-z]/).test(letters[j]) + && (/-?\d+(\.\d*)?(e-?\d+)?$/).test(next) + ) { + setArg(letters[j], next, arg); + broken = true; + break; + } + + if (letters[j + 1] && letters[j + 1].match(/\W/)) { + setArg(letters[j], arg.slice(j + 2), arg); + broken = true; + break; + } else { + setArg(letters[j], flags.strings[letters[j]] ? '' : true, arg); + } + } + + key = arg.slice(-1)[0]; + if (!broken && key !== '-') { + if ( + args[i + 1] + && !(/^(-|--)[^-]/).test(args[i + 1]) + && !flags.bools[key] + && (aliases[key] ? !aliasIsBoolean(key) : true) + ) { + setArg(key, args[i + 1], arg); + i += 1; + } else if (args[i + 1] && (/^(true|false)$/).test(args[i + 1])) { + setArg(key, args[i + 1] === 'true', arg); + i += 1; + } else { + setArg(key, flags.strings[key] ? '' : true, arg); + } + } + } else { + if (!flags.unknownFn || flags.unknownFn(arg) !== false) { + argv._.push(flags.strings._ || !isNumber(arg) ? arg : Number(arg)); + } + if (opts.stopEarly) { + argv._.push.apply(argv._, args.slice(i + 1)); + break; + } + } + } + + Object.keys(defaults).forEach(function (k) { + if (!hasKey(argv, k.split('.'))) { + setKey(argv, k.split('.'), defaults[k]); + + (aliases[k] || []).forEach(function (x) { + setKey(argv, x.split('.'), defaults[k]); + }); + } + }); + + if (opts['--']) { + argv['--'] = notFlags.slice(); + } else { + notFlags.forEach(function (k) { + argv._.push(k); + }); + } + + return argv; +}; diff --git a/node_modules/minimist/package.json b/node_modules/minimist/package.json new file mode 100644 index 0000000..c10a334 --- /dev/null +++ b/node_modules/minimist/package.json @@ -0,0 +1,75 @@ +{ + "name": "minimist", + "version": "1.2.8", + "description": "parse argument options", + "main": "index.js", + "devDependencies": { + "@ljharb/eslint-config": "^21.0.1", + "aud": "^2.0.2", + "auto-changelog": "^2.4.0", + "eslint": "=8.8.0", + "in-publish": "^2.0.1", + "npmignore": "^0.3.0", + "nyc": "^10.3.2", + "safe-publish-latest": "^2.0.0", + "tape": "^5.6.3" + }, + "scripts": { + "prepack": "npmignore --auto --commentLines=auto", + "prepublishOnly": "safe-publish-latest", + "prepublish": "not-in-publish || npm run prepublishOnly", + "lint": "eslint --ext=js,mjs .", + "pretest": "npm run lint", + "tests-only": "nyc tape 'test/**/*.js'", + "test": "npm run tests-only", + "posttest": "aud --production", + "version": "auto-changelog && git add CHANGELOG.md", + "postversion": "auto-changelog && git add CHANGELOG.md && git commit --no-edit --amend && git tag -f \"v$(node -e \"console.log(require('./package.json').version)\")\"" + }, + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "ff/5", + "firefox/latest", + "chrome/10", + "chrome/latest", + "safari/5.1", + "safari/latest", + "opera/12" + ] + }, + "repository": { + "type": "git", + "url": "git://github.com/minimistjs/minimist.git" + }, + "homepage": "https://github.com/minimistjs/minimist", + "keywords": [ + "argv", + "getopt", + "parser", + "optimist" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + }, + "license": "MIT", + "auto-changelog": { + "output": "CHANGELOG.md", + "template": "keepachangelog", + "unreleased": false, + "commitLimit": false, + "backfillLimit": false, + "hideCredit": true + }, + "publishConfig": { + "ignore": [ + ".github/workflows" + ] + } +} diff --git a/node_modules/minimist/test/all_bool.js b/node_modules/minimist/test/all_bool.js new file mode 100644 index 0000000..befa0c9 --- /dev/null +++ b/node_modules/minimist/test/all_bool.js @@ -0,0 +1,34 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean true (default all --args to boolean)', function (t) { + var argv = parse(['moo', '--honk', 'cow'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); + +test('flag boolean true only affects double hyphen arguments without equals signs', function (t) { + var argv = parse(['moo', '--honk', 'cow', '-p', '55', '--tacos=good'], { + boolean: true, + }); + + t.deepEqual(argv, { + honk: true, + tacos: 'good', + p: 55, + _: ['moo', 'cow'], + }); + + t.deepEqual(typeof argv.honk, 'boolean'); + t.end(); +}); diff --git a/node_modules/minimist/test/bool.js b/node_modules/minimist/test/bool.js new file mode 100644 index 0000000..e58d47e --- /dev/null +++ b/node_modules/minimist/test/bool.js @@ -0,0 +1,177 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('flag boolean default false', function (t) { + var argv = parse(['moo'], { + boolean: ['t', 'verbose'], + default: { verbose: false, t: false }, + }); + + t.deepEqual(argv, { + verbose: false, + t: false, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); + +}); + +test('boolean groups', function (t) { + var argv = parse(['-x', '-z', 'one', 'two', 'three'], { + boolean: ['x', 'y', 'z'], + }); + + t.deepEqual(argv, { + x: true, + y: false, + z: true, + _: ['one', 'two', 'three'], + }); + + t.deepEqual(typeof argv.x, 'boolean'); + t.deepEqual(typeof argv.y, 'boolean'); + t.deepEqual(typeof argv.z, 'boolean'); + t.end(); +}); +test('boolean and alias with chainable api', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var aliasedArgv = parse(aliased, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var propertyArgv = parse(regular, { + boolean: 'herp', + alias: { h: 'herp' }, + }); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var opts = { + alias: { h: 'herp' }, + boolean: 'herp', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +test('boolean and alias array with options hash', function (t) { + var aliased = ['-h', 'derp']; + var regular = ['--herp', 'derp']; + var alt = ['--harp', 'derp']; + var opts = { + alias: { h: ['herp', 'harp'] }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var altPropertyArgv = parse(alt, opts); + var expected = { + harp: true, + herp: true, + h: true, + _: ['derp'], + }; + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.same(altPropertyArgv, expected); + t.end(); +}); + +test('boolean and alias using explicit true', function (t) { + var aliased = ['-h', 'true']; + var regular = ['--herp', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + }; + var aliasedArgv = parse(aliased, opts); + var propertyArgv = parse(regular, opts); + var expected = { + herp: true, + h: true, + _: [], + }; + + t.same(aliasedArgv, expected); + t.same(propertyArgv, expected); + t.end(); +}); + +// regression, see https://github.com/substack/node-optimist/issues/71 +test('boolean and --x=true', function (t) { + var parsed = parse(['--boool', '--other=true'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'true'); + + parsed = parse(['--boool', '--other=false'], { + boolean: 'boool', + }); + + t.same(parsed.boool, true); + t.same(parsed.other, 'false'); + t.end(); +}); + +test('boolean --boool=true', function (t) { + var parsed = parse(['--boool=true'], { + default: { + boool: false, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, true); + t.end(); +}); + +test('boolean --boool=false', function (t) { + var parsed = parse(['--boool=false'], { + default: { + boool: true, + }, + boolean: ['boool'], + }); + + t.same(parsed.boool, false); + t.end(); +}); + +test('boolean using something similar to true', function (t) { + var opts = { boolean: 'h' }; + var result = parse(['-h', 'true.txt'], opts); + var expected = { + h: true, + _: ['true.txt'], + }; + + t.same(result, expected); + t.end(); +}); diff --git a/node_modules/minimist/test/dash.js b/node_modules/minimist/test/dash.js new file mode 100644 index 0000000..7078817 --- /dev/null +++ b/node_modules/minimist/test/dash.js @@ -0,0 +1,43 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('-', function (t) { + t.plan(6); + t.deepEqual(parse(['-n', '-']), { n: '-', _: [] }); + t.deepEqual(parse(['--nnn', '-']), { nnn: '-', _: [] }); + t.deepEqual(parse(['-']), { _: ['-'] }); + t.deepEqual(parse(['-f-']), { f: '-', _: [] }); + t.deepEqual( + parse(['-b', '-'], { boolean: 'b' }), + { b: true, _: ['-'] } + ); + t.deepEqual( + parse(['-s', '-'], { string: 's' }), + { s: '-', _: [] } + ); +}); + +test('-a -- b', function (t) { + t.plan(2); + t.deepEqual(parse(['-a', '--', 'b']), { a: true, _: ['b'] }); + t.deepEqual(parse(['--a', '--', 'b']), { a: true, _: ['b'] }); +}); + +test('move arguments after the -- into their own `--` array', function (t) { + t.plan(1); + t.deepEqual( + parse(['--name', 'John', 'before', '--', 'after'], { '--': true }), + { name: 'John', _: ['before'], '--': ['after'] } + ); +}); + +test('--- option value', function (t) { + // A multi-dash value is largely an edge case, but check the behaviour is as expected, + // and in particular the same for short option and long option (as made consistent in Jan 2023). + t.plan(2); + t.deepEqual(parse(['-n', '---']), { n: '---', _: [] }); + t.deepEqual(parse(['--nnn', '---']), { nnn: '---', _: [] }); +}); + diff --git a/node_modules/minimist/test/default_bool.js b/node_modules/minimist/test/default_bool.js new file mode 100644 index 0000000..4e9f625 --- /dev/null +++ b/node_modules/minimist/test/default_bool.js @@ -0,0 +1,37 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('boolean default true', function (t) { + var argv = parse([], { + boolean: 'sometrue', + default: { sometrue: true }, + }); + t.equal(argv.sometrue, true); + t.end(); +}); + +test('boolean default false', function (t) { + var argv = parse([], { + boolean: 'somefalse', + default: { somefalse: false }, + }); + t.equal(argv.somefalse, false); + t.end(); +}); + +test('boolean default to null', function (t) { + var argv = parse([], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argv.maybe, null); + + var argvLong = parse(['--maybe'], { + boolean: 'maybe', + default: { maybe: null }, + }); + t.equal(argvLong.maybe, true); + t.end(); +}); diff --git a/node_modules/minimist/test/dotted.js b/node_modules/minimist/test/dotted.js new file mode 100644 index 0000000..126ff03 --- /dev/null +++ b/node_modules/minimist/test/dotted.js @@ -0,0 +1,24 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('dotted alias', function (t) { + var argv = parse(['--a.b', '22'], { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 22); + t.equal(argv.aa.bb, 22); + t.end(); +}); + +test('dotted default', function (t) { + var argv = parse('', { default: { 'a.b': 11 }, alias: { 'a.b': 'aa.bb' } }); + t.equal(argv.a.b, 11); + t.equal(argv.aa.bb, 11); + t.end(); +}); + +test('dotted default with no alias', function (t) { + var argv = parse('', { default: { 'a.b': 11 } }); + t.equal(argv.a.b, 11); + t.end(); +}); diff --git a/node_modules/minimist/test/kv_short.js b/node_modules/minimist/test/kv_short.js new file mode 100644 index 0000000..6d1b53a --- /dev/null +++ b/node_modules/minimist/test/kv_short.js @@ -0,0 +1,32 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-b=123']); + t.deepEqual(argv, { b: 123, _: [] }); +}); + +test('multi short -k=v', function (t) { + t.plan(1); + + var argv = parse(['-a=whatever', '-b=robots']); + t.deepEqual(argv, { a: 'whatever', b: 'robots', _: [] }); +}); + +test('short with embedded equals -k=a=b', function (t) { + t.plan(1); + + var argv = parse(['-k=a=b']); + t.deepEqual(argv, { k: 'a=b', _: [] }); +}); + +test('short with later equals like -ab=c', function (t) { + t.plan(1); + + var argv = parse(['-ab=c']); + t.deepEqual(argv, { a: true, b: 'c', _: [] }); +}); diff --git a/node_modules/minimist/test/long.js b/node_modules/minimist/test/long.js new file mode 100644 index 0000000..9fef51f --- /dev/null +++ b/node_modules/minimist/test/long.js @@ -0,0 +1,33 @@ +'use strict'; + +var test = require('tape'); +var parse = require('../'); + +test('long opts', function (t) { + t.deepEqual( + parse(['--bool']), + { bool: true, _: [] }, + 'long boolean' + ); + t.deepEqual( + parse(['--pow', 'xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture sp' + ); + t.deepEqual( + parse(['--pow=xixxle']), + { pow: 'xixxle', _: [] }, + 'long capture eq' + ); + t.deepEqual( + parse(['--host', 'localhost', '--port', '555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures sp' + ); + t.deepEqual( + parse(['--host=localhost', '--port=555']), + { host: 'localhost', port: 555, _: [] }, + 'long captures eq' + ); + t.end(); +}); diff --git a/node_modules/minimist/test/num.js b/node_modules/minimist/test/num.js new file mode 100644 index 0000000..074393e --- /dev/null +++ b/node_modules/minimist/test/num.js @@ -0,0 +1,38 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('nums', function (t) { + var argv = parse([ + '-x', '1234', + '-y', '5.67', + '-z', '1e7', + '-w', '10f', + '--hex', '0xdeadbeef', + '789', + ]); + t.deepEqual(argv, { + x: 1234, + y: 5.67, + z: 1e7, + w: '10f', + hex: 0xdeadbeef, + _: [789], + }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv.y, 'number'); + t.deepEqual(typeof argv.z, 'number'); + t.deepEqual(typeof argv.w, 'string'); + t.deepEqual(typeof argv.hex, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); + +test('already a number', function (t) { + var argv = parse(['-x', 1234, 789]); + t.deepEqual(argv, { x: 1234, _: [789] }); + t.deepEqual(typeof argv.x, 'number'); + t.deepEqual(typeof argv._[0], 'number'); + t.end(); +}); diff --git a/node_modules/minimist/test/parse.js b/node_modules/minimist/test/parse.js new file mode 100644 index 0000000..65d9d90 --- /dev/null +++ b/node_modules/minimist/test/parse.js @@ -0,0 +1,209 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse args', function (t) { + t.deepEqual( + parse(['--no-moo']), + { moo: false, _: [] }, + 'no' + ); + t.deepEqual( + parse(['-v', 'a', '-v', 'b', '-v', 'c']), + { v: ['a', 'b', 'c'], _: [] }, + 'multi' + ); + t.end(); +}); + +test('comprehensive', function (t) { + t.deepEqual( + parse([ + '--name=meowmers', 'bare', '-cats', 'woo', + '-h', 'awesome', '--multi=quux', + '--key', 'value', + '-b', '--bool', '--no-meep', '--multi=baz', + '--', '--not-a-flag', 'eek', + ]), + { + c: true, + a: true, + t: true, + s: 'woo', + h: 'awesome', + b: true, + bool: true, + key: 'value', + multi: ['quux', 'baz'], + meep: false, + name: 'meowmers', + _: ['bare', '--not-a-flag', 'eek'], + } + ); + t.end(); +}); + +test('flag boolean', function (t) { + var argv = parse(['-t', 'moo'], { boolean: 't' }); + t.deepEqual(argv, { t: true, _: ['moo'] }); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('flag boolean value', function (t) { + var argv = parse(['--verbose', 'false', 'moo', '-t', 'true'], { + boolean: ['t', 'verbose'], + default: { verbose: true }, + }); + + t.deepEqual(argv, { + verbose: false, + t: true, + _: ['moo'], + }); + + t.deepEqual(typeof argv.verbose, 'boolean'); + t.deepEqual(typeof argv.t, 'boolean'); + t.end(); +}); + +test('newlines in params', function (t) { + var args = parse(['-s', 'X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + + // reproduce in bash: + // VALUE="new + // line" + // node program.js --s="$VALUE" + args = parse(['--s=X\nX']); + t.deepEqual(args, { _: [], s: 'X\nX' }); + t.end(); +}); + +test('strings', function (t) { + var s = parse(['-s', '0001234'], { string: 's' }).s; + t.equal(s, '0001234'); + t.equal(typeof s, 'string'); + + var x = parse(['-x', '56'], { string: 'x' }).x; + t.equal(x, '56'); + t.equal(typeof x, 'string'); + t.end(); +}); + +test('stringArgs', function (t) { + var s = parse([' ', ' '], { string: '_' })._; + t.same(s.length, 2); + t.same(typeof s[0], 'string'); + t.same(s[0], ' '); + t.same(typeof s[1], 'string'); + t.same(s[1], ' '); + t.end(); +}); + +test('empty strings', function (t) { + var s = parse(['-s'], { string: 's' }).s; + t.equal(s, ''); + t.equal(typeof s, 'string'); + + var str = parse(['--str'], { string: 'str' }).str; + t.equal(str, ''); + t.equal(typeof str, 'string'); + + var letters = parse(['-art'], { + string: ['a', 't'], + }); + + t.equal(letters.a, ''); + t.equal(letters.r, true); + t.equal(letters.t, ''); + + t.end(); +}); + +test('string and alias', function (t) { + var x = parse(['--str', '000123'], { + string: 's', + alias: { s: 'str' }, + }); + + t.equal(x.str, '000123'); + t.equal(typeof x.str, 'string'); + t.equal(x.s, '000123'); + t.equal(typeof x.s, 'string'); + + var y = parse(['-s', '000123'], { + string: 'str', + alias: { str: 's' }, + }); + + t.equal(y.str, '000123'); + t.equal(typeof y.str, 'string'); + t.equal(y.s, '000123'); + t.equal(typeof y.s, 'string'); + + var z = parse(['-s123'], { + alias: { str: ['s', 'S'] }, + string: ['str'], + }); + + t.deepEqual( + z, + { _: [], s: '123', S: '123', str: '123' }, + 'opt.string works with multiple aliases' + ); + t.end(); +}); + +test('slashBreak', function (t) { + t.same( + parse(['-I/foo/bar/baz']), + { I: '/foo/bar/baz', _: [] } + ); + t.same( + parse(['-xyz/foo/bar/baz']), + { x: true, y: true, z: '/foo/bar/baz', _: [] } + ); + t.end(); +}); + +test('alias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: 'zoom' }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.f, 11); + t.end(); +}); + +test('multiAlias', function (t) { + var argv = parse(['-f', '11', '--zoom', '55'], { + alias: { z: ['zm', 'zoom'] }, + }); + t.equal(argv.zoom, 55); + t.equal(argv.z, argv.zoom); + t.equal(argv.z, argv.zm); + t.equal(argv.f, 11); + t.end(); +}); + +test('nested dotted objects', function (t) { + var argv = parse([ + '--foo.bar', '3', '--foo.baz', '4', + '--foo.quux.quibble', '5', '--foo.quux.o_O', + '--beep.boop', + ]); + + t.same(argv.foo, { + bar: 3, + baz: 4, + quux: { + quibble: 5, + o_O: true, + }, + }); + t.same(argv.beep, { boop: true }); + t.end(); +}); diff --git a/node_modules/minimist/test/parse_modified.js b/node_modules/minimist/test/parse_modified.js new file mode 100644 index 0000000..32965d1 --- /dev/null +++ b/node_modules/minimist/test/parse_modified.js @@ -0,0 +1,11 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('parse with modifier functions', function (t) { + t.plan(1); + + var argv = parse(['-b', '123'], { boolean: 'b' }); + t.deepEqual(argv, { b: true, _: [123] }); +}); diff --git a/node_modules/minimist/test/proto.js b/node_modules/minimist/test/proto.js new file mode 100644 index 0000000..6e629dd --- /dev/null +++ b/node_modules/minimist/test/proto.js @@ -0,0 +1,64 @@ +'use strict'; + +/* eslint no-proto: 0 */ + +var parse = require('../'); +var test = require('tape'); + +test('proto pollution', function (t) { + var argv = parse(['--__proto__.x', '123']); + t.equal({}.x, undefined); + t.equal(argv.__proto__.x, undefined); + t.equal(argv.x, undefined); + t.end(); +}); + +test('proto pollution (array)', function (t) { + var argv = parse(['--x', '4', '--x', '5', '--x.__proto__.z', '789']); + t.equal({}.z, undefined); + t.deepEqual(argv.x, [4, 5]); + t.equal(argv.x.z, undefined); + t.equal(argv.x.__proto__.z, undefined); + t.end(); +}); + +test('proto pollution (number)', function (t) { + var argv = parse(['--x', '5', '--x.__proto__.z', '100']); + t.equal({}.z, undefined); + t.equal((4).z, undefined); + t.equal(argv.x, 5); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (string)', function (t) { + var argv = parse(['--x', 'abc', '--x.__proto__.z', 'def']); + t.equal({}.z, undefined); + t.equal('...'.z, undefined); + t.equal(argv.x, 'abc'); + t.equal(argv.x.z, undefined); + t.end(); +}); + +test('proto pollution (constructor)', function (t) { + var argv = parse(['--constructor.prototype.y', '123']); + t.equal({}.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +test('proto pollution (constructor function)', function (t) { + var argv = parse(['--_.concat.constructor.prototype.y', '123']); + function fnToBeTested() {} + t.equal(fnToBeTested.y, undefined); + t.equal(argv.y, undefined); + t.end(); +}); + +// powered by snyk - https://github.com/backstage/backstage/issues/10343 +test('proto pollution (constructor function) snyk', function (t) { + var argv = parse('--_.constructor.constructor.prototype.foo bar'.split(' ')); + t.equal(function () {}.foo, undefined); + t.equal(argv.y, undefined); + t.end(); +}); diff --git a/node_modules/minimist/test/short.js b/node_modules/minimist/test/short.js new file mode 100644 index 0000000..4a7b843 --- /dev/null +++ b/node_modules/minimist/test/short.js @@ -0,0 +1,69 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('numeric short args', function (t) { + t.plan(2); + t.deepEqual(parse(['-n123']), { n: 123, _: [] }); + t.deepEqual( + parse(['-123', '456']), + { 1: true, 2: true, 3: 456, _: [] } + ); +}); + +test('short', function (t) { + t.deepEqual( + parse(['-b']), + { b: true, _: [] }, + 'short boolean' + ); + t.deepEqual( + parse(['foo', 'bar', 'baz']), + { _: ['foo', 'bar', 'baz'] }, + 'bare' + ); + t.deepEqual( + parse(['-cats']), + { c: true, a: true, t: true, s: true, _: [] }, + 'group' + ); + t.deepEqual( + parse(['-cats', 'meow']), + { c: true, a: true, t: true, s: 'meow', _: [] }, + 'short group next' + ); + t.deepEqual( + parse(['-h', 'localhost']), + { h: 'localhost', _: [] }, + 'short capture' + ); + t.deepEqual( + parse(['-h', 'localhost', '-p', '555']), + { h: 'localhost', p: 555, _: [] }, + 'short captures' + ); + t.end(); +}); + +test('mixed short bool and capture', function (t) { + t.same( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); + +test('short and long', function (t) { + t.deepEqual( + parse(['-h', 'localhost', '-fp', '555', 'script.js']), + { + f: true, p: 555, h: 'localhost', + _: ['script.js'], + } + ); + t.end(); +}); diff --git a/node_modules/minimist/test/stop_early.js b/node_modules/minimist/test/stop_early.js new file mode 100644 index 0000000..52a6a91 --- /dev/null +++ b/node_modules/minimist/test/stop_early.js @@ -0,0 +1,17 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('stops parsing on the first non-option when stopEarly is set', function (t) { + var argv = parse(['--aaa', 'bbb', 'ccc', '--ddd'], { + stopEarly: true, + }); + + t.deepEqual(argv, { + aaa: 'bbb', + _: ['ccc', '--ddd'], + }); + + t.end(); +}); diff --git a/node_modules/minimist/test/unknown.js b/node_modules/minimist/test/unknown.js new file mode 100644 index 0000000..4f2e0ca --- /dev/null +++ b/node_modules/minimist/test/unknown.js @@ -0,0 +1,104 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('boolean and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'true', '--derp', 'true']; + var regular = ['--herp', 'true', '-d', 'true']; + var opts = { + alias: { h: 'herp' }, + boolean: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('flag boolean true any double hyphen argument is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var argv = parse(['--honk', '--tacos=good', 'cow', '-p', '55'], { + boolean: true, + unknown: unknownFn, + }); + t.same(unknown, ['--tacos=good', 'cow', '-p']); + t.same(argv, { + honk: true, + _: [], + }); + t.end(); +}); + +test('string and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello', '--derp', 'goodbye']; + var regular = ['--herp', 'hello', '-d', 'moon']; + var opts = { + alias: { h: 'herp' }, + string: 'h', + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, ['--derp', '-d']); + t.end(); +}); + +test('default and alias is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['-h', 'hello']; + var regular = ['--herp', 'hello']; + var opts = { + default: { h: 'bar' }, + alias: { h: 'herp' }, + unknown: unknownFn, + }; + parse(aliased, opts); + parse(regular, opts); + + t.same(unknown, []); + t.end(); + unknownFn(); // exercise fn for 100% coverage +}); + +test('value following -- is not unknown', function (t) { + var unknown = []; + function unknownFn(arg) { + unknown.push(arg); + return false; + } + var aliased = ['--bad', '--', 'good', 'arg']; + var opts = { + '--': true, + unknown: unknownFn, + }; + var argv = parse(aliased, opts); + + t.same(unknown, ['--bad']); + t.same(argv, { + '--': ['good', 'arg'], + _: [], + }); + t.end(); +}); diff --git a/node_modules/minimist/test/whitespace.js b/node_modules/minimist/test/whitespace.js new file mode 100644 index 0000000..4fdaf1d --- /dev/null +++ b/node_modules/minimist/test/whitespace.js @@ -0,0 +1,10 @@ +'use strict'; + +var parse = require('../'); +var test = require('tape'); + +test('whitespace should be whitespace', function (t) { + t.plan(1); + var x = parse(['-x', '\t']).x; + t.equal(x, '\t'); +}); diff --git a/node_modules/mkdirp/LICENSE b/node_modules/mkdirp/LICENSE new file mode 100644 index 0000000..432d1ae --- /dev/null +++ b/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright 2010 James Halliday (mail@substack.net) + +This project is free software released under the MIT/X11 license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/mkdirp/bin/cmd.js b/node_modules/mkdirp/bin/cmd.js new file mode 100755 index 0000000..d95de15 --- /dev/null +++ b/node_modules/mkdirp/bin/cmd.js @@ -0,0 +1,33 @@ +#!/usr/bin/env node + +var mkdirp = require('../'); +var minimist = require('minimist'); +var fs = require('fs'); + +var argv = minimist(process.argv.slice(2), { + alias: { m: 'mode', h: 'help' }, + string: [ 'mode' ] +}); +if (argv.help) { + fs.createReadStream(__dirname + '/usage.txt').pipe(process.stdout); + return; +} + +var paths = argv._.slice(); +var mode = argv.mode ? parseInt(argv.mode, 8) : undefined; + +(function next () { + if (paths.length === 0) return; + var p = paths.shift(); + + if (mode === undefined) mkdirp(p, cb) + else mkdirp(p, mode, cb) + + function cb (err) { + if (err) { + console.error(err.message); + process.exit(1); + } + else next(); + } +})(); diff --git a/node_modules/mkdirp/bin/usage.txt b/node_modules/mkdirp/bin/usage.txt new file mode 100644 index 0000000..f952aa2 --- /dev/null +++ b/node_modules/mkdirp/bin/usage.txt @@ -0,0 +1,12 @@ +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + diff --git a/node_modules/mkdirp/index.js b/node_modules/mkdirp/index.js new file mode 100644 index 0000000..0890ac3 --- /dev/null +++ b/node_modules/mkdirp/index.js @@ -0,0 +1,102 @@ +var path = require('path'); +var fs = require('fs'); +var _0777 = parseInt('0777', 8); + +module.exports = mkdirP.mkdirp = mkdirP.mkdirP = mkdirP; + +function mkdirP (p, opts, f, made) { + if (typeof opts === 'function') { + f = opts; + opts = {}; + } + else if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + var cb = f || /* istanbul ignore next */ function () {}; + p = path.resolve(p); + + xfs.mkdir(p, mode, function (er) { + if (!er) { + made = made || p; + return cb(null, made); + } + switch (er.code) { + case 'ENOENT': + /* istanbul ignore if */ + if (path.dirname(p) === p) return cb(er); + mkdirP(path.dirname(p), opts, function (er, made) { + /* istanbul ignore if */ + if (er) cb(er, made); + else mkdirP(p, opts, cb, made); + }); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + xfs.stat(p, function (er2, stat) { + // if the stat fails, then that's super weird. + // let the original error be the failure reason. + if (er2 || !stat.isDirectory()) cb(er, made) + else cb(null, made); + }); + break; + } + }); +} + +mkdirP.sync = function sync (p, opts, made) { + if (!opts || typeof opts !== 'object') { + opts = { mode: opts }; + } + + var mode = opts.mode; + var xfs = opts.fs || fs; + + if (mode === undefined) { + mode = _0777 + } + if (!made) made = null; + + p = path.resolve(p); + + try { + xfs.mkdirSync(p, mode); + made = made || p; + } + catch (err0) { + switch (err0.code) { + case 'ENOENT' : + made = sync(path.dirname(p), opts, made); + sync(p, opts, made); + break; + + // In the case of any other error, just see if there's a dir + // there already. If so, then hooray! If not, then something + // is borked. + default: + var stat; + try { + stat = xfs.statSync(p); + } + catch (err1) /* istanbul ignore next */ { + throw err0; + } + /* istanbul ignore if */ + if (!stat.isDirectory()) throw err0; + break; + } + } + + return made; +}; diff --git a/node_modules/mkdirp/package.json b/node_modules/mkdirp/package.json new file mode 100644 index 0000000..951e58d --- /dev/null +++ b/node_modules/mkdirp/package.json @@ -0,0 +1,33 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "0.5.6", + "publishConfig": { + "tag": "legacy" + }, + "author": "James Halliday (http://substack.net)", + "main": "index.js", + "keywords": [ + "mkdir", + "directory" + ], + "repository": { + "type": "git", + "url": "https://github.com/substack/node-mkdirp.git" + }, + "scripts": { + "test": "tap test/*.js" + }, + "dependencies": { + "minimist": "^1.2.6" + }, + "devDependencies": { + "tap": "^16.0.1" + }, + "bin": "bin/cmd.js", + "license": "MIT", + "files": [ + "bin", + "index.js" + ] +} diff --git a/node_modules/mkdirp/readme.markdown b/node_modules/mkdirp/readme.markdown new file mode 100644 index 0000000..fc314bf --- /dev/null +++ b/node_modules/mkdirp/readme.markdown @@ -0,0 +1,100 @@ +# mkdirp + +Like `mkdir -p`, but in node.js! + +[![build status](https://secure.travis-ci.org/substack/node-mkdirp.png)](http://travis-ci.org/substack/node-mkdirp) + +# example + +## pow.js + +```js +var mkdirp = require('mkdirp'); + +mkdirp('/tmp/foo/bar/baz', function (err) { + if (err) console.error(err) + else console.log('pow!') +}); +``` + +Output + +``` +pow! +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +var mkdirp = require('mkdirp'); +``` + +## mkdirp(dir, opts, cb) + +Create a new directory and any necessary subdirectories at `dir` with octal +permission string `opts.mode`. If `opts` is a non-object, it will be treated as +the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777`. + +`cb(err, made)` fires with the error or the first directory `made` +that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdir(path, mode, cb)` and +`opts.fs.stat(path, cb)`. + +## mkdirp.sync(dir, opts) + +Synchronously create a new directory and any necessary subdirectories at `dir` +with octal permission string `opts.mode`. If `opts` is a non-object, it will be +treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0777`. + +Returns the first directory that had to be created, if any. + +You can optionally pass in an alternate `fs` implementation by passing in +`opts.fs`. Your implementation should have `opts.fs.mkdirSync(path, mode)` and +`opts.fs.statSync(path)`. + +# usage + +This package also ships with a `mkdirp` command. + +``` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories that + don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m, --mode If a directory needs to be created, set the mode as an octal + permission string. + +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library, or + +``` +npm install -g mkdirp +``` + +to get the command. + +# license + +MIT diff --git a/node_modules/multer/LICENSE b/node_modules/multer/LICENSE new file mode 100644 index 0000000..6c011b1 --- /dev/null +++ b/node_modules/multer/LICENSE @@ -0,0 +1,17 @@ +Copyright (c) 2014 Hage Yaapa <[http://www.hacksparrow.com](http://www.hacksparrow.com)> + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/multer/README.md b/node_modules/multer/README.md new file mode 100644 index 0000000..7f5d080 --- /dev/null +++ b/node_modules/multer/README.md @@ -0,0 +1,333 @@ +# Multer [![Build Status](https://travis-ci.org/expressjs/multer.svg?branch=master)](https://travis-ci.org/expressjs/multer) [![NPM version](https://badge.fury.io/js/multer.svg)](https://badge.fury.io/js/multer) [![js-standard-style](https://img.shields.io/badge/code%20style-standard-brightgreen.svg?style=flat)](https://github.com/feross/standard) + +Multer is a node.js middleware for handling `multipart/form-data`, which is primarily used for uploading files. It is written +on top of [busboy](https://github.com/mscdex/busboy) for maximum efficiency. + +**NOTE**: Multer will not process any form which is not multipart (`multipart/form-data`). + +## Translations + +This README is also available in other languages: + +- [Español](https://github.com/expressjs/multer/blob/master/doc/README-es.md) (Spanish) +- [简体中文](https://github.com/expressjs/multer/blob/master/doc/README-zh-cn.md) (Chinese) +- [한국어](https://github.com/expressjs/multer/blob/master/doc/README-ko.md) (Korean) +- [Русский язык](https://github.com/expressjs/multer/blob/master/doc/README-ru.md) (Russian) +- [Việt Nam](https://github.com/expressjs/multer/blob/master/doc/README-vi.md) (Vietnam) +- [Português](https://github.com/expressjs/multer/blob/master/doc/README-pt-br.md) (Portuguese Brazil) + +## Installation + +```sh +$ npm install --save multer +``` + +## Usage + +Multer adds a `body` object and a `file` or `files` object to the `request` object. The `body` object contains the values of the text fields of the form, the `file` or `files` object contains the files uploaded via the form. + +Basic usage example: + +Don't forget the `enctype="multipart/form-data"` in your form. + +```html +
+ +
+``` + +```javascript +const express = require('express') +const multer = require('multer') +const upload = multer({ dest: 'uploads/' }) + +const app = express() + +app.post('/profile', upload.single('avatar'), function (req, res, next) { + // req.file is the `avatar` file + // req.body will hold the text fields, if there were any +}) + +app.post('/photos/upload', upload.array('photos', 12), function (req, res, next) { + // req.files is array of `photos` files + // req.body will contain the text fields, if there were any +}) + +const cpUpload = upload.fields([{ name: 'avatar', maxCount: 1 }, { name: 'gallery', maxCount: 8 }]) +app.post('/cool-profile', cpUpload, function (req, res, next) { + // req.files is an object (String -> Array) where fieldname is the key, and the value is array of files + // + // e.g. + // req.files['avatar'][0] -> File + // req.files['gallery'] -> Array + // + // req.body will contain the text fields, if there were any +}) +``` + +In case you need to handle a text-only multipart form, you should use the `.none()` method: + +```javascript +const express = require('express') +const app = express() +const multer = require('multer') +const upload = multer() + +app.post('/profile', upload.none(), function (req, res, next) { + // req.body contains the text fields +}) +``` + +Here's an example on how multer is used an HTML form. Take special note of the `enctype="multipart/form-data"` and `name="uploaded_file"` fields: + +```html +
+
+ + + +
+
+``` + +Then in your javascript file you would add these lines to access both the file and the body. It is important that you use the `name` field value from the form in your upload function. This tells multer which field on the request it should look for the files in. If these fields aren't the same in the HTML form and on your server, your upload will fail: + +```javascript +const multer = require('multer') +const upload = multer({ dest: './public/data/uploads/' }) +app.post('/stats', upload.single('uploaded_file'), function (req, res) { + // req.file is the name of your file in the form above, here 'uploaded_file' + // req.body will hold the text fields, if there were any + console.log(req.file, req.body) +}); +``` + + + +## API + +### File information + +Each file contains the following information: + +Key | Description | Note +--- | --- | --- +`fieldname` | Field name specified in the form | +`originalname` | Name of the file on the user's computer | +`encoding` | Encoding type of the file | +`mimetype` | Mime type of the file | +`size` | Size of the file in bytes | +`destination` | The folder to which the file has been saved | `DiskStorage` +`filename` | The name of the file within the `destination` | `DiskStorage` +`path` | The full path to the uploaded file | `DiskStorage` +`buffer` | A `Buffer` of the entire file | `MemoryStorage` + +### `multer(opts)` + +Multer accepts an options object, the most basic of which is the `dest` +property, which tells Multer where to upload the files. In case you omit the +options object, the files will be kept in memory and never written to disk. + +By default, Multer will rename the files so as to avoid naming conflicts. The +renaming function can be customized according to your needs. + +The following are the options that can be passed to Multer. + +Key | Description +--- | --- +`dest` or `storage` | Where to store the files +`fileFilter` | Function to control which files are accepted +`limits` | Limits of the uploaded data +`preservePath` | Keep the full path of files instead of just the base name + +In an average web app, only `dest` might be required, and configured as shown in +the following example. + +```javascript +const upload = multer({ dest: 'uploads/' }) +``` + +If you want more control over your uploads, you'll want to use the `storage` +option instead of `dest`. Multer ships with storage engines `DiskStorage` +and `MemoryStorage`; More engines are available from third parties. + +#### `.single(fieldname)` + +Accept a single file with the name `fieldname`. The single file will be stored +in `req.file`. + +#### `.array(fieldname[, maxCount])` + +Accept an array of files, all with the name `fieldname`. Optionally error out if +more than `maxCount` files are uploaded. The array of files will be stored in +`req.files`. + +#### `.fields(fields)` + +Accept a mix of files, specified by `fields`. An object with arrays of files +will be stored in `req.files`. + +`fields` should be an array of objects with `name` and optionally a `maxCount`. +Example: + +```javascript +[ + { name: 'avatar', maxCount: 1 }, + { name: 'gallery', maxCount: 8 } +] +``` + +#### `.none()` + +Accept only text fields. If any file upload is made, error with code +"LIMIT\_UNEXPECTED\_FILE" will be issued. + +#### `.any()` + +Accepts all files that comes over the wire. An array of files will be stored in +`req.files`. + +**WARNING:** Make sure that you always handle the files that a user uploads. +Never add multer as a global middleware since a malicious user could upload +files to a route that you didn't anticipate. Only use this function on routes +where you are handling the uploaded files. + +### `storage` + +#### `DiskStorage` + +The disk storage engine gives you full control on storing files to disk. + +```javascript +const storage = multer.diskStorage({ + destination: function (req, file, cb) { + cb(null, '/tmp/my-uploads') + }, + filename: function (req, file, cb) { + const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9) + cb(null, file.fieldname + '-' + uniqueSuffix) + } +}) + +const upload = multer({ storage: storage }) +``` + +There are two options available, `destination` and `filename`. They are both +functions that determine where the file should be stored. + +`destination` is used to determine within which folder the uploaded files should +be stored. This can also be given as a `string` (e.g. `'/tmp/uploads'`). If no +`destination` is given, the operating system's default directory for temporary +files is used. + +**Note:** You are responsible for creating the directory when providing +`destination` as a function. When passing a string, multer will make sure that +the directory is created for you. + +`filename` is used to determine what the file should be named inside the folder. +If no `filename` is given, each file will be given a random name that doesn't +include any file extension. + +**Note:** Multer will not append any file extension for you, your function +should return a filename complete with an file extension. + +Each function gets passed both the request (`req`) and some information about +the file (`file`) to aid with the decision. + +Note that `req.body` might not have been fully populated yet. It depends on the +order that the client transmits fields and files to the server. + +For understanding the calling convention used in the callback (needing to pass +null as the first param), refer to +[Node.js error handling](https://www.joyent.com/node-js/production/design/errors) + +#### `MemoryStorage` + +The memory storage engine stores the files in memory as `Buffer` objects. It +doesn't have any options. + +```javascript +const storage = multer.memoryStorage() +const upload = multer({ storage: storage }) +``` + +When using memory storage, the file info will contain a field called +`buffer` that contains the entire file. + +**WARNING**: Uploading very large files, or relatively small files in large +numbers very quickly, can cause your application to run out of memory when +memory storage is used. + +### `limits` + +An object specifying the size limits of the following optional properties. Multer passes this object into busboy directly, and the details of the properties can be found on [busboy's page](https://github.com/mscdex/busboy#busboy-methods). + +The following integer values are available: + +Key | Description | Default +--- | --- | --- +`fieldNameSize` | Max field name size | 100 bytes +`fieldSize` | Max field value size (in bytes) | 1MB +`fields` | Max number of non-file fields | Infinity +`fileSize` | For multipart forms, the max file size (in bytes) | Infinity +`files` | For multipart forms, the max number of file fields | Infinity +`parts` | For multipart forms, the max number of parts (fields + files) | Infinity +`headerPairs` | For multipart forms, the max number of header key=>value pairs to parse | 2000 + +Specifying the limits can help protect your site against denial of service (DoS) attacks. + +### `fileFilter` + +Set this to a function to control which files should be uploaded and which +should be skipped. The function should look like this: + +```javascript +function fileFilter (req, file, cb) { + + // The function should call `cb` with a boolean + // to indicate if the file should be accepted + + // To reject this file pass `false`, like so: + cb(null, false) + + // To accept the file pass `true`, like so: + cb(null, true) + + // You can always pass an error if something goes wrong: + cb(new Error('I don\'t have a clue!')) + +} +``` + +## Error handling + +When encountering an error, Multer will delegate the error to Express. You can +display a nice error page using [the standard express way](http://expressjs.com/guide/error-handling.html). + +If you want to catch errors specifically from Multer, you can call the +middleware function by yourself. Also, if you want to catch only [the Multer errors](https://github.com/expressjs/multer/blob/master/lib/multer-error.js), you can use the `MulterError` class that is attached to the `multer` object itself (e.g. `err instanceof multer.MulterError`). + +```javascript +const multer = require('multer') +const upload = multer().single('avatar') + +app.post('/profile', function (req, res) { + upload(req, res, function (err) { + if (err instanceof multer.MulterError) { + // A Multer error occurred when uploading. + } else if (err) { + // An unknown error occurred when uploading. + } + + // Everything went fine. + }) +}) +``` + +## Custom storage engine + +For information on how to build your own storage engine, see [Multer Storage Engine](https://github.com/expressjs/multer/blob/master/StorageEngine.md). + +## License + +[MIT](LICENSE) diff --git a/node_modules/multer/index.js b/node_modules/multer/index.js new file mode 100644 index 0000000..d5b67eb --- /dev/null +++ b/node_modules/multer/index.js @@ -0,0 +1,104 @@ +var makeMiddleware = require('./lib/make-middleware') + +var diskStorage = require('./storage/disk') +var memoryStorage = require('./storage/memory') +var MulterError = require('./lib/multer-error') + +function allowAll (req, file, cb) { + cb(null, true) +} + +function Multer (options) { + if (options.storage) { + this.storage = options.storage + } else if (options.dest) { + this.storage = diskStorage({ destination: options.dest }) + } else { + this.storage = memoryStorage() + } + + this.limits = options.limits + this.preservePath = options.preservePath + this.fileFilter = options.fileFilter || allowAll +} + +Multer.prototype._makeMiddleware = function (fields, fileStrategy) { + function setup () { + var fileFilter = this.fileFilter + var filesLeft = Object.create(null) + + fields.forEach(function (field) { + if (typeof field.maxCount === 'number') { + filesLeft[field.name] = field.maxCount + } else { + filesLeft[field.name] = Infinity + } + }) + + function wrappedFileFilter (req, file, cb) { + if ((filesLeft[file.fieldname] || 0) <= 0) { + return cb(new MulterError('LIMIT_UNEXPECTED_FILE', file.fieldname)) + } + + filesLeft[file.fieldname] -= 1 + fileFilter(req, file, cb) + } + + return { + limits: this.limits, + preservePath: this.preservePath, + storage: this.storage, + fileFilter: wrappedFileFilter, + fileStrategy: fileStrategy + } + } + + return makeMiddleware(setup.bind(this)) +} + +Multer.prototype.single = function (name) { + return this._makeMiddleware([{ name: name, maxCount: 1 }], 'VALUE') +} + +Multer.prototype.array = function (name, maxCount) { + return this._makeMiddleware([{ name: name, maxCount: maxCount }], 'ARRAY') +} + +Multer.prototype.fields = function (fields) { + return this._makeMiddleware(fields, 'OBJECT') +} + +Multer.prototype.none = function () { + return this._makeMiddleware([], 'NONE') +} + +Multer.prototype.any = function () { + function setup () { + return { + limits: this.limits, + preservePath: this.preservePath, + storage: this.storage, + fileFilter: this.fileFilter, + fileStrategy: 'ARRAY' + } + } + + return makeMiddleware(setup.bind(this)) +} + +function multer (options) { + if (options === undefined) { + return new Multer({}) + } + + if (typeof options === 'object' && options !== null) { + return new Multer(options) + } + + throw new TypeError('Expected object for argument options') +} + +module.exports = multer +module.exports.diskStorage = diskStorage +module.exports.memoryStorage = memoryStorage +module.exports.MulterError = MulterError diff --git a/node_modules/multer/lib/counter.js b/node_modules/multer/lib/counter.js new file mode 100644 index 0000000..29c410c --- /dev/null +++ b/node_modules/multer/lib/counter.js @@ -0,0 +1,28 @@ +var EventEmitter = require('events').EventEmitter + +function Counter () { + EventEmitter.call(this) + this.value = 0 +} + +Counter.prototype = Object.create(EventEmitter.prototype) + +Counter.prototype.increment = function increment () { + this.value++ +} + +Counter.prototype.decrement = function decrement () { + if (--this.value === 0) this.emit('zero') +} + +Counter.prototype.isZero = function isZero () { + return (this.value === 0) +} + +Counter.prototype.onceZero = function onceZero (fn) { + if (this.isZero()) return fn() + + this.once('zero', fn) +} + +module.exports = Counter diff --git a/node_modules/multer/lib/file-appender.js b/node_modules/multer/lib/file-appender.js new file mode 100644 index 0000000..1a2c5e7 --- /dev/null +++ b/node_modules/multer/lib/file-appender.js @@ -0,0 +1,67 @@ +var objectAssign = require('object-assign') + +function arrayRemove (arr, item) { + var idx = arr.indexOf(item) + if (~idx) arr.splice(idx, 1) +} + +function FileAppender (strategy, req) { + this.strategy = strategy + this.req = req + + switch (strategy) { + case 'NONE': break + case 'VALUE': break + case 'ARRAY': req.files = []; break + case 'OBJECT': req.files = Object.create(null); break + default: throw new Error('Unknown file strategy: ' + strategy) + } +} + +FileAppender.prototype.insertPlaceholder = function (file) { + var placeholder = { + fieldname: file.fieldname + } + + switch (this.strategy) { + case 'NONE': break + case 'VALUE': break + case 'ARRAY': this.req.files.push(placeholder); break + case 'OBJECT': + if (this.req.files[file.fieldname]) { + this.req.files[file.fieldname].push(placeholder) + } else { + this.req.files[file.fieldname] = [placeholder] + } + break + } + + return placeholder +} + +FileAppender.prototype.removePlaceholder = function (placeholder) { + switch (this.strategy) { + case 'NONE': break + case 'VALUE': break + case 'ARRAY': arrayRemove(this.req.files, placeholder); break + case 'OBJECT': + if (this.req.files[placeholder.fieldname].length === 1) { + delete this.req.files[placeholder.fieldname] + } else { + arrayRemove(this.req.files[placeholder.fieldname], placeholder) + } + break + } +} + +FileAppender.prototype.replacePlaceholder = function (placeholder, file) { + if (this.strategy === 'VALUE') { + this.req.file = file + return + } + + delete placeholder.fieldname + objectAssign(placeholder, file) +} + +module.exports = FileAppender diff --git a/node_modules/multer/lib/make-middleware.js b/node_modules/multer/lib/make-middleware.js new file mode 100644 index 0000000..6627cf4 --- /dev/null +++ b/node_modules/multer/lib/make-middleware.js @@ -0,0 +1,173 @@ +var is = require('type-is') +var Busboy = require('busboy') +var extend = require('xtend') +var appendField = require('append-field') + +var Counter = require('./counter') +var MulterError = require('./multer-error') +var FileAppender = require('./file-appender') +var removeUploadedFiles = require('./remove-uploaded-files') + +function makeMiddleware (setup) { + return function multerMiddleware (req, res, next) { + if (!is(req, ['multipart'])) return next() + + var options = setup() + + var limits = options.limits + var storage = options.storage + var fileFilter = options.fileFilter + var fileStrategy = options.fileStrategy + var preservePath = options.preservePath + + req.body = Object.create(null) + + var busboy + + try { + busboy = Busboy({ headers: req.headers, limits: limits, preservePath: preservePath }) + } catch (err) { + return next(err) + } + + var appender = new FileAppender(fileStrategy, req) + var isDone = false + var readFinished = false + var errorOccured = false + var pendingWrites = new Counter() + var uploadedFiles = [] + + function done (err) { + if (isDone) return + isDone = true + req.unpipe(busboy) + busboy.removeAllListeners() + next(err) + } + + function indicateDone () { + if (readFinished && pendingWrites.isZero() && !errorOccured) done() + } + + function abortWithError (uploadError) { + if (errorOccured) return + errorOccured = true + + pendingWrites.onceZero(function () { + function remove (file, cb) { + storage._removeFile(req, file, cb) + } + + removeUploadedFiles(uploadedFiles, remove, function (err, storageErrors) { + if (err) return done(err) + + uploadError.storageErrors = storageErrors + done(uploadError) + }) + }) + } + + function abortWithCode (code, optionalField) { + abortWithError(new MulterError(code, optionalField)) + } + + // handle text field data + busboy.on('field', function (fieldname, value, { nameTruncated, valueTruncated }) { + if (fieldname == null) return abortWithCode('MISSING_FIELD_NAME') + if (nameTruncated) return abortWithCode('LIMIT_FIELD_KEY') + if (valueTruncated) return abortWithCode('LIMIT_FIELD_VALUE', fieldname) + + // Work around bug in Busboy (https://github.com/mscdex/busboy/issues/6) + if (limits && Object.prototype.hasOwnProperty.call(limits, 'fieldNameSize')) { + if (fieldname.length > limits.fieldNameSize) return abortWithCode('LIMIT_FIELD_KEY') + } + + appendField(req.body, fieldname, value) + }) + + // handle files + busboy.on('file', function (fieldname, fileStream, { filename, encoding, mimeType }) { + // don't attach to the files object, if there is no file + if (!filename) return fileStream.resume() + + // Work around bug in Busboy (https://github.com/mscdex/busboy/issues/6) + if (limits && Object.prototype.hasOwnProperty.call(limits, 'fieldNameSize')) { + if (fieldname.length > limits.fieldNameSize) return abortWithCode('LIMIT_FIELD_KEY') + } + + var file = { + fieldname: fieldname, + originalname: filename, + encoding: encoding, + mimetype: mimeType + } + + var placeholder = appender.insertPlaceholder(file) + + fileFilter(req, file, function (err, includeFile) { + if (err) { + appender.removePlaceholder(placeholder) + return abortWithError(err) + } + + if (!includeFile) { + appender.removePlaceholder(placeholder) + return fileStream.resume() + } + + var aborting = false + pendingWrites.increment() + + Object.defineProperty(file, 'stream', { + configurable: true, + enumerable: false, + value: fileStream + }) + + fileStream.on('error', function (err) { + pendingWrites.decrement() + abortWithError(err) + }) + + fileStream.on('limit', function () { + aborting = true + abortWithCode('LIMIT_FILE_SIZE', fieldname) + }) + + storage._handleFile(req, file, function (err, info) { + if (aborting) { + appender.removePlaceholder(placeholder) + uploadedFiles.push(extend(file, info)) + return pendingWrites.decrement() + } + + if (err) { + appender.removePlaceholder(placeholder) + pendingWrites.decrement() + return abortWithError(err) + } + + var fileInfo = extend(file, info) + + appender.replacePlaceholder(placeholder, fileInfo) + uploadedFiles.push(fileInfo) + pendingWrites.decrement() + indicateDone() + }) + }) + }) + + busboy.on('error', function (err) { abortWithError(err) }) + busboy.on('partsLimit', function () { abortWithCode('LIMIT_PART_COUNT') }) + busboy.on('filesLimit', function () { abortWithCode('LIMIT_FILE_COUNT') }) + busboy.on('fieldsLimit', function () { abortWithCode('LIMIT_FIELD_COUNT') }) + busboy.on('close', function () { + readFinished = true + indicateDone() + }) + + req.pipe(busboy) + } +} + +module.exports = makeMiddleware diff --git a/node_modules/multer/lib/multer-error.js b/node_modules/multer/lib/multer-error.js new file mode 100644 index 0000000..d56b00e --- /dev/null +++ b/node_modules/multer/lib/multer-error.js @@ -0,0 +1,24 @@ +var util = require('util') + +var errorMessages = { + LIMIT_PART_COUNT: 'Too many parts', + LIMIT_FILE_SIZE: 'File too large', + LIMIT_FILE_COUNT: 'Too many files', + LIMIT_FIELD_KEY: 'Field name too long', + LIMIT_FIELD_VALUE: 'Field value too long', + LIMIT_FIELD_COUNT: 'Too many fields', + LIMIT_UNEXPECTED_FILE: 'Unexpected field', + MISSING_FIELD_NAME: 'Field name missing' +} + +function MulterError (code, field) { + Error.captureStackTrace(this, this.constructor) + this.name = this.constructor.name + this.message = errorMessages[code] + this.code = code + if (field) this.field = field +} + +util.inherits(MulterError, Error) + +module.exports = MulterError diff --git a/node_modules/multer/lib/remove-uploaded-files.js b/node_modules/multer/lib/remove-uploaded-files.js new file mode 100644 index 0000000..f0b16ea --- /dev/null +++ b/node_modules/multer/lib/remove-uploaded-files.js @@ -0,0 +1,28 @@ +function removeUploadedFiles (uploadedFiles, remove, cb) { + var length = uploadedFiles.length + var errors = [] + + if (length === 0) return cb(null, errors) + + function handleFile (idx) { + var file = uploadedFiles[idx] + + remove(file, function (err) { + if (err) { + err.file = file + err.field = file.fieldname + errors.push(err) + } + + if (idx < length - 1) { + handleFile(idx + 1) + } else { + cb(null, errors) + } + }) + } + + handleFile(0) +} + +module.exports = removeUploadedFiles diff --git a/node_modules/multer/package.json b/node_modules/multer/package.json new file mode 100644 index 0000000..8545a73 --- /dev/null +++ b/node_modules/multer/package.json @@ -0,0 +1,52 @@ +{ + "name": "multer", + "description": "Middleware for handling `multipart/form-data`.", + "version": "1.4.5-lts.1", + "contributors": [ + "Hage Yaapa (http://www.hacksparrow.com)", + "Jaret Pfluger ", + "Linus Unnebäck " + ], + "license": "MIT", + "repository": "expressjs/multer", + "keywords": [ + "form", + "post", + "multipart", + "form-data", + "formdata", + "express", + "middleware" + ], + "dependencies": { + "append-field": "^1.0.0", + "busboy": "^1.0.0", + "concat-stream": "^1.5.2", + "mkdirp": "^0.5.4", + "object-assign": "^4.1.1", + "type-is": "^1.6.4", + "xtend": "^4.0.0" + }, + "devDependencies": { + "deep-equal": "^2.0.3", + "express": "^4.13.1", + "form-data": "^1.0.0-rc1", + "fs-temp": "^1.1.2", + "mocha": "^3.5.3", + "rimraf": "^2.4.1", + "standard": "^14.3.3", + "testdata-w3c-json-form": "^1.0.0" + }, + "engines": { + "node": ">= 6.0.0" + }, + "files": [ + "LICENSE", + "index.js", + "storage/", + "lib/" + ], + "scripts": { + "test": "standard && mocha" + } +} diff --git a/node_modules/multer/storage/disk.js b/node_modules/multer/storage/disk.js new file mode 100644 index 0000000..2f77c9f --- /dev/null +++ b/node_modules/multer/storage/disk.js @@ -0,0 +1,66 @@ +var fs = require('fs') +var os = require('os') +var path = require('path') +var crypto = require('crypto') +var mkdirp = require('mkdirp') + +function getFilename (req, file, cb) { + crypto.randomBytes(16, function (err, raw) { + cb(err, err ? undefined : raw.toString('hex')) + }) +} + +function getDestination (req, file, cb) { + cb(null, os.tmpdir()) +} + +function DiskStorage (opts) { + this.getFilename = (opts.filename || getFilename) + + if (typeof opts.destination === 'string') { + mkdirp.sync(opts.destination) + this.getDestination = function ($0, $1, cb) { cb(null, opts.destination) } + } else { + this.getDestination = (opts.destination || getDestination) + } +} + +DiskStorage.prototype._handleFile = function _handleFile (req, file, cb) { + var that = this + + that.getDestination(req, file, function (err, destination) { + if (err) return cb(err) + + that.getFilename(req, file, function (err, filename) { + if (err) return cb(err) + + var finalPath = path.join(destination, filename) + var outStream = fs.createWriteStream(finalPath) + + file.stream.pipe(outStream) + outStream.on('error', cb) + outStream.on('finish', function () { + cb(null, { + destination: destination, + filename: filename, + path: finalPath, + size: outStream.bytesWritten + }) + }) + }) + }) +} + +DiskStorage.prototype._removeFile = function _removeFile (req, file, cb) { + var path = file.path + + delete file.destination + delete file.filename + delete file.path + + fs.unlink(path, cb) +} + +module.exports = function (opts) { + return new DiskStorage(opts) +} diff --git a/node_modules/multer/storage/memory.js b/node_modules/multer/storage/memory.js new file mode 100644 index 0000000..f953ded --- /dev/null +++ b/node_modules/multer/storage/memory.js @@ -0,0 +1,21 @@ +var concat = require('concat-stream') + +function MemoryStorage (opts) {} + +MemoryStorage.prototype._handleFile = function _handleFile (req, file, cb) { + file.stream.pipe(concat({ encoding: 'buffer' }, function (data) { + cb(null, { + buffer: data, + size: data.length + }) + })) +} + +MemoryStorage.prototype._removeFile = function _removeFile (req, file, cb) { + delete file.buffer + cb(null) +} + +module.exports = function (opts) { + return new MemoryStorage(opts) +} diff --git a/node_modules/object-assign/index.js b/node_modules/object-assign/index.js new file mode 100644 index 0000000..0930cf8 --- /dev/null +++ b/node_modules/object-assign/index.js @@ -0,0 +1,90 @@ +/* +object-assign +(c) Sindre Sorhus +@license MIT +*/ + +'use strict'; +/* eslint-disable no-unused-vars */ +var getOwnPropertySymbols = Object.getOwnPropertySymbols; +var hasOwnProperty = Object.prototype.hasOwnProperty; +var propIsEnumerable = Object.prototype.propertyIsEnumerable; + +function toObject(val) { + if (val === null || val === undefined) { + throw new TypeError('Object.assign cannot be called with null or undefined'); + } + + return Object(val); +} + +function shouldUseNative() { + try { + if (!Object.assign) { + return false; + } + + // Detect buggy property enumeration order in older V8 versions. + + // https://bugs.chromium.org/p/v8/issues/detail?id=4118 + var test1 = new String('abc'); // eslint-disable-line no-new-wrappers + test1[5] = 'de'; + if (Object.getOwnPropertyNames(test1)[0] === '5') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test2 = {}; + for (var i = 0; i < 10; i++) { + test2['_' + String.fromCharCode(i)] = i; + } + var order2 = Object.getOwnPropertyNames(test2).map(function (n) { + return test2[n]; + }); + if (order2.join('') !== '0123456789') { + return false; + } + + // https://bugs.chromium.org/p/v8/issues/detail?id=3056 + var test3 = {}; + 'abcdefghijklmnopqrst'.split('').forEach(function (letter) { + test3[letter] = letter; + }); + if (Object.keys(Object.assign({}, test3)).join('') !== + 'abcdefghijklmnopqrst') { + return false; + } + + return true; + } catch (err) { + // We don't expect any of the above to throw, but better to be safe. + return false; + } +} + +module.exports = shouldUseNative() ? Object.assign : function (target, source) { + var from; + var to = toObject(target); + var symbols; + + for (var s = 1; s < arguments.length; s++) { + from = Object(arguments[s]); + + for (var key in from) { + if (hasOwnProperty.call(from, key)) { + to[key] = from[key]; + } + } + + if (getOwnPropertySymbols) { + symbols = getOwnPropertySymbols(from); + for (var i = 0; i < symbols.length; i++) { + if (propIsEnumerable.call(from, symbols[i])) { + to[symbols[i]] = from[symbols[i]]; + } + } + } + } + + return to; +}; diff --git a/node_modules/object-assign/license b/node_modules/object-assign/license new file mode 100644 index 0000000..654d0bf --- /dev/null +++ b/node_modules/object-assign/license @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Sindre Sorhus (sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/object-assign/package.json b/node_modules/object-assign/package.json new file mode 100644 index 0000000..503eb1e --- /dev/null +++ b/node_modules/object-assign/package.json @@ -0,0 +1,42 @@ +{ + "name": "object-assign", + "version": "4.1.1", + "description": "ES2015 `Object.assign()` ponyfill", + "license": "MIT", + "repository": "sindresorhus/object-assign", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "sindresorhus.com" + }, + "engines": { + "node": ">=0.10.0" + }, + "scripts": { + "test": "xo && ava", + "bench": "matcha bench.js" + }, + "files": [ + "index.js" + ], + "keywords": [ + "object", + "assign", + "extend", + "properties", + "es2015", + "ecmascript", + "harmony", + "ponyfill", + "prollyfill", + "polyfill", + "shim", + "browser" + ], + "devDependencies": { + "ava": "^0.16.0", + "lodash": "^4.16.4", + "matcha": "^0.7.0", + "xo": "^0.16.0" + } +} diff --git a/node_modules/object-assign/readme.md b/node_modules/object-assign/readme.md new file mode 100644 index 0000000..1be09d3 --- /dev/null +++ b/node_modules/object-assign/readme.md @@ -0,0 +1,61 @@ +# object-assign [![Build Status](https://travis-ci.org/sindresorhus/object-assign.svg?branch=master)](https://travis-ci.org/sindresorhus/object-assign) + +> ES2015 [`Object.assign()`](http://www.2ality.com/2014/01/object-assign.html) [ponyfill](https://ponyfill.com) + + +## Use the built-in + +Node.js 4 and up, as well as every evergreen browser (Chrome, Edge, Firefox, Opera, Safari), +support `Object.assign()` :tada:. If you target only those environments, then by all +means, use `Object.assign()` instead of this package. + + +## Install + +``` +$ npm install --save object-assign +``` + + +## Usage + +```js +const objectAssign = require('object-assign'); + +objectAssign({foo: 0}, {bar: 1}); +//=> {foo: 0, bar: 1} + +// multiple sources +objectAssign({foo: 0}, {bar: 1}, {baz: 2}); +//=> {foo: 0, bar: 1, baz: 2} + +// overwrites equal keys +objectAssign({foo: 0}, {foo: 1}, {foo: 2}); +//=> {foo: 2} + +// ignores null and undefined sources +objectAssign({foo: 0}, null, {bar: 1}, undefined); +//=> {foo: 0, bar: 1} +``` + + +## API + +### objectAssign(target, [source, ...]) + +Assigns enumerable own properties of `source` objects to the `target` object and returns the `target` object. Additional `source` objects will overwrite previous ones. + + +## Resources + +- [ES2015 spec - Object.assign](https://people.mozilla.org/~jorendorff/es6-draft.html#sec-object.assign) + + +## Related + +- [deep-assign](https://github.com/sindresorhus/deep-assign) - Recursive `Object.assign()` + + +## License + +MIT © [Sindre Sorhus](https://sindresorhus.com) diff --git a/node_modules/process-nextick-args/index.js b/node_modules/process-nextick-args/index.js new file mode 100644 index 0000000..3eecf11 --- /dev/null +++ b/node_modules/process-nextick-args/index.js @@ -0,0 +1,45 @@ +'use strict'; + +if (typeof process === 'undefined' || + !process.version || + process.version.indexOf('v0.') === 0 || + process.version.indexOf('v1.') === 0 && process.version.indexOf('v1.8.') !== 0) { + module.exports = { nextTick: nextTick }; +} else { + module.exports = process +} + +function nextTick(fn, arg1, arg2, arg3) { + if (typeof fn !== 'function') { + throw new TypeError('"callback" argument must be a function'); + } + var len = arguments.length; + var args, i; + switch (len) { + case 0: + case 1: + return process.nextTick(fn); + case 2: + return process.nextTick(function afterTickOne() { + fn.call(null, arg1); + }); + case 3: + return process.nextTick(function afterTickTwo() { + fn.call(null, arg1, arg2); + }); + case 4: + return process.nextTick(function afterTickThree() { + fn.call(null, arg1, arg2, arg3); + }); + default: + args = new Array(len - 1); + i = 0; + while (i < args.length) { + args[i++] = arguments[i]; + } + return process.nextTick(function afterTick() { + fn.apply(null, args); + }); + } +} + diff --git a/node_modules/process-nextick-args/license.md b/node_modules/process-nextick-args/license.md new file mode 100644 index 0000000..c67e353 --- /dev/null +++ b/node_modules/process-nextick-args/license.md @@ -0,0 +1,19 @@ +# Copyright (c) 2015 Calvin Metcalf + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +**THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.** diff --git a/node_modules/process-nextick-args/package.json b/node_modules/process-nextick-args/package.json new file mode 100644 index 0000000..6070b72 --- /dev/null +++ b/node_modules/process-nextick-args/package.json @@ -0,0 +1,25 @@ +{ + "name": "process-nextick-args", + "version": "2.0.1", + "description": "process.nextTick but always with args", + "main": "index.js", + "files": [ + "index.js" + ], + "scripts": { + "test": "node test.js" + }, + "repository": { + "type": "git", + "url": "https://github.com/calvinmetcalf/process-nextick-args.git" + }, + "author": "", + "license": "MIT", + "bugs": { + "url": "https://github.com/calvinmetcalf/process-nextick-args/issues" + }, + "homepage": "https://github.com/calvinmetcalf/process-nextick-args", + "devDependencies": { + "tap": "~0.2.6" + } +} diff --git a/node_modules/process-nextick-args/readme.md b/node_modules/process-nextick-args/readme.md new file mode 100644 index 0000000..ecb432c --- /dev/null +++ b/node_modules/process-nextick-args/readme.md @@ -0,0 +1,18 @@ +process-nextick-args +===== + +[![Build Status](https://travis-ci.org/calvinmetcalf/process-nextick-args.svg?branch=master)](https://travis-ci.org/calvinmetcalf/process-nextick-args) + +```bash +npm install --save process-nextick-args +``` + +Always be able to pass arguments to process.nextTick, no matter the platform + +```js +var pna = require('process-nextick-args'); + +pna.nextTick(function (a, b, c) { + console.log(a, b, c); +}, 'step', 3, 'profit'); +``` diff --git a/node_modules/readable-stream/.travis.yml b/node_modules/readable-stream/.travis.yml new file mode 100644 index 0000000..f62cdac --- /dev/null +++ b/node_modules/readable-stream/.travis.yml @@ -0,0 +1,34 @@ +sudo: false +language: node_js +before_install: + - (test $NPM_LEGACY && npm install -g npm@2 && npm install -g npm@3) || true +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: NPM_LEGACY=true + - node_js: '0.10' + env: NPM_LEGACY=true + - node_js: '0.11' + env: NPM_LEGACY=true + - node_js: '0.12' + env: NPM_LEGACY=true + - node_js: 1 + env: NPM_LEGACY=true + - node_js: 2 + env: NPM_LEGACY=true + - node_js: 3 + env: NPM_LEGACY=true + - node_js: 4 + - node_js: 5 + - node_js: 6 + - node_js: 7 + - node_js: 8 + - node_js: 9 +script: "npm run test" +env: + global: + - secure: rE2Vvo7vnjabYNULNyLFxOyt98BoJexDqsiOnfiD6kLYYsiQGfr/sbZkPMOFm9qfQG7pjqx+zZWZjGSswhTt+626C0t/njXqug7Yps4c3dFblzGfreQHp7wNX5TFsvrxd6dAowVasMp61sJcRnB2w8cUzoe3RAYUDHyiHktwqMc= + - secure: g9YINaKAdMatsJ28G9jCGbSaguXCyxSTy+pBO6Ch0Cf57ZLOTka3HqDj8p3nV28LUIHZ3ut5WO43CeYKwt4AUtLpBS3a0dndHdY6D83uY6b2qh5hXlrcbeQTq2cvw2y95F7hm4D1kwrgZ7ViqaKggRcEupAL69YbJnxeUDKWEdI= diff --git a/node_modules/readable-stream/CONTRIBUTING.md b/node_modules/readable-stream/CONTRIBUTING.md new file mode 100644 index 0000000..f478d58 --- /dev/null +++ b/node_modules/readable-stream/CONTRIBUTING.md @@ -0,0 +1,38 @@ +# Developer's Certificate of Origin 1.1 + +By making a contribution to this project, I certify that: + +* (a) The contribution was created in whole or in part by me and I + have the right to submit it under the open source license + indicated in the file; or + +* (b) The contribution is based upon previous work that, to the best + of my knowledge, is covered under an appropriate open source + license and I have the right under that license to submit that + work with modifications, whether created in whole or in part + by me, under the same open source license (unless I am + permitted to submit under a different license), as indicated + in the file; or + +* (c) The contribution was provided directly to me by some other + person who certified (a), (b) or (c) and I have not modified + it. + +* (d) I understand and agree that this project and the contribution + are public and that a record of the contribution (including all + personal information I submit with it, including my sign-off) is + maintained indefinitely and may be redistributed consistent with + this project or the open source license(s) involved. + +## Moderation Policy + +The [Node.js Moderation Policy] applies to this WG. + +## Code of Conduct + +The [Node.js Code of Conduct][] applies to this WG. + +[Node.js Code of Conduct]: +https://github.com/nodejs/node/blob/master/CODE_OF_CONDUCT.md +[Node.js Moderation Policy]: +https://github.com/nodejs/TSC/blob/master/Moderation-Policy.md diff --git a/node_modules/readable-stream/GOVERNANCE.md b/node_modules/readable-stream/GOVERNANCE.md new file mode 100644 index 0000000..16ffb93 --- /dev/null +++ b/node_modules/readable-stream/GOVERNANCE.md @@ -0,0 +1,136 @@ +### Streams Working Group + +The Node.js Streams is jointly governed by a Working Group +(WG) +that is responsible for high-level guidance of the project. + +The WG has final authority over this project including: + +* Technical direction +* Project governance and process (including this policy) +* Contribution policy +* GitHub repository hosting +* Conduct guidelines +* Maintaining the list of additional Collaborators + +For the current list of WG members, see the project +[README.md](./README.md#current-project-team-members). + +### Collaborators + +The readable-stream GitHub repository is +maintained by the WG and additional Collaborators who are added by the +WG on an ongoing basis. + +Individuals making significant and valuable contributions are made +Collaborators and given commit-access to the project. These +individuals are identified by the WG and their addition as +Collaborators is discussed during the WG meeting. + +_Note:_ If you make a significant contribution and are not considered +for commit-access log an issue or contact a WG member directly and it +will be brought up in the next WG meeting. + +Modifications of the contents of the readable-stream repository are +made on +a collaborative basis. Anybody with a GitHub account may propose a +modification via pull request and it will be considered by the project +Collaborators. All pull requests must be reviewed and accepted by a +Collaborator with sufficient expertise who is able to take full +responsibility for the change. In the case of pull requests proposed +by an existing Collaborator, an additional Collaborator is required +for sign-off. Consensus should be sought if additional Collaborators +participate and there is disagreement around a particular +modification. See _Consensus Seeking Process_ below for further detail +on the consensus model used for governance. + +Collaborators may opt to elevate significant or controversial +modifications, or modifications that have not found consensus to the +WG for discussion by assigning the ***WG-agenda*** tag to a pull +request or issue. The WG should serve as the final arbiter where +required. + +For the current list of Collaborators, see the project +[README.md](./README.md#members). + +### WG Membership + +WG seats are not time-limited. There is no fixed size of the WG. +However, the expected target is between 6 and 12, to ensure adequate +coverage of important areas of expertise, balanced with the ability to +make decisions efficiently. + +There is no specific set of requirements or qualifications for WG +membership beyond these rules. + +The WG may add additional members to the WG by unanimous consensus. + +A WG member may be removed from the WG by voluntary resignation, or by +unanimous consensus of all other WG members. + +Changes to WG membership should be posted in the agenda, and may be +suggested as any other agenda item (see "WG Meetings" below). + +If an addition or removal is proposed during a meeting, and the full +WG is not in attendance to participate, then the addition or removal +is added to the agenda for the subsequent meeting. This is to ensure +that all members are given the opportunity to participate in all +membership decisions. If a WG member is unable to attend a meeting +where a planned membership decision is being made, then their consent +is assumed. + +No more than 1/3 of the WG members may be affiliated with the same +employer. If removal or resignation of a WG member, or a change of +employment by a WG member, creates a situation where more than 1/3 of +the WG membership shares an employer, then the situation must be +immediately remedied by the resignation or removal of one or more WG +members affiliated with the over-represented employer(s). + +### WG Meetings + +The WG meets occasionally on a Google Hangout On Air. A designated moderator +approved by the WG runs the meeting. Each meeting should be +published to YouTube. + +Items are added to the WG agenda that are considered contentious or +are modifications of governance, contribution policy, WG membership, +or release process. + +The intention of the agenda is not to approve or review all patches; +that should happen continuously on GitHub and be handled by the larger +group of Collaborators. + +Any community member or contributor can ask that something be added to +the next meeting's agenda by logging a GitHub Issue. Any Collaborator, +WG member or the moderator can add the item to the agenda by adding +the ***WG-agenda*** tag to the issue. + +Prior to each WG meeting the moderator will share the Agenda with +members of the WG. WG members can add any items they like to the +agenda at the beginning of each meeting. The moderator and the WG +cannot veto or remove items. + +The WG may invite persons or representatives from certain projects to +participate in a non-voting capacity. + +The moderator is responsible for summarizing the discussion of each +agenda item and sends it as a pull request after the meeting. + +### Consensus Seeking Process + +The WG follows a +[Consensus +Seeking](http://en.wikipedia.org/wiki/Consensus-seeking_decision-making) +decision-making model. + +When an agenda item has appeared to reach a consensus the moderator +will ask "Does anyone object?" as a final call for dissent from the +consensus. + +If an agenda item cannot reach a consensus a WG member can call for +either a closing vote or a vote to table the issue to the next +meeting. The call for a vote must be seconded by a majority of the WG +or else the discussion will continue. Simple majority wins. + +Note that changes to WG membership require a majority consensus. See +"WG Membership" above. diff --git a/node_modules/readable-stream/LICENSE b/node_modules/readable-stream/LICENSE new file mode 100644 index 0000000..2873b3b --- /dev/null +++ b/node_modules/readable-stream/LICENSE @@ -0,0 +1,47 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" diff --git a/node_modules/readable-stream/README.md b/node_modules/readable-stream/README.md new file mode 100644 index 0000000..f1c5a93 --- /dev/null +++ b/node_modules/readable-stream/README.md @@ -0,0 +1,58 @@ +# readable-stream + +***Node-core v8.17.0 streams for userland*** [![Build Status](https://travis-ci.org/nodejs/readable-stream.svg?branch=master)](https://travis-ci.org/nodejs/readable-stream) + + +[![NPM](https://nodei.co/npm/readable-stream.png?downloads=true&downloadRank=true)](https://nodei.co/npm/readable-stream/) +[![NPM](https://nodei.co/npm-dl/readable-stream.png?&months=6&height=3)](https://nodei.co/npm/readable-stream/) + + +[![Sauce Test Status](https://saucelabs.com/browser-matrix/readable-stream.svg)](https://saucelabs.com/u/readable-stream) + +```bash +npm install --save readable-stream +``` + +***Node-core streams for userland*** + +This package is a mirror of the Streams2 and Streams3 implementations in +Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.17.0/docs/api/stream.html). + +If you want to guarantee a stable streams base, regardless of what version of +Node you, or the users of your libraries are using, use **readable-stream** *only* and avoid the *"stream"* module in Node-core, for background see [this blogpost](http://r.va.gg/2014/06/why-i-dont-use-nodes-core-stream-module.html). + +As of version 2.0.0 **readable-stream** uses semantic versioning. + +# Streams Working Group + +`readable-stream` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + + +## Team Members + +* **Chris Dickinson** ([@chrisdickinson](https://github.com/chrisdickinson)) <christopher.s.dickinson@gmail.com> + - Release GPG key: 9554F04D7259F04124DE6B476D5A82AC7E37093B +* **Calvin Metcalf** ([@calvinmetcalf](https://github.com/calvinmetcalf)) <calvin.metcalf@gmail.com> + - Release GPG key: F3EF5F62A87FC27A22E643F714CE4FF5015AA242 +* **Rod Vagg** ([@rvagg](https://github.com/rvagg)) <rod@vagg.org> + - Release GPG key: DD8F2338BAE7501E3DD5AC78C273792F7D83545D +* **Sam Newman** ([@sonewman](https://github.com/sonewman)) <newmansam@outlook.com> +* **Mathias Buus** ([@mafintosh](https://github.com/mafintosh)) <mathiasbuus@gmail.com> +* **Domenic Denicola** ([@domenic](https://github.com/domenic)) <d@domenic.me> +* **Matteo Collina** ([@mcollina](https://github.com/mcollina)) <matteo.collina@gmail.com> + - Release GPG key: 3ABC01543F22DD2239285CDD818674489FBC127E +* **Irina Shestak** ([@lrlna](https://github.com/lrlna)) <shestak.irina@gmail.com> diff --git a/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md new file mode 100644 index 0000000..83275f1 --- /dev/null +++ b/node_modules/readable-stream/doc/wg-meetings/2015-01-30.md @@ -0,0 +1,60 @@ +# streams WG Meeting 2015-01-30 + +## Links + +* **Google Hangouts Video**: http://www.youtube.com/watch?v=I9nDOSGfwZg +* **GitHub Issue**: https://github.com/iojs/readable-stream/issues/106 +* **Original Minutes Google Doc**: https://docs.google.com/document/d/17aTgLnjMXIrfjgNaTUnHQO7m3xgzHR2VXBTmi03Qii4/ + +## Agenda + +Extracted from https://github.com/iojs/readable-stream/labels/wg-agenda prior to meeting. + +* adopt a charter [#105](https://github.com/iojs/readable-stream/issues/105) +* release and versioning strategy [#101](https://github.com/iojs/readable-stream/issues/101) +* simpler stream creation [#102](https://github.com/iojs/readable-stream/issues/102) +* proposal: deprecate implicit flowing of streams [#99](https://github.com/iojs/readable-stream/issues/99) + +## Minutes + +### adopt a charter + +* group: +1's all around + +### What versioning scheme should be adopted? +* group: +1’s 3.0.0 +* domenic+group: pulling in patches from other sources where appropriate +* mikeal: version independently, suggesting versions for io.js +* mikeal+domenic: work with TC to notify in advance of changes +simpler stream creation + +### streamline creation of streams +* sam: streamline creation of streams +* domenic: nice simple solution posted + but, we lose the opportunity to change the model + may not be backwards incompatible (double check keys) + + **action item:** domenic will check + +### remove implicit flowing of streams on(‘data’) +* add isFlowing / isPaused +* mikeal: worrying that we’re documenting polyfill methods – confuses users +* domenic: more reflective API is probably good, with warning labels for users +* new section for mad scientists (reflective stream access) +* calvin: name the “third state” +* mikeal: maybe borrow the name from whatwg? +* domenic: we’re missing the “third state” +* consensus: kind of difficult to name the third state +* mikeal: figure out differences in states / compat +* mathias: always flow on data – eliminates third state + * explore what it breaks + +**action items:** +* ask isaac for ability to list packages by what public io.js APIs they use (esp. Stream) +* ask rod/build for infrastructure +* **chris**: explore the “flow on data” approach +* add isPaused/isFlowing +* add new docs section +* move isPaused to that section + + diff --git a/node_modules/readable-stream/duplex-browser.js b/node_modules/readable-stream/duplex-browser.js new file mode 100644 index 0000000..f8b2db8 --- /dev/null +++ b/node_modules/readable-stream/duplex-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_duplex.js'); diff --git a/node_modules/readable-stream/duplex.js b/node_modules/readable-stream/duplex.js new file mode 100644 index 0000000..46924cb --- /dev/null +++ b/node_modules/readable-stream/duplex.js @@ -0,0 +1 @@ +module.exports = require('./readable').Duplex diff --git a/node_modules/readable-stream/lib/_stream_duplex.js b/node_modules/readable-stream/lib/_stream_duplex.js new file mode 100644 index 0000000..57003c3 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_duplex.js @@ -0,0 +1,131 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a duplex stream is just a stream that is both readable and writable. +// Since JS doesn't have multiple prototypal inheritance, this class +// prototypally inherits from Readable, and then parasitically from +// Writable. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +/**/ +var objectKeys = Object.keys || function (obj) { + var keys = []; + for (var key in obj) { + keys.push(key); + }return keys; +}; +/**/ + +module.exports = Duplex; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +var Readable = require('./_stream_readable'); +var Writable = require('./_stream_writable'); + +util.inherits(Duplex, Readable); + +{ + // avoid scope creep, the keys array can then be collected + var keys = objectKeys(Writable.prototype); + for (var v = 0; v < keys.length; v++) { + var method = keys[v]; + if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; + } +} + +function Duplex(options) { + if (!(this instanceof Duplex)) return new Duplex(options); + + Readable.call(this, options); + Writable.call(this, options); + + if (options && options.readable === false) this.readable = false; + + if (options && options.writable === false) this.writable = false; + + this.allowHalfOpen = true; + if (options && options.allowHalfOpen === false) this.allowHalfOpen = false; + + this.once('end', onend); +} + +Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// the no-half-open enforcer +function onend() { + // if we allow half-open state, or if the writable side ended, + // then we're ok. + if (this.allowHalfOpen || this._writableState.ended) return; + + // no more data can be written. + // But allow more writes to happen in this tick. + pna.nextTick(onEndNT, this); +} + +function onEndNT(self) { + self.end(); +} + +Object.defineProperty(Duplex.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined || this._writableState === undefined) { + return false; + } + return this._readableState.destroyed && this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (this._readableState === undefined || this._writableState === undefined) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + this._writableState.destroyed = value; + } +}); + +Duplex.prototype._destroy = function (err, cb) { + this.push(null); + this.end(); + + pna.nextTick(cb, err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_passthrough.js b/node_modules/readable-stream/lib/_stream_passthrough.js new file mode 100644 index 0000000..612edb4 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_passthrough.js @@ -0,0 +1,47 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a passthrough stream. +// basically just the most minimal sort of Transform stream. +// Every written chunk gets output as-is. + +'use strict'; + +module.exports = PassThrough; + +var Transform = require('./_stream_transform'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(PassThrough, Transform); + +function PassThrough(options) { + if (!(this instanceof PassThrough)) return new PassThrough(options); + + Transform.call(this, options); +} + +PassThrough.prototype._transform = function (chunk, encoding, cb) { + cb(null, chunk); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_readable.js b/node_modules/readable-stream/lib/_stream_readable.js new file mode 100644 index 0000000..3af95cb --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_readable.js @@ -0,0 +1,1019 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Readable; + +/**/ +var isArray = require('isarray'); +/**/ + +/**/ +var Duplex; +/**/ + +Readable.ReadableState = ReadableState; + +/**/ +var EE = require('events').EventEmitter; + +var EElistenerCount = function (emitter, type) { + return emitter.listeners(type).length; +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var debugUtil = require('util'); +var debug = void 0; +if (debugUtil && debugUtil.debuglog) { + debug = debugUtil.debuglog('stream'); +} else { + debug = function () {}; +} +/**/ + +var BufferList = require('./internal/streams/BufferList'); +var destroyImpl = require('./internal/streams/destroy'); +var StringDecoder; + +util.inherits(Readable, Stream); + +var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; + +function prependListener(emitter, event, fn) { + // Sadly this is not cacheable as some libraries bundle their own + // event emitter implementation with them. + if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); + + // This is a hack to make sure that our error handler is attached before any + // userland ones. NEVER DO THIS. This is here only because this code needs + // to continue to work with older versions of Node.js that do not include + // the prependListener() method. The goal is to eventually remove this hack. + if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; +} + +function ReadableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag. Used to make read(n) ignore n and to + // make all the buffer merging and length checks go away + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; + + // the point at which it stops calling _read() to fill the buffer + // Note: 0 is a valid value, means "don't call _read preemptively ever" + var hwm = options.highWaterMark; + var readableHwm = options.readableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (readableHwm || readableHwm === 0)) this.highWaterMark = readableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // A linked list is used to store data chunks instead of an array because the + // linked list can remove elements from the beginning faster than + // array.shift() + this.buffer = new BufferList(); + this.length = 0; + this.pipes = null; + this.pipesCount = 0; + this.flowing = null; + this.ended = false; + this.endEmitted = false; + this.reading = false; + + // a flag to be able to tell if the event 'readable'/'data' is emitted + // immediately, or on a later tick. We set this to true at first, because + // any actions that shouldn't happen until "later" should generally also + // not happen before the first read call. + this.sync = true; + + // whenever we return null, then we set a flag to say + // that we're awaiting a 'readable' event emission. + this.needReadable = false; + this.emittedReadable = false; + this.readableListening = false; + this.resumeScheduled = false; + + // has it been destroyed + this.destroyed = false; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // the number of writers that are awaiting a drain event in .pipe()s + this.awaitDrain = 0; + + // if true, a maybeReadMore has been scheduled + this.readingMore = false; + + this.decoder = null; + this.encoding = null; + if (options.encoding) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this.decoder = new StringDecoder(options.encoding); + this.encoding = options.encoding; + } +} + +function Readable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + if (!(this instanceof Readable)) return new Readable(options); + + this._readableState = new ReadableState(options, this); + + // legacy + this.readable = true; + + if (options) { + if (typeof options.read === 'function') this._read = options.read; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + } + + Stream.call(this); +} + +Object.defineProperty(Readable.prototype, 'destroyed', { + get: function () { + if (this._readableState === undefined) { + return false; + } + return this._readableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._readableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._readableState.destroyed = value; + } +}); + +Readable.prototype.destroy = destroyImpl.destroy; +Readable.prototype._undestroy = destroyImpl.undestroy; +Readable.prototype._destroy = function (err, cb) { + this.push(null); + cb(err); +}; + +// Manually shove something into the read() buffer. +// This returns true if the highWaterMark has not been hit yet, +// similar to how Writable.write() returns true if you should +// write() some more. +Readable.prototype.push = function (chunk, encoding) { + var state = this._readableState; + var skipChunkCheck; + + if (!state.objectMode) { + if (typeof chunk === 'string') { + encoding = encoding || state.defaultEncoding; + if (encoding !== state.encoding) { + chunk = Buffer.from(chunk, encoding); + encoding = ''; + } + skipChunkCheck = true; + } + } else { + skipChunkCheck = true; + } + + return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); +}; + +// Unshift should *always* be something directly out of read() +Readable.prototype.unshift = function (chunk) { + return readableAddChunk(this, chunk, null, true, false); +}; + +function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { + var state = stream._readableState; + if (chunk === null) { + state.reading = false; + onEofChunk(stream, state); + } else { + var er; + if (!skipChunkCheck) er = chunkInvalid(state, chunk); + if (er) { + stream.emit('error', er); + } else if (state.objectMode || chunk && chunk.length > 0) { + if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (addToFront) { + if (state.endEmitted) stream.emit('error', new Error('stream.unshift() after end event'));else addChunk(stream, state, chunk, true); + } else if (state.ended) { + stream.emit('error', new Error('stream.push() after EOF')); + } else { + state.reading = false; + if (state.decoder && !encoding) { + chunk = state.decoder.write(chunk); + if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); + } else { + addChunk(stream, state, chunk, false); + } + } + } else if (!addToFront) { + state.reading = false; + } + } + + return needMoreData(state); +} + +function addChunk(stream, state, chunk, addToFront) { + if (state.flowing && state.length === 0 && !state.sync) { + stream.emit('data', chunk); + stream.read(0); + } else { + // update the buffer info. + state.length += state.objectMode ? 1 : chunk.length; + if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); + + if (state.needReadable) emitReadable(stream); + } + maybeReadMore(stream, state); +} + +function chunkInvalid(state, chunk) { + var er; + if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + return er; +} + +// if it's past the high water mark, we can push in some more. +// Also, if we have no data yet, we can stand some +// more bytes. This is to work around cases where hwm=0, +// such as the repl. Also, if the push() triggered a +// readable event, and the user called read(largeNumber) such that +// needReadable was set, then we ought to push more, so that another +// 'readable' event will be triggered. +function needMoreData(state) { + return !state.ended && (state.needReadable || state.length < state.highWaterMark || state.length === 0); +} + +Readable.prototype.isPaused = function () { + return this._readableState.flowing === false; +}; + +// backwards compatibility. +Readable.prototype.setEncoding = function (enc) { + if (!StringDecoder) StringDecoder = require('string_decoder/').StringDecoder; + this._readableState.decoder = new StringDecoder(enc); + this._readableState.encoding = enc; + return this; +}; + +// Don't raise the hwm > 8MB +var MAX_HWM = 0x800000; +function computeNewHighWaterMark(n) { + if (n >= MAX_HWM) { + n = MAX_HWM; + } else { + // Get the next highest power of 2 to prevent increasing hwm excessively in + // tiny amounts + n--; + n |= n >>> 1; + n |= n >>> 2; + n |= n >>> 4; + n |= n >>> 8; + n |= n >>> 16; + n++; + } + return n; +} + +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function howMuchToRead(n, state) { + if (n <= 0 || state.length === 0 && state.ended) return 0; + if (state.objectMode) return 1; + if (n !== n) { + // Only flow one buffer at a time + if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; + } + // If we're asking for more than the current hwm, then raise the hwm. + if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); + if (n <= state.length) return n; + // Don't have enough + if (!state.ended) { + state.needReadable = true; + return 0; + } + return state.length; +} + +// you can override either this method, or the async _read(n) below. +Readable.prototype.read = function (n) { + debug('read', n); + n = parseInt(n, 10); + var state = this._readableState; + var nOrig = n; + + if (n !== 0) state.emittedReadable = false; + + // if we're doing read(0) to trigger a readable event, but we + // already have a bunch of data in the buffer, then just trigger + // the 'readable' event and move on. + if (n === 0 && state.needReadable && (state.length >= state.highWaterMark || state.ended)) { + debug('read: emitReadable', state.length, state.ended); + if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); + return null; + } + + n = howMuchToRead(n, state); + + // if we've ended, and we're now clear, then finish it up. + if (n === 0 && state.ended) { + if (state.length === 0) endReadable(this); + return null; + } + + // All the actual chunk generation logic needs to be + // *below* the call to _read. The reason is that in certain + // synthetic stream cases, such as passthrough streams, _read + // may be a completely synchronous operation which may change + // the state of the read buffer, providing enough data when + // before there was *not* enough. + // + // So, the steps are: + // 1. Figure out what the state of things will be after we do + // a read from the buffer. + // + // 2. If that resulting state will trigger a _read, then call _read. + // Note that this may be asynchronous, or synchronous. Yes, it is + // deeply ugly to write APIs this way, but that still doesn't mean + // that the Readable class should behave improperly, as streams are + // designed to be sync/async agnostic. + // Take note if the _read call is sync or async (ie, if the read call + // has returned yet), so that we know whether or not it's safe to emit + // 'readable' etc. + // + // 3. Actually pull the requested chunks out of the buffer and return. + + // if we need a readable event, then we need to do some reading. + var doRead = state.needReadable; + debug('need readable', doRead); + + // if we currently have less than the highWaterMark, then also read some + if (state.length === 0 || state.length - n < state.highWaterMark) { + doRead = true; + debug('length less than watermark', doRead); + } + + // however, if we've ended, then there's no point, and if we're already + // reading, then it's unnecessary. + if (state.ended || state.reading) { + doRead = false; + debug('reading or ended', doRead); + } else if (doRead) { + debug('do read'); + state.reading = true; + state.sync = true; + // if the length is currently zero, then we *need* a readable event. + if (state.length === 0) state.needReadable = true; + // call internal read method + this._read(state.highWaterMark); + state.sync = false; + // If _read pushed data synchronously, then `reading` will be false, + // and we need to re-evaluate how much data we can return to the user. + if (!state.reading) n = howMuchToRead(nOrig, state); + } + + var ret; + if (n > 0) ret = fromList(n, state);else ret = null; + + if (ret === null) { + state.needReadable = true; + n = 0; + } else { + state.length -= n; + } + + if (state.length === 0) { + // If we have nothing in the buffer, then we want to know + // as soon as we *do* get something into the buffer. + if (!state.ended) state.needReadable = true; + + // If we tried to read() past the EOF, then emit end on the next tick. + if (nOrig !== n && state.ended) endReadable(this); + } + + if (ret !== null) this.emit('data', ret); + + return ret; +}; + +function onEofChunk(stream, state) { + if (state.ended) return; + if (state.decoder) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) { + state.buffer.push(chunk); + state.length += state.objectMode ? 1 : chunk.length; + } + } + state.ended = true; + + // emit 'readable' now to make sure it gets picked up. + emitReadable(stream); +} + +// Don't emit readable right away in sync mode, because this can trigger +// another read() call => stack overflow. This way, it might trigger +// a nextTick recursion warning, but that's not so bad. +function emitReadable(stream) { + var state = stream._readableState; + state.needReadable = false; + if (!state.emittedReadable) { + debug('emitReadable', state.flowing); + state.emittedReadable = true; + if (state.sync) pna.nextTick(emitReadable_, stream);else emitReadable_(stream); + } +} + +function emitReadable_(stream) { + debug('emit readable'); + stream.emit('readable'); + flow(stream); +} + +// at this point, the user has presumably seen the 'readable' event, +// and called read() to consume some data. that may have triggered +// in turn another _read(n) call, in which case reading = true if +// it's in progress. +// However, if we're not ended, or reading, and the length < hwm, +// then go ahead and try to read some more preemptively. +function maybeReadMore(stream, state) { + if (!state.readingMore) { + state.readingMore = true; + pna.nextTick(maybeReadMore_, stream, state); + } +} + +function maybeReadMore_(stream, state) { + var len = state.length; + while (!state.reading && !state.flowing && !state.ended && state.length < state.highWaterMark) { + debug('maybeReadMore read 0'); + stream.read(0); + if (len === state.length) + // didn't get any data, stop spinning. + break;else len = state.length; + } + state.readingMore = false; +} + +// abstract method. to be overridden in specific implementation classes. +// call cb(er, data) where data is <= n in length. +// for virtual (non-string, non-buffer) streams, "length" is somewhat +// arbitrary, and perhaps not very meaningful. +Readable.prototype._read = function (n) { + this.emit('error', new Error('_read() is not implemented')); +}; + +Readable.prototype.pipe = function (dest, pipeOpts) { + var src = this; + var state = this._readableState; + + switch (state.pipesCount) { + case 0: + state.pipes = dest; + break; + case 1: + state.pipes = [state.pipes, dest]; + break; + default: + state.pipes.push(dest); + break; + } + state.pipesCount += 1; + debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); + + var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; + + var endFn = doEnd ? onend : unpipe; + if (state.endEmitted) pna.nextTick(endFn);else src.once('end', endFn); + + dest.on('unpipe', onunpipe); + function onunpipe(readable, unpipeInfo) { + debug('onunpipe'); + if (readable === src) { + if (unpipeInfo && unpipeInfo.hasUnpiped === false) { + unpipeInfo.hasUnpiped = true; + cleanup(); + } + } + } + + function onend() { + debug('onend'); + dest.end(); + } + + // when the dest drains, it reduces the awaitDrain counter + // on the source. This would be more elegant with a .once() + // handler in flow(), but adding and removing repeatedly is + // too slow. + var ondrain = pipeOnDrain(src); + dest.on('drain', ondrain); + + var cleanedUp = false; + function cleanup() { + debug('cleanup'); + // cleanup event handlers once the pipe is broken + dest.removeListener('close', onclose); + dest.removeListener('finish', onfinish); + dest.removeListener('drain', ondrain); + dest.removeListener('error', onerror); + dest.removeListener('unpipe', onunpipe); + src.removeListener('end', onend); + src.removeListener('end', unpipe); + src.removeListener('data', ondata); + + cleanedUp = true; + + // if the reader is waiting for a drain event from this + // specific writer, then it would cause it to never start + // flowing again. + // So, if this is awaiting a drain, then we just call it now. + // If we don't know, then assume that we are waiting for one. + if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); + } + + // If the user pushes more data while we're writing to dest then we'll end up + // in ondata again. However, we only want to increase awaitDrain once because + // dest will only emit one 'drain' event for the multiple writes. + // => Introduce a guard on increasing awaitDrain. + var increasedAwaitDrain = false; + src.on('data', ondata); + function ondata(chunk) { + debug('ondata'); + increasedAwaitDrain = false; + var ret = dest.write(chunk); + if (false === ret && !increasedAwaitDrain) { + // If the user unpiped during `dest.write()`, it is possible + // to get stuck in a permanently paused state if that write + // also returned false. + // => Check whether `dest` is still a piping destination. + if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { + debug('false write response, pause', state.awaitDrain); + state.awaitDrain++; + increasedAwaitDrain = true; + } + src.pause(); + } + } + + // if the dest has an error, then stop piping into it. + // however, don't suppress the throwing behavior for this. + function onerror(er) { + debug('onerror', er); + unpipe(); + dest.removeListener('error', onerror); + if (EElistenerCount(dest, 'error') === 0) dest.emit('error', er); + } + + // Make sure our error handler is attached before userland ones. + prependListener(dest, 'error', onerror); + + // Both close and finish should trigger unpipe, but only once. + function onclose() { + dest.removeListener('finish', onfinish); + unpipe(); + } + dest.once('close', onclose); + function onfinish() { + debug('onfinish'); + dest.removeListener('close', onclose); + unpipe(); + } + dest.once('finish', onfinish); + + function unpipe() { + debug('unpipe'); + src.unpipe(dest); + } + + // tell the dest that it's being piped to + dest.emit('pipe', src); + + // start the flow if it hasn't been started already. + if (!state.flowing) { + debug('pipe resume'); + src.resume(); + } + + return dest; +}; + +function pipeOnDrain(src) { + return function () { + var state = src._readableState; + debug('pipeOnDrain', state.awaitDrain); + if (state.awaitDrain) state.awaitDrain--; + if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { + state.flowing = true; + flow(src); + } + }; +} + +Readable.prototype.unpipe = function (dest) { + var state = this._readableState; + var unpipeInfo = { hasUnpiped: false }; + + // if we're not piping anywhere, then do nothing. + if (state.pipesCount === 0) return this; + + // just one destination. most common case. + if (state.pipesCount === 1) { + // passed in one, but it's not the right one. + if (dest && dest !== state.pipes) return this; + + if (!dest) dest = state.pipes; + + // got a match. + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + if (dest) dest.emit('unpipe', this, unpipeInfo); + return this; + } + + // slow case. multiple pipe destinations. + + if (!dest) { + // remove all. + var dests = state.pipes; + var len = state.pipesCount; + state.pipes = null; + state.pipesCount = 0; + state.flowing = false; + + for (var i = 0; i < len; i++) { + dests[i].emit('unpipe', this, { hasUnpiped: false }); + }return this; + } + + // try to find the right one. + var index = indexOf(state.pipes, dest); + if (index === -1) return this; + + state.pipes.splice(index, 1); + state.pipesCount -= 1; + if (state.pipesCount === 1) state.pipes = state.pipes[0]; + + dest.emit('unpipe', this, unpipeInfo); + + return this; +}; + +// set up data events if they are asked for +// Ensure readable listeners eventually get something +Readable.prototype.on = function (ev, fn) { + var res = Stream.prototype.on.call(this, ev, fn); + + if (ev === 'data') { + // Start flowing on next tick if stream isn't explicitly paused + if (this._readableState.flowing !== false) this.resume(); + } else if (ev === 'readable') { + var state = this._readableState; + if (!state.endEmitted && !state.readableListening) { + state.readableListening = state.needReadable = true; + state.emittedReadable = false; + if (!state.reading) { + pna.nextTick(nReadingNextTick, this); + } else if (state.length) { + emitReadable(this); + } + } + } + + return res; +}; +Readable.prototype.addListener = Readable.prototype.on; + +function nReadingNextTick(self) { + debug('readable nexttick read 0'); + self.read(0); +} + +// pause() and resume() are remnants of the legacy readable stream API +// If the user uses them, then switch into old mode. +Readable.prototype.resume = function () { + var state = this._readableState; + if (!state.flowing) { + debug('resume'); + state.flowing = true; + resume(this, state); + } + return this; +}; + +function resume(stream, state) { + if (!state.resumeScheduled) { + state.resumeScheduled = true; + pna.nextTick(resume_, stream, state); + } +} + +function resume_(stream, state) { + if (!state.reading) { + debug('resume read 0'); + stream.read(0); + } + + state.resumeScheduled = false; + state.awaitDrain = 0; + stream.emit('resume'); + flow(stream); + if (state.flowing && !state.reading) stream.read(0); +} + +Readable.prototype.pause = function () { + debug('call pause flowing=%j', this._readableState.flowing); + if (false !== this._readableState.flowing) { + debug('pause'); + this._readableState.flowing = false; + this.emit('pause'); + } + return this; +}; + +function flow(stream) { + var state = stream._readableState; + debug('flow', state.flowing); + while (state.flowing && stream.read() !== null) {} +} + +// wrap an old-style stream as the async data source. +// This is *not* part of the readable stream interface. +// It is an ugly unfortunate mess of history. +Readable.prototype.wrap = function (stream) { + var _this = this; + + var state = this._readableState; + var paused = false; + + stream.on('end', function () { + debug('wrapped end'); + if (state.decoder && !state.ended) { + var chunk = state.decoder.end(); + if (chunk && chunk.length) _this.push(chunk); + } + + _this.push(null); + }); + + stream.on('data', function (chunk) { + debug('wrapped data'); + if (state.decoder) chunk = state.decoder.write(chunk); + + // don't skip over falsy values in objectMode + if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; + + var ret = _this.push(chunk); + if (!ret) { + paused = true; + stream.pause(); + } + }); + + // proxy all the other methods. + // important when wrapping filters and duplexes. + for (var i in stream) { + if (this[i] === undefined && typeof stream[i] === 'function') { + this[i] = function (method) { + return function () { + return stream[method].apply(stream, arguments); + }; + }(i); + } + } + + // proxy certain important events. + for (var n = 0; n < kProxyEvents.length; n++) { + stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); + } + + // when we try to consume some more bytes, simply unpause the + // underlying stream. + this._read = function (n) { + debug('wrapped _read', n); + if (paused) { + paused = false; + stream.resume(); + } + }; + + return this; +}; + +Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._readableState.highWaterMark; + } +}); + +// exposed for testing purposes only. +Readable._fromList = fromList; + +// Pluck off n bytes from an array of buffers. +// Length is the combined lengths of all the buffers in the list. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromList(n, state) { + // nothing buffered + if (state.length === 0) return null; + + var ret; + if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { + // read it all, truncate the list + if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.head.data;else ret = state.buffer.concat(state.length); + state.buffer.clear(); + } else { + // read part of list + ret = fromListPartial(n, state.buffer, state.decoder); + } + + return ret; +} + +// Extracts only enough buffered data to satisfy the amount requested. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function fromListPartial(n, list, hasStrings) { + var ret; + if (n < list.head.data.length) { + // slice is the same for buffers and strings + ret = list.head.data.slice(0, n); + list.head.data = list.head.data.slice(n); + } else if (n === list.head.data.length) { + // first chunk is a perfect match + ret = list.shift(); + } else { + // result spans more than one buffer + ret = hasStrings ? copyFromBufferString(n, list) : copyFromBuffer(n, list); + } + return ret; +} + +// Copies a specified amount of characters from the list of buffered data +// chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBufferString(n, list) { + var p = list.head; + var c = 1; + var ret = p.data; + n -= ret.length; + while (p = p.next) { + var str = p.data; + var nb = n > str.length ? str.length : n; + if (nb === str.length) ret += str;else ret += str.slice(0, n); + n -= nb; + if (n === 0) { + if (nb === str.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = str.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +// Copies a specified amount of bytes from the list of buffered data chunks. +// This function is designed to be inlinable, so please take care when making +// changes to the function body. +function copyFromBuffer(n, list) { + var ret = Buffer.allocUnsafe(n); + var p = list.head; + var c = 1; + p.data.copy(ret); + n -= p.data.length; + while (p = p.next) { + var buf = p.data; + var nb = n > buf.length ? buf.length : n; + buf.copy(ret, ret.length - n, 0, nb); + n -= nb; + if (n === 0) { + if (nb === buf.length) { + ++c; + if (p.next) list.head = p.next;else list.head = list.tail = null; + } else { + list.head = p; + p.data = buf.slice(nb); + } + break; + } + ++c; + } + list.length -= c; + return ret; +} + +function endReadable(stream) { + var state = stream._readableState; + + // If we get here before consuming all the bytes, then that is a + // bug in node. Should never happen. + if (state.length > 0) throw new Error('"endReadable()" called on non-empty stream'); + + if (!state.endEmitted) { + state.ended = true; + pna.nextTick(endReadableNT, state, stream); + } +} + +function endReadableNT(state, stream) { + // Check that we didn't get one last unshift. + if (!state.endEmitted && state.length === 0) { + state.endEmitted = true; + stream.readable = false; + stream.emit('end'); + } +} + +function indexOf(xs, x) { + for (var i = 0, l = xs.length; i < l; i++) { + if (xs[i] === x) return i; + } + return -1; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_transform.js b/node_modules/readable-stream/lib/_stream_transform.js new file mode 100644 index 0000000..fcfc105 --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_transform.js @@ -0,0 +1,214 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// a transform stream is a readable/writable stream where you do +// something with the data. Sometimes it's called a "filter", +// but that's not a great name for it, since that implies a thing where +// some bits pass through, and others are simply ignored. (That would +// be a valid example of a transform, of course.) +// +// While the output is causally related to the input, it's not a +// necessarily symmetric or synchronous transformation. For example, +// a zlib stream might take multiple plain-text writes(), and then +// emit a single compressed chunk some time in the future. +// +// Here's how this works: +// +// The Transform stream has all the aspects of the readable and writable +// stream classes. When you write(chunk), that calls _write(chunk,cb) +// internally, and returns false if there's a lot of pending writes +// buffered up. When you call read(), that calls _read(n) until +// there's enough pending readable data buffered up. +// +// In a transform stream, the written data is placed in a buffer. When +// _read(n) is called, it transforms the queued up data, calling the +// buffered _write cb's as it consumes chunks. If consuming a single +// written chunk would result in multiple output chunks, then the first +// outputted bit calls the readcb, and subsequent chunks just go into +// the read buffer, and will cause it to emit 'readable' if necessary. +// +// This way, back-pressure is actually determined by the reading side, +// since _read has to be called to start processing a new chunk. However, +// a pathological inflate type of transform can cause excessive buffering +// here. For example, imagine a stream where every byte of input is +// interpreted as an integer from 0-255, and then results in that many +// bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in +// 1kb of data being output. In this case, you could write a very small +// amount of input, and end up with a very large amount of output. In +// such a pathological inflating mechanism, there'd be no way to tell +// the system to stop doing the transform. A single 4MB write could +// cause the system to run out of memory. +// +// However, even in such a pathological case, only a single written chunk +// would be consumed, and then the rest would wait (un-transformed) until +// the results of the previous transformed chunk were consumed. + +'use strict'; + +module.exports = Transform; + +var Duplex = require('./_stream_duplex'); + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +util.inherits(Transform, Duplex); + +function afterTransform(er, data) { + var ts = this._transformState; + ts.transforming = false; + + var cb = ts.writecb; + + if (!cb) { + return this.emit('error', new Error('write callback called multiple times')); + } + + ts.writechunk = null; + ts.writecb = null; + + if (data != null) // single equals check for both `null` and `undefined` + this.push(data); + + cb(er); + + var rs = this._readableState; + rs.reading = false; + if (rs.needReadable || rs.length < rs.highWaterMark) { + this._read(rs.highWaterMark); + } +} + +function Transform(options) { + if (!(this instanceof Transform)) return new Transform(options); + + Duplex.call(this, options); + + this._transformState = { + afterTransform: afterTransform.bind(this), + needTransform: false, + transforming: false, + writecb: null, + writechunk: null, + writeencoding: null + }; + + // start out asking for a readable event once data is transformed. + this._readableState.needReadable = true; + + // we have implemented the _read method, and done the other things + // that Readable wants before the first _read call, so unset the + // sync guard flag. + this._readableState.sync = false; + + if (options) { + if (typeof options.transform === 'function') this._transform = options.transform; + + if (typeof options.flush === 'function') this._flush = options.flush; + } + + // When the writable side finishes, then flush out anything remaining. + this.on('prefinish', prefinish); +} + +function prefinish() { + var _this = this; + + if (typeof this._flush === 'function') { + this._flush(function (er, data) { + done(_this, er, data); + }); + } else { + done(this, null, null); + } +} + +Transform.prototype.push = function (chunk, encoding) { + this._transformState.needTransform = false; + return Duplex.prototype.push.call(this, chunk, encoding); +}; + +// This is the part where you do stuff! +// override this function in implementation classes. +// 'chunk' is an input chunk. +// +// Call `push(newChunk)` to pass along transformed output +// to the readable side. You may call 'push' zero or more times. +// +// Call `cb(err)` when you are done with this chunk. If you pass +// an error, then that'll put the hurt on the whole operation. If you +// never call cb(), then you'll never get another chunk. +Transform.prototype._transform = function (chunk, encoding, cb) { + throw new Error('_transform() is not implemented'); +}; + +Transform.prototype._write = function (chunk, encoding, cb) { + var ts = this._transformState; + ts.writecb = cb; + ts.writechunk = chunk; + ts.writeencoding = encoding; + if (!ts.transforming) { + var rs = this._readableState; + if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); + } +}; + +// Doesn't matter what the args are here. +// _transform does all the work. +// That we got here means that the readable side wants more data. +Transform.prototype._read = function (n) { + var ts = this._transformState; + + if (ts.writechunk !== null && ts.writecb && !ts.transforming) { + ts.transforming = true; + this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); + } else { + // mark that we need a transform, so that any data that comes in + // will get processed, now that we've asked for it. + ts.needTransform = true; + } +}; + +Transform.prototype._destroy = function (err, cb) { + var _this2 = this; + + Duplex.prototype._destroy.call(this, err, function (err2) { + cb(err2); + _this2.emit('close'); + }); +}; + +function done(stream, er, data) { + if (er) return stream.emit('error', er); + + if (data != null) // single equals check for both `null` and `undefined` + stream.push(data); + + // if there's nothing in the write buffer, then that means + // that nothing more will ever be provided + if (stream._writableState.length) throw new Error('Calling transform done when ws.length != 0'); + + if (stream._transformState.transforming) throw new Error('Calling transform done when still transforming'); + + return stream.push(null); +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/_stream_writable.js b/node_modules/readable-stream/lib/_stream_writable.js new file mode 100644 index 0000000..e1e897f --- /dev/null +++ b/node_modules/readable-stream/lib/_stream_writable.js @@ -0,0 +1,685 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +// A bit simpler than readable streams. +// Implement an async ._write(chunk, encoding, cb), and it'll handle all +// the drain event emission and buffering. + +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +module.exports = Writable; + +/* */ +function WriteReq(chunk, encoding, cb) { + this.chunk = chunk; + this.encoding = encoding; + this.callback = cb; + this.next = null; +} + +// It seems a linked list but it is not +// there will be only 2 of these for each stream +function CorkedRequest(state) { + var _this = this; + + this.next = null; + this.entry = null; + this.finish = function () { + onCorkedFinish(_this, state); + }; +} +/* */ + +/**/ +var asyncWrite = !process.browser && ['v0.10', 'v0.9.'].indexOf(process.version.slice(0, 5)) > -1 ? setImmediate : pna.nextTick; +/**/ + +/**/ +var Duplex; +/**/ + +Writable.WritableState = WritableState; + +/**/ +var util = Object.create(require('core-util-is')); +util.inherits = require('inherits'); +/**/ + +/**/ +var internalUtil = { + deprecate: require('util-deprecate') +}; +/**/ + +/**/ +var Stream = require('./internal/streams/stream'); +/**/ + +/**/ + +var Buffer = require('safe-buffer').Buffer; +var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; +function _uint8ArrayToBuffer(chunk) { + return Buffer.from(chunk); +} +function _isUint8Array(obj) { + return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; +} + +/**/ + +var destroyImpl = require('./internal/streams/destroy'); + +util.inherits(Writable, Stream); + +function nop() {} + +function WritableState(options, stream) { + Duplex = Duplex || require('./_stream_duplex'); + + options = options || {}; + + // Duplex streams are both readable and writable, but share + // the same options object. + // However, some cases require setting options to different + // values for the readable and the writable sides of the duplex stream. + // These options can be provided separately as readableXXX and writableXXX. + var isDuplex = stream instanceof Duplex; + + // object stream flag to indicate whether or not this stream + // contains buffers or objects. + this.objectMode = !!options.objectMode; + + if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; + + // the point at which write() starts returning false + // Note: 0 is a valid value, means that we always return false if + // the entire buffer is not flushed immediately on write() + var hwm = options.highWaterMark; + var writableHwm = options.writableHighWaterMark; + var defaultHwm = this.objectMode ? 16 : 16 * 1024; + + if (hwm || hwm === 0) this.highWaterMark = hwm;else if (isDuplex && (writableHwm || writableHwm === 0)) this.highWaterMark = writableHwm;else this.highWaterMark = defaultHwm; + + // cast to ints. + this.highWaterMark = Math.floor(this.highWaterMark); + + // if _final has been called + this.finalCalled = false; + + // drain event flag. + this.needDrain = false; + // at the start of calling end() + this.ending = false; + // when end() has been called, and returned + this.ended = false; + // when 'finish' is emitted + this.finished = false; + + // has it been destroyed + this.destroyed = false; + + // should we decode strings into buffers before passing to _write? + // this is here so that some node-core streams can optimize string + // handling at a lower level. + var noDecode = options.decodeStrings === false; + this.decodeStrings = !noDecode; + + // Crypto is kind of old and crusty. Historically, its default string + // encoding is 'binary' so we have to make this configurable. + // Everything else in the universe uses 'utf8', though. + this.defaultEncoding = options.defaultEncoding || 'utf8'; + + // not an actual buffer we keep track of, but a measurement + // of how much we're waiting to get pushed to some underlying + // socket or file. + this.length = 0; + + // a flag to see when we're in the middle of a write. + this.writing = false; + + // when true all writes will be buffered until .uncork() call + this.corked = 0; + + // a flag to be able to tell if the onwrite cb is called immediately, + // or on a later tick. We set this to true at first, because any + // actions that shouldn't happen until "later" should generally also + // not happen before the first write call. + this.sync = true; + + // a flag to know if we're processing previously buffered items, which + // may call the _write() callback in the same tick, so that we don't + // end up in an overlapped onwrite situation. + this.bufferProcessing = false; + + // the callback that's passed to _write(chunk,cb) + this.onwrite = function (er) { + onwrite(stream, er); + }; + + // the callback that the user supplies to write(chunk,encoding,cb) + this.writecb = null; + + // the amount that is being written when _write is called. + this.writelen = 0; + + this.bufferedRequest = null; + this.lastBufferedRequest = null; + + // number of pending user-supplied write callbacks + // this must be 0 before 'finish' can be emitted + this.pendingcb = 0; + + // emit prefinish if the only thing we're waiting for is _write cbs + // This is relevant for synchronous Transform streams + this.prefinished = false; + + // True if the error was already emitted and should not be thrown again + this.errorEmitted = false; + + // count buffered requests + this.bufferedRequestCount = 0; + + // allocate the first CorkedRequest, there is always + // one allocated and free to use, and we maintain at most two + this.corkedRequestsFree = new CorkedRequest(this); +} + +WritableState.prototype.getBuffer = function getBuffer() { + var current = this.bufferedRequest; + var out = []; + while (current) { + out.push(current); + current = current.next; + } + return out; +}; + +(function () { + try { + Object.defineProperty(WritableState.prototype, 'buffer', { + get: internalUtil.deprecate(function () { + return this.getBuffer(); + }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') + }); + } catch (_) {} +})(); + +// Test _writableState for inheritance to account for Duplex streams, +// whose prototype chain only points to Readable. +var realHasInstance; +if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { + realHasInstance = Function.prototype[Symbol.hasInstance]; + Object.defineProperty(Writable, Symbol.hasInstance, { + value: function (object) { + if (realHasInstance.call(this, object)) return true; + if (this !== Writable) return false; + + return object && object._writableState instanceof WritableState; + } + }); +} else { + realHasInstance = function (object) { + return object instanceof this; + }; +} + +function Writable(options) { + Duplex = Duplex || require('./_stream_duplex'); + + // Writable ctor is applied to Duplexes, too. + // `realHasInstance` is necessary because using plain `instanceof` + // would return false, as no `_writableState` property is attached. + + // Trying to use the custom `instanceof` for Writable here will also break the + // Node.js LazyTransform implementation, which has a non-trivial getter for + // `_writableState` that would lead to infinite recursion. + if (!realHasInstance.call(Writable, this) && !(this instanceof Duplex)) { + return new Writable(options); + } + + this._writableState = new WritableState(options, this); + + // legacy. + this.writable = true; + + if (options) { + if (typeof options.write === 'function') this._write = options.write; + + if (typeof options.writev === 'function') this._writev = options.writev; + + if (typeof options.destroy === 'function') this._destroy = options.destroy; + + if (typeof options.final === 'function') this._final = options.final; + } + + Stream.call(this); +} + +// Otherwise people can pipe Writable streams, which is just wrong. +Writable.prototype.pipe = function () { + this.emit('error', new Error('Cannot pipe, not readable')); +}; + +function writeAfterEnd(stream, cb) { + var er = new Error('write after end'); + // TODO: defer error events consistently everywhere, not just the cb + stream.emit('error', er); + pna.nextTick(cb, er); +} + +// Checks that a user-supplied chunk is valid, especially for the particular +// mode the stream is in. Currently this means that `null` is never accepted +// and undefined/non-string values are only allowed in object mode. +function validChunk(stream, state, chunk, cb) { + var valid = true; + var er = false; + + if (chunk === null) { + er = new TypeError('May not write null values to stream'); + } else if (typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { + er = new TypeError('Invalid non-string/buffer chunk'); + } + if (er) { + stream.emit('error', er); + pna.nextTick(cb, er); + valid = false; + } + return valid; +} + +Writable.prototype.write = function (chunk, encoding, cb) { + var state = this._writableState; + var ret = false; + var isBuf = !state.objectMode && _isUint8Array(chunk); + + if (isBuf && !Buffer.isBuffer(chunk)) { + chunk = _uint8ArrayToBuffer(chunk); + } + + if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; + + if (typeof cb !== 'function') cb = nop; + + if (state.ended) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { + state.pendingcb++; + ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); + } + + return ret; +}; + +Writable.prototype.cork = function () { + var state = this._writableState; + + state.corked++; +}; + +Writable.prototype.uncork = function () { + var state = this._writableState; + + if (state.corked) { + state.corked--; + + if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); + } +}; + +Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { + // node::ParseEncoding() requires lower case. + if (typeof encoding === 'string') encoding = encoding.toLowerCase(); + if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new TypeError('Unknown encoding: ' + encoding); + this._writableState.defaultEncoding = encoding; + return this; +}; + +function decodeChunk(state, chunk, encoding) { + if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + return chunk; +} + +Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { + // making it explicit this property is not enumerable + // because otherwise some prototype manipulation in + // userland will fail + enumerable: false, + get: function () { + return this._writableState.highWaterMark; + } +}); + +// if we're already writing something, then just put this +// in the queue, and wait our turn. Otherwise, call _write +// If we return false, then we need a drain event, so set that flag. +function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { + if (!isBuf) { + var newChunk = decodeChunk(state, chunk, encoding); + if (chunk !== newChunk) { + isBuf = true; + encoding = 'buffer'; + chunk = newChunk; + } + } + var len = state.objectMode ? 1 : chunk.length; + + state.length += len; + + var ret = state.length < state.highWaterMark; + // we must ensure that previous needDrain will not be reset to false. + if (!ret) state.needDrain = true; + + if (state.writing || state.corked) { + var last = state.lastBufferedRequest; + state.lastBufferedRequest = { + chunk: chunk, + encoding: encoding, + isBuf: isBuf, + callback: cb, + next: null + }; + if (last) { + last.next = state.lastBufferedRequest; + } else { + state.bufferedRequest = state.lastBufferedRequest; + } + state.bufferedRequestCount += 1; + } else { + doWrite(stream, state, false, len, chunk, encoding, cb); + } + + return ret; +} + +function doWrite(stream, state, writev, len, chunk, encoding, cb) { + state.writelen = len; + state.writecb = cb; + state.writing = true; + state.sync = true; + if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); + state.sync = false; +} + +function onwriteError(stream, state, sync, er, cb) { + --state.pendingcb; + + if (sync) { + // defer the callback if we are being called synchronously + // to avoid piling up things on the stack + pna.nextTick(cb, er); + // this can emit finish, and it will always happen + // after error + pna.nextTick(finishMaybe, stream, state); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + } else { + // the caller expect this to happen before if + // it is async + cb(er); + stream._writableState.errorEmitted = true; + stream.emit('error', er); + // this can emit finish, but finish must + // always follow error + finishMaybe(stream, state); + } +} + +function onwriteStateUpdate(state) { + state.writing = false; + state.writecb = null; + state.length -= state.writelen; + state.writelen = 0; +} + +function onwrite(stream, er) { + var state = stream._writableState; + var sync = state.sync; + var cb = state.writecb; + + onwriteStateUpdate(state); + + if (er) onwriteError(stream, state, sync, er, cb);else { + // Check if we're actually ready to finish, but don't emit yet + var finished = needFinish(state); + + if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { + clearBuffer(stream, state); + } + + if (sync) { + /**/ + asyncWrite(afterWrite, stream, state, finished, cb); + /**/ + } else { + afterWrite(stream, state, finished, cb); + } + } +} + +function afterWrite(stream, state, finished, cb) { + if (!finished) onwriteDrain(stream, state); + state.pendingcb--; + cb(); + finishMaybe(stream, state); +} + +// Must force callback to be called on nextTick, so that we don't +// emit 'drain' before the write() consumer gets the 'false' return +// value, and has a chance to attach a 'drain' listener. +function onwriteDrain(stream, state) { + if (state.length === 0 && state.needDrain) { + state.needDrain = false; + stream.emit('drain'); + } +} + +// if there's something in the buffer waiting, then process it +function clearBuffer(stream, state) { + state.bufferProcessing = true; + var entry = state.bufferedRequest; + + if (stream._writev && entry && entry.next) { + // Fast case, write everything using _writev() + var l = state.bufferedRequestCount; + var buffer = new Array(l); + var holder = state.corkedRequestsFree; + holder.entry = entry; + + var count = 0; + var allBuffers = true; + while (entry) { + buffer[count] = entry; + if (!entry.isBuf) allBuffers = false; + entry = entry.next; + count += 1; + } + buffer.allBuffers = allBuffers; + + doWrite(stream, state, true, state.length, buffer, '', holder.finish); + + // doWrite is almost always async, defer these to save a bit of time + // as the hot path ends with doWrite + state.pendingcb++; + state.lastBufferedRequest = null; + if (holder.next) { + state.corkedRequestsFree = holder.next; + holder.next = null; + } else { + state.corkedRequestsFree = new CorkedRequest(state); + } + state.bufferedRequestCount = 0; + } else { + // Slow case, write chunks one-by-one + while (entry) { + var chunk = entry.chunk; + var encoding = entry.encoding; + var cb = entry.callback; + var len = state.objectMode ? 1 : chunk.length; + + doWrite(stream, state, false, len, chunk, encoding, cb); + entry = entry.next; + state.bufferedRequestCount--; + // if we didn't call the onwrite immediately, then + // it means that we need to wait until it does. + // also, that means that the chunk and cb are currently + // being processed, so move the buffer counter past them. + if (state.writing) { + break; + } + } + + if (entry === null) state.lastBufferedRequest = null; + } + + state.bufferedRequest = entry; + state.bufferProcessing = false; +} + +Writable.prototype._write = function (chunk, encoding, cb) { + cb(new Error('_write() is not implemented')); +}; + +Writable.prototype._writev = null; + +Writable.prototype.end = function (chunk, encoding, cb) { + var state = this._writableState; + + if (typeof chunk === 'function') { + cb = chunk; + chunk = null; + encoding = null; + } else if (typeof encoding === 'function') { + cb = encoding; + encoding = null; + } + + if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); + + // .end() fully uncorks + if (state.corked) { + state.corked = 1; + this.uncork(); + } + + // ignore unnecessary end() calls. + if (!state.ending) endWritable(this, state, cb); +}; + +function needFinish(state) { + return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; +} +function callFinal(stream, state) { + stream._final(function (err) { + state.pendingcb--; + if (err) { + stream.emit('error', err); + } + state.prefinished = true; + stream.emit('prefinish'); + finishMaybe(stream, state); + }); +} +function prefinish(stream, state) { + if (!state.prefinished && !state.finalCalled) { + if (typeof stream._final === 'function') { + state.pendingcb++; + state.finalCalled = true; + pna.nextTick(callFinal, stream, state); + } else { + state.prefinished = true; + stream.emit('prefinish'); + } + } +} + +function finishMaybe(stream, state) { + var need = needFinish(state); + if (need) { + prefinish(stream, state); + if (state.pendingcb === 0) { + state.finished = true; + stream.emit('finish'); + } + } + return need; +} + +function endWritable(stream, state, cb) { + state.ending = true; + finishMaybe(stream, state); + if (cb) { + if (state.finished) pna.nextTick(cb);else stream.once('finish', cb); + } + state.ended = true; + stream.writable = false; +} + +function onCorkedFinish(corkReq, state, err) { + var entry = corkReq.entry; + corkReq.entry = null; + while (entry) { + var cb = entry.callback; + state.pendingcb--; + cb(err); + entry = entry.next; + } + + // reuse the free corkReq. + state.corkedRequestsFree.next = corkReq; +} + +Object.defineProperty(Writable.prototype, 'destroyed', { + get: function () { + if (this._writableState === undefined) { + return false; + } + return this._writableState.destroyed; + }, + set: function (value) { + // we ignore the value if the stream + // has not been initialized yet + if (!this._writableState) { + return; + } + + // backward compatibility, the user is explicitly + // managing destroyed + this._writableState.destroyed = value; + } +}); + +Writable.prototype.destroy = destroyImpl.destroy; +Writable.prototype._undestroy = destroyImpl.undestroy; +Writable.prototype._destroy = function (err, cb) { + this.end(); + cb(err); +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/BufferList.js b/node_modules/readable-stream/lib/internal/streams/BufferList.js new file mode 100644 index 0000000..5e08097 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/BufferList.js @@ -0,0 +1,78 @@ +'use strict'; + +function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } + +var Buffer = require('safe-buffer').Buffer; +var util = require('util'); + +function copyBuffer(src, target, offset) { + src.copy(target, offset); +} + +module.exports = function () { + function BufferList() { + _classCallCheck(this, BufferList); + + this.head = null; + this.tail = null; + this.length = 0; + } + + BufferList.prototype.push = function push(v) { + var entry = { data: v, next: null }; + if (this.length > 0) this.tail.next = entry;else this.head = entry; + this.tail = entry; + ++this.length; + }; + + BufferList.prototype.unshift = function unshift(v) { + var entry = { data: v, next: this.head }; + if (this.length === 0) this.tail = entry; + this.head = entry; + ++this.length; + }; + + BufferList.prototype.shift = function shift() { + if (this.length === 0) return; + var ret = this.head.data; + if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; + --this.length; + return ret; + }; + + BufferList.prototype.clear = function clear() { + this.head = this.tail = null; + this.length = 0; + }; + + BufferList.prototype.join = function join(s) { + if (this.length === 0) return ''; + var p = this.head; + var ret = '' + p.data; + while (p = p.next) { + ret += s + p.data; + }return ret; + }; + + BufferList.prototype.concat = function concat(n) { + if (this.length === 0) return Buffer.alloc(0); + var ret = Buffer.allocUnsafe(n >>> 0); + var p = this.head; + var i = 0; + while (p) { + copyBuffer(p.data, ret, i); + i += p.data.length; + p = p.next; + } + return ret; + }; + + return BufferList; +}(); + +if (util && util.inspect && util.inspect.custom) { + module.exports.prototype[util.inspect.custom] = function () { + var obj = util.inspect({ length: this.length }); + return this.constructor.name + ' ' + obj; + }; +} \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/destroy.js b/node_modules/readable-stream/lib/internal/streams/destroy.js new file mode 100644 index 0000000..85a8214 --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/destroy.js @@ -0,0 +1,84 @@ +'use strict'; + +/**/ + +var pna = require('process-nextick-args'); +/**/ + +// undocumented cb() API, needed for core, not for public API +function destroy(err, cb) { + var _this = this; + + var readableDestroyed = this._readableState && this._readableState.destroyed; + var writableDestroyed = this._writableState && this._writableState.destroyed; + + if (readableDestroyed || writableDestroyed) { + if (cb) { + cb(err); + } else if (err) { + if (!this._writableState) { + pna.nextTick(emitErrorNT, this, err); + } else if (!this._writableState.errorEmitted) { + this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, this, err); + } + } + + return this; + } + + // we set destroyed to true before firing error callbacks in order + // to make it re-entrance safe in case destroy() is called within callbacks + + if (this._readableState) { + this._readableState.destroyed = true; + } + + // if this is a duplex stream mark the writable part as destroyed as well + if (this._writableState) { + this._writableState.destroyed = true; + } + + this._destroy(err || null, function (err) { + if (!cb && err) { + if (!_this._writableState) { + pna.nextTick(emitErrorNT, _this, err); + } else if (!_this._writableState.errorEmitted) { + _this._writableState.errorEmitted = true; + pna.nextTick(emitErrorNT, _this, err); + } + } else if (cb) { + cb(err); + } + }); + + return this; +} + +function undestroy() { + if (this._readableState) { + this._readableState.destroyed = false; + this._readableState.reading = false; + this._readableState.ended = false; + this._readableState.endEmitted = false; + } + + if (this._writableState) { + this._writableState.destroyed = false; + this._writableState.ended = false; + this._writableState.ending = false; + this._writableState.finalCalled = false; + this._writableState.prefinished = false; + this._writableState.finished = false; + this._writableState.errorEmitted = false; + } +} + +function emitErrorNT(self, err) { + self.emit('error', err); +} + +module.exports = { + destroy: destroy, + undestroy: undestroy +}; \ No newline at end of file diff --git a/node_modules/readable-stream/lib/internal/streams/stream-browser.js b/node_modules/readable-stream/lib/internal/streams/stream-browser.js new file mode 100644 index 0000000..9332a3f --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream-browser.js @@ -0,0 +1 @@ +module.exports = require('events').EventEmitter; diff --git a/node_modules/readable-stream/lib/internal/streams/stream.js b/node_modules/readable-stream/lib/internal/streams/stream.js new file mode 100644 index 0000000..ce2ad5b --- /dev/null +++ b/node_modules/readable-stream/lib/internal/streams/stream.js @@ -0,0 +1 @@ +module.exports = require('stream'); diff --git a/node_modules/readable-stream/node_modules/safe-buffer/LICENSE b/node_modules/readable-stream/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/readable-stream/node_modules/safe-buffer/README.md b/node_modules/readable-stream/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts b/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/readable-stream/node_modules/safe-buffer/index.js b/node_modules/readable-stream/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..22438da --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/readable-stream/node_modules/safe-buffer/package.json b/node_modules/readable-stream/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..623fbc3 --- /dev/null +++ b/node_modules/readable-stream/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/readable-stream/package.json b/node_modules/readable-stream/package.json new file mode 100644 index 0000000..514c178 --- /dev/null +++ b/node_modules/readable-stream/package.json @@ -0,0 +1,52 @@ +{ + "name": "readable-stream", + "version": "2.3.8", + "description": "Streams3, a user-land copy of the stream library from Node.js", + "main": "readable.js", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + }, + "devDependencies": { + "assert": "^1.4.0", + "babel-polyfill": "^6.9.1", + "buffer": "^4.9.0", + "lolex": "^2.3.2", + "nyc": "^6.4.0", + "tap": "^0.7.0", + "tape": "^4.8.0" + }, + "scripts": { + "test": "tap test/parallel/*.js test/ours/*.js && node test/verify-dependencies.js", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js", + "cover": "nyc npm test", + "report": "nyc report --reporter=lcov" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/readable-stream" + }, + "keywords": [ + "readable", + "stream", + "pipe" + ], + "browser": { + "util": false, + "./readable.js": "./readable-browser.js", + "./writable.js": "./writable-browser.js", + "./duplex.js": "./duplex-browser.js", + "./lib/internal/streams/stream.js": "./lib/internal/streams/stream-browser.js" + }, + "nyc": { + "include": [ + "lib/**.js" + ] + }, + "license": "MIT" +} diff --git a/node_modules/readable-stream/passthrough.js b/node_modules/readable-stream/passthrough.js new file mode 100644 index 0000000..ffd791d --- /dev/null +++ b/node_modules/readable-stream/passthrough.js @@ -0,0 +1 @@ +module.exports = require('./readable').PassThrough diff --git a/node_modules/readable-stream/readable-browser.js b/node_modules/readable-stream/readable-browser.js new file mode 100644 index 0000000..e503725 --- /dev/null +++ b/node_modules/readable-stream/readable-browser.js @@ -0,0 +1,7 @@ +exports = module.exports = require('./lib/_stream_readable.js'); +exports.Stream = exports; +exports.Readable = exports; +exports.Writable = require('./lib/_stream_writable.js'); +exports.Duplex = require('./lib/_stream_duplex.js'); +exports.Transform = require('./lib/_stream_transform.js'); +exports.PassThrough = require('./lib/_stream_passthrough.js'); diff --git a/node_modules/readable-stream/readable.js b/node_modules/readable-stream/readable.js new file mode 100644 index 0000000..ec89ec5 --- /dev/null +++ b/node_modules/readable-stream/readable.js @@ -0,0 +1,19 @@ +var Stream = require('stream'); +if (process.env.READABLE_STREAM === 'disable' && Stream) { + module.exports = Stream; + exports = module.exports = Stream.Readable; + exports.Readable = Stream.Readable; + exports.Writable = Stream.Writable; + exports.Duplex = Stream.Duplex; + exports.Transform = Stream.Transform; + exports.PassThrough = Stream.PassThrough; + exports.Stream = Stream; +} else { + exports = module.exports = require('./lib/_stream_readable.js'); + exports.Stream = Stream || exports; + exports.Readable = exports; + exports.Writable = require('./lib/_stream_writable.js'); + exports.Duplex = require('./lib/_stream_duplex.js'); + exports.Transform = require('./lib/_stream_transform.js'); + exports.PassThrough = require('./lib/_stream_passthrough.js'); +} diff --git a/node_modules/readable-stream/transform.js b/node_modules/readable-stream/transform.js new file mode 100644 index 0000000..b1baba2 --- /dev/null +++ b/node_modules/readable-stream/transform.js @@ -0,0 +1 @@ +module.exports = require('./readable').Transform diff --git a/node_modules/readable-stream/writable-browser.js b/node_modules/readable-stream/writable-browser.js new file mode 100644 index 0000000..ebdde6a --- /dev/null +++ b/node_modules/readable-stream/writable-browser.js @@ -0,0 +1 @@ +module.exports = require('./lib/_stream_writable.js'); diff --git a/node_modules/readable-stream/writable.js b/node_modules/readable-stream/writable.js new file mode 100644 index 0000000..3211a6f --- /dev/null +++ b/node_modules/readable-stream/writable.js @@ -0,0 +1,8 @@ +var Stream = require("stream") +var Writable = require("./lib/_stream_writable.js") + +if (process.env.READABLE_STREAM === 'disable') { + module.exports = Stream && Stream.Writable || Writable +} else { + module.exports = Writable +} diff --git a/node_modules/streamsearch/.eslintrc.js b/node_modules/streamsearch/.eslintrc.js new file mode 100644 index 0000000..be9311d --- /dev/null +++ b/node_modules/streamsearch/.eslintrc.js @@ -0,0 +1,5 @@ +'use strict'; + +module.exports = { + extends: '@mscdex/eslint-config', +}; diff --git a/node_modules/streamsearch/.github/workflows/ci.yml b/node_modules/streamsearch/.github/workflows/ci.yml new file mode 100644 index 0000000..29d5178 --- /dev/null +++ b/node_modules/streamsearch/.github/workflows/ci.yml @@ -0,0 +1,24 @@ +name: CI + +on: + pull_request: + push: + branches: [ master ] + +jobs: + tests-linux: + runs-on: ubuntu-latest + strategy: + fail-fast: false + matrix: + node-version: [10.x, 12.x, 14.x, 16.x] + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ matrix.node-version }} + uses: actions/setup-node@v1 + with: + node-version: ${{ matrix.node-version }} + - name: Install module + run: npm install + - name: Run tests + run: npm test diff --git a/node_modules/streamsearch/.github/workflows/lint.yml b/node_modules/streamsearch/.github/workflows/lint.yml new file mode 100644 index 0000000..9f9e1f5 --- /dev/null +++ b/node_modules/streamsearch/.github/workflows/lint.yml @@ -0,0 +1,23 @@ +name: lint + +on: + pull_request: + push: + branches: [ master ] + +env: + NODE_VERSION: 16.x + +jobs: + lint-js: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + - name: Use Node.js ${{ env.NODE_VERSION }} + uses: actions/setup-node@v1 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install ESLint + ESLint configs/plugins + run: npm install --only=dev + - name: Lint files + run: npm run lint diff --git a/node_modules/streamsearch/LICENSE b/node_modules/streamsearch/LICENSE new file mode 100644 index 0000000..290762e --- /dev/null +++ b/node_modules/streamsearch/LICENSE @@ -0,0 +1,19 @@ +Copyright Brian White. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. \ No newline at end of file diff --git a/node_modules/streamsearch/README.md b/node_modules/streamsearch/README.md new file mode 100644 index 0000000..c3934d1 --- /dev/null +++ b/node_modules/streamsearch/README.md @@ -0,0 +1,95 @@ +Description +=========== + +streamsearch is a module for [node.js](http://nodejs.org/) that allows searching a stream using the Boyer-Moore-Horspool algorithm. + +This module is based heavily on the Streaming Boyer-Moore-Horspool C++ implementation by Hongli Lai [here](https://github.com/FooBarWidget/boyer-moore-horspool). + + +Requirements +============ + +* [node.js](http://nodejs.org/) -- v10.0.0 or newer + + +Installation +============ + + npm install streamsearch + +Example +======= + +```js + const { inspect } = require('util'); + + const StreamSearch = require('streamsearch'); + + const needle = Buffer.from('\r\n'); + const ss = new StreamSearch(needle, (isMatch, data, start, end) => { + if (data) + console.log('data: ' + inspect(data.toString('latin1', start, end))); + if (isMatch) + console.log('match!'); + }); + + const chunks = [ + 'foo', + ' bar', + '\r', + '\n', + 'baz, hello\r', + '\n world.', + '\r\n Node.JS rules!!\r\n\r\n', + ]; + for (const chunk of chunks) + ss.push(Buffer.from(chunk)); + + // output: + // + // data: 'foo' + // data: ' bar' + // match! + // data: 'baz, hello' + // match! + // data: ' world.' + // match! + // data: ' Node.JS rules!!' + // match! + // data: '' + // match! +``` + + +API +=== + +Properties +---------- + +* **maxMatches** - < _integer_ > - The maximum number of matches. Defaults to `Infinity`. + +* **matches** - < _integer_ > - The current match count. + + +Functions +--------- + +* **(constructor)**(< _mixed_ >needle, < _function_ >callback) - Creates and returns a new instance for searching for a _Buffer_ or _string_ `needle`. `callback` is called any time there is non-matching data and/or there is a needle match. `callback` will be called with the following arguments: + + 1. `isMatch` - _boolean_ - Indicates whether a match has been found + + 2. `data` - _mixed_ - If set, this contains data that did not match the needle. + + 3. `start` - _integer_ - The index in `data` where the non-matching data begins (inclusive). + + 4. `end` - _integer_ - The index in `data` where the non-matching data ends (exclusive). + + 5. `isSafeData` - _boolean_ - Indicates if it is safe to store a reference to `data` (e.g. as-is or via `data.slice()`) or not, as in some cases `data` may point to a Buffer whose contents change over time. + +* **destroy**() - _(void)_ - Emits any last remaining unmatched data that may still be buffered and then resets internal state. + +* **push**(< _Buffer_ >chunk) - _integer_ - Processes `chunk`, searching for a match. The return value is the last processed index in `chunk` + 1. + +* **reset**() - _(void)_ - Resets internal state. Useful for when you wish to start searching a new/different stream for example. + diff --git a/node_modules/streamsearch/lib/sbmh.js b/node_modules/streamsearch/lib/sbmh.js new file mode 100644 index 0000000..510cae2 --- /dev/null +++ b/node_modules/streamsearch/lib/sbmh.js @@ -0,0 +1,267 @@ +'use strict'; +/* + Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool +*/ +function memcmp(buf1, pos1, buf2, pos2, num) { + for (let i = 0; i < num; ++i) { + if (buf1[pos1 + i] !== buf2[pos2 + i]) + return false; + } + return true; +} + +class SBMH { + constructor(needle, cb) { + if (typeof cb !== 'function') + throw new Error('Missing match callback'); + + if (typeof needle === 'string') + needle = Buffer.from(needle); + else if (!Buffer.isBuffer(needle)) + throw new Error(`Expected Buffer for needle, got ${typeof needle}`); + + const needleLen = needle.length; + + this.maxMatches = Infinity; + this.matches = 0; + + this._cb = cb; + this._lookbehindSize = 0; + this._needle = needle; + this._bufPos = 0; + + this._lookbehind = Buffer.allocUnsafe(needleLen); + + // Initialize occurrence table. + this._occ = [ + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen, needleLen, needleLen, + needleLen, needleLen, needleLen, needleLen + ]; + + // Populate occurrence table with analysis of the needle, ignoring the last + // letter. + if (needleLen > 1) { + for (let i = 0; i < needleLen - 1; ++i) + this._occ[needle[i]] = needleLen - 1 - i; + } + } + + reset() { + this.matches = 0; + this._lookbehindSize = 0; + this._bufPos = 0; + } + + push(chunk, pos) { + let result; + if (!Buffer.isBuffer(chunk)) + chunk = Buffer.from(chunk, 'latin1'); + const chunkLen = chunk.length; + this._bufPos = pos || 0; + while (result !== chunkLen && this.matches < this.maxMatches) + result = feed(this, chunk); + return result; + } + + destroy() { + const lbSize = this._lookbehindSize; + if (lbSize) + this._cb(false, this._lookbehind, 0, lbSize, false); + this.reset(); + } +} + +function feed(self, data) { + const len = data.length; + const needle = self._needle; + const needleLen = needle.length; + + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehindSize - 2] + let pos = -self._lookbehindSize; + const lastNeedleCharPos = needleLen - 1; + const lastNeedleChar = needle[lastNeedleCharPos]; + const end = len - needleLen; + const occ = self._occ; + const lookbehind = self._lookbehind; + + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= end) { + const nextPos = pos + lastNeedleCharPos; + const ch = (nextPos < 0 + ? lookbehind[self._lookbehindSize + nextPos] + : data[nextPos]); + + if (ch === lastNeedleChar + && matchNeedle(self, data, pos, lastNeedleCharPos)) { + self._lookbehindSize = 0; + ++self.matches; + if (pos > -self._lookbehindSize) + self._cb(true, lookbehind, 0, self._lookbehindSize + pos, false); + else + self._cb(true, undefined, 0, 0, true); + + return (self._bufPos = pos + needleLen); + } + + pos += occ[ch]; + } + + // No match. + + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !matchNeedle(self, data, pos, len - pos)) + ++pos; + + if (pos < 0) { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = self._lookbehindSize + pos; + + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + self._cb(false, lookbehind, 0, bytesToCutOff, false); + } + + self._lookbehindSize -= bytesToCutOff; + lookbehind.copy(lookbehind, 0, bytesToCutOff, self._lookbehindSize); + lookbehind.set(data, self._lookbehindSize); + self._lookbehindSize += len; + + self._bufPos = len; + return len; + } + + // Discard lookbehind buffer. + self._cb(false, lookbehind, 0, self._lookbehindSize, false); + self._lookbehindSize = 0; + } + + pos += self._bufPos; + + const firstNeedleChar = needle[0]; + + // Lookbehind buffer is now empty. Perform Boyer-Moore-Horspool + // search with optimized character lookup code that only considers + // the current round's haystack data. + while (pos <= end) { + const ch = data[pos + lastNeedleCharPos]; + + if (ch === lastNeedleChar + && data[pos] === firstNeedleChar + && memcmp(needle, 0, data, pos, lastNeedleCharPos)) { + ++self.matches; + if (pos > 0) + self._cb(true, data, self._bufPos, pos, true); + else + self._cb(true, undefined, 0, 0, true); + + return (self._bufPos = pos + needleLen); + } + + pos += occ[ch]; + } + + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while (pos < len) { + if (data[pos] !== firstNeedleChar + || !memcmp(data, pos, needle, 0, len - pos)) { + ++pos; + continue; + } + data.copy(lookbehind, 0, pos, len); + self._lookbehindSize = len - pos; + break; + } + + // Everything until `pos` is guaranteed not to contain needle data. + if (pos > 0) + self._cb(false, data, self._bufPos, pos < len ? pos : len, true); + + self._bufPos = len; + return len; +} + +function matchNeedle(self, data, pos, len) { + const lb = self._lookbehind; + const lbSize = self._lookbehindSize; + const needle = self._needle; + + for (let i = 0; i < len; ++i, ++pos) { + const ch = (pos < 0 ? lb[lbSize + pos] : data[pos]); + if (ch !== needle[i]) + return false; + } + return true; +} + +module.exports = SBMH; diff --git a/node_modules/streamsearch/package.json b/node_modules/streamsearch/package.json new file mode 100644 index 0000000..51df8f9 --- /dev/null +++ b/node_modules/streamsearch/package.json @@ -0,0 +1,34 @@ +{ + "name": "streamsearch", + "version": "1.1.0", + "author": "Brian White ", + "description": "Streaming Boyer-Moore-Horspool searching for node.js", + "main": "./lib/sbmh.js", + "engines": { + "node": ">=10.0.0" + }, + "devDependencies": { + "@mscdex/eslint-config": "^1.1.0", + "eslint": "^7.32.0" + }, + "scripts": { + "test": "node test/test.js", + "lint": "eslint --cache --report-unused-disable-directives --ext=.js .eslintrc.js lib test", + "lint:fix": "npm run lint -- --fix" + }, + "keywords": [ + "stream", + "horspool", + "boyer-moore-horspool", + "boyer-moore", + "search" + ], + "licenses": [{ + "type": "MIT", + "url": "http://github.com/mscdex/streamsearch/raw/master/LICENSE" + }], + "repository": { + "type": "git", + "url": "http://github.com/mscdex/streamsearch.git" + } +} diff --git a/node_modules/streamsearch/test/test.js b/node_modules/streamsearch/test/test.js new file mode 100644 index 0000000..39a04d7 --- /dev/null +++ b/node_modules/streamsearch/test/test.js @@ -0,0 +1,70 @@ +'use strict'; + +const assert = require('assert'); + +const StreamSearch = require('../lib/sbmh.js'); + +[ + { + needle: '\r\n', + chunks: [ + 'foo', + ' bar', + '\r', + '\n', + 'baz, hello\r', + '\n world.', + '\r\n Node.JS rules!!\r\n\r\n', + ], + expect: [ + [false, 'foo'], + [false, ' bar'], + [ true, null], + [false, 'baz, hello'], + [ true, null], + [false, ' world.'], + [ true, null], + [ true, ' Node.JS rules!!'], + [ true, ''], + ], + }, + { + needle: '---foobarbaz', + chunks: [ + '---foobarbaz', + 'asdf', + '\r\n', + '---foobarba', + '---foobar', + 'ba', + '\r\n---foobarbaz--\r\n', + ], + expect: [ + [ true, null], + [false, 'asdf'], + [false, '\r\n'], + [false, '---foobarba'], + [false, '---foobarba'], + [ true, '\r\n'], + [false, '--\r\n'], + ], + }, +].forEach((test, i) => { + console.log(`Running test #${i + 1}`); + const { needle, chunks, expect } = test; + + const results = []; + const ss = new StreamSearch(Buffer.from(needle), + (isMatch, data, start, end) => { + if (data) + data = data.toString('latin1', start, end); + else + data = null; + results.push([isMatch, data]); + }); + + for (const chunk of chunks) + ss.push(Buffer.from(chunk)); + + assert.deepStrictEqual(results, expect); +}); diff --git a/node_modules/string_decoder/.travis.yml b/node_modules/string_decoder/.travis.yml new file mode 100644 index 0000000..3347a72 --- /dev/null +++ b/node_modules/string_decoder/.travis.yml @@ -0,0 +1,50 @@ +sudo: false +language: node_js +before_install: + - npm install -g npm@2 + - test $NPM_LEGACY && npm install -g npm@latest-3 || npm install npm -g +notifications: + email: false +matrix: + fast_finish: true + include: + - node_js: '0.8' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.10' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.11' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: '0.12' + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 1 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 2 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 3 + env: + - TASK=test + - NPM_LEGACY=true + - node_js: 4 + env: TASK=test + - node_js: 5 + env: TASK=test + - node_js: 6 + env: TASK=test + - node_js: 7 + env: TASK=test + - node_js: 8 + env: TASK=test + - node_js: 9 + env: TASK=test diff --git a/node_modules/string_decoder/LICENSE b/node_modules/string_decoder/LICENSE new file mode 100644 index 0000000..778edb2 --- /dev/null +++ b/node_modules/string_decoder/LICENSE @@ -0,0 +1,48 @@ +Node.js is licensed for use as follows: + +""" +Copyright Node.js contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + +This license applies to parts of Node.js originating from the +https://github.com/joyent/node repository: + +""" +Copyright Joyent, Inc. and other Node contributors. All rights reserved. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to +deal in the Software without restriction, including without limitation the +rights to use, copy, modify, merge, publish, distribute, sublicense, and/or +sell copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS +IN THE SOFTWARE. +""" + diff --git a/node_modules/string_decoder/README.md b/node_modules/string_decoder/README.md new file mode 100644 index 0000000..5fd5831 --- /dev/null +++ b/node_modules/string_decoder/README.md @@ -0,0 +1,47 @@ +# string_decoder + +***Node-core v8.9.4 string_decoder for userland*** + + +[![NPM](https://nodei.co/npm/string_decoder.png?downloads=true&downloadRank=true)](https://nodei.co/npm/string_decoder/) +[![NPM](https://nodei.co/npm-dl/string_decoder.png?&months=6&height=3)](https://nodei.co/npm/string_decoder/) + + +```bash +npm install --save string_decoder +``` + +***Node-core string_decoder for userland*** + +This package is a mirror of the string_decoder implementation in Node-core. + +Full documentation may be found on the [Node.js website](https://nodejs.org/dist/v8.9.4/docs/api/). + +As of version 1.0.0 **string_decoder** uses semantic versioning. + +## Previous versions + +Previous version numbers match the versions found in Node core, e.g. 0.10.24 matches Node 0.10.24, likewise 0.11.10 matches Node 0.11.10. + +## Update + +The *build/* directory contains a build script that will scrape the source from the [nodejs/node](https://github.com/nodejs/node) repo given a specific Node version. + +## Streams Working Group + +`string_decoder` is maintained by the Streams Working Group, which +oversees the development and maintenance of the Streams API within +Node.js. The responsibilities of the Streams Working Group include: + +* Addressing stream issues on the Node.js issue tracker. +* Authoring and editing stream documentation within the Node.js project. +* Reviewing changes to stream subclasses within the Node.js project. +* Redirecting changes to streams from the Node.js project to this + project. +* Assisting in the implementation of stream providers within Node.js. +* Recommending versions of `readable-stream` to be included in Node.js. +* Messaging about the future of streams to give the community advance + notice of changes. + +See [readable-stream](https://github.com/nodejs/readable-stream) for +more details. diff --git a/node_modules/string_decoder/lib/string_decoder.js b/node_modules/string_decoder/lib/string_decoder.js new file mode 100644 index 0000000..2e89e63 --- /dev/null +++ b/node_modules/string_decoder/lib/string_decoder.js @@ -0,0 +1,296 @@ +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. + +'use strict'; + +/**/ + +var Buffer = require('safe-buffer').Buffer; +/**/ + +var isEncoding = Buffer.isEncoding || function (encoding) { + encoding = '' + encoding; + switch (encoding && encoding.toLowerCase()) { + case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': + return true; + default: + return false; + } +}; + +function _normalizeEncoding(enc) { + if (!enc) return 'utf8'; + var retried; + while (true) { + switch (enc) { + case 'utf8': + case 'utf-8': + return 'utf8'; + case 'ucs2': + case 'ucs-2': + case 'utf16le': + case 'utf-16le': + return 'utf16le'; + case 'latin1': + case 'binary': + return 'latin1'; + case 'base64': + case 'ascii': + case 'hex': + return enc; + default: + if (retried) return; // undefined + enc = ('' + enc).toLowerCase(); + retried = true; + } + } +}; + +// Do not cache `Buffer.isEncoding` when checking encoding names as some +// modules monkey-patch it to support additional encodings +function normalizeEncoding(enc) { + var nenc = _normalizeEncoding(enc); + if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); + return nenc || enc; +} + +// StringDecoder provides an interface for efficiently splitting a series of +// buffers into a series of JS strings without breaking apart multi-byte +// characters. +exports.StringDecoder = StringDecoder; +function StringDecoder(encoding) { + this.encoding = normalizeEncoding(encoding); + var nb; + switch (this.encoding) { + case 'utf16le': + this.text = utf16Text; + this.end = utf16End; + nb = 4; + break; + case 'utf8': + this.fillLast = utf8FillLast; + nb = 4; + break; + case 'base64': + this.text = base64Text; + this.end = base64End; + nb = 3; + break; + default: + this.write = simpleWrite; + this.end = simpleEnd; + return; + } + this.lastNeed = 0; + this.lastTotal = 0; + this.lastChar = Buffer.allocUnsafe(nb); +} + +StringDecoder.prototype.write = function (buf) { + if (buf.length === 0) return ''; + var r; + var i; + if (this.lastNeed) { + r = this.fillLast(buf); + if (r === undefined) return ''; + i = this.lastNeed; + this.lastNeed = 0; + } else { + i = 0; + } + if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); + return r || ''; +}; + +StringDecoder.prototype.end = utf8End; + +// Returns only complete characters in a Buffer +StringDecoder.prototype.text = utf8Text; + +// Attempts to complete a partial non-UTF-8 character using bytes from a Buffer +StringDecoder.prototype.fillLast = function (buf) { + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); + this.lastNeed -= buf.length; +}; + +// Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a +// continuation byte. If an invalid byte is detected, -2 is returned. +function utf8CheckByte(byte) { + if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; + return byte >> 6 === 0x02 ? -1 : -2; +} + +// Checks at most 3 bytes at the end of a Buffer in order to detect an +// incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) +// needed to complete the UTF-8 character (if applicable) are returned. +function utf8CheckIncomplete(self, buf, i) { + var j = buf.length - 1; + if (j < i) return 0; + var nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 1; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) self.lastNeed = nb - 2; + return nb; + } + if (--j < i || nb === -2) return 0; + nb = utf8CheckByte(buf[j]); + if (nb >= 0) { + if (nb > 0) { + if (nb === 2) nb = 0;else self.lastNeed = nb - 3; + } + return nb; + } + return 0; +} + +// Validates as many continuation bytes for a multi-byte UTF-8 character as +// needed or are available. If we see a non-continuation byte where we expect +// one, we "replace" the validated continuation bytes we've seen so far with +// a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding +// behavior. The continuation byte check is included three times in the case +// where all of the continuation bytes for a character exist in the same buffer. +// It is also done this way as a slight performance increase instead of using a +// loop. +function utf8CheckExtraBytes(self, buf, p) { + if ((buf[0] & 0xC0) !== 0x80) { + self.lastNeed = 0; + return '\ufffd'; + } + if (self.lastNeed > 1 && buf.length > 1) { + if ((buf[1] & 0xC0) !== 0x80) { + self.lastNeed = 1; + return '\ufffd'; + } + if (self.lastNeed > 2 && buf.length > 2) { + if ((buf[2] & 0xC0) !== 0x80) { + self.lastNeed = 2; + return '\ufffd'; + } + } + } +} + +// Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. +function utf8FillLast(buf) { + var p = this.lastTotal - this.lastNeed; + var r = utf8CheckExtraBytes(this, buf, p); + if (r !== undefined) return r; + if (this.lastNeed <= buf.length) { + buf.copy(this.lastChar, p, 0, this.lastNeed); + return this.lastChar.toString(this.encoding, 0, this.lastTotal); + } + buf.copy(this.lastChar, p, 0, buf.length); + this.lastNeed -= buf.length; +} + +// Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a +// partial character, the character's bytes are buffered until the required +// number of bytes are available. +function utf8Text(buf, i) { + var total = utf8CheckIncomplete(this, buf, i); + if (!this.lastNeed) return buf.toString('utf8', i); + this.lastTotal = total; + var end = buf.length - (total - this.lastNeed); + buf.copy(this.lastChar, 0, end); + return buf.toString('utf8', i, end); +} + +// For UTF-8, a replacement character is added when ending on a partial +// character. +function utf8End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + '\ufffd'; + return r; +} + +// UTF-16LE typically needs two bytes per character, but even if we have an even +// number of bytes available, we need to check if we end on a leading/high +// surrogate. In that case, we need to wait for the next two bytes in order to +// decode the last character properly. +function utf16Text(buf, i) { + if ((buf.length - i) % 2 === 0) { + var r = buf.toString('utf16le', i); + if (r) { + var c = r.charCodeAt(r.length - 1); + if (c >= 0xD800 && c <= 0xDBFF) { + this.lastNeed = 2; + this.lastTotal = 4; + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + return r.slice(0, -1); + } + } + return r; + } + this.lastNeed = 1; + this.lastTotal = 2; + this.lastChar[0] = buf[buf.length - 1]; + return buf.toString('utf16le', i, buf.length - 1); +} + +// For UTF-16LE we do not explicitly append special replacement characters if we +// end on a partial character, we simply let v8 handle that. +function utf16End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) { + var end = this.lastTotal - this.lastNeed; + return r + this.lastChar.toString('utf16le', 0, end); + } + return r; +} + +function base64Text(buf, i) { + var n = (buf.length - i) % 3; + if (n === 0) return buf.toString('base64', i); + this.lastNeed = 3 - n; + this.lastTotal = 3; + if (n === 1) { + this.lastChar[0] = buf[buf.length - 1]; + } else { + this.lastChar[0] = buf[buf.length - 2]; + this.lastChar[1] = buf[buf.length - 1]; + } + return buf.toString('base64', i, buf.length - n); +} + +function base64End(buf) { + var r = buf && buf.length ? this.write(buf) : ''; + if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); + return r; +} + +// Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) +function simpleWrite(buf) { + return buf.toString(this.encoding); +} + +function simpleEnd(buf) { + return buf && buf.length ? this.write(buf) : ''; +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/LICENSE b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE new file mode 100644 index 0000000..0c068ce --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) Feross Aboukhadijeh + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/string_decoder/node_modules/safe-buffer/README.md b/node_modules/string_decoder/node_modules/safe-buffer/README.md new file mode 100644 index 0000000..e9a81af --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/README.md @@ -0,0 +1,584 @@ +# safe-buffer [![travis][travis-image]][travis-url] [![npm][npm-image]][npm-url] [![downloads][downloads-image]][downloads-url] [![javascript style guide][standard-image]][standard-url] + +[travis-image]: https://img.shields.io/travis/feross/safe-buffer/master.svg +[travis-url]: https://travis-ci.org/feross/safe-buffer +[npm-image]: https://img.shields.io/npm/v/safe-buffer.svg +[npm-url]: https://npmjs.org/package/safe-buffer +[downloads-image]: https://img.shields.io/npm/dm/safe-buffer.svg +[downloads-url]: https://npmjs.org/package/safe-buffer +[standard-image]: https://img.shields.io/badge/code_style-standard-brightgreen.svg +[standard-url]: https://standardjs.com + +#### Safer Node.js Buffer API + +**Use the new Node.js Buffer APIs (`Buffer.from`, `Buffer.alloc`, +`Buffer.allocUnsafe`, `Buffer.allocUnsafeSlow`) in all versions of Node.js.** + +**Uses the built-in implementation when available.** + +## install + +``` +npm install safe-buffer +``` + +## usage + +The goal of this package is to provide a safe replacement for the node.js `Buffer`. + +It's a drop-in replacement for `Buffer`. You can use it by adding one `require` line to +the top of your node.js modules: + +```js +var Buffer = require('safe-buffer').Buffer + +// Existing buffer code will continue to work without issues: + +new Buffer('hey', 'utf8') +new Buffer([1, 2, 3], 'utf8') +new Buffer(obj) +new Buffer(16) // create an uninitialized buffer (potentially unsafe) + +// But you can use these new explicit APIs to make clear what you want: + +Buffer.from('hey', 'utf8') // convert from many types to a Buffer +Buffer.alloc(16) // create a zero-filled buffer (safe) +Buffer.allocUnsafe(16) // create an uninitialized buffer (potentially unsafe) +``` + +## api + +### Class Method: Buffer.from(array) + + +* `array` {Array} + +Allocates a new `Buffer` using an `array` of octets. + +```js +const buf = Buffer.from([0x62,0x75,0x66,0x66,0x65,0x72]); + // creates a new Buffer containing ASCII bytes + // ['b','u','f','f','e','r'] +``` + +A `TypeError` will be thrown if `array` is not an `Array`. + +### Class Method: Buffer.from(arrayBuffer[, byteOffset[, length]]) + + +* `arrayBuffer` {ArrayBuffer} The `.buffer` property of a `TypedArray` or + a `new ArrayBuffer()` +* `byteOffset` {Number} Default: `0` +* `length` {Number} Default: `arrayBuffer.length - byteOffset` + +When passed a reference to the `.buffer` property of a `TypedArray` instance, +the newly created `Buffer` will share the same allocated memory as the +TypedArray. + +```js +const arr = new Uint16Array(2); +arr[0] = 5000; +arr[1] = 4000; + +const buf = Buffer.from(arr.buffer); // shares the memory with arr; + +console.log(buf); + // Prints: + +// changing the TypedArray changes the Buffer also +arr[1] = 6000; + +console.log(buf); + // Prints: +``` + +The optional `byteOffset` and `length` arguments specify a memory range within +the `arrayBuffer` that will be shared by the `Buffer`. + +```js +const ab = new ArrayBuffer(10); +const buf = Buffer.from(ab, 0, 2); +console.log(buf.length); + // Prints: 2 +``` + +A `TypeError` will be thrown if `arrayBuffer` is not an `ArrayBuffer`. + +### Class Method: Buffer.from(buffer) + + +* `buffer` {Buffer} + +Copies the passed `buffer` data onto a new `Buffer` instance. + +```js +const buf1 = Buffer.from('buffer'); +const buf2 = Buffer.from(buf1); + +buf1[0] = 0x61; +console.log(buf1.toString()); + // 'auffer' +console.log(buf2.toString()); + // 'buffer' (copy is not changed) +``` + +A `TypeError` will be thrown if `buffer` is not a `Buffer`. + +### Class Method: Buffer.from(str[, encoding]) + + +* `str` {String} String to encode. +* `encoding` {String} Encoding to use, Default: `'utf8'` + +Creates a new `Buffer` containing the given JavaScript string `str`. If +provided, the `encoding` parameter identifies the character encoding. +If not provided, `encoding` defaults to `'utf8'`. + +```js +const buf1 = Buffer.from('this is a tést'); +console.log(buf1.toString()); + // prints: this is a tést +console.log(buf1.toString('ascii')); + // prints: this is a tC)st + +const buf2 = Buffer.from('7468697320697320612074c3a97374', 'hex'); +console.log(buf2.toString()); + // prints: this is a tést +``` + +A `TypeError` will be thrown if `str` is not a string. + +### Class Method: Buffer.alloc(size[, fill[, encoding]]) + + +* `size` {Number} +* `fill` {Value} Default: `undefined` +* `encoding` {String} Default: `utf8` + +Allocates a new `Buffer` of `size` bytes. If `fill` is `undefined`, the +`Buffer` will be *zero-filled*. + +```js +const buf = Buffer.alloc(5); +console.log(buf); + // +``` + +The `size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +If `fill` is specified, the allocated `Buffer` will be initialized by calling +`buf.fill(fill)`. See [`buf.fill()`][] for more information. + +```js +const buf = Buffer.alloc(5, 'a'); +console.log(buf); + // +``` + +If both `fill` and `encoding` are specified, the allocated `Buffer` will be +initialized by calling `buf.fill(fill, encoding)`. For example: + +```js +const buf = Buffer.alloc(11, 'aGVsbG8gd29ybGQ=', 'base64'); +console.log(buf); + // +``` + +Calling `Buffer.alloc(size)` can be significantly slower than the alternative +`Buffer.allocUnsafe(size)` but ensures that the newly created `Buffer` instance +contents will *never contain sensitive data*. + +A `TypeError` will be thrown if `size` is not a number. + +### Class Method: Buffer.allocUnsafe(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* `Buffer` of `size` bytes. The `size` must +be less than or equal to the value of `require('buffer').kMaxLength` (on 64-bit +architectures, `kMaxLength` is `(2^31)-1`). Otherwise, a [`RangeError`][] is +thrown. A zero-length Buffer will be created if a `size` less than or equal to +0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +```js +const buf = Buffer.allocUnsafe(5); +console.log(buf); + // + // (octets will be different, every time) +buf.fill(0); +console.log(buf); + // +``` + +A `TypeError` will be thrown if `size` is not a number. + +Note that the `Buffer` module pre-allocates an internal `Buffer` instance of +size `Buffer.poolSize` that is used as a pool for the fast allocation of new +`Buffer` instances created using `Buffer.allocUnsafe(size)` (and the deprecated +`new Buffer(size)` constructor) only when `size` is less than or equal to +`Buffer.poolSize >> 1` (floor of `Buffer.poolSize` divided by two). The default +value of `Buffer.poolSize` is `8192` but can be modified. + +Use of this pre-allocated internal memory pool is a key difference between +calling `Buffer.alloc(size, fill)` vs. `Buffer.allocUnsafe(size).fill(fill)`. +Specifically, `Buffer.alloc(size, fill)` will *never* use the internal Buffer +pool, while `Buffer.allocUnsafe(size).fill(fill)` *will* use the internal +Buffer pool if `size` is less than or equal to half `Buffer.poolSize`. The +difference is subtle but can be important when an application requires the +additional performance that `Buffer.allocUnsafe(size)` provides. + +### Class Method: Buffer.allocUnsafeSlow(size) + + +* `size` {Number} + +Allocates a new *non-zero-filled* and non-pooled `Buffer` of `size` bytes. The +`size` must be less than or equal to the value of +`require('buffer').kMaxLength` (on 64-bit architectures, `kMaxLength` is +`(2^31)-1`). Otherwise, a [`RangeError`][] is thrown. A zero-length Buffer will +be created if a `size` less than or equal to 0 is specified. + +The underlying memory for `Buffer` instances created in this way is *not +initialized*. The contents of the newly created `Buffer` are unknown and +*may contain sensitive data*. Use [`buf.fill(0)`][] to initialize such +`Buffer` instances to zeroes. + +When using `Buffer.allocUnsafe()` to allocate new `Buffer` instances, +allocations under 4KB are, by default, sliced from a single pre-allocated +`Buffer`. This allows applications to avoid the garbage collection overhead of +creating many individually allocated Buffers. This approach improves both +performance and memory usage by eliminating the need to track and cleanup as +many `Persistent` objects. + +However, in the case where a developer may need to retain a small chunk of +memory from a pool for an indeterminate amount of time, it may be appropriate +to create an un-pooled Buffer instance using `Buffer.allocUnsafeSlow()` then +copy out the relevant bits. + +```js +// need to keep around a few small chunks of memory +const store = []; + +socket.on('readable', () => { + const data = socket.read(); + // allocate for retained data + const sb = Buffer.allocUnsafeSlow(10); + // copy the data into the new allocation + data.copy(sb, 0, 0, 10); + store.push(sb); +}); +``` + +Use of `Buffer.allocUnsafeSlow()` should be used only as a last resort *after* +a developer has observed undue memory retention in their applications. + +A `TypeError` will be thrown if `size` is not a number. + +### All the Rest + +The rest of the `Buffer` API is exactly the same as in node.js. +[See the docs](https://nodejs.org/api/buffer.html). + + +## Related links + +- [Node.js issue: Buffer(number) is unsafe](https://github.com/nodejs/node/issues/4660) +- [Node.js Enhancement Proposal: Buffer.from/Buffer.alloc/Buffer.zalloc/Buffer() soft-deprecate](https://github.com/nodejs/node-eps/pull/4) + +## Why is `Buffer` unsafe? + +Today, the node.js `Buffer` constructor is overloaded to handle many different argument +types like `String`, `Array`, `Object`, `TypedArrayView` (`Uint8Array`, etc.), +`ArrayBuffer`, and also `Number`. + +The API is optimized for convenience: you can throw any type at it, and it will try to do +what you want. + +Because the Buffer constructor is so powerful, you often see code like this: + +```js +// Convert UTF-8 strings to hex +function toHex (str) { + return new Buffer(str).toString('hex') +} +``` + +***But what happens if `toHex` is called with a `Number` argument?*** + +### Remote Memory Disclosure + +If an attacker can make your program call the `Buffer` constructor with a `Number` +argument, then they can make it allocate uninitialized memory from the node.js process. +This could potentially disclose TLS private keys, user data, or database passwords. + +When the `Buffer` constructor is passed a `Number` argument, it returns an +**UNINITIALIZED** block of memory of the specified `size`. When you create a `Buffer` like +this, you **MUST** overwrite the contents before returning it to the user. + +From the [node.js docs](https://nodejs.org/api/buffer.html#buffer_new_buffer_size): + +> `new Buffer(size)` +> +> - `size` Number +> +> The underlying memory for `Buffer` instances created in this way is not initialized. +> **The contents of a newly created `Buffer` are unknown and could contain sensitive +> data.** Use `buf.fill(0)` to initialize a Buffer to zeroes. + +(Emphasis our own.) + +Whenever the programmer intended to create an uninitialized `Buffer` you often see code +like this: + +```js +var buf = new Buffer(16) + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### Would this ever be a problem in real code? + +Yes. It's surprisingly common to forget to check the type of your variables in a +dynamically-typed language like JavaScript. + +Usually the consequences of assuming the wrong type is that your program crashes with an +uncaught exception. But the failure mode for forgetting to check the type of arguments to +the `Buffer` constructor is more catastrophic. + +Here's an example of a vulnerable service that takes a JSON payload and converts it to +hex: + +```js +// Take a JSON payload {str: "some string"} and convert it to hex +var server = http.createServer(function (req, res) { + var data = '' + req.setEncoding('utf8') + req.on('data', function (chunk) { + data += chunk + }) + req.on('end', function () { + var body = JSON.parse(data) + res.end(new Buffer(body.str).toString('hex')) + }) +}) + +server.listen(8080) +``` + +In this example, an http client just has to send: + +```json +{ + "str": 1000 +} +``` + +and it will get back 1,000 bytes of uninitialized memory from the server. + +This is a very serious bug. It's similar in severity to the +[the Heartbleed bug](http://heartbleed.com/) that allowed disclosure of OpenSSL process +memory by remote attackers. + + +### Which real-world packages were vulnerable? + +#### [`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht) + +[Mathias Buus](https://github.com/mafintosh) and I +([Feross Aboukhadijeh](http://feross.org/)) found this issue in one of our own packages, +[`bittorrent-dht`](https://www.npmjs.com/package/bittorrent-dht). The bug would allow +anyone on the internet to send a series of messages to a user of `bittorrent-dht` and get +them to reveal 20 bytes at a time of uninitialized memory from the node.js process. + +Here's +[the commit](https://github.com/feross/bittorrent-dht/commit/6c7da04025d5633699800a99ec3fbadf70ad35b8) +that fixed it. We released a new fixed version, created a +[Node Security Project disclosure](https://nodesecurity.io/advisories/68), and deprecated all +vulnerable versions on npm so users will get a warning to upgrade to a newer version. + +#### [`ws`](https://www.npmjs.com/package/ws) + +That got us wondering if there were other vulnerable packages. Sure enough, within a short +period of time, we found the same issue in [`ws`](https://www.npmjs.com/package/ws), the +most popular WebSocket implementation in node.js. + +If certain APIs were called with `Number` parameters instead of `String` or `Buffer` as +expected, then uninitialized server memory would be disclosed to the remote peer. + +These were the vulnerable methods: + +```js +socket.send(number) +socket.ping(number) +socket.pong(number) +``` + +Here's a vulnerable socket server with some echo functionality: + +```js +server.on('connection', function (socket) { + socket.on('message', function (message) { + message = JSON.parse(message) + if (message.type === 'echo') { + socket.send(message.data) // send back the user's message + } + }) +}) +``` + +`socket.send(number)` called on the server, will disclose server memory. + +Here's [the release](https://github.com/websockets/ws/releases/tag/1.0.1) where the issue +was fixed, with a more detailed explanation. Props to +[Arnout Kazemier](https://github.com/3rd-Eden) for the quick fix. Here's the +[Node Security Project disclosure](https://nodesecurity.io/advisories/67). + + +### What's the solution? + +It's important that node.js offers a fast way to get memory otherwise performance-critical +applications would needlessly get a lot slower. + +But we need a better way to *signal our intent* as programmers. **When we want +uninitialized memory, we should request it explicitly.** + +Sensitive functionality should not be packed into a developer-friendly API that loosely +accepts many different types. This type of API encourages the lazy practice of passing +variables in without checking the type very carefully. + +#### A new API: `Buffer.allocUnsafe(number)` + +The functionality of creating buffers with uninitialized memory should be part of another +API. We propose `Buffer.allocUnsafe(number)`. This way, it's not part of an API that +frequently gets user input of all sorts of different types passed into it. + +```js +var buf = Buffer.allocUnsafe(16) // careful, uninitialized memory! + +// Immediately overwrite the uninitialized buffer with data from another buffer +for (var i = 0; i < buf.length; i++) { + buf[i] = otherBuf[i] +} +``` + + +### How do we fix node.js core? + +We sent [a PR to node.js core](https://github.com/nodejs/node/pull/4514) (merged as +`semver-major`) which defends against one case: + +```js +var str = 16 +new Buffer(str, 'utf8') +``` + +In this situation, it's implied that the programmer intended the first argument to be a +string, since they passed an encoding as a second argument. Today, node.js will allocate +uninitialized memory in the case of `new Buffer(number, encoding)`, which is probably not +what the programmer intended. + +But this is only a partial solution, since if the programmer does `new Buffer(variable)` +(without an `encoding` parameter) there's no way to know what they intended. If `variable` +is sometimes a number, then uninitialized memory will sometimes be returned. + +### What's the real long-term fix? + +We could deprecate and remove `new Buffer(number)` and use `Buffer.allocUnsafe(number)` when +we need uninitialized memory. But that would break 1000s of packages. + +~~We believe the best solution is to:~~ + +~~1. Change `new Buffer(number)` to return safe, zeroed-out memory~~ + +~~2. Create a new API for creating uninitialized Buffers. We propose: `Buffer.allocUnsafe(number)`~~ + +#### Update + +We now support adding three new APIs: + +- `Buffer.from(value)` - convert from any type to a buffer +- `Buffer.alloc(size)` - create a zero-filled buffer +- `Buffer.allocUnsafe(size)` - create an uninitialized buffer with given size + +This solves the core problem that affected `ws` and `bittorrent-dht` which is +`Buffer(variable)` getting tricked into taking a number argument. + +This way, existing code continues working and the impact on the npm ecosystem will be +minimal. Over time, npm maintainers can migrate performance-critical code to use +`Buffer.allocUnsafe(number)` instead of `new Buffer(number)`. + + +### Conclusion + +We think there's a serious design issue with the `Buffer` API as it exists today. It +promotes insecure software by putting high-risk functionality into a convenient API +with friendly "developer ergonomics". + +This wasn't merely a theoretical exercise because we found the issue in some of the +most popular npm packages. + +Fortunately, there's an easy fix that can be applied today. Use `safe-buffer` in place of +`buffer`. + +```js +var Buffer = require('safe-buffer').Buffer +``` + +Eventually, we hope that node.js core can switch to this new, safer behavior. We believe +the impact on the ecosystem would be minimal since it's not a breaking change. +Well-maintained, popular packages would be updated to use `Buffer.alloc` quickly, while +older, insecure packages would magically become safe from this attack vector. + + +## links + +- [Node.js PR: buffer: throw if both length and enc are passed](https://github.com/nodejs/node/pull/4514) +- [Node Security Project disclosure for `ws`](https://nodesecurity.io/advisories/67) +- [Node Security Project disclosure for`bittorrent-dht`](https://nodesecurity.io/advisories/68) + + +## credit + +The original issues in `bittorrent-dht` +([disclosure](https://nodesecurity.io/advisories/68)) and +`ws` ([disclosure](https://nodesecurity.io/advisories/67)) were discovered by +[Mathias Buus](https://github.com/mafintosh) and +[Feross Aboukhadijeh](http://feross.org/). + +Thanks to [Adam Baldwin](https://github.com/evilpacket) for helping disclose these issues +and for his work running the [Node Security Project](https://nodesecurity.io/). + +Thanks to [John Hiesey](https://github.com/jhiesey) for proofreading this README and +auditing the code. + + +## license + +MIT. Copyright (C) [Feross Aboukhadijeh](http://feross.org) diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts new file mode 100644 index 0000000..e9fed80 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.d.ts @@ -0,0 +1,187 @@ +declare module "safe-buffer" { + export class Buffer { + length: number + write(string: string, offset?: number, length?: number, encoding?: string): number; + toString(encoding?: string, start?: number, end?: number): string; + toJSON(): { type: 'Buffer', data: any[] }; + equals(otherBuffer: Buffer): boolean; + compare(otherBuffer: Buffer, targetStart?: number, targetEnd?: number, sourceStart?: number, sourceEnd?: number): number; + copy(targetBuffer: Buffer, targetStart?: number, sourceStart?: number, sourceEnd?: number): number; + slice(start?: number, end?: number): Buffer; + writeUIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeUIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntLE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + writeIntBE(value: number, offset: number, byteLength: number, noAssert?: boolean): number; + readUIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readUIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntLE(offset: number, byteLength: number, noAssert?: boolean): number; + readIntBE(offset: number, byteLength: number, noAssert?: boolean): number; + readUInt8(offset: number, noAssert?: boolean): number; + readUInt16LE(offset: number, noAssert?: boolean): number; + readUInt16BE(offset: number, noAssert?: boolean): number; + readUInt32LE(offset: number, noAssert?: boolean): number; + readUInt32BE(offset: number, noAssert?: boolean): number; + readInt8(offset: number, noAssert?: boolean): number; + readInt16LE(offset: number, noAssert?: boolean): number; + readInt16BE(offset: number, noAssert?: boolean): number; + readInt32LE(offset: number, noAssert?: boolean): number; + readInt32BE(offset: number, noAssert?: boolean): number; + readFloatLE(offset: number, noAssert?: boolean): number; + readFloatBE(offset: number, noAssert?: boolean): number; + readDoubleLE(offset: number, noAssert?: boolean): number; + readDoubleBE(offset: number, noAssert?: boolean): number; + swap16(): Buffer; + swap32(): Buffer; + swap64(): Buffer; + writeUInt8(value: number, offset: number, noAssert?: boolean): number; + writeUInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeUInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeInt8(value: number, offset: number, noAssert?: boolean): number; + writeInt16LE(value: number, offset: number, noAssert?: boolean): number; + writeInt16BE(value: number, offset: number, noAssert?: boolean): number; + writeInt32LE(value: number, offset: number, noAssert?: boolean): number; + writeInt32BE(value: number, offset: number, noAssert?: boolean): number; + writeFloatLE(value: number, offset: number, noAssert?: boolean): number; + writeFloatBE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleLE(value: number, offset: number, noAssert?: boolean): number; + writeDoubleBE(value: number, offset: number, noAssert?: boolean): number; + fill(value: any, offset?: number, end?: number): this; + indexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + lastIndexOf(value: string | number | Buffer, byteOffset?: number, encoding?: string): number; + includes(value: string | number | Buffer, byteOffset?: number, encoding?: string): boolean; + + /** + * Allocates a new buffer containing the given {str}. + * + * @param str String to store in buffer. + * @param encoding encoding to use, optional. Default is 'utf8' + */ + constructor (str: string, encoding?: string); + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + */ + constructor (size: number); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: Uint8Array); + /** + * Produces a Buffer backed by the same allocated memory as + * the given {ArrayBuffer}. + * + * + * @param arrayBuffer The ArrayBuffer with which to share memory. + */ + constructor (arrayBuffer: ArrayBuffer); + /** + * Allocates a new buffer containing the given {array} of octets. + * + * @param array The octets to store. + */ + constructor (array: any[]); + /** + * Copies the passed {buffer} data onto a new {Buffer} instance. + * + * @param buffer The buffer to copy. + */ + constructor (buffer: Buffer); + prototype: Buffer; + /** + * Allocates a new Buffer using an {array} of octets. + * + * @param array + */ + static from(array: any[]): Buffer; + /** + * When passed a reference to the .buffer property of a TypedArray instance, + * the newly created Buffer will share the same allocated memory as the TypedArray. + * The optional {byteOffset} and {length} arguments specify a memory range + * within the {arrayBuffer} that will be shared by the Buffer. + * + * @param arrayBuffer The .buffer property of a TypedArray or a new ArrayBuffer() + * @param byteOffset + * @param length + */ + static from(arrayBuffer: ArrayBuffer, byteOffset?: number, length?: number): Buffer; + /** + * Copies the passed {buffer} data onto a new Buffer instance. + * + * @param buffer + */ + static from(buffer: Buffer): Buffer; + /** + * Creates a new Buffer containing the given JavaScript string {str}. + * If provided, the {encoding} parameter identifies the character encoding. + * If not provided, {encoding} defaults to 'utf8'. + * + * @param str + */ + static from(str: string, encoding?: string): Buffer; + /** + * Returns true if {obj} is a Buffer + * + * @param obj object to test. + */ + static isBuffer(obj: any): obj is Buffer; + /** + * Returns true if {encoding} is a valid encoding argument. + * Valid string encodings in Node 0.12: 'ascii'|'utf8'|'utf16le'|'ucs2'(alias of 'utf16le')|'base64'|'binary'(deprecated)|'hex' + * + * @param encoding string to test. + */ + static isEncoding(encoding: string): boolean; + /** + * Gives the actual byte length of a string. encoding defaults to 'utf8'. + * This is not the same as String.prototype.length since that returns the number of characters in a string. + * + * @param string string to test. + * @param encoding encoding used to evaluate (defaults to 'utf8') + */ + static byteLength(string: string, encoding?: string): number; + /** + * Returns a buffer which is the result of concatenating all the buffers in the list together. + * + * If the list has no items, or if the totalLength is 0, then it returns a zero-length buffer. + * If the list has exactly one item, then the first item of the list is returned. + * If the list has more than one item, then a new Buffer is created. + * + * @param list An array of Buffer objects to concatenate + * @param totalLength Total length of the buffers when concatenated. + * If totalLength is not provided, it is read from the buffers in the list. However, this adds an additional loop to the function, so it is faster to provide the length explicitly. + */ + static concat(list: Buffer[], totalLength?: number): Buffer; + /** + * The same as buf1.compare(buf2). + */ + static compare(buf1: Buffer, buf2: Buffer): number; + /** + * Allocates a new buffer of {size} octets. + * + * @param size count of octets to allocate. + * @param fill if specified, buffer will be initialized by calling buf.fill(fill). + * If parameter is omitted, buffer will be filled with zeros. + * @param encoding encoding used for call to buf.fill while initalizing + */ + static alloc(size: number, fill?: string | Buffer | number, encoding?: string): Buffer; + /** + * Allocates a new buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafe(size: number): Buffer; + /** + * Allocates a new non-pooled buffer of {size} octets, leaving memory not initialized, so the contents + * of the newly created Buffer are unknown and may contain sensitive data. + * + * @param size count of octets to allocate + */ + static allocUnsafeSlow(size: number): Buffer; + } +} \ No newline at end of file diff --git a/node_modules/string_decoder/node_modules/safe-buffer/index.js b/node_modules/string_decoder/node_modules/safe-buffer/index.js new file mode 100644 index 0000000..22438da --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/index.js @@ -0,0 +1,62 @@ +/* eslint-disable node/no-deprecated-api */ +var buffer = require('buffer') +var Buffer = buffer.Buffer + +// alternative to using Object.keys for old browsers +function copyProps (src, dst) { + for (var key in src) { + dst[key] = src[key] + } +} +if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { + module.exports = buffer +} else { + // Copy properties from require('buffer') + copyProps(buffer, exports) + exports.Buffer = SafeBuffer +} + +function SafeBuffer (arg, encodingOrOffset, length) { + return Buffer(arg, encodingOrOffset, length) +} + +// Copy static methods from Buffer +copyProps(Buffer, SafeBuffer) + +SafeBuffer.from = function (arg, encodingOrOffset, length) { + if (typeof arg === 'number') { + throw new TypeError('Argument must not be a number') + } + return Buffer(arg, encodingOrOffset, length) +} + +SafeBuffer.alloc = function (size, fill, encoding) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + var buf = Buffer(size) + if (fill !== undefined) { + if (typeof encoding === 'string') { + buf.fill(fill, encoding) + } else { + buf.fill(fill) + } + } else { + buf.fill(0) + } + return buf +} + +SafeBuffer.allocUnsafe = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return Buffer(size) +} + +SafeBuffer.allocUnsafeSlow = function (size) { + if (typeof size !== 'number') { + throw new TypeError('Argument must be a number') + } + return buffer.SlowBuffer(size) +} diff --git a/node_modules/string_decoder/node_modules/safe-buffer/package.json b/node_modules/string_decoder/node_modules/safe-buffer/package.json new file mode 100644 index 0000000..623fbc3 --- /dev/null +++ b/node_modules/string_decoder/node_modules/safe-buffer/package.json @@ -0,0 +1,37 @@ +{ + "name": "safe-buffer", + "description": "Safer Node.js Buffer API", + "version": "5.1.2", + "author": { + "name": "Feross Aboukhadijeh", + "email": "feross@feross.org", + "url": "http://feross.org" + }, + "bugs": { + "url": "https://github.com/feross/safe-buffer/issues" + }, + "devDependencies": { + "standard": "*", + "tape": "^4.0.0" + }, + "homepage": "https://github.com/feross/safe-buffer", + "keywords": [ + "buffer", + "buffer allocate", + "node security", + "safe", + "safe-buffer", + "security", + "uninitialized" + ], + "license": "MIT", + "main": "index.js", + "types": "index.d.ts", + "repository": { + "type": "git", + "url": "git://github.com/feross/safe-buffer.git" + }, + "scripts": { + "test": "standard && tape test/*.js" + } +} diff --git a/node_modules/string_decoder/package.json b/node_modules/string_decoder/package.json new file mode 100644 index 0000000..518c3eb --- /dev/null +++ b/node_modules/string_decoder/package.json @@ -0,0 +1,31 @@ +{ + "name": "string_decoder", + "version": "1.1.1", + "description": "The string_decoder module from Node core", + "main": "lib/string_decoder.js", + "dependencies": { + "safe-buffer": "~5.1.0" + }, + "devDependencies": { + "babel-polyfill": "^6.23.0", + "core-util-is": "^1.0.2", + "inherits": "^2.0.3", + "tap": "~0.4.8" + }, + "scripts": { + "test": "tap test/parallel/*.js && node test/verify-dependencies", + "ci": "tap test/parallel/*.js test/ours/*.js --tap | tee test.tap && node test/verify-dependencies.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/nodejs/string_decoder.git" + }, + "homepage": "https://github.com/nodejs/string_decoder", + "keywords": [ + "string", + "decoder", + "browser", + "browserify" + ], + "license": "MIT" +} diff --git a/node_modules/typedarray/.travis.yml b/node_modules/typedarray/.travis.yml new file mode 100644 index 0000000..cc4dba2 --- /dev/null +++ b/node_modules/typedarray/.travis.yml @@ -0,0 +1,4 @@ +language: node_js +node_js: + - "0.8" + - "0.10" diff --git a/node_modules/typedarray/LICENSE b/node_modules/typedarray/LICENSE new file mode 100644 index 0000000..11adfae --- /dev/null +++ b/node_modules/typedarray/LICENSE @@ -0,0 +1,35 @@ +/* + Copyright (c) 2010, Linden Research, Inc. + Copyright (c) 2012, Joshua Bell + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in + all copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN + THE SOFTWARE. + $/LicenseInfo$ + */ + +// Original can be found at: +// https://bitbucket.org/lindenlab/llsd +// Modifications by Joshua Bell inexorabletash@gmail.com +// https://github.com/inexorabletash/polyfill + +// ES3/ES5 implementation of the Krhonos Typed Array Specification +// Ref: http://www.khronos.org/registry/typedarray/specs/latest/ +// Date: 2011-02-01 +// +// Variations: +// * Allows typed_array.get/set() as alias for subscripts (typed_array[]) diff --git a/node_modules/typedarray/example/tarray.js b/node_modules/typedarray/example/tarray.js new file mode 100644 index 0000000..8423d7c --- /dev/null +++ b/node_modules/typedarray/example/tarray.js @@ -0,0 +1,4 @@ +var Uint8Array = require('../').Uint8Array; +var ua = new Uint8Array(5); +ua[1] = 256 + 55; +console.log(ua[1]); diff --git a/node_modules/typedarray/index.js b/node_modules/typedarray/index.js new file mode 100644 index 0000000..5e54084 --- /dev/null +++ b/node_modules/typedarray/index.js @@ -0,0 +1,630 @@ +var undefined = (void 0); // Paranoia + +// Beyond this value, index getters/setters (i.e. array[0], array[1]) are so slow to +// create, and consume so much memory, that the browser appears frozen. +var MAX_ARRAY_LENGTH = 1e5; + +// Approximations of internal ECMAScript conversion functions +var ECMAScript = (function() { + // Stash a copy in case other scripts modify these + var opts = Object.prototype.toString, + ophop = Object.prototype.hasOwnProperty; + + return { + // Class returns internal [[Class]] property, used to avoid cross-frame instanceof issues: + Class: function(v) { return opts.call(v).replace(/^\[object *|\]$/g, ''); }, + HasProperty: function(o, p) { return p in o; }, + HasOwnProperty: function(o, p) { return ophop.call(o, p); }, + IsCallable: function(o) { return typeof o === 'function'; }, + ToInt32: function(v) { return v >> 0; }, + ToUint32: function(v) { return v >>> 0; } + }; +}()); + +// Snapshot intrinsics +var LN2 = Math.LN2, + abs = Math.abs, + floor = Math.floor, + log = Math.log, + min = Math.min, + pow = Math.pow, + round = Math.round; + +// ES5: lock down object properties +function configureProperties(obj) { + if (getOwnPropNames && defineProp) { + var props = getOwnPropNames(obj), i; + for (i = 0; i < props.length; i += 1) { + defineProp(obj, props[i], { + value: obj[props[i]], + writable: false, + enumerable: false, + configurable: false + }); + } + } +} + +// emulate ES5 getter/setter API using legacy APIs +// http://blogs.msdn.com/b/ie/archive/2010/09/07/transitioning-existing-code-to-the-es5-getter-setter-apis.aspx +// (second clause tests for Object.defineProperty() in IE<9 that only supports extending DOM prototypes, but +// note that IE<9 does not support __defineGetter__ or __defineSetter__ so it just renders the method harmless) +var defineProp +if (Object.defineProperty && (function() { + try { + Object.defineProperty({}, 'x', {}); + return true; + } catch (e) { + return false; + } + })()) { + defineProp = Object.defineProperty; +} else { + defineProp = function(o, p, desc) { + if (!o === Object(o)) throw new TypeError("Object.defineProperty called on non-object"); + if (ECMAScript.HasProperty(desc, 'get') && Object.prototype.__defineGetter__) { Object.prototype.__defineGetter__.call(o, p, desc.get); } + if (ECMAScript.HasProperty(desc, 'set') && Object.prototype.__defineSetter__) { Object.prototype.__defineSetter__.call(o, p, desc.set); } + if (ECMAScript.HasProperty(desc, 'value')) { o[p] = desc.value; } + return o; + }; +} + +var getOwnPropNames = Object.getOwnPropertyNames || function (o) { + if (o !== Object(o)) throw new TypeError("Object.getOwnPropertyNames called on non-object"); + var props = [], p; + for (p in o) { + if (ECMAScript.HasOwnProperty(o, p)) { + props.push(p); + } + } + return props; +}; + +// ES5: Make obj[index] an alias for obj._getter(index)/obj._setter(index, value) +// for index in 0 ... obj.length +function makeArrayAccessors(obj) { + if (!defineProp) { return; } + + if (obj.length > MAX_ARRAY_LENGTH) throw new RangeError("Array too large for polyfill"); + + function makeArrayAccessor(index) { + defineProp(obj, index, { + 'get': function() { return obj._getter(index); }, + 'set': function(v) { obj._setter(index, v); }, + enumerable: true, + configurable: false + }); + } + + var i; + for (i = 0; i < obj.length; i += 1) { + makeArrayAccessor(i); + } +} + +// Internal conversion functions: +// pack() - take a number (interpreted as Type), output a byte array +// unpack() - take a byte array, output a Type-like number + +function as_signed(value, bits) { var s = 32 - bits; return (value << s) >> s; } +function as_unsigned(value, bits) { var s = 32 - bits; return (value << s) >>> s; } + +function packI8(n) { return [n & 0xff]; } +function unpackI8(bytes) { return as_signed(bytes[0], 8); } + +function packU8(n) { return [n & 0xff]; } +function unpackU8(bytes) { return as_unsigned(bytes[0], 8); } + +function packU8Clamped(n) { n = round(Number(n)); return [n < 0 ? 0 : n > 0xff ? 0xff : n & 0xff]; } + +function packI16(n) { return [(n >> 8) & 0xff, n & 0xff]; } +function unpackI16(bytes) { return as_signed(bytes[0] << 8 | bytes[1], 16); } + +function packU16(n) { return [(n >> 8) & 0xff, n & 0xff]; } +function unpackU16(bytes) { return as_unsigned(bytes[0] << 8 | bytes[1], 16); } + +function packI32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; } +function unpackI32(bytes) { return as_signed(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); } + +function packU32(n) { return [(n >> 24) & 0xff, (n >> 16) & 0xff, (n >> 8) & 0xff, n & 0xff]; } +function unpackU32(bytes) { return as_unsigned(bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3], 32); } + +function packIEEE754(v, ebits, fbits) { + + var bias = (1 << (ebits - 1)) - 1, + s, e, f, ln, + i, bits, str, bytes; + + function roundToEven(n) { + var w = floor(n), f = n - w; + if (f < 0.5) + return w; + if (f > 0.5) + return w + 1; + return w % 2 ? w + 1 : w; + } + + // Compute sign, exponent, fraction + if (v !== v) { + // NaN + // http://dev.w3.org/2006/webapi/WebIDL/#es-type-mapping + e = (1 << ebits) - 1; f = pow(2, fbits - 1); s = 0; + } else if (v === Infinity || v === -Infinity) { + e = (1 << ebits) - 1; f = 0; s = (v < 0) ? 1 : 0; + } else if (v === 0) { + e = 0; f = 0; s = (1 / v === -Infinity) ? 1 : 0; + } else { + s = v < 0; + v = abs(v); + + if (v >= pow(2, 1 - bias)) { + e = min(floor(log(v) / LN2), 1023); + f = roundToEven(v / pow(2, e) * pow(2, fbits)); + if (f / pow(2, fbits) >= 2) { + e = e + 1; + f = 1; + } + if (e > bias) { + // Overflow + e = (1 << ebits) - 1; + f = 0; + } else { + // Normalized + e = e + bias; + f = f - pow(2, fbits); + } + } else { + // Denormalized + e = 0; + f = roundToEven(v / pow(2, 1 - bias - fbits)); + } + } + + // Pack sign, exponent, fraction + bits = []; + for (i = fbits; i; i -= 1) { bits.push(f % 2 ? 1 : 0); f = floor(f / 2); } + for (i = ebits; i; i -= 1) { bits.push(e % 2 ? 1 : 0); e = floor(e / 2); } + bits.push(s ? 1 : 0); + bits.reverse(); + str = bits.join(''); + + // Bits to bytes + bytes = []; + while (str.length) { + bytes.push(parseInt(str.substring(0, 8), 2)); + str = str.substring(8); + } + return bytes; +} + +function unpackIEEE754(bytes, ebits, fbits) { + + // Bytes to bits + var bits = [], i, j, b, str, + bias, s, e, f; + + for (i = bytes.length; i; i -= 1) { + b = bytes[i - 1]; + for (j = 8; j; j -= 1) { + bits.push(b % 2 ? 1 : 0); b = b >> 1; + } + } + bits.reverse(); + str = bits.join(''); + + // Unpack sign, exponent, fraction + bias = (1 << (ebits - 1)) - 1; + s = parseInt(str.substring(0, 1), 2) ? -1 : 1; + e = parseInt(str.substring(1, 1 + ebits), 2); + f = parseInt(str.substring(1 + ebits), 2); + + // Produce number + if (e === (1 << ebits) - 1) { + return f !== 0 ? NaN : s * Infinity; + } else if (e > 0) { + // Normalized + return s * pow(2, e - bias) * (1 + f / pow(2, fbits)); + } else if (f !== 0) { + // Denormalized + return s * pow(2, -(bias - 1)) * (f / pow(2, fbits)); + } else { + return s < 0 ? -0 : 0; + } +} + +function unpackF64(b) { return unpackIEEE754(b, 11, 52); } +function packF64(v) { return packIEEE754(v, 11, 52); } +function unpackF32(b) { return unpackIEEE754(b, 8, 23); } +function packF32(v) { return packIEEE754(v, 8, 23); } + + +// +// 3 The ArrayBuffer Type +// + +(function() { + + /** @constructor */ + var ArrayBuffer = function ArrayBuffer(length) { + length = ECMAScript.ToInt32(length); + if (length < 0) throw new RangeError('ArrayBuffer size is not a small enough positive integer'); + + this.byteLength = length; + this._bytes = []; + this._bytes.length = length; + + var i; + for (i = 0; i < this.byteLength; i += 1) { + this._bytes[i] = 0; + } + + configureProperties(this); + }; + + exports.ArrayBuffer = exports.ArrayBuffer || ArrayBuffer; + + // + // 4 The ArrayBufferView Type + // + + // NOTE: this constructor is not exported + /** @constructor */ + var ArrayBufferView = function ArrayBufferView() { + //this.buffer = null; + //this.byteOffset = 0; + //this.byteLength = 0; + }; + + // + // 5 The Typed Array View Types + // + + function makeConstructor(bytesPerElement, pack, unpack) { + // Each TypedArray type requires a distinct constructor instance with + // identical logic, which this produces. + + var ctor; + ctor = function(buffer, byteOffset, length) { + var array, sequence, i, s; + + if (!arguments.length || typeof arguments[0] === 'number') { + // Constructor(unsigned long length) + this.length = ECMAScript.ToInt32(arguments[0]); + if (length < 0) throw new RangeError('ArrayBufferView size is not a small enough positive integer'); + + this.byteLength = this.length * this.BYTES_PER_ELEMENT; + this.buffer = new ArrayBuffer(this.byteLength); + this.byteOffset = 0; + } else if (typeof arguments[0] === 'object' && arguments[0].constructor === ctor) { + // Constructor(TypedArray array) + array = arguments[0]; + + this.length = array.length; + this.byteLength = this.length * this.BYTES_PER_ELEMENT; + this.buffer = new ArrayBuffer(this.byteLength); + this.byteOffset = 0; + + for (i = 0; i < this.length; i += 1) { + this._setter(i, array._getter(i)); + } + } else if (typeof arguments[0] === 'object' && + !(arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) { + // Constructor(sequence array) + sequence = arguments[0]; + + this.length = ECMAScript.ToUint32(sequence.length); + this.byteLength = this.length * this.BYTES_PER_ELEMENT; + this.buffer = new ArrayBuffer(this.byteLength); + this.byteOffset = 0; + + for (i = 0; i < this.length; i += 1) { + s = sequence[i]; + this._setter(i, Number(s)); + } + } else if (typeof arguments[0] === 'object' && + (arguments[0] instanceof ArrayBuffer || ECMAScript.Class(arguments[0]) === 'ArrayBuffer')) { + // Constructor(ArrayBuffer buffer, + // optional unsigned long byteOffset, optional unsigned long length) + this.buffer = buffer; + + this.byteOffset = ECMAScript.ToUint32(byteOffset); + if (this.byteOffset > this.buffer.byteLength) { + throw new RangeError("byteOffset out of range"); + } + + if (this.byteOffset % this.BYTES_PER_ELEMENT) { + // The given byteOffset must be a multiple of the element + // size of the specific type, otherwise an exception is raised. + throw new RangeError("ArrayBuffer length minus the byteOffset is not a multiple of the element size."); + } + + if (arguments.length < 3) { + this.byteLength = this.buffer.byteLength - this.byteOffset; + + if (this.byteLength % this.BYTES_PER_ELEMENT) { + throw new RangeError("length of buffer minus byteOffset not a multiple of the element size"); + } + this.length = this.byteLength / this.BYTES_PER_ELEMENT; + } else { + this.length = ECMAScript.ToUint32(length); + this.byteLength = this.length * this.BYTES_PER_ELEMENT; + } + + if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) { + throw new RangeError("byteOffset and length reference an area beyond the end of the buffer"); + } + } else { + throw new TypeError("Unexpected argument type(s)"); + } + + this.constructor = ctor; + + configureProperties(this); + makeArrayAccessors(this); + }; + + ctor.prototype = new ArrayBufferView(); + ctor.prototype.BYTES_PER_ELEMENT = bytesPerElement; + ctor.prototype._pack = pack; + ctor.prototype._unpack = unpack; + ctor.BYTES_PER_ELEMENT = bytesPerElement; + + // getter type (unsigned long index); + ctor.prototype._getter = function(index) { + if (arguments.length < 1) throw new SyntaxError("Not enough arguments"); + + index = ECMAScript.ToUint32(index); + if (index >= this.length) { + return undefined; + } + + var bytes = [], i, o; + for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT; + i < this.BYTES_PER_ELEMENT; + i += 1, o += 1) { + bytes.push(this.buffer._bytes[o]); + } + return this._unpack(bytes); + }; + + // NONSTANDARD: convenience alias for getter: type get(unsigned long index); + ctor.prototype.get = ctor.prototype._getter; + + // setter void (unsigned long index, type value); + ctor.prototype._setter = function(index, value) { + if (arguments.length < 2) throw new SyntaxError("Not enough arguments"); + + index = ECMAScript.ToUint32(index); + if (index >= this.length) { + return undefined; + } + + var bytes = this._pack(value), i, o; + for (i = 0, o = this.byteOffset + index * this.BYTES_PER_ELEMENT; + i < this.BYTES_PER_ELEMENT; + i += 1, o += 1) { + this.buffer._bytes[o] = bytes[i]; + } + }; + + // void set(TypedArray array, optional unsigned long offset); + // void set(sequence array, optional unsigned long offset); + ctor.prototype.set = function(index, value) { + if (arguments.length < 1) throw new SyntaxError("Not enough arguments"); + var array, sequence, offset, len, + i, s, d, + byteOffset, byteLength, tmp; + + if (typeof arguments[0] === 'object' && arguments[0].constructor === this.constructor) { + // void set(TypedArray array, optional unsigned long offset); + array = arguments[0]; + offset = ECMAScript.ToUint32(arguments[1]); + + if (offset + array.length > this.length) { + throw new RangeError("Offset plus length of array is out of range"); + } + + byteOffset = this.byteOffset + offset * this.BYTES_PER_ELEMENT; + byteLength = array.length * this.BYTES_PER_ELEMENT; + + if (array.buffer === this.buffer) { + tmp = []; + for (i = 0, s = array.byteOffset; i < byteLength; i += 1, s += 1) { + tmp[i] = array.buffer._bytes[s]; + } + for (i = 0, d = byteOffset; i < byteLength; i += 1, d += 1) { + this.buffer._bytes[d] = tmp[i]; + } + } else { + for (i = 0, s = array.byteOffset, d = byteOffset; + i < byteLength; i += 1, s += 1, d += 1) { + this.buffer._bytes[d] = array.buffer._bytes[s]; + } + } + } else if (typeof arguments[0] === 'object' && typeof arguments[0].length !== 'undefined') { + // void set(sequence array, optional unsigned long offset); + sequence = arguments[0]; + len = ECMAScript.ToUint32(sequence.length); + offset = ECMAScript.ToUint32(arguments[1]); + + if (offset + len > this.length) { + throw new RangeError("Offset plus length of array is out of range"); + } + + for (i = 0; i < len; i += 1) { + s = sequence[i]; + this._setter(offset + i, Number(s)); + } + } else { + throw new TypeError("Unexpected argument type(s)"); + } + }; + + // TypedArray subarray(long begin, optional long end); + ctor.prototype.subarray = function(start, end) { + function clamp(v, min, max) { return v < min ? min : v > max ? max : v; } + + start = ECMAScript.ToInt32(start); + end = ECMAScript.ToInt32(end); + + if (arguments.length < 1) { start = 0; } + if (arguments.length < 2) { end = this.length; } + + if (start < 0) { start = this.length + start; } + if (end < 0) { end = this.length + end; } + + start = clamp(start, 0, this.length); + end = clamp(end, 0, this.length); + + var len = end - start; + if (len < 0) { + len = 0; + } + + return new this.constructor( + this.buffer, this.byteOffset + start * this.BYTES_PER_ELEMENT, len); + }; + + return ctor; + } + + var Int8Array = makeConstructor(1, packI8, unpackI8); + var Uint8Array = makeConstructor(1, packU8, unpackU8); + var Uint8ClampedArray = makeConstructor(1, packU8Clamped, unpackU8); + var Int16Array = makeConstructor(2, packI16, unpackI16); + var Uint16Array = makeConstructor(2, packU16, unpackU16); + var Int32Array = makeConstructor(4, packI32, unpackI32); + var Uint32Array = makeConstructor(4, packU32, unpackU32); + var Float32Array = makeConstructor(4, packF32, unpackF32); + var Float64Array = makeConstructor(8, packF64, unpackF64); + + exports.Int8Array = exports.Int8Array || Int8Array; + exports.Uint8Array = exports.Uint8Array || Uint8Array; + exports.Uint8ClampedArray = exports.Uint8ClampedArray || Uint8ClampedArray; + exports.Int16Array = exports.Int16Array || Int16Array; + exports.Uint16Array = exports.Uint16Array || Uint16Array; + exports.Int32Array = exports.Int32Array || Int32Array; + exports.Uint32Array = exports.Uint32Array || Uint32Array; + exports.Float32Array = exports.Float32Array || Float32Array; + exports.Float64Array = exports.Float64Array || Float64Array; +}()); + +// +// 6 The DataView View Type +// + +(function() { + function r(array, index) { + return ECMAScript.IsCallable(array.get) ? array.get(index) : array[index]; + } + + var IS_BIG_ENDIAN = (function() { + var u16array = new(exports.Uint16Array)([0x1234]), + u8array = new(exports.Uint8Array)(u16array.buffer); + return r(u8array, 0) === 0x12; + }()); + + // Constructor(ArrayBuffer buffer, + // optional unsigned long byteOffset, + // optional unsigned long byteLength) + /** @constructor */ + var DataView = function DataView(buffer, byteOffset, byteLength) { + if (arguments.length === 0) { + buffer = new exports.ArrayBuffer(0); + } else if (!(buffer instanceof exports.ArrayBuffer || ECMAScript.Class(buffer) === 'ArrayBuffer')) { + throw new TypeError("TypeError"); + } + + this.buffer = buffer || new exports.ArrayBuffer(0); + + this.byteOffset = ECMAScript.ToUint32(byteOffset); + if (this.byteOffset > this.buffer.byteLength) { + throw new RangeError("byteOffset out of range"); + } + + if (arguments.length < 3) { + this.byteLength = this.buffer.byteLength - this.byteOffset; + } else { + this.byteLength = ECMAScript.ToUint32(byteLength); + } + + if ((this.byteOffset + this.byteLength) > this.buffer.byteLength) { + throw new RangeError("byteOffset and length reference an area beyond the end of the buffer"); + } + + configureProperties(this); + }; + + function makeGetter(arrayType) { + return function(byteOffset, littleEndian) { + + byteOffset = ECMAScript.ToUint32(byteOffset); + + if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) { + throw new RangeError("Array index out of range"); + } + byteOffset += this.byteOffset; + + var uint8Array = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT), + bytes = [], i; + for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) { + bytes.push(r(uint8Array, i)); + } + + if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) { + bytes.reverse(); + } + + return r(new arrayType(new exports.Uint8Array(bytes).buffer), 0); + }; + } + + DataView.prototype.getUint8 = makeGetter(exports.Uint8Array); + DataView.prototype.getInt8 = makeGetter(exports.Int8Array); + DataView.prototype.getUint16 = makeGetter(exports.Uint16Array); + DataView.prototype.getInt16 = makeGetter(exports.Int16Array); + DataView.prototype.getUint32 = makeGetter(exports.Uint32Array); + DataView.prototype.getInt32 = makeGetter(exports.Int32Array); + DataView.prototype.getFloat32 = makeGetter(exports.Float32Array); + DataView.prototype.getFloat64 = makeGetter(exports.Float64Array); + + function makeSetter(arrayType) { + return function(byteOffset, value, littleEndian) { + + byteOffset = ECMAScript.ToUint32(byteOffset); + if (byteOffset + arrayType.BYTES_PER_ELEMENT > this.byteLength) { + throw new RangeError("Array index out of range"); + } + + // Get bytes + var typeArray = new arrayType([value]), + byteArray = new exports.Uint8Array(typeArray.buffer), + bytes = [], i, byteView; + + for (i = 0; i < arrayType.BYTES_PER_ELEMENT; i += 1) { + bytes.push(r(byteArray, i)); + } + + // Flip if necessary + if (Boolean(littleEndian) === Boolean(IS_BIG_ENDIAN)) { + bytes.reverse(); + } + + // Write them + byteView = new exports.Uint8Array(this.buffer, byteOffset, arrayType.BYTES_PER_ELEMENT); + byteView.set(bytes); + }; + } + + DataView.prototype.setUint8 = makeSetter(exports.Uint8Array); + DataView.prototype.setInt8 = makeSetter(exports.Int8Array); + DataView.prototype.setUint16 = makeSetter(exports.Uint16Array); + DataView.prototype.setInt16 = makeSetter(exports.Int16Array); + DataView.prototype.setUint32 = makeSetter(exports.Uint32Array); + DataView.prototype.setInt32 = makeSetter(exports.Int32Array); + DataView.prototype.setFloat32 = makeSetter(exports.Float32Array); + DataView.prototype.setFloat64 = makeSetter(exports.Float64Array); + + exports.DataView = exports.DataView || DataView; + +}()); diff --git a/node_modules/typedarray/package.json b/node_modules/typedarray/package.json new file mode 100644 index 0000000..a7854a0 --- /dev/null +++ b/node_modules/typedarray/package.json @@ -0,0 +1,55 @@ +{ + "name": "typedarray", + "version": "0.0.6", + "description": "TypedArray polyfill for old browsers", + "main": "index.js", + "devDependencies": { + "tape": "~2.3.2" + }, + "scripts": { + "test": "tape test/*.js test/server/*.js" + }, + "repository": { + "type": "git", + "url": "git://github.com/substack/typedarray.git" + }, + "homepage": "https://github.com/substack/typedarray", + "keywords": [ + "ArrayBuffer", + "DataView", + "Float32Array", + "Float64Array", + "Int8Array", + "Int16Array", + "Int32Array", + "Uint8Array", + "Uint8ClampedArray", + "Uint16Array", + "Uint32Array", + "typed", + "array", + "polyfill" + ], + "author": { + "name": "James Halliday", + "email": "mail@substack.net", + "url": "http://substack.net" + }, + "license": "MIT", + "testling": { + "files": "test/*.js", + "browsers": [ + "ie/6..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest", + "android-browser/4.2..latest" + ] + } +} diff --git a/node_modules/typedarray/readme.markdown b/node_modules/typedarray/readme.markdown new file mode 100644 index 0000000..d18f6f7 --- /dev/null +++ b/node_modules/typedarray/readme.markdown @@ -0,0 +1,61 @@ +# typedarray + +TypedArray polyfill ripped from [this +module](https://raw.github.com/inexorabletash/polyfill). + +[![build status](https://secure.travis-ci.org/substack/typedarray.png)](http://travis-ci.org/substack/typedarray) + +[![testling badge](https://ci.testling.com/substack/typedarray.png)](https://ci.testling.com/substack/typedarray) + +# example + +``` js +var Uint8Array = require('typedarray').Uint8Array; +var ua = new Uint8Array(5); +ua[1] = 256 + 55; +console.log(ua[1]); +``` + +output: + +``` +55 +``` + +# methods + +``` js +var TA = require('typedarray') +``` + +The `TA` object has the following constructors: + +* TA.ArrayBuffer +* TA.DataView +* TA.Float32Array +* TA.Float64Array +* TA.Int8Array +* TA.Int16Array +* TA.Int32Array +* TA.Uint8Array +* TA.Uint8ClampedArray +* TA.Uint16Array +* TA.Uint32Array + +# install + +With [npm](https://npmjs.org) do: + +``` +npm install typedarray +``` + +To use this module in the browser, compile with +[browserify](http://browserify.org) +or download a UMD build from browserify CDN: + +http://wzrd.in/standalone/typedarray@latest + +# license + +MIT diff --git a/node_modules/typedarray/test/server/undef_globals.js b/node_modules/typedarray/test/server/undef_globals.js new file mode 100644 index 0000000..425950f --- /dev/null +++ b/node_modules/typedarray/test/server/undef_globals.js @@ -0,0 +1,19 @@ +var test = require('tape'); +var vm = require('vm'); +var fs = require('fs'); +var src = fs.readFileSync(__dirname + '/../../index.js', 'utf8'); + +test('u8a without globals', function (t) { + var c = { + module: { exports: {} }, + }; + c.exports = c.module.exports; + vm.runInNewContext(src, c); + var TA = c.module.exports; + var ua = new(TA.Uint8Array)(5); + + t.equal(ua.length, 5); + ua[1] = 256 + 55; + t.equal(ua[1], 55); + t.end(); +}); diff --git a/node_modules/typedarray/test/tarray.js b/node_modules/typedarray/test/tarray.js new file mode 100644 index 0000000..df596a3 --- /dev/null +++ b/node_modules/typedarray/test/tarray.js @@ -0,0 +1,10 @@ +var TA = require('../'); +var test = require('tape'); + +test('tiny u8a test', function (t) { + var ua = new(TA.Uint8Array)(5); + t.equal(ua.length, 5); + ua[1] = 256 + 55; + t.equal(ua[1], 55); + t.end(); +}); diff --git a/node_modules/util-deprecate/History.md b/node_modules/util-deprecate/History.md new file mode 100644 index 0000000..acc8675 --- /dev/null +++ b/node_modules/util-deprecate/History.md @@ -0,0 +1,16 @@ + +1.0.2 / 2015-10-07 +================== + + * use try/catch when checking `localStorage` (#3, @kumavis) + +1.0.1 / 2014-11-25 +================== + + * browser: use `console.warn()` for deprecation calls + * browser: more jsdocs + +1.0.0 / 2014-04-30 +================== + + * initial commit diff --git a/node_modules/util-deprecate/LICENSE b/node_modules/util-deprecate/LICENSE new file mode 100644 index 0000000..6a60e8c --- /dev/null +++ b/node_modules/util-deprecate/LICENSE @@ -0,0 +1,24 @@ +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/README.md b/node_modules/util-deprecate/README.md new file mode 100644 index 0000000..75622fa --- /dev/null +++ b/node_modules/util-deprecate/README.md @@ -0,0 +1,53 @@ +util-deprecate +============== +### The Node.js `util.deprecate()` function with browser support + +In Node.js, this module simply re-exports the `util.deprecate()` function. + +In the web browser (i.e. via browserify), a browser-specific implementation +of the `util.deprecate()` function is used. + + +## API + +A `deprecate()` function is the only thing exposed by this module. + +``` javascript +// setup: +exports.foo = deprecate(foo, 'foo() is deprecated, use bar() instead'); + + +// users see: +foo(); +// foo() is deprecated, use bar() instead +foo(); +foo(); +``` + + +## License + +(The MIT License) + +Copyright (c) 2014 Nathan Rajlich + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/node_modules/util-deprecate/browser.js b/node_modules/util-deprecate/browser.js new file mode 100644 index 0000000..549ae2f --- /dev/null +++ b/node_modules/util-deprecate/browser.js @@ -0,0 +1,67 @@ + +/** + * Module exports. + */ + +module.exports = deprecate; + +/** + * Mark that a method should not be used. + * Returns a modified function which warns once by default. + * + * If `localStorage.noDeprecation = true` is set, then it is a no-op. + * + * If `localStorage.throwDeprecation = true` is set, then deprecated functions + * will throw an Error when invoked. + * + * If `localStorage.traceDeprecation = true` is set, then deprecated functions + * will invoke `console.trace()` instead of `console.error()`. + * + * @param {Function} fn - the function to deprecate + * @param {String} msg - the string to print to the console when `fn` is invoked + * @returns {Function} a new "deprecated" version of `fn` + * @api public + */ + +function deprecate (fn, msg) { + if (config('noDeprecation')) { + return fn; + } + + var warned = false; + function deprecated() { + if (!warned) { + if (config('throwDeprecation')) { + throw new Error(msg); + } else if (config('traceDeprecation')) { + console.trace(msg); + } else { + console.warn(msg); + } + warned = true; + } + return fn.apply(this, arguments); + } + + return deprecated; +} + +/** + * Checks `localStorage` for boolean values for the given `name`. + * + * @param {String} name + * @returns {Boolean} + * @api private + */ + +function config (name) { + // accessing global.localStorage can trigger a DOMException in sandboxed iframes + try { + if (!global.localStorage) return false; + } catch (_) { + return false; + } + var val = global.localStorage[name]; + if (null == val) return false; + return String(val).toLowerCase() === 'true'; +} diff --git a/node_modules/util-deprecate/node.js b/node_modules/util-deprecate/node.js new file mode 100644 index 0000000..5e6fcff --- /dev/null +++ b/node_modules/util-deprecate/node.js @@ -0,0 +1,6 @@ + +/** + * For Node.js, simply re-export the core `util.deprecate` function. + */ + +module.exports = require('util').deprecate; diff --git a/node_modules/util-deprecate/package.json b/node_modules/util-deprecate/package.json new file mode 100644 index 0000000..2e79f89 --- /dev/null +++ b/node_modules/util-deprecate/package.json @@ -0,0 +1,27 @@ +{ + "name": "util-deprecate", + "version": "1.0.2", + "description": "The Node.js `util.deprecate()` function with browser support", + "main": "node.js", + "browser": "browser.js", + "scripts": { + "test": "echo \"Error: no test specified\" && exit 1" + }, + "repository": { + "type": "git", + "url": "git://github.com/TooTallNate/util-deprecate.git" + }, + "keywords": [ + "util", + "deprecate", + "browserify", + "browser", + "node" + ], + "author": "Nathan Rajlich (http://n8.io/)", + "license": "MIT", + "bugs": { + "url": "https://github.com/TooTallNate/util-deprecate/issues" + }, + "homepage": "https://github.com/TooTallNate/util-deprecate" +} diff --git a/node_modules/xtend/.jshintrc b/node_modules/xtend/.jshintrc new file mode 100644 index 0000000..77887b5 --- /dev/null +++ b/node_modules/xtend/.jshintrc @@ -0,0 +1,30 @@ +{ + "maxdepth": 4, + "maxstatements": 200, + "maxcomplexity": 12, + "maxlen": 80, + "maxparams": 5, + + "curly": true, + "eqeqeq": true, + "immed": true, + "latedef": false, + "noarg": true, + "noempty": true, + "nonew": true, + "undef": true, + "unused": "vars", + "trailing": true, + + "quotmark": true, + "expr": true, + "asi": true, + + "browser": false, + "esnext": true, + "devel": false, + "node": false, + "nonstandard": false, + + "predef": ["require", "module", "__dirname", "__filename"] +} diff --git a/node_modules/xtend/LICENSE b/node_modules/xtend/LICENSE new file mode 100644 index 0000000..0099f4f --- /dev/null +++ b/node_modules/xtend/LICENSE @@ -0,0 +1,20 @@ +The MIT License (MIT) +Copyright (c) 2012-2014 Raynos. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/node_modules/xtend/README.md b/node_modules/xtend/README.md new file mode 100644 index 0000000..4a2703c --- /dev/null +++ b/node_modules/xtend/README.md @@ -0,0 +1,32 @@ +# xtend + +[![browser support][3]][4] + +[![locked](http://badges.github.io/stability-badges/dist/locked.svg)](http://github.com/badges/stability-badges) + +Extend like a boss + +xtend is a basic utility library which allows you to extend an object by appending all of the properties from each object in a list. When there are identical properties, the right-most property takes precedence. + +## Examples + +```js +var extend = require("xtend") + +// extend returns a new object. Does not mutate arguments +var combination = extend({ + a: "a", + b: "c" +}, { + b: "b" +}) +// { a: "a", b: "b" } +``` + +## Stability status: Locked + +## MIT Licensed + + + [3]: http://ci.testling.com/Raynos/xtend.png + [4]: http://ci.testling.com/Raynos/xtend diff --git a/node_modules/xtend/immutable.js b/node_modules/xtend/immutable.js new file mode 100644 index 0000000..94889c9 --- /dev/null +++ b/node_modules/xtend/immutable.js @@ -0,0 +1,19 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend() { + var target = {} + + for (var i = 0; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/mutable.js b/node_modules/xtend/mutable.js new file mode 100644 index 0000000..72debed --- /dev/null +++ b/node_modules/xtend/mutable.js @@ -0,0 +1,17 @@ +module.exports = extend + +var hasOwnProperty = Object.prototype.hasOwnProperty; + +function extend(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] + + for (var key in source) { + if (hasOwnProperty.call(source, key)) { + target[key] = source[key] + } + } + } + + return target +} diff --git a/node_modules/xtend/package.json b/node_modules/xtend/package.json new file mode 100644 index 0000000..f7a39d1 --- /dev/null +++ b/node_modules/xtend/package.json @@ -0,0 +1,55 @@ +{ + "name": "xtend", + "version": "4.0.2", + "description": "extend like a boss", + "keywords": [ + "extend", + "merge", + "options", + "opts", + "object", + "array" + ], + "author": "Raynos ", + "repository": "git://github.com/Raynos/xtend.git", + "main": "immutable", + "scripts": { + "test": "node test" + }, + "dependencies": {}, + "devDependencies": { + "tape": "~1.1.0" + }, + "homepage": "https://github.com/Raynos/xtend", + "contributors": [ + { + "name": "Jake Verbaten" + }, + { + "name": "Matt Esch" + } + ], + "bugs": { + "url": "https://github.com/Raynos/xtend/issues", + "email": "raynos2@gmail.com" + }, + "license": "MIT", + "testling": { + "files": "test.js", + "browsers": [ + "ie/7..latest", + "firefox/16..latest", + "firefox/nightly", + "chrome/22..latest", + "chrome/canary", + "opera/12..latest", + "opera/next", + "safari/5.1..latest", + "ipad/6.0..latest", + "iphone/6.0..latest" + ] + }, + "engines": { + "node": ">=0.4" + } +} diff --git a/node_modules/xtend/test.js b/node_modules/xtend/test.js new file mode 100644 index 0000000..b895b42 --- /dev/null +++ b/node_modules/xtend/test.js @@ -0,0 +1,103 @@ +var test = require("tape") +var extend = require("./") +var mutableExtend = require("./mutable") + +test("merge", function(assert) { + var a = { a: "foo" } + var b = { b: "bar" } + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("replace", function(assert) { + var a = { a: "foo" } + var b = { a: "bar" } + + assert.deepEqual(extend(a, b), { a: "bar" }) + assert.end() +}) + +test("undefined", function(assert) { + var a = { a: undefined } + var b = { b: "foo" } + + assert.deepEqual(extend(a, b), { a: undefined, b: "foo" }) + assert.deepEqual(extend(b, a), { a: undefined, b: "foo" }) + assert.end() +}) + +test("handle 0", function(assert) { + var a = { a: "default" } + var b = { a: 0 } + + assert.deepEqual(extend(a, b), { a: 0 }) + assert.deepEqual(extend(b, a), { a: "default" }) + assert.end() +}) + +test("is immutable", function (assert) { + var record = {} + + extend(record, { foo: "bar" }) + assert.equal(record.foo, undefined) + assert.end() +}) + +test("null as argument", function (assert) { + var a = { foo: "bar" } + var b = null + var c = void 0 + + assert.deepEqual(extend(b, a, c), { foo: "bar" }) + assert.end() +}) + +test("mutable", function (assert) { + var a = { foo: "bar" } + + mutableExtend(a, { bar: "baz" }) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("null prototype", function(assert) { + var a = { a: "foo" } + var b = Object.create(null) + b.b = "bar"; + + assert.deepEqual(extend(a, b), { a: "foo", b: "bar" }) + assert.end() +}) + +test("null prototype mutable", function (assert) { + var a = { foo: "bar" } + var b = Object.create(null) + b.bar = "baz"; + + mutableExtend(a, b) + + assert.equal(a.bar, "baz") + assert.end() +}) + +test("prototype pollution", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + extend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) + +test("prototype pollution mutable", function (assert) { + var a = {} + var maliciousPayload = '{"__proto__":{"oops":"It works!"}}' + + assert.strictEqual(a.oops, undefined) + mutableExtend({}, maliciousPayload) + assert.strictEqual(a.oops, undefined) + assert.end() +}) diff --git a/public/video/videoBackground.mp4 b/public/video/videoBackground.mp4 new file mode 100644 index 0000000..f8f5583 Binary files /dev/null and b/public/video/videoBackground.mp4 differ diff --git a/public/video/videoMilkyWay.mp4 b/public/video/videoMilkyWay.mp4 new file mode 100644 index 0000000..7daad31 Binary files /dev/null and b/public/video/videoMilkyWay.mp4 differ diff --git a/views/futur.ejs b/views/futur.ejs new file mode 100644 index 0000000..a0e17d1 --- /dev/null +++ b/views/futur.ejs @@ -0,0 +1,17 @@ +<%- include('includes/head.ejs', {Title: 'error 500'}) %> + + + + <%- include('includes/header.ejs') %> +
+ +
+

+ Cette partie du site sera disponible après la campagne de BDE +

+
+
+ +