Skip to content

Commit

Permalink
little bit more code coverage (#63)
Browse files Browse the repository at this point in the history
* add more unit tests

* dicer is a writable

* FileStream is an internally used pseudoClass, which we call with new-Operator, there is no way to write a unit test for it.

* fix typo

* test for decoder reset

* simplify

* simplify

* revert, use getLimit for max header Pairs
  • Loading branch information
Uzlopak authored Dec 4, 2021
1 parent 3d789bf commit e9bbade
Show file tree
Hide file tree
Showing 3 changed files with 140 additions and 10 deletions.
14 changes: 6 additions & 8 deletions lib/types/multipart.js
Original file line number Diff line number Diff line change
Expand Up @@ -33,9 +33,7 @@ function Multipart (boy, cfg) {
const parsedConType = cfg.parsedConType || []
const defCharset = cfg.defCharset || 'utf8'
const preservePath = cfg.preservePath
const fileopts = (typeof cfg.fileHwm === 'number'
? { highWaterMark: cfg.fileHwm }
: {})
const fileOpts = { highWaterMark: cfg.fileHwm }

for (i = 0, len = parsedConType.length; i < len; ++i) {
if (Array.isArray(parsedConType[i]) &&
Expand All @@ -62,6 +60,7 @@ function Multipart (boy, cfg) {
const filesLimit = getLimit(limits, 'files', Infinity)
const fieldsLimit = getLimit(limits, 'fields', Infinity)
const partsLimit = getLimit(limits, 'parts', Infinity)
const headerPairsLimit = getLimit(limits, 'headerPairs', 2000)

let nfiles = 0
let nfields = 0
Expand All @@ -78,10 +77,10 @@ function Multipart (boy, cfg) {

const parserCfg = {
boundary: boundary,
maxHeaderPairs: (limits && limits.headerPairs)
maxHeaderPairs: headerPairsLimit,
partHwm: fileOpts.highWaterMark,
highWaterMark: cfg.highWaterMark
}
if (fileopts.highWaterMark) { parserCfg.partHwm = fileopts.highWaterMark }
if (cfg.highWaterMark) { parserCfg.highWaterMark = cfg.highWaterMark }

this.parser = new Dicer(parserCfg)
this.parser.on('drain', function () {
Expand Down Expand Up @@ -170,7 +169,7 @@ function Multipart (boy, cfg) {
}

++nends
const file = new FileStream(fileopts)
const file = new FileStream(fileOpts)
curFile = file
file.on('end', function () {
--nends
Expand Down Expand Up @@ -288,7 +287,6 @@ function skipPart (part) {
}

function FileStream (opts) {
if (!(this instanceof FileStream)) { return new FileStream(opts) }
ReadableStream.call(this, opts)

this.bytesRead = 0
Expand Down
11 changes: 10 additions & 1 deletion test/decoder.spec.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
const { assert } = require('chai')
const { assert, expect } = require('chai')
const { Decoder } = require('../lib/utils')

describe('Decoder', () => {
Expand Down Expand Up @@ -76,4 +76,13 @@ describe('Decoder', () => {
assert.deepEqual(result, v.expected, msg)
})
})

it('reset sets internal buffer to undefined', () => {
const dec = new Decoder()
dec.write('Hello+world%2')

expect(dec.buffer).to.be.not.equal(undefined)
dec.reset()
expect(dec.buffer).to.be.equal(undefined)
})
})
125 changes: 124 additions & 1 deletion test/types-multipart.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,98 @@ describe('types-multipart', () => {
],
what: 'Fields and files (limits)'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
fields: 0
},
events: ['file'],
expected: [
['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream']
],
what: 'should not emit fieldsLimit if no field was sent'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
fields: 0
},
events: ['file', 'fieldsLimit'],
expected: [
['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream']
],
what: 'should respect fields limit of 0'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_1"',
'',
'super beta file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
fields: 1
},
events: ['field', 'file', 'fieldsLimit'],
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'],
['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream']
],
what: 'should respect fields limit of 1'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
files: 0
},
events: ['field'],
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain']
],
what: 'should not emit filesLimit if no file was sent'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
Expand All @@ -116,10 +208,41 @@ describe('types-multipart', () => {
limits: {
files: 0
},
events: ['field', 'filesLimit'],
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain']
],
what: 'Fields and files (limits: 0 files)'
what: 'should respect fields limit of 0'
},
{
source: [
['-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="file_name_0"',
'',
'super alpha file',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_0"; filename="1k_a.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
'Content-Disposition: form-data; name="upload_file_b"; filename="1k_b.dat"',
'Content-Type: application/octet-stream',
'',
'ABCDEFGHIJKLMNOPQRSTUVWXYZ',
'-----------------------------paZqsnEHRufoShdX6fh0lUhXBP4k--'
].join('\r\n')
],
boundary: '---------------------------paZqsnEHRufoShdX6fh0lUhXBP4k',
limits: {
files: 1
},
events: ['field', 'file', 'filesLimit'],
expected: [
['field', 'file_name_0', 'super alpha file', false, false, '7bit', 'text/plain'],
['file', 'upload_file_0', 26, 0, '1k_a.dat', '7bit', 'application/octet-stream']
],
what: 'should respect fields limit of 1'
},
{
source: [
Expand Down

0 comments on commit e9bbade

Please sign in to comment.