Skip to content

Commit

Permalink
feat breaking: throw file size limit on toBuffer calls (#265)
Browse files Browse the repository at this point in the history
* add toBuffer aligned to throwFileSizeLimit

* add docs
  • Loading branch information
Eomm authored Sep 14, 2021
1 parent 6e77e5e commit a62116f
Show file tree
Hide file tree
Showing 3 changed files with 155 additions and 1 deletion.
14 changes: 13 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,10 @@ fastify.register(require('fastify-multipart'), {
**Note**: if the file stream that is provided by `data.file` is not consumed, like in the example below with the usage of pump, the promise will not be fulfilled at the end of the multipart processing.
This behavior is inherited from [busboy](https://github.com/mscdex/busboy).

**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can listen to `data.file.on('limit')` or check at the end of the stream the property `data.file.truncated`.
**Note**: if you set a `fileSize` limit and you want to know if the file limit was reached you can:
- listen to `data.file.on('limit')`
- or check at the end of the stream the property `data.file.truncated`
- or call `data.file.toBuffer()` and wait for the error to be thrown

```js
const data = await req.file()
Expand All @@ -103,6 +106,15 @@ if (data.file.truncated) {
// before the `limits.fileSize` has been reached
reply.send(new fastify.multipartErrors.FilesLimitError());
}

// OR
const data = await req.file()
try {
const buffer = await data.toBuffer()
} catch (err) {
// fileSize limit reached!
}

```

Additionally, you can pass per-request options to the `req.file`, `req.files`, `req.saveRequestFiles` or `req.multipartIterator` function.
Expand Down
9 changes: 9 additions & 0 deletions index.js
Original file line number Diff line number Diff line change
Expand Up @@ -321,6 +321,7 @@ function fastifyMultipart (fastify, options, done) {
opts
])

this.log.trace({ busboyOptions }, 'Providing options to busboy')
const bb = busboy(busboyOptions)

request.on('close', cleanup)
Expand Down Expand Up @@ -406,6 +407,14 @@ function fastifyMultipart (fastify, options, done) {
const fileChunks = []
for await (const chunk of this.file) {
fileChunks.push(chunk)

if (throwFileSizeLimit && this.file.truncated) {
const err = new RequestFileTooLargeError()
err.part = this

onError(err)
throw err
}
}
this._buf = Buffer.concat(fileChunks)
return this._buf
Expand Down
133 changes: 133 additions & 0 deletions test/multipart-fileLimit.test.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,133 @@
'use strict'

const util = require('util')
const crypto = require('crypto')
const test = require('tap').test
const FormData = require('form-data')
const Fastify = require('fastify')
const multipart = require('..')
const http = require('http')
const stream = require('stream')
const pump = util.promisify(stream.pipeline)
const EventEmitter = require('events')
const { once } = EventEmitter

test('should throw fileSize limitation error when consuming the stream', async function (t) {
t.plan(4)

const fastify = Fastify()
t.teardown(fastify.close.bind(fastify))

fastify.register(multipart, {
throwFileSizeLimit: true,
limits: {
fileSize: 524288
}
})

fastify.post('/', async function (req, reply) {
t.ok(req.isMultipart())

const part = await req.file()
t.pass('the file is not consumed yet')

try {
await part.toBuffer()
t.fail('it should throw')
} catch (error) {
t.ok(error)
reply.send(error)
}
})

await fastify.listen(0)

// request
const form = new FormData()
const opts = {
protocol: 'http:',
hostname: 'localhost',
port: fastify.server.address().port,
path: '/',
headers: form.getHeaders(),
method: 'POST'
}

const randomFileBuffer = Buffer.alloc(600000)
crypto.randomFillSync(randomFileBuffer)

const req = http.request(opts)
form.append('upload', randomFileBuffer)

pump(form, req)

try {
const [res] = await once(req, 'response')
t.equal(res.statusCode, 413)
res.resume()
await once(res, 'end')
} catch (error) {
t.error(error, 'request')
}
})

test('should NOT throw fileSize limitation error when consuming the stream', async function (t) {
t.plan(5)

const fastify = Fastify()
t.teardown(fastify.close.bind(fastify))

fastify.register(multipart, {
throwFileSizeLimit: false,
limits: {
fileSize: 524288
}
})
const fileInputLength = 600000

fastify.post('/', async function (req, reply) {
t.ok(req.isMultipart())

const part = await req.file()
t.pass('the file is not consumed yet')

try {
const buffer = await part.toBuffer()
t.ok(part.file.truncated)
t.notSame(buffer.length, fileInputLength)
reply.send(new fastify.multipartErrors.FilesLimitError())
} catch (error) {
t.fail('it should not throw')
}
})

await fastify.listen(0)

// request
const form = new FormData()
const opts = {
protocol: 'http:',
hostname: 'localhost',
port: fastify.server.address().port,
path: '/',
headers: form.getHeaders(),
method: 'POST'
}

const randomFileBuffer = Buffer.alloc(fileInputLength)
crypto.randomFillSync(randomFileBuffer)

const req = http.request(opts)
form.append('upload', randomFileBuffer)

pump(form, req)

try {
const [res] = await once(req, 'response')
t.equal(res.statusCode, 413)
res.resume()
await once(res, 'end')
} catch (error) {
t.error(error, 'request')
}
})

0 comments on commit a62116f

Please sign in to comment.