diff --git a/index.js b/index.js index dda51fb1..c880e3c2 100644 --- a/index.js +++ b/index.js @@ -444,17 +444,25 @@ function fastifyMultipart (fastify, options, done) { return this._buf } const fileChunks = [] + let err for await (const chunk of this.file) { fileChunks.push(chunk) if (throwFileSizeLimit && this.file.truncated) { - const err = new RequestFileTooLargeError() + err = new RequestFileTooLargeError() err.part = this onError(err) - throw err + fileChunks.length = 0 } } + if (err) { + // throwing in the async iterator will + // cause the file.destroy() to be called + // The stream has already been managed by + // busboy instead + throw err + } this._buf = Buffer.concat(fileChunks) return this._buf } @@ -541,10 +549,6 @@ function fastifyMultipart (fastify, options, done) { let part while ((part = await parts()) != null) { if (part.file) { - // part.file.truncated is true when a configured file size limit is reached - if (part.file.truncated && throwFileSizeLimit) { - throw new RequestFileTooLargeError() - } return part } } diff --git a/test/multipart-fileLimit.test.js b/test/multipart-fileLimit.test.js index dbbc60bb..5650a357 100644 --- a/test/multipart-fileLimit.test.js +++ b/test/multipart-fileLimit.test.js @@ -1,5 +1,6 @@ 'use strict' +const fs = require('fs') const crypto = require('crypto') const test = require('tap').test const FormData = require('form-data') @@ -49,7 +50,7 @@ test('should throw fileSize limitation error when consuming the stream', async f method: 'POST' } - const randomFileBuffer = Buffer.alloc(600000) + const randomFileBuffer = Buffer.alloc(600_000) crypto.randomFillSync(randomFileBuffer) const req = http.request(opts) @@ -67,6 +68,69 @@ test('should throw fileSize limitation error when consuming the stream', async f } }) +test('should throw fileSize limitation error when consuming the stream MBs', async function (t) { + t.plan(4) + + const fastify = Fastify() + t.teardown(fastify.close.bind(fastify)) + + fastify.register(multipart, { + throwFileSizeLimit: true, + limits: { + fileSize: 5_000_000 // 5MB + } + }) + + fastify.post('/', async function (req, reply) { + t.ok(req.isMultipart()) + + const part = await req.file() + t.pass('the file is not consumed yet') + + try { + await part.toBuffer() + t.fail('it should throw') + } catch (error) { + t.ok(error) + reply.send(error) + } + }) + + await fastify.listen({ port: 0 }) + + // request + const form = new FormData() + const opts = { + hostname: '127.0.0.1', + port: fastify.server.address().port, + path: '/', + headers: form.getHeaders(), + method: 'POST' + } + + const randomFileBuffer = Buffer.alloc(15_000_000) + crypto.randomFillSync(randomFileBuffer) + + const tmpFile = 'test/random-file' + fs.writeFileSync(tmpFile, randomFileBuffer) + + const req = http.request(opts) + form.append('upload', fs.createReadStream(tmpFile)) + + form.pipe(req) + + try { + const [res] = await once(req, 'response') + t.equal(res.statusCode, 413) + res.resume() + await once(res, 'end') + + fs.unlinkSync(tmpFile) + } catch (error) { + t.error(error, 'request') + } +}) + test('should NOT throw fileSize limitation error when consuming the stream', async function (t) { t.plan(5)