blob: e3c00aa8a378b06d2ca83d3464b480ce6f227b1b [file] [log] [blame]
Leo Repp58b9f112021-11-22 11:57:47 +01001var bytes = require('bytes')
2
3// NOTE: the trailing slash is not a typo
4var StringDecoder = require('string_decoder/').StringDecoder
5
6module.exports = function (stream, options, done) {
7 if (typeof options === 'function') {
8 done = options
9 options = {}
10 } else if (!options) {
11 options = {}
12 } else if (options === true) {
13 options = {
14 encoding: 'utf8'
15 }
16 }
17
18 // convert the limit to an integer
19 var limit = null
20 if (typeof options.limit === 'number')
21 limit = options.limit
22 if (typeof options.limit === 'string')
23 limit = bytes(options.limit)
24
25 // convert the expected length to an integer
26 var length = null
27 if (options.length != null && !isNaN(options.length))
28 length = parseInt(options.length, 10)
29
30 // check the length and limit options.
31 // note: we intentionally leave the stream paused,
32 // so users should handle the stream themselves.
33 if (limit !== null && length !== null && length > limit) {
34 if (typeof stream.pause === 'function')
35 stream.pause()
36
37 process.nextTick(function () {
38 var err = makeError('request entity too large', 'entity.too.large')
39 err.status = err.statusCode = 413
40 err.length = err.expected = length
41 err.limit = limit
42 done(err)
43 })
44 return defer
45 }
46
47 // streams1: assert request encoding is buffer.
48 // streams2+: assert the stream encoding is buffer.
49 // stream._decoder: streams1
50 // state.encoding: streams2
51 // state.decoder: streams2, specifically < 0.10.6
52 var state = stream._readableState
53 if (stream._decoder || (state && (state.encoding || state.decoder))) {
54 if (typeof stream.pause === 'function')
55 stream.pause()
56
57 process.nextTick(function () {
58 var err = makeError('stream encoding should not be set',
59 'stream.encoding.set')
60 // developer error
61 err.status = err.statusCode = 500
62 done(err)
63 })
64 return defer
65 }
66
67 var received = 0
68 // note: we delegate any invalid encodings to the constructor
69 var decoder = options.encoding
70 ? new StringDecoder(options.encoding === true ? 'utf8' : options.encoding)
71 : null
72 var buffer = decoder
73 ? ''
74 : []
75
76 stream.on('data', onData)
77 stream.once('end', onEnd)
78 stream.once('error', onEnd)
79 stream.once('close', cleanup)
80
81 return defer
82
83 // yieldable support
84 function defer(fn) {
85 done = fn
86 }
87
88 function onData(chunk) {
89 received += chunk.length
90 decoder
91 ? buffer += decoder.write(chunk)
92 : buffer.push(chunk)
93
94 if (limit !== null && received > limit) {
95 if (typeof stream.pause === 'function')
96 stream.pause()
97 var err = makeError('request entity too large', 'entity.too.large')
98 err.status = err.statusCode = 413
99 err.received = received
100 err.limit = limit
101 done(err)
102 cleanup()
103 }
104 }
105
106 function onEnd(err) {
107 if (err) {
108 if (typeof stream.pause === 'function')
109 stream.pause()
110 done(err)
111 } else if (length !== null && received !== length) {
112 err = makeError('request size did not match content length',
113 'request.size.invalid')
114 err.status = err.statusCode = 400
115 err.received = received
116 err.length = err.expected = length
117 done(err)
118 } else {
119 done(null, decoder
120 ? buffer + decoder.end()
121 : Buffer.concat(buffer)
122 )
123 }
124
125 cleanup()
126 }
127
128 function cleanup() {
129 received = buffer = null
130
131 stream.removeListener('data', onData)
132 stream.removeListener('end', onEnd)
133 stream.removeListener('error', onEnd)
134 stream.removeListener('close', cleanup)
135 }
136}
137
138// to create serializable errors you must re-set message so
139// that it is enumerable and you must re configure the type
140// property so that is writable and enumerable
141function makeError(message, type) {
142 var error = new Error()
143 error.message = message
144 Object.defineProperty(error, 'type', {
145 value: type,
146 enumerable: true,
147 writable: true,
148 configurable: true
149 })
150 return error
151}