Demo for query storing
Change-Id: I947bcac841992c3f6cfd01ab337c265b0d01cb70
diff --git a/node_modules/raw-body/index.js b/node_modules/raw-body/index.js
new file mode 100644
index 0000000..e3c00aa
--- /dev/null
+++ b/node_modules/raw-body/index.js
@@ -0,0 +1,151 @@
+var bytes = require('bytes')
+
+// NOTE: the trailing slash is not a typo
+var StringDecoder = require('string_decoder/').StringDecoder
+
+module.exports = function (stream, options, done) {
+ if (typeof options === 'function') {
+ done = options
+ options = {}
+ } else if (!options) {
+ options = {}
+ } else if (options === true) {
+ options = {
+ encoding: 'utf8'
+ }
+ }
+
+ // convert the limit to an integer
+ var limit = null
+ if (typeof options.limit === 'number')
+ limit = options.limit
+ if (typeof options.limit === 'string')
+ limit = bytes(options.limit)
+
+ // convert the expected length to an integer
+ var length = null
+ if (options.length != null && !isNaN(options.length))
+ length = parseInt(options.length, 10)
+
+ // check the length and limit options.
+ // note: we intentionally leave the stream paused,
+ // so users should handle the stream themselves.
+ if (limit !== null && length !== null && length > limit) {
+ if (typeof stream.pause === 'function')
+ stream.pause()
+
+ process.nextTick(function () {
+ var err = makeError('request entity too large', 'entity.too.large')
+ err.status = err.statusCode = 413
+ err.length = err.expected = length
+ err.limit = limit
+ done(err)
+ })
+ return defer
+ }
+
+ // streams1: assert request encoding is buffer.
+ // streams2+: assert the stream encoding is buffer.
+ // stream._decoder: streams1
+ // state.encoding: streams2
+ // state.decoder: streams2, specifically < 0.10.6
+ var state = stream._readableState
+ if (stream._decoder || (state && (state.encoding || state.decoder))) {
+ if (typeof stream.pause === 'function')
+ stream.pause()
+
+ process.nextTick(function () {
+ var err = makeError('stream encoding should not be set',
+ 'stream.encoding.set')
+ // developer error
+ err.status = err.statusCode = 500
+ done(err)
+ })
+ return defer
+ }
+
+ var received = 0
+ // note: we delegate any invalid encodings to the constructor
+ var decoder = options.encoding
+ ? new StringDecoder(options.encoding === true ? 'utf8' : options.encoding)
+ : null
+ var buffer = decoder
+ ? ''
+ : []
+
+ stream.on('data', onData)
+ stream.once('end', onEnd)
+ stream.once('error', onEnd)
+ stream.once('close', cleanup)
+
+ return defer
+
+ // yieldable support
+ function defer(fn) {
+ done = fn
+ }
+
+ function onData(chunk) {
+ received += chunk.length
+ decoder
+ ? buffer += decoder.write(chunk)
+ : buffer.push(chunk)
+
+ if (limit !== null && received > limit) {
+ if (typeof stream.pause === 'function')
+ stream.pause()
+ var err = makeError('request entity too large', 'entity.too.large')
+ err.status = err.statusCode = 413
+ err.received = received
+ err.limit = limit
+ done(err)
+ cleanup()
+ }
+ }
+
+ function onEnd(err) {
+ if (err) {
+ if (typeof stream.pause === 'function')
+ stream.pause()
+ done(err)
+ } else if (length !== null && received !== length) {
+ err = makeError('request size did not match content length',
+ 'request.size.invalid')
+ err.status = err.statusCode = 400
+ err.received = received
+ err.length = err.expected = length
+ done(err)
+ } else {
+ done(null, decoder
+ ? buffer + decoder.end()
+ : Buffer.concat(buffer)
+ )
+ }
+
+ cleanup()
+ }
+
+ function cleanup() {
+ received = buffer = null
+
+ stream.removeListener('data', onData)
+ stream.removeListener('end', onEnd)
+ stream.removeListener('error', onEnd)
+ stream.removeListener('close', cleanup)
+ }
+}
+
+// to create serializable errors you must re-set message so
+// that it is enumerable and you must re configure the type
+// property so that is writable and enumerable
+function makeError(message, type) {
+ var error = new Error()
+ error.message = message
+ Object.defineProperty(error, 'type', {
+ value: type,
+ enumerable: true,
+ writable: true,
+ configurable: true
+ })
+ return error
+}