Last active
April 22, 2017 03:14
-
-
Save Zaggen/9fa841c511944492addf500947ead485 to your computer and use it in GitHub Desktop.
Middleware to ease the handling of file uploads with extra data on sails.js (skipper), it relies on the FileManager service
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
module.exports = injector.set ()-> | |
# Takes all input streams that have the following format | |
# inputName[file] that contains a stream and inputName[data] that contains metaData | |
# and combines them together and adds it to the req.body.inputName for easy access | |
fileParser = (req, res, next)-> | |
if req.is('multipart/form-data') and req._fileparser? | |
do async => | |
streamNames = _.map(req._fileparser.upstreams, 'fieldName') | |
try | |
### | |
We start running the streams without waiting for them and each time a new stream is added we run it by calling upload, | |
then we have an interval checking every 100ms is the parsed was marked by skipper as closed, when that's true we can | |
safely wait for all the streams, and finally we resolve the promise, or if the timeout fires before we reject it. | |
### | |
uploadedFiles = | |
if not _.isEmpty(streamNames) | |
await new Promise((resolve, reject)-> | |
TIMEOUT = 20000 | |
{upstreams} = req._fileparser | |
tasks = _.map(upstreams, FileManager.scheduleUpload) | |
# We proxy the original push method, to push to our task list every time a new stream is added | |
upstreams.push = (stream)-> | |
tasks.push(FileManager.scheduleUpload(stream)) | |
Array.prototype.push.call(upstreams, stream) | |
startTime = (new Date()).getTime() | |
intervalId = setInterval(=> | |
do async => | |
if(req._fileparser.closed) | |
clearInterval(intervalId) | |
fileObjects = await tasks | |
parsedFileFields = _(upstreams) | |
.map('fieldName') | |
.reduce((o, streamName, i)-> | |
_.set(o, streamName, fileObjects[i][0]) | |
return o | |
, {}) | |
resolve(parsedFileFields) | |
else if (new Date()).getTime() - startTime > TIMEOUT | |
clearInterval(intervalId) | |
reject(new Error('Timeout triggered before form closed')) | |
, 100) | |
) | |
else {} | |
catch err | |
console.log err | |
return res.json({ | |
status: 'error', | |
title: 'Server Error' | |
msg: "There was an issue while uploading your images, make sure they are in .jpg/.png and don't exceed 4mb per file" | |
}) | |
# We do a basic combination of the uploaded files and req.body so we can iterate over | |
# file fields even if they don't have associated data i.e an input like image[file] without image[data] | |
tmpBody = _.extend({}, uploadedFiles, req.body) | |
req.body = _.reduce(tmpBody, (o, v, k)-> | |
o[k] = | |
if _.isArray(v) | |
_.map(v, (item, i)-> | |
if _.isObject(item) then parseFileAndData(item, uploadedFiles[k]?[i]?.file) | |
else item | |
) | |
else if _.isObject(v) | |
parseFileAndData(v, uploadedFiles[k]?.file) | |
else v | |
return o | |
, {}) | |
return next() | |
else | |
return next() | |
parseFileAndData = (v, uploadedFile)-> | |
mergedImgObj = | |
if v.data? and uploadedFile then _.extend({}, uploadedFile, v.data) | |
else if v.data? then v.data | |
else if uploadedFile? then uploadedFile | |
else if v.file is '' then null | |
else v | |
if mergedImgObj and (mergedImgObj.id or mergedImgObj.fd or mergedImgObj.path) | |
return mergedImgObj | |
else | |
return null | |
return fileParser |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment