Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Cannot pipe s3 object readstream to PUT request

I have a simple scenario. I need to read an object from S3 and pipe its output to PUT request. Here is my code using request.

// client.js
let AWS = require('aws-sdk')
let request = require('request')

let bucket = 'my_bucket'
let filename = 'path/to/file.zip'

let host = 'localhost'
let port = 8080

let s3 = new AWS.S3({
  . . .
})

let readStream = s3.getObject({
  Bucket: bucket,
  Key: filename
}).createReadStream()

let formData = {
  applicationType: 'my_app_type',
  applicationName: 'my_app_name',
  upload: {
    value: readStream,
    options: {
      filename: 'my_file_name.zip',
      contentType: 'application/zip'
    }
  }
}

request.put({
  url: 'http://' + host + ':' + port + '/bootstrap',
  formData: formData
}, function (error, response, body) {
  if (error) throw error
  console.log(body)
})

And, here is my server.js code.

// server.js
let http = require('http')
let Busboy = require('busboy')
let events = require('events')
let fs = require('fs')

let host = 'localhost'
let port = 8080

let compressedCodeLocation = './code.zip'

let handleRequest = function (request, response) {
  let eventEmitter = new events.EventEmitter()
  let inputStreamWriter = fs.createWriteStream(compressedCodeLocation)
  inputStreamWriter.on('finish', function () {
    eventEmitter.emit('input.stream.saved')
  })
  let busboy = new Busboy({
    headers: request.headers
  })

  busboy.on('file', function (field, file) {
    file.pipe(inputStreamWriter)
  })
  busboy.on('field', function (field, val) {
    console.log(field + ': ' + val)
  })
  eventEmitter.on('input.stream.saved', function () {
    let stats = fs.statSync(compressedCodeLocation)
    response.statusCode = 200
    response.end(JSON.stringify(stats))
  })

  request.pipe(busboy)
}

let server = http.createServer(handleRequest)
server.listen(port, host, function () {
  console.log('Server started on ' + host + ':' + port)
})

let handleShutdown = function () {
  server.close(function () {
    console.log('Server stopped on ' + host + ':' + port)
  })
}
process.on('SIGTERM', handleShutdown)
process.on('SIGINT', handleShutdown)

Server receives the following headers:

{ host: 'localhost:8080',
  'content-type': 'multipart/form-data; boundary=--------------------------870259812928253745629174',
  'content-length': '465',
  connection: 'close' }

I am getting this error on the Server side:

File [upload] got 58 bytes
events.js:160
      throw er; // Unhandled 'error' event
      ^

Error: Unexpected end of multipart data
    at /pots/cnc/node_modules/dicer/lib/Dicer.js:62:28
    at _combinedTickCallback (internal/process/next_tick.js:67:7)
    at process._tickCallback (internal/process/next_tick.js:98:9)

And, Client receives the following error:

Error: read ECONNRESET
    at exports._errnoException (util.js:1018:11)
    at TCP.onread (net.js:568:26)

The funny thing is, if I save the file locally first and then createReadStream for that local file, it works:

let formData = {
  ...
  upload: {
    value: fs.createReadStream(localPath + "/" + filename),
    options: {
      ...
    }
  }
};
like image 380
Rash Avatar asked Feb 05 '23 20:02

Rash


2 Answers

I had trouble using request. But got worked fine. Try this:

const {S3} = require('aws-sdk'),
      got = require('got'),
      FormData = require('form-data'),
;
const form = new FormData(),
      readStream = s3.getObject({
          Bucket: bucket,
          Key: filename
      }).createReadStream()
;
form.append('applicationType', 'my_app_type')
form.append('applicationName', 'my_app_name')
form.append('upload', readStream,{
  filename: 'my_file_name.zip',
  contentType: 'application/zip'
})
got.put('http://' + host + ':' + port + '/bootstrap', {body: form})
like image 134
metakungfu Avatar answered Feb 08 '23 10:02

metakungfu


Solution 2:

Another answer I found today is much much simpler. Simply use the property knownLength to let request module know the size of the file in advance.

upload: {
  value: readStream,
  options: {
    filename: 'my_file_name.zip',
    contentType: 'application/zip'
    knownLength: 423424
  }
}


Solution 1

This solution is using the request module itself, in case you do not want to introduce a new library to your code.

I solved this question with help of @metakungfu answer. Please redirect any votes to his answer.

I needed to reset the require module's form to the form-data module's form. As request documentation states:

For advanced cases, you can access the form-data object itself via r.form().

Once new form is set in my request (which I think re-writes the multipart boundary), I pipe my form data to my request. See Reference.

let AWS = require('aws-sdk')
let FormData = require('form-data')
let request = require('request')

let bucket = 'ppi-uploads'
let filename = 'introduction.zip'

let host = 'localhost'
let port = 8080

let s3 = new AWS.S3({
  . . .
})

let readStream = s3.getObject({
  Bucket: bucket,
  Key: filename
}).createReadStream()

let form = new FormData()
form.append('applicationType', 'html')
form.append('applicationName', 'introduction')
form.append('upload', readStream, {
  filename: 'introduction.zip',
  contentType: 'application/zip'
})

let putRequest = request.put({
  url: 'http://' + host + ':' + port + '/bootstrap',
  headers: form.getHeaders()
}, function (error, response, body) {
  if (error) throw error
  console.log(body)
})

form.pipe(putRequest)
like image 35
Rash Avatar answered Feb 08 '23 12:02

Rash