I'd like to upload a file to AWS S3 via the POST interface, but I fail to do so.
I've already made it work with PUT and getSignedUrl
, but unfortunately that interface doesn't allow direct file size restrictions. So I tried to use the POST interface, because there I can use 'content-length-range'
condition.
Here's my request signature:
const aws = require('aws-sdk');
aws.config.update({
signatureVersion: 'v4',
region: 'eu-central-1',
accessKeyId: config.aws.keyId,
secretAccessKey: config.aws.keySecret
});
const s3 = new aws.S3();
return new Promise((resolve, reject) => {
const params = {
Bucket: config.aws.bucket,
Fields: {
key: filePath
},
Expires: config.aws.expire,
Conditions: [
['acl', 'public-read'],
['content-length-range', 0, 10000000] // 10 Mb
]
};
const postUrl = s3.createPresignedPost(params, (err, data) => {
resolve(data);
});
});
This part seems to be OK, but I can't use the required signature to upload a file to S3.
Here are a few other attempts I made:
request.post({
url: payload.url,
body: payload,
form: fs.createReadStream(__dirname + `/${filePath}`)
}, (err, response, body) => {});
Another attempt:
let formData = payload;
formData.file = fs.createReadStream(__dirname + `/${filePath}`);
request.post({
url: payload.url,
formData: formData
}, (err, response, body) => {});
With fetch:
const fetch = require('node-fetch');
const FormData = require('form-data');
const form = new FormData();
const fields = payload.fields;
for(const field in payload.fields) {
form.append(field, payload.fields[field]);
}
form.append('file', fs.createReadStream(__dirname + `/${filePath}`));
fetch(payload.url, {
method: 'POST',
body: form.toString(),
headers: form.getHeaders()
})
.then((response) => {})
.catch((err) => {});
Neither of these work, they either say 'Bad request', or 'Badly formed request'. One of them uploaded something to the server, but the file was unreadable.
How can I add a max file size limit to an S3 bucket?
Update:
I think I move forward just a little. With this code, I get the error response: You must provide the Content-Length HTTP header.
const fetch = require('node-fetch');
const FormData = require('form-data');
const form = new FormData();
form.append('acl', 'public-read');
for(const field in payload.fields) {
form.append(field, payload.fields[field]);
}
form.append('file', fs.createReadStream(__dirname + `/${filePath}`));
fetch(payload.url, {
method: 'POST',
body: form,
headers: form.getHeaders()
})
.then((response) => { return response.text(); })
.then((payload) => { console.log(payload); })
.catch((err) => console.log(`Error: ${err}`));
In the Amazon S3 console, choose the bucket where you want to upload an object, choose Upload, and then choose Add Files. In the file selection dialog box, find the file that you want to upload, choose it, choose Open, and then choose Start Upload. You can watch the progress of the upload in the Transfer pane.
POST: Takes in data, applies it to the resource identified by the given URI, here the rules you documented for the resource media type are followed. PUT: It will replace whatever is identified by the given URI with this data, and ignore whatever is in there already, if anything.
To upload your app to an Amazon S3 bucketphp and the ASSETS folder are at the root level of the downloaded ZIP file. If they are not, unzip the file, and create a new ZIP file that has these files at the root level. Open the Amazon S3 console at https://console.aws.amazon.com/s3/ . Choose Create Bucket.
With the Hitachi API for Amazon S3, you can accept objects uploaded directly to HCP from web browser forms (that is, a POST form request). This process is called a POST object upload.
Finally it works. Here's the code in case anyone has the same problem.
A few things to note:
acl
later, it will fail.signatureVersion
to v4
even in the S3 constructor.I'm not proud of the code quality, but at last it works.
const aws = require('aws-sdk');
const fs = require('fs');
const request = require('request');
const config = require('./config');
let s3;
const init = () => {
aws.config.update({
signatureVersion: 'v4',
region: 'eu-central-1',
accessKeyId: config.aws.keyId,
secretAccessKey: config.aws.keySecret
});
s3 = new aws.S3({signatureVersion: 'v4'});
};
const signFile = (filePath) => {
return new Promise((resolve, reject) => {
const params = {
Bucket: config.aws.bucket,
Fields: {
key: filePath
},
Expires: config.aws.expire,
Conditions: [
['content-length-range', 0, 10000000], // 10 Mb
{'acl': 'public-read'}
]
};
s3.createPresignedPost(params, (err, data) => {
resolve(data);
});
});
};
const sendFile = (filePath, payload) => {
const fetch = require('node-fetch');
const FormData = require('form-data');
const form = new FormData();
form.append('acl', 'public-read');
for(const field in payload.fields) {
form.append(field, payload.fields[field]);
}
form.append('file', fs.createReadStream(__dirname + `/${filePath}`));
form.getLength((err, length) => {
console.log(`Length: ${length}`);
fetch(payload.url, {
method: 'POST',
body: form,
headers: {
'Content-Type': false,
'Content-Length': length
}
})
.then((response) => {
console.log(response.ok);
console.log(response.status);
console.log(response.statusText);
return response.text();
})
.then((payload) => {
console.log(payload);
console.log(form.getHeaders());
})
.catch((err) => console.log(`Error: ${err}`));
});
};
init();
const file = 'test.pdf';
const filePath = `files/new/${file}`;
signFile(filePath)
.then((payload) => { sendFile(file, payload); });
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With