Is it possible to rename an object on s3 via aws-sdk? I couldn't find a method for that, maybe there is a provisionary solution ...
1 Answer. You can not rename bucket functionality for S3 because there are technically no folders in S3 so we have to handle every file within the bucket. You will have to create a new bucket, copy the contents from the new bucket and delete the old bucket.
An Amazon S3 bucket is owned by the AWS account that created it. Bucket ownership is not transferable to another account. When you create a bucket, you choose its name and the AWS Region to create it in. After you create a bucket, you can't change its name or Region.
As you note, S3 does not have an atomic rename operation, so your usual technique doesn't work as you desire. S3 has a nice "notification" feature that can be configured. In your case, you probably want to get notified when a file is created.
We can do this using the AWS management console or by using Node. js. To create an S3 bucket using the management console, go to the S3 service by selecting it from the service menu: Select "Create Bucket" and enter the name of your bucket and the region that you want to host your bucket.
I will answer I guess since no one has - this one should work
// create a new s3 object
var s3 = new AWS.S3();
var BUCKET_NAME = 'your-bucket-name';
var OLD_KEY = '/original-file.js';
var NEW_KEY = '/new-file.js';
// Copy the object to a new location
s3.copyObject({
Bucket: BUCKET_NAME,
CopySource: `${BUCKET_NAME}${OLD_KEY}`,
Key: NEW_KEY
})
.promise()
.then(() =>
// Delete the old object
s3.deleteObject({
Bucket: BUCKET_NAME,
Key: OLD_KEY
}).promise()
)
// Error handling is left up to reader
.catch((e) => console.error(e))
This is just a flow on from @nf071590 answer. Which is awesome.
Below, gets the entire list of a bucket and then changes the image name of every image that isn't .jpg
to .jpg
Hope this helps someone. :)
const start = new Date()
const AWS = require('aws-sdk')
const state = {}
AWS.config.update({ region: 'ADD_REGION_HERE' })
try {
var s3 = new AWS.S3();
var BUCKET_NAME = 'ADD_BUCKET_NAME_HERE';
var params = {
Bucket: BUCKET_NAME,
MaxKeys: 1000
};
s3.listObjects(params, function (err, data) {
if (err) {
console.log(err, err.stack); // an error occurred
} else {
console.log(data);
data.Contents.forEach(image => {
var OLD_KEY = image.Key
var NEW_KEY = ''
// split key
var keyArray = image.Key.split('.')
var keyArrayLength = keyArray.length
console.log(keyArrayLength);
var ext = keyArray[keyArrayLength - 1]
// console.log(ext);
if(ext != 'jpg') {
console.log('Change this ext FROM: ', OLD_KEY)
ext = 'jpg'
if (keyArrayLength == 2) {
NEW_KEY = `${keyArray[0]}.${ext}`
} else if (keyArrayLength == 3) {
NEW_KEY = `${keyArray[0]}.${keyArray[1]}.${ext}`
} else if (keyArrayLength == 4) {
NEW_KEY = `${keyArray[0]}.${keyArray[1]}.${keyArray[2]}.${ext}`
}
console.log('TO:: ', NEW_KEY);
// Copy the object to a new location
try {
s3.copyObject({
Bucket: BUCKET_NAME,
CopySource: `${BUCKET_NAME}/${OLD_KEY}`,
Key: NEW_KEY
}).promise()
.then((response) => {
console.log('Seemed to have worked??');
console.log(response);
// Delete the old object
s3.deleteObject({
Bucket: BUCKET_NAME,
Key: OLD_KEY
}).promise()
})
// Error handling is left up to reader
.catch((e) => console.error(e))
} catch (error) {
console.log('error::', error);
}
}
});
}
});
} catch (err) {
const end = new Date() - start
let seconds = end / 1000
state.seconds = seconds
state.error = err
state.status = "error"
state.message = err.message
console.log(err)
console.log(state);
return
}
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With