In this article let's see a small nodejs script which can be run to upload files to amazon S3 bucket.
Prerequisite
I am going to assume you understand what amazon s3 is using for. If not you can visit this link to learn more.
Writing the upload script
// uploadTos3.js
const fs = require('fs');
const readDir = require('recursive-readdir');
const path = require('path');
const AWS = require('aws-sdk');
const mime = require('mime-types');
AWS.config.update({
region: 'ap-south-1',
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
secretAccessKey: process.env.SECRET_ACCESS_KEY,
maxRetries: 3
});
const directoryPath = path.resolve(__dirname, './images');
const getDirectoryFilesRecursive = (dir, ignores = []) => {
return new Promise((resolve, reject) => {
readDir(dir, ignores, (err, files) => (err ? reject(err) : resolve(files)));
});
};
const generateFileKey = fileName => {
const S3objectPath = fileName.split('/images/')[1];
return `website-assets/${S3objectPath}`;
};
const s3 = new AWS.S3();
const uploadToS3 = async () => {
try {
const fileArray = await getDirectoryFilesRecursive(directoryPath, [
'BUILD_ID'
]);
fileArray.map(file => {
// Configuring parameters for S3 Object
const S3params = {
Bucket: 'public-read-bucket',
Body: fs.createReadStream(file),
Key: generateFileKey(file),
ACL: 'public-read',
ContentType: mime.lookup(file),
ContentEncoding: 'utf-8',
CacheControl: 'immutable,max-age=31536000,public'
};
s3.upload(S3params, function(err, data) {
if (err) {
// Set the exit code while letting
// the process exit gracefully.
console.error(err);
process.exitCode = 1;
} else {
console.log(`Assets uploaded to S3: `, data);
}
});
});
} catch (error) {
console.error(error);
}
};
uploadToS3();
Note: I’m uploading all the assets to website-assets folder. The cloudfront distribution will have that as a base path.
Run the command node uploadToS3
to upload all the files to S3 bucket.
Top comments (0)