Creating A Multipart Uploader With The New AWS NodeJS SDK

Creating A Multipart Uploader With The New AWS NodeJS SDK

In this post I will go through an share some code and issues that came accross when building and uploader with the new AWS V3 Node SDK.

You can find the docs for the new SDK here. https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/index.html

I would also suggest keeping an eye on the Github repo https://github.com/aws/aws-sdk-js-v3 for bugs and releases.

In my latest project I am building a cross platform desktop app in ElectronJs for Windows and Mac. Everything was working fine until I needed to upload files about 3gb.

I was originally using fs.readFileSync to get the file data this works fine but NodeJS has a flat limit of 2gb when using this command so you need to take a large file and break it down into managble chunks simple right!.

I found many post suggesting fs.createReadStream which will automatically break you file into streams Great! unfortunately I couldnt find hardly any docs on how to edit the stream chunks after a lot of searching it turns out its really easy see code below.

async function setupChunks(readable, callback) {

    for await (const chunk of readable) {

        console.log(chunk);

    }

    callback(true);

}

const readable = fs.createReadStream(file_path, {
    highWaterMark: 10 * 1024 * 1024 // 10mb chunks
});

setupChunks(readable, function(res) {

    console.log(res);

});

This breaks the input file stream into 10mb chunks we can now build our code based on this. Also adding a callback for a progress bar.

Here is the full code.

// Load the required clients and commands.
const {
    S3,
    CreateMultipartUploadCommand,
    UploadPartCommand,
    CompleteMultipartUploadCommand,
} = require("@aws-sdk/client-s3");

const fs = require('fs');

const s3Client = new S3({ 
	region: "us-east-1" 
});

// Set the parameters.
const createParams = {
    Bucket: 'bucket', 
    Key: 'Movie-3gb.mp4',
    ContentType: 'video/mp4'
};

let file_path = '/Users/dave/Desktop/large/Movie-3gb.mp4';

let uploader_id; // Sets the job upload id

let params = []; // Stores the upload parts array 

let partSize = 100 * 1024 * 1024; // 10mb chunks

let index = 1; // Parts increment

(async () => {

	const data = await s3Client.send(
	    new CreateMultipartUploadCommand(createParams)
	);

	uploader_id = data.UploadId;

	async function setupChunks(readable, callback) {

	  	for await (const chunk of readable) {

	    	params.push(new UploadPartCommand({
		        Bucket: createParams.Bucket,
		        Key: createParams.Key,
		        Body: chunk,
		        PartNumber: index++,
		        UploadId: uploader_id,
		    }));

	  	}

	  	callback(true);

	}

	const readable = fs.createReadStream(file_path, { 
		highWaterMark: partSize
	});

	setupChunks(readable, function(res){
		
		// Start function
		(async () => {

			try {

				total_files = params.length;

				progress = 1;

				const onProgress = async promise => {

				  	const result = await promise;

				  	console.log('Progress: ' + Math.round(progress / total_files * 100) + '%');

				  	progress++;
				  	
				  	return result;

				};
		     	
		     	const responses = await Promise.all(
			        params.map(param => onProgress(s3Client.send(param)))
			    ).then(function(response){

			    	(async () => {

					 	try {

					    	var parts = [];

					    	for (var i = 0; i < response.length; i++) {
					    		
					    		parts.push({
	                                ETag: response[i].ETag,
	                                PartNumber: (i+1),
	                            });

					    	}

					        // Complete the mutlipart upload.
					        const uploader = await s3Client.send(
					            new CompleteMultipartUploadCommand({
				                    Bucket: createParams.Bucket,
				                    Key: createParams.Key,
				                    MultipartUpload: {
				                        Parts: parts,
				                    },
				                    UploadId: uploader_id
				                })
					        );

					        console.log("Upload completed. File location: ", uploader.Location);

					    } catch (err) {
					        
					        console.log("Error ", err);
					    
					    }

				    })();

				}).catch(function(err){
				  	
				  	console.log(err);


				});

		    } catch (err) {
		      	
		      	return console.log("There was an error uploading: ", err.message);
		    
		    }

		})();

	});

})();

Update turns out it is much simpler than the above code like they say its easy when you know how.

Use this code.

const {
    S3Client,
    ListBucketsCommand,
    PutObjectCommand,
    HeadObjectCommand
} = require("@aws-sdk/client-s3");

const {
    Upload
} = require("@aws-sdk/lib-storage");

const fs = require('fs');

const fileStream = fs.createReadStream("path/to/file");

(async () => {
    try {
        const upload = new Upload({
            params: {
                Bucket: "bucket-name",
                Key: "3gbFile",
                Body: fileStream,
            },
            client: new S3({
                region: "us-west-2"
            }),
            queueSize: 3,
        });

        upload.on("httpUploadProgress", (progress) => {
            console.log(progress);
        });

        const result = await upload.done();
        console.log(result);
    } catch (error) {
        console.log(error)
    }
})();

So now we can add our progress from the previous code and an estimated time remaining counter.

const {
    S3Client,
    ListBucketsCommand,
    PutObjectCommand,
    HeadObjectCommand
} = require("@aws-sdk/client-s3");

const {
    Upload
} = require("@aws-sdk/lib-storage");

const fs = require('fs');

const fileStream = fs.createReadStream("/Users/samueleast/Desktop/large/BigBuckBunny.mp4");

let time_started = new Date();

let time_elapsed;

let upload_speed;

let seconds_left;

(async () => {

    try {

        const upload = new Upload({
            params: {
                Bucket: "wptuts-deliver",
                Key: "BigBuckBunny.mp4",
                Body: fileStream,
            },
            client: new S3Client({
                region: "us-east-1"
            }),
            queueSize: 3,
        });

        upload.on("httpUploadProgress", (progress) => {

            time_elapsed = (new Date()) - time_started;

            upload_speed = progress.loaded / (time_elapsed / 1000);

            seconds_left = Math.round((progress.total - progress.loaded) / upload_speed);

            console.log('Progress: ' + Math.round(progress.loaded / progress.total * 100) + '%');
            
            console.log('Estimated time remaining:', convert_time(seconds_left));

        });

        const result = await upload.done();

        console.log(result);

    } catch (error) {

        console.log(error)

    }

    // Function grabbed from stackoverflow https://stackoverflow.com/questions/37096367/how-to-convert-seconds-to-minutes-and-hours-in-javascript
    function convert_time(d) {

        d = Number(d);
        
        var h = Math.floor(d / 3600);
        
        var m = Math.floor(d % 3600 / 60);
        
        var s = Math.floor(d % 3600 % 60);

        var hDisplay = h > 0 ? h + (h == 1 ? "hr, " : "hrs, ") : "";
        
        var mDisplay = m > 0 ? m + (m == 1 ? "min, " : "mins, ") : "";
        
        var sDisplay = s > 0 ? s + (s == 1 ? "sec" : "secs") : "";
        
        return hDisplay + mDisplay + sDisplay;
    
    }

})();

Benchmarking it will upload a 2.5GB file in around one hour depending on internet speed.

Really enjoying building applications with Node at the moment any questions get in touch.

Leave a comment