node.js - Node resize image and upload to AWS -
i'm relatively new node, , want write module takes image s3 bucket, resizes , saves temporary directory on amazon's new lambda service , uploads images bucket.
when run code, none of functions seem called (download
, transform
, upload
). using tmp
create temporary directory , graphicsmagick
resize image.
what wrong code?
i have defined dependencies , array outside of module, because have depends on these.
// dependencies var aws = require('aws-sdk'); var gm = require('gm').subclass({ imagemagick: true }); var fs = require("fs"); var tmp = require("tmp"); // reference s3 client var s3 = new aws.s3(); var _800px = { width: 800, destinationpath: "large" }; var _500px = { width: 500, destinationpath: "medium" }; var _200px = { width: 200, destinationpath: "small" }; var _45px = { width: 45, destinationpath: "thumbnail" }; var _sizesarray = [_800px, _500px, _200px, _45px]; var len = _sizesarray.length; exports.awshandler = function(event) { // read options event. var srcbucket = event.records[0].s3.bucket.name; var srckey = event.records[0].s3.object.key; var dstnkey = srckey; // create temporary directory var tmpobj = tmp.dirsync(); // function determine paths function _filepath (directory, i) { if (!directory) { return "dst/" + _sizesarray[i].destinationpath + "/" + dstnkey; } else { return directory + "/dst/" + _sizesarray[i].destinationpath + "/" + dstnkey; } }; // infer image type. var typematch = srckey.match(/\.([^.]*)$/); if (!typematch) { console.error('unable infer image type key ' + srckey); return; }; var imagetype = typematch[1]; if (imagetype != "jpg" && imagetype != "png") { console.log('skipping non-image ' + srckey); return; }; (function resizeimage () { function download () { console.log("started!"); s3.getobject({ bucket: srcbucket, key: srckey }, function (err, response) { if (err) { console.error(err); } // call transform if successful transform (response); } ); }; function transform (response) { ( var = 0; i<len; i++ ) { // define path image write var _key = _filepath (tmpobj, i); // resize images gm(response.body, srckey) .resize(_sizesarray[i].width) .write(_key, function (err) { if (err) { console.error(err); } upload (); }); } }; function upload () { ( var = 0; i<len; i++ ) { var readpath = _filepath (tmpobj, i); var writepath = _filepath (i); // read file temp directory fs.readfile(readpath, function (err, data) { if (err) { console.error(err); } // upload images s3 bucket s3.putobject({ bucket: srcbucket, key: writepath, body: data, contenttype: data.type }, function (err) { if (err) { console.error(err); } console.log("uploaded success!"); }); }) } // manual cleanup of temporary directory tmpobj.removecallback(); }; }()); };
here's partial improvement, note use of async library. have issues in upload() because firing 4 asynchronous calls (in loop) , there's no easy way know when done. (well, easy way rewrite function use async library, async.foreach)
// dependencies var aws = require('aws-sdk'); var gm = require('gm').subclass({ imagemagick: true }); var fs = require("fs"); var tmp = require("tmp"); var async = require("async"); // reference s3 client var s3 = new aws.s3(); var _800px = { width: 800, destinationpath: "large" }; var _500px = { width: 500, destinationpath: "medium" }; var _200px = { width: 200, destinationpath: "small" }; var _45px = { width: 45, destinationpath: "thumbnail" }; var _sizesarray = [_800px, _500px, _200px, _45px]; var len = _sizesarray.length; exports.awshandler = function(event) { // read options event. var srcbucket = event.records[0].s3.bucket.name; var srckey = event.records[0].s3.object.key; var dstnkey = srckey; // create temporary directory var tmpobj = tmp.dirsync(); // function determine paths function _filepath (directory, i) { if (!directory) { return "dst/" + _sizesarray[i].destinationpath + "/" + dstnkey; } else { return directory + "/dst/" + _sizesarray[i].destinationpath + "/" + dstnkey; } }; // infer image type. var typematch = srckey.match(/\.([^.]*)$/); if (!typematch) { console.error('unable infer image type key ' + srckey); return; }; var imagetype = typematch[1]; if (imagetype != "jpg" && imagetype != "png") { console.log('skipping non-image ' + srckey); return; }; // call resizeimage, main pipeline function: resizeimage(function(err){ // done. manual cleanup of temporary directory tmpobj.removecallback(); }) function resizeimage (callback) { var s3obj = { bucket: srcbucket, key: srckey }; download(s3obj, function(response){ var gmconfigs = sizesarray.map(function(size, i){ return { width: size.width _key: _filepath (tmpobj, i) } }) async.eachseries(gmconfigs, function(config, done){ transform(response, config.width, config._key, done) }, function(err){ if(err){ console.log(err); } else { upload(); // further work required identify if uploads worked, // , know when call callback() here // callback(); } }) }) } function download (s3obj, callback) { console.log("started!"); s3.getobject(s3obj, function (err, response) { if (err) { console.error(err); } // call transform if successful callback(response); }); }; function transform (response, width, _key, callback) { // resize images gm(response.body, srckey) .resize(width) .write(_key, function (err) { if (err) { console.error(err); } callback(); }); }; function upload () { ( var = 0; i<len; i++ ) { var readpath = _filepath (tmpobj, i); var writepath = _filepath (i); // read file temp directory fs.readfile(readpath, function (err, data) { if (err) { console.error(err); } // upload images s3 bucket s3.putobject({ bucket: srcbucket, key: writepath, body: data, contenttype: data.type }, function (err) { if (err) { console.error(err); } console.log("uploaded success!"); }); }) } }; };
Comments
Post a Comment