Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/amazon-web-services/14.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Node.js 在AmazonLambda中,并行异步调整多个缩略图大小会引发错误:流产生空缓冲区_Node.js_Amazon Web Services_Imagemagick_Aws Lambda - Fatal编程技术网

Node.js 在AmazonLambda中,并行异步调整多个缩略图大小会引发错误:流产生空缓冲区

Node.js 在AmazonLambda中,并行异步调整多个缩略图大小会引发错误:流产生空缓冲区,node.js,amazon-web-services,imagemagick,aws-lambda,Node.js,Amazon Web Services,Imagemagick,Aws Lambda,我已经修改了Amazon的示例来创建多个缩略图大小并并行运行 我的代码在几秒钟内就可以在本地正常运行,但在lambda云中,它不会并行运行,在调整第一个缩略图大小后会抛出一个错误。。如果我把它转换成串行而不是并行,串行运行大约需要60秒 为什么在lambda中并行运行resize代码会导致流产生空缓冲区错误。如何提高性能,以便在几秒钟内创建大小,但在处理器成本方面仍能从lambda获得良好的价值和效率 // dependencies var async = require('async'); v

我已经修改了Amazon的示例来创建多个缩略图大小并并行运行

我的代码在几秒钟内就可以在本地正常运行,但在lambda云中,它不会并行运行,在调整第一个缩略图大小后会抛出一个错误。。如果我把它转换成串行而不是并行,串行运行大约需要60秒

为什么在lambda中并行运行resize代码会导致流产生空缓冲区错误。如何提高性能,以便在几秒钟内创建大小,但在处理器成本方面仍能从lambda获得良好的价值和效率

// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
            .subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');

// constants
var SIZES = [100, 320, 640];

// get reference to S3 client 
var s3 = new AWS.S3();

exports.handler = function(event, context) {

    // Read options from the event.
    console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
    var srcBucket = event.Records[0].s3.bucket.name;
    var srcKey    = event.Records[0].s3.object.key;
    var dstBucket = srcBucket + "-resized";

    // Infer the image type.
    var typeMatch = srcKey.match(/\.([^.]*)$/);
    if (!typeMatch) {
        console.error('unable to infer image type for key ' + srcKey);
        return context.done();
    }
    var imageType = typeMatch[1];
    if (imageType != "jpg" && imageType != "png") {
        console.log('skipping non-image ' + srcKey);
        return context.done();
    }

    // Sanity check: validate that source and destination are different buckets.
    if (srcBucket == dstBucket) {
        console.error("Destination bucket must not match source bucket.");
        return context.done();
    }

    // Download the image from S3
    s3.getObject({
            Bucket: srcBucket,
            Key: srcKey
        },
        function(err, response){

            if (err)
                return console.error('unable to download image ' + err);

            var contentType = response.ContentType;

            var original =  gm(response.Body);
            original.size(function(err, size){

                if(err)
                    return console.error(err);

                //transform, and upload to a different S3 bucket.
                async.each(SIZES,
                    function (max_size,  callback) {
                        resize_photo(size, max_size, imageType, original, srcKey, dstBucket, contentType, callback);
                    },
                    function (err) {
                        if (err) {
                            console.error(
                                'Unable to resize ' + srcBucket +
                                ' due to an error: ' + err
                            );
                        } else {
                            console.log(
                                'Successfully resized ' + srcBucket
                            );
                        }

                        context.done();
                    });
            });


        });



};

//wrap up variables into an options object
var resize_photo = function(size, max_size, imageType, original, srcKey, dstBucket, contentType, done) {

    var dstKey = max_size +  "_" + srcKey;


    // transform, and upload to a different S3 bucket.
    async.waterfall([

        function transform(next) {


            // Infer the scaling factor to avoid stretching the image unnaturally.
            var scalingFactor = Math.min(
                max_size / size.width,
                max_size / size.height
            );
            var width  = scalingFactor * size.width;
            var height = scalingFactor * size.height;


            // Transform the image buffer in memory.
            original.resize(width, height)
                .toBuffer(imageType, function(err, buffer) {

                    if (err) {
                        next(err);
                    } else {
                        next(null, buffer);
                    }
                });

        },
        function upload(data, next) {
            // Stream the transformed image to a different S3 bucket.
            s3.putObject({
                    Bucket: dstBucket,
                    Key: dstKey,
                    Body: data,
                    ContentType: contentType
                },
                next);
            }
        ], function (err) {

            console.log('finished resizing ' + dstBucket + '/' + dstKey);

            if (err) {
                console.error(err)
                ;
            } else {
                console.log(
                    'Successfully resized ' + dstKey
                );
            }

            done(err);
        }
    );
};

今晚我遇到了同样的问题

尽管您可能还可以做其他事情,但我更新了lambda任务的内存,缓冲区问题消失了

我正在将大约2.1mb和5000x3000的图像调整为3个较小的尺寸

持续时间:11619.86毫秒计费持续时间:11700毫秒内存大小:1024 MB 使用的最大内存:582 MB


希望这有帮助

是的,调整lambda内存的大小对我也有帮助——当我问亚马逊架构师时,他们似乎对此感到困惑