In amazon lambda, resizing multiple thumbnail sizes in parallel async throws Error: Stream yields empty buffer
In amazon lambda, resizing multiple thumbnail sizes in parallel async throws Error: Stream yields empty buffer
我改编了 resizing a photo in lambda 的 Amazon 示例以并行创建多个缩略图大小和 运行。
我的代码 运行 在几秒钟内在本地运行良好,但在 lambda 云中,它不会 运行 并行,在调整第一个缩略图大小后抛出错误..和如果我将它切换为串行而不是并行,则大约需要 60 秒才能串行 运行。
为什么 运行在 lambda 中并行调整代码大小会导致流产生空缓冲区错误。我怎样才能提高性能,以便在几秒钟内创建大小,同时在处理器成本方面仍然从 lambda 中获得良好的价值和效率?
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var SIZES = [100, 320, 640];
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstBucket = srcBucket + "-resized";
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return context.done();
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return context.done();
}
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
console.error("Destination bucket must not match source bucket.");
return context.done();
}
// Download the image from S3
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
function(err, response){
if (err)
return console.error('unable to download image ' + err);
var contentType = response.ContentType;
var original = gm(response.Body);
original.size(function(err, size){
if(err)
return console.error(err);
//transform, and upload to a different S3 bucket.
async.each(SIZES,
function (max_size, callback) {
resize_photo(size, max_size, imageType, original, srcKey, dstBucket, contentType, callback);
},
function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket
);
}
context.done();
});
});
});
};
//wrap up variables into an options object
var resize_photo = function(size, max_size, imageType, original, srcKey, dstBucket, contentType, done) {
var dstKey = max_size + "_" + srcKey;
// transform, and upload to a different S3 bucket.
async.waterfall([
function transform(next) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
max_size / size.width,
max_size / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
original.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, buffer);
}
});
},
function upload(data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
console.log('finished resizing ' + dstBucket + '/' + dstKey);
if (err) {
console.error(err)
;
} else {
console.log(
'Successfully resized ' + dstKey
);
}
done(err);
}
);
};
我今晚 运行 遇到了同样的问题。
尽管您可能还可以做其他事情,但我更新了 lambda 任务的内存,缓冲区问题就消失了。
我正在将大约 2.1mb 和 5000x3000 的图像调整为 3 个较小的尺寸。
Duration: 11619.86 ms Billed Duration: 11700 ms Memory Size: 1024 MB
Max Memory Used: 582 MB
希望对您有所帮助
我改编了 resizing a photo in lambda 的 Amazon 示例以并行创建多个缩略图大小和 运行。
我的代码 运行 在几秒钟内在本地运行良好,但在 lambda 云中,它不会 运行 并行,在调整第一个缩略图大小后抛出错误..和如果我将它切换为串行而不是并行,则大约需要 60 秒才能串行 运行。
为什么 运行在 lambda 中并行调整代码大小会导致流产生空缓冲区错误。我怎样才能提高性能,以便在几秒钟内创建大小,同时在处理器成本方面仍然从 lambda 中获得良好的价值和效率?
// dependencies
var async = require('async');
var AWS = require('aws-sdk');
var gm = require('gm')
.subClass({ imageMagick: true }); // Enable ImageMagick integration.
var util = require('util');
// constants
var SIZES = [100, 320, 640];
// get reference to S3 client
var s3 = new AWS.S3();
exports.handler = function(event, context) {
// Read options from the event.
console.log("Reading options from event:\n", util.inspect(event, {depth: 5}));
var srcBucket = event.Records[0].s3.bucket.name;
var srcKey = event.Records[0].s3.object.key;
var dstBucket = srcBucket + "-resized";
// Infer the image type.
var typeMatch = srcKey.match(/\.([^.]*)$/);
if (!typeMatch) {
console.error('unable to infer image type for key ' + srcKey);
return context.done();
}
var imageType = typeMatch[1];
if (imageType != "jpg" && imageType != "png") {
console.log('skipping non-image ' + srcKey);
return context.done();
}
// Sanity check: validate that source and destination are different buckets.
if (srcBucket == dstBucket) {
console.error("Destination bucket must not match source bucket.");
return context.done();
}
// Download the image from S3
s3.getObject({
Bucket: srcBucket,
Key: srcKey
},
function(err, response){
if (err)
return console.error('unable to download image ' + err);
var contentType = response.ContentType;
var original = gm(response.Body);
original.size(function(err, size){
if(err)
return console.error(err);
//transform, and upload to a different S3 bucket.
async.each(SIZES,
function (max_size, callback) {
resize_photo(size, max_size, imageType, original, srcKey, dstBucket, contentType, callback);
},
function (err) {
if (err) {
console.error(
'Unable to resize ' + srcBucket +
' due to an error: ' + err
);
} else {
console.log(
'Successfully resized ' + srcBucket
);
}
context.done();
});
});
});
};
//wrap up variables into an options object
var resize_photo = function(size, max_size, imageType, original, srcKey, dstBucket, contentType, done) {
var dstKey = max_size + "_" + srcKey;
// transform, and upload to a different S3 bucket.
async.waterfall([
function transform(next) {
// Infer the scaling factor to avoid stretching the image unnaturally.
var scalingFactor = Math.min(
max_size / size.width,
max_size / size.height
);
var width = scalingFactor * size.width;
var height = scalingFactor * size.height;
// Transform the image buffer in memory.
original.resize(width, height)
.toBuffer(imageType, function(err, buffer) {
if (err) {
next(err);
} else {
next(null, buffer);
}
});
},
function upload(data, next) {
// Stream the transformed image to a different S3 bucket.
s3.putObject({
Bucket: dstBucket,
Key: dstKey,
Body: data,
ContentType: contentType
},
next);
}
], function (err) {
console.log('finished resizing ' + dstBucket + '/' + dstKey);
if (err) {
console.error(err)
;
} else {
console.log(
'Successfully resized ' + dstKey
);
}
done(err);
}
);
};
我今晚 运行 遇到了同样的问题。
尽管您可能还可以做其他事情,但我更新了 lambda 任务的内存,缓冲区问题就消失了。
我正在将大约 2.1mb 和 5000x3000 的图像调整为 3 个较小的尺寸。
Duration: 11619.86 ms Billed Duration: 11700 ms Memory Size: 1024 MB Max Memory Used: 582 MB
希望对您有所帮助