如何使用 nodejs express 从 postgres 下载更大的二进制对象?
how to download larger binary object from postgres using nodejs express?
我有节点 js rest api 脚本可以从 Postgres 表中查询。它工作正常,但是当我尝试使用 Postgres 的“lo_get”获取大对象时,我得到
JavaScript heap out of memory
下面是玩具示例。
index.js
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 8000
var controller= require('./src/controller');
app.use(bodyParser.json())
app.use(
bodyParser.urlencoded({
extended: true,
})
)
app.use('/cont', controller);
app.listen(port, () => {
console.log(`App running on port ${port}.`)
})
controller.js
var express = require('express');
var router = express.Router();
router.get('/fileID/:lo_oid/', function( req, res) {
const lo_oid = parseInt(req.params.lo_oid)
model.exportLoSaint(lo_oid, function (err, object) {
if (err) return res.send(err);
# Fixed the typo
# response.status(200).file(file);
res.status(200).objects;
});
});
module.exports = router;
Model.js
const Pool = require('pg').Pool
var exportLoSaintQuery= "SELECT lo_get()"
const exportLoSaint = (lo_oid, callback) => {
pool.query( exportLoSaintQuery ,[lo_oid], (error, results) => {
if (error) {
callback(error);
}
else {
callback(results.rows)
}
})
}
发送请求时.. http://ipaddress:8000/cont/fileID/<fileID>/
低于错误。
<--- JS stacktrace --->
==== JS stack trace =========================================
0: ExitFrame [pc: 0x1b2cbd65be1d]
Security context: 0x2bf3bbc08bd9 <JSObject>
1: toJSON [0x259ded584f29] [buffer.js:~979] [pc=0x1b2cbd665145](this=0x306d2be17731 <Uint8Array map = 0x9dd44699a69>)
2: arguments adaptor frame: 1->0
3: InternalFrame [pc: 0x1b2cbd60ee75]
4: EntryFrame [pc: 0x1b2cbd6092c1]
5: builtin exit frame: stringify(this=0x0a6855c58f61 <Object map = 0x428c84042a9>,0x2160e26026f1 <undefined>,0x2160e...
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory
1: 0x56228da912e4 node::Abort() [node]
2: 0x56228da91332 [node]
3: 0x56228dc86d4a v8::Utils::ReportOOMFailure(v8::internal::Isolate*, char const*, bool) [node]
4: 0x56228dc86fc5 v8::internal::V8::FatalProcessOutOfMemory(v8::internal::Isolate*, char const*, bool) [node]
5: 0x56228e0387d6 [node]
6: 0x56228e04e018 v8::internal::Heap::AllocateRawWithRetryOrFail(int, v8::internal::AllocationSpace, v8::internal::AllocationAlignment) [node]
7: 0x56228e01a63b v8::internal::Factory::AllocateRawArray(int, v8::internal::PretenureFlag) [node]
8: 0x56228e01aad8 v8::internal::Factory::NewFixedArrayWithFiller(v8::internal::Heap::RootListIndex, int, v8::internal::Object*, v8::internal::PretenureFlag) [node]
9: 0x56228dfe48fe [node]
10: 0x56228dfe4ad3 [node]
11: 0x56228e143566 v8::internal::JSObject::AddDataElement(v8::internal::Handle<v8::internal::JSObject>, unsigned int, v8::internal::Handle<v8::internal::Object>, v8::internal::PropertyAttributes, v8::internal::ShouldThrow) [node]
12: 0x56228e17897e v8::internal::Object::SetProperty(v8::internal::LookupIterator*, v8::internal::Handle<v8::internal::Object>, v8::internal::LanguageMode, v8::internal::Object::StoreFromKeyed) [node]
13: 0x56228e2c1299 v8::internal::Runtime::SetObjectProperty(v8::internal::Isolate*, v8::internal::Handle<v8::internal::Object>, v8::internal::Handle<v8::internal::Object>, v8::internal::Handle<v8::internal::Object>, v8::internal::LanguageMode) [node]
14: 0x56228e0a9b45 v8::internal::Runtime_KeyedStoreIC_Slow(int, v8::internal::Object**, v8::internal::Isolate*) [node]
15: 0x1b2cbd65be1d
postgres 中二进制对象的大小只有 128 MB。所以,我将 node js 的大小增加到 1gb,但没有帮助。我发现的其他解决方案不是使用 express 从 postgres 下载二进制文件。
********************* 更新 1 ************ *** *********
Model.js
const exportLoSaintForMaster = (oid, fileName, callback) => {
var fileStream = require('fs').createWriteStream(fileName);
var man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
var bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
callback(err);
return console.error('Unable to read the given large object', err);
}
console.log('Streaming a large object with a total size of', size);
stream.on('end', function () {
pool.query('COMMIT', callback);
});
// Store it as an image
stream.pipe(fileStream);
});
});
callback(fileStream)
};
脚本等待很长时间,然后出现错误并显示以下消息..
response.status(200).objects;
^
ReferenceError: response is not defined
********************* 更新 2 *************** *******
修正上面的错别字后,错误就没有了。
但是我看到了一些问题..因为客户端无法下载它。它在客户端显示以下错误..
我试着去做 get 。我收到以下消息
{"_writableState":{"objectMode":false,"highWaterMark":16384,"finalCalled":false,"needDrain":false,"ending":false,"ended":false,"finished":false,"destroyed":false,"decodeStrings":true,"defaultEncoding":"utf8","length":0,"writing":false,"corked":0,"sync":true,"bufferProcessing":false,"writecb":null,"writelen":0,"bufferedRequest":null,"lastBufferedRequest":null,"pendingcb":0,"prefinished":false,"errorEmitted":false,"emitClose":false,"autoDestroy":false,"bufferedRequestCount":0,"corkedRequestsFree":{"next":null,"entry":null}},"writable":true,"_events":{},"_eventsCount":0,"path":"myfile1","fd":null,"flags":"w","mode":438,"autoClose":true,"bytesWritten":0,"closed":false}
我是用邮递员下载的。我认为问题可能是因为 controller.js 在 model.js 中调用脚本的方式。
请推荐我
********************* 更新 3 *************** **
controller.js
router.get('/fileID/:lo_oid/fileName/:fileName', function (req, res) {
const oid = parseInt(req.params.lo_oid)
const fileName = req.params.fileName
model.exportLoSaintForMaster(oid,fileName, (err, stream) => stream.pipe(res));
});
model.js
const exportLoSaintForMaster = (oid, fileName, callback) => {
var fileStream = require('fs').createWriteStream(fileName);
var man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
var bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
callback(err);
return console.error('Unable to read the given large object', err);
}
console.log('Streaming a large object with a total size of', size);
stream.on('end', function () {
pool.query('COMMIT', callback);
});
// Store it as an image
stream.pipe(fileStream);
});
});
callback(null, fileStream)
};
module.exports = {
exportLoSaintForMaster
}
低于错误
Error [ERR_STREAM_CANNOT_PIPE]: Cannot pipe, not readable
at WriteStream.Writable.pipe (_stream_writable.js:243:24)
...
...
正如您所提到的,您可以使用标志 -max-old-space-size
增加 NodeJS 堆大小,例如将其设置为 4GB :
node --max-old-space-size=4096 index.js
但是这不是一个合适的解决方案。正确的做法是使用流处理它,正如 Aviv Lo 在评论中提到的那样。它将允许您通过逐块处理数据来减少脚本的内存占用。
要处理 PostgreSQL 大对象,您可以使用 node-pg-large-objects (npm i -s pg-large-object
) :
const { LargeObjectManager } = require('pg-large-object');
const { createWriteStream } = require('fs');
router.get('/fileID/:lo_oid/', (req, res, next) => {
// assuming db is your Database connection
// When working with Large Objects, always use a transaction
db.tx(tx => {
const man = new LargeObjectManager({pgPromise: tx});
const lo_oid = parseInt(req.params.lo_oid);
const bufferSize = 16384; // the size of each chunk, customize it
return man.openAndReadableStreamAsync(oid, bufferSize)
.then(([size, stream]) => {
stream.pipe(res); // It will take care of binding to data and end events.
});
}).catch(error => {
next(error); // pass to express error handler
});
}
通过这种方式,您可以将大型对象作为流处理并将其通过管道传输到客户端以供下载。
使用此解决方案,脚本的这部分占用的最大堆大小将是bufferSize
,而不是之前的整个文件大小。
更新 3 编辑
在你的控制器中:
router.get('/fileID/:lo_oid/fileName/:fileName', function (req, res, next) {
const oid = parseInt(req.params.lo_oid);
const fileName = req.params.fileName;
res.header('Content-Disposition', `attachment; filename="${fileName}"`);
model.exportLoSaintForMaster(oid, (err, stream) => {
if (err) return next(err);
stream.pipe(res);
});
});
在您的模型中:
const exportLoSaintForMaster = (oid, callback) => {
const man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
const bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
return callback(err);
}
console.log(`Streaming a large object with a total size of ${size}`);
stream.on('end', _ => pool.query('COMMIT'));
callback(null, stream);
});
});
};
警告:我不知道您的 pool
对象是如何工作的,因此此代码可能无法完全按预期工作。
要解决 response is not defined
你必须在 controller.js
中将 response
更改为 res
var express = require('express');
var router = express.Router();
router.get('/fileID/:lo_oid/', function(req, res) {
const lo_oid = parseInt(req.params.lo_oid)
model.exportLoSaint(lo_oid, function (err, file) {
if (err) return res.send(err);
res.status(200).file(file);
});
});
module.exports = router;
我有节点 js rest api 脚本可以从 Postgres 表中查询。它工作正常,但是当我尝试使用 Postgres 的“lo_get”获取大对象时,我得到
JavaScript heap out of memory
下面是玩具示例。
index.js
const express = require('express')
const bodyParser = require('body-parser')
const app = express()
const port = 8000
var controller= require('./src/controller');
app.use(bodyParser.json())
app.use(
bodyParser.urlencoded({
extended: true,
})
)
app.use('/cont', controller);
app.listen(port, () => {
console.log(`App running on port ${port}.`)
})
controller.js
var express = require('express');
var router = express.Router();
router.get('/fileID/:lo_oid/', function( req, res) {
const lo_oid = parseInt(req.params.lo_oid)
model.exportLoSaint(lo_oid, function (err, object) {
if (err) return res.send(err);
# Fixed the typo
# response.status(200).file(file);
res.status(200).objects;
});
});
module.exports = router;
Model.js
const Pool = require('pg').Pool
var exportLoSaintQuery= "SELECT lo_get()"
const exportLoSaint = (lo_oid, callback) => {
pool.query( exportLoSaintQuery ,[lo_oid], (error, results) => {
if (error) {
callback(error);
}
else {
callback(results.rows)
}
})
}
发送请求时.. http://ipaddress:8000/cont/fileID/<fileID>/
低于错误。
<--- JS stacktrace --->
==== JS stack trace =========================================
0: ExitFrame [pc: 0x1b2cbd65be1d]
Security context: 0x2bf3bbc08bd9 <JSObject>
1: toJSON [0x259ded584f29] [buffer.js:~979] [pc=0x1b2cbd665145](this=0x306d2be17731 <Uint8Array map = 0x9dd44699a69>)
2: arguments adaptor frame: 1->0
3: InternalFrame [pc: 0x1b2cbd60ee75]
4: EntryFrame [pc: 0x1b2cbd6092c1]
5: builtin exit frame: stringify(this=0x0a6855c58f61 <Object map = 0x428c84042a9>,0x2160e26026f1 <undefined>,0x2160e...
FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed - JavaScript heap out of memory
1: 0x56228da912e4 node::Abort() [node]
2: 0x56228da91332 [node]
3: 0x56228dc86d4a v8::Utils::ReportOOMFailure(v8::internal::Isolate*, char const*, bool) [node]
4: 0x56228dc86fc5 v8::internal::V8::FatalProcessOutOfMemory(v8::internal::Isolate*, char const*, bool) [node]
5: 0x56228e0387d6 [node]
6: 0x56228e04e018 v8::internal::Heap::AllocateRawWithRetryOrFail(int, v8::internal::AllocationSpace, v8::internal::AllocationAlignment) [node]
7: 0x56228e01a63b v8::internal::Factory::AllocateRawArray(int, v8::internal::PretenureFlag) [node]
8: 0x56228e01aad8 v8::internal::Factory::NewFixedArrayWithFiller(v8::internal::Heap::RootListIndex, int, v8::internal::Object*, v8::internal::PretenureFlag) [node]
9: 0x56228dfe48fe [node]
10: 0x56228dfe4ad3 [node]
11: 0x56228e143566 v8::internal::JSObject::AddDataElement(v8::internal::Handle<v8::internal::JSObject>, unsigned int, v8::internal::Handle<v8::internal::Object>, v8::internal::PropertyAttributes, v8::internal::ShouldThrow) [node]
12: 0x56228e17897e v8::internal::Object::SetProperty(v8::internal::LookupIterator*, v8::internal::Handle<v8::internal::Object>, v8::internal::LanguageMode, v8::internal::Object::StoreFromKeyed) [node]
13: 0x56228e2c1299 v8::internal::Runtime::SetObjectProperty(v8::internal::Isolate*, v8::internal::Handle<v8::internal::Object>, v8::internal::Handle<v8::internal::Object>, v8::internal::Handle<v8::internal::Object>, v8::internal::LanguageMode) [node]
14: 0x56228e0a9b45 v8::internal::Runtime_KeyedStoreIC_Slow(int, v8::internal::Object**, v8::internal::Isolate*) [node]
15: 0x1b2cbd65be1d
postgres 中二进制对象的大小只有 128 MB。所以,我将 node js 的大小增加到 1gb,但没有帮助。我发现的其他解决方案不是使用 express 从 postgres 下载二进制文件。
********************* 更新 1 ************ *** *********
Model.js
const exportLoSaintForMaster = (oid, fileName, callback) => {
var fileStream = require('fs').createWriteStream(fileName);
var man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
var bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
callback(err);
return console.error('Unable to read the given large object', err);
}
console.log('Streaming a large object with a total size of', size);
stream.on('end', function () {
pool.query('COMMIT', callback);
});
// Store it as an image
stream.pipe(fileStream);
});
});
callback(fileStream)
};
脚本等待很长时间,然后出现错误并显示以下消息..
response.status(200).objects;
^
ReferenceError: response is not defined
********************* 更新 2 *************** *******
修正上面的错别字后,错误就没有了。 但是我看到了一些问题..因为客户端无法下载它。它在客户端显示以下错误..
我试着去做 get 。我收到以下消息
{"_writableState":{"objectMode":false,"highWaterMark":16384,"finalCalled":false,"needDrain":false,"ending":false,"ended":false,"finished":false,"destroyed":false,"decodeStrings":true,"defaultEncoding":"utf8","length":0,"writing":false,"corked":0,"sync":true,"bufferProcessing":false,"writecb":null,"writelen":0,"bufferedRequest":null,"lastBufferedRequest":null,"pendingcb":0,"prefinished":false,"errorEmitted":false,"emitClose":false,"autoDestroy":false,"bufferedRequestCount":0,"corkedRequestsFree":{"next":null,"entry":null}},"writable":true,"_events":{},"_eventsCount":0,"path":"myfile1","fd":null,"flags":"w","mode":438,"autoClose":true,"bytesWritten":0,"closed":false}
我是用邮递员下载的。我认为问题可能是因为 controller.js 在 model.js 中调用脚本的方式。
请推荐我
********************* 更新 3 *************** **
controller.js
router.get('/fileID/:lo_oid/fileName/:fileName', function (req, res) {
const oid = parseInt(req.params.lo_oid)
const fileName = req.params.fileName
model.exportLoSaintForMaster(oid,fileName, (err, stream) => stream.pipe(res));
});
model.js
const exportLoSaintForMaster = (oid, fileName, callback) => {
var fileStream = require('fs').createWriteStream(fileName);
var man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
var bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
callback(err);
return console.error('Unable to read the given large object', err);
}
console.log('Streaming a large object with a total size of', size);
stream.on('end', function () {
pool.query('COMMIT', callback);
});
// Store it as an image
stream.pipe(fileStream);
});
});
callback(null, fileStream)
};
module.exports = {
exportLoSaintForMaster
}
低于错误
Error [ERR_STREAM_CANNOT_PIPE]: Cannot pipe, not readable
at WriteStream.Writable.pipe (_stream_writable.js:243:24)
...
...
正如您所提到的,您可以使用标志 -max-old-space-size
增加 NodeJS 堆大小,例如将其设置为 4GB :
node --max-old-space-size=4096 index.js
但是这不是一个合适的解决方案。正确的做法是使用流处理它,正如 Aviv Lo 在评论中提到的那样。它将允许您通过逐块处理数据来减少脚本的内存占用。
要处理 PostgreSQL 大对象,您可以使用 node-pg-large-objects (npm i -s pg-large-object
) :
const { LargeObjectManager } = require('pg-large-object');
const { createWriteStream } = require('fs');
router.get('/fileID/:lo_oid/', (req, res, next) => {
// assuming db is your Database connection
// When working with Large Objects, always use a transaction
db.tx(tx => {
const man = new LargeObjectManager({pgPromise: tx});
const lo_oid = parseInt(req.params.lo_oid);
const bufferSize = 16384; // the size of each chunk, customize it
return man.openAndReadableStreamAsync(oid, bufferSize)
.then(([size, stream]) => {
stream.pipe(res); // It will take care of binding to data and end events.
});
}).catch(error => {
next(error); // pass to express error handler
});
}
通过这种方式,您可以将大型对象作为流处理并将其通过管道传输到客户端以供下载。
使用此解决方案,脚本的这部分占用的最大堆大小将是bufferSize
,而不是之前的整个文件大小。
更新 3 编辑
在你的控制器中:
router.get('/fileID/:lo_oid/fileName/:fileName', function (req, res, next) {
const oid = parseInt(req.params.lo_oid);
const fileName = req.params.fileName;
res.header('Content-Disposition', `attachment; filename="${fileName}"`);
model.exportLoSaintForMaster(oid, (err, stream) => {
if (err) return next(err);
stream.pipe(res);
});
});
在您的模型中:
const exportLoSaintForMaster = (oid, callback) => {
const man = new LargeObjectManager({ pg: pool });
pool.query('BEGIN', function (err, result) {
if (err) {
callback(err);
pool.emit('error', err);
}
console.log("\nthe oid : %d\n", oid)
const bufferSize = 16384;
man.openAndReadableStream(oid, bufferSize, function (err, size, stream) {
if (err) {
return callback(err);
}
console.log(`Streaming a large object with a total size of ${size}`);
stream.on('end', _ => pool.query('COMMIT'));
callback(null, stream);
});
});
};
警告:我不知道您的 pool
对象是如何工作的,因此此代码可能无法完全按预期工作。
要解决 response is not defined
你必须在 controller.js
response
更改为 res
var express = require('express');
var router = express.Router();
router.get('/fileID/:lo_oid/', function(req, res) {
const lo_oid = parseInt(req.params.lo_oid)
model.exportLoSaint(lo_oid, function (err, file) {
if (err) return res.send(err);
res.status(200).file(file);
});
});
module.exports = router;