I am pleased to report that with Merlin's suggestion of using the pg-large-object middleware, I have a test case now
showingthat I can write a 25MB buffer from Node.js to Postgres in roughly 700 milliseconds. Here is the JavaScript
code,which is nearly verbatim from the example in the pg-large-object doc:
packages.testLargeObjects = function(callback) {
var pgp = require('pg-promise')();
var LargeObjectManager = require('pg-large-object').LargeObjectManager;
var PassThrough = require('stream').PassThrough;
var bufSize = 1024 * 1024 * 25;
var buf = new Buffer(bufSize);
buf.fill("pgrocks");
var connInfo = {
host: 'localhost',
port: 5432,
database: 'mydb',
user: 'postgres,
password: 'secret'
};
var db = pgp(connInfo);
db.tx(function(tx) {
const lObjMgr = new LargeObjectManager({pgPromise: tx});
const bufferSize = 16384;
return lObjMgr.createAndWritableStreamAsync(bufferSize)
.then( ([oid, stream]) => {
let bufferStream = new PassThrough();
bufferStream.end(buf);
bufferStream.pipe(stream);
return new Promise(function(resolve, reject) {
stream.on('finish', resolve);
stream.on('error', reject);
});
});
})
.then(function() {
callback();
pgp.end();
})
.catch(function(err) {
callback(err);
pgp.end();
});
};
Thanks very much!
Eric
-----Original Message-----
From: Merlin Moncure [mailto:mmoncure@gmail.com]
Another point, some googling turned up
https://www.npmjs.com/package/pg-large-object which is definitely something to consider trying.
merlin