This is how I query my data in Node :
const {Pool} = require('pg');
const QueryStream = require('pg-query-stream');
const CSVStream = require('../path/to/CSVStream');
const pool = new Pool({
host: "localhost",
port: 5432,
user: "blah",
password: "blah",
database: "prenoms",
});
pool.connect((err, client, done) => {
if (err) {
throw err;
}
const query = new QueryStream('select * from public.prenoms_dep;');
const stream = client.query(query);
stream.on('end', done);
stream.pipe(CSVStream.stringify()).pipe(res);
});
My CSVStream is as per below :
var through = require('through');
exports.stringify = function (op, sep, cl) {
op = ''
sep = '\n'
cl = ''
var stream
, first = true
, anyData = false
stream = through(function (data) {
anyData = true
try {
var strTemp = "";
for (let val of Object.values(data)) {
strTemp = strTemp + val.toString() + ",";
}
strTemp = strTemp.slice(0, -1);
} catch (err) {
return stream.emit('error', err)
}
if (first) {
first = false;
var headers = "";
for (let val of Object.keys(data)) {
headers = headers + val.toString() + ",";
}
headers = headers.slice(0,-1);
stream.queue(op + headers + sep + strTemp)
} else stream.queue(sep + strTemp)
},
function (data) {
if (!anyData)
stream.queue(op)
stream.queue(cl)
stream.queue(null)
})
return stream
}
When I run it in pgAdmin, The SQL query takes ~10 seconds to run. When going through node, it takes at least 3 times as much.
How would I isolate where I'm wasting time in my code ?
How do I make sure it's taking roughly the same time than in pgAdmin ?