This is my code :
class Counter extends Readable {
constructor() {
super();
}
_read () {
readStream.on('data', (data) => {
console.log(JSON.stringify(data));
this.push(JSON.stringify(data));
});
readStream.on('end', () => {
this.push(null);
});
}
}
I am getting this error and the processing also becomes very slow.
(node:18488) MaxListenersExceededWarning: Possible EventEmitter memory
leak detected. 11 end listeners added. Use emitter.setMaxListeners()
to increase limit (node:18488) MaxListenersExceededWarning: Possible
EventEmitter memory leak detected. 11 data listeners added. Use
emitter.setMaxListeners() to increase limit
This is my full code. Basically, I want to read a CSV. Convert it into JSON and write to a file. I want to do this operation using stream. I am using 'csv-parser' npm module
const fs = require('fs');
var Readable = require('stream').Readable;
const csv = require('csv-parser');
class Counter extends Readable {
constructor() {
super();
}
_read () {
readStream.on('data', (data) => {
console.log(JSON.stringify(data));
this.push(JSON.stringify(data));
});
readStream.on('end', () => {
this.push(null);
});
}
}
let writeStream = fs.createWriteStream('tmp1');
let readStream = fs.createReadStream('tmp.csv').pipe(csv());
var counter = new Counter();
counter.pipe(writeStream);
I tried writing the 1.48GB csv file as json array into the file,it didn't give any errors or warnings that you have mentioned.
FYI:the dataset is take from https://catalog.data.gov/dataset?res_format=CSV