Change the destination file in a file writer stream

387 Views Asked by At

I wan't to log into a file continuously, but after every 1000 lines I want to change to a new file. Now my method works like this:

var fs = require('fs');
...
var outputStream = fs.createWriteStream(fileName + '.csv');
outputStream.write(content, 'utf8', callback);
...
if (lineCounter === 1000) {
  outputStream.end(function(err) {
    outputStream = fs.createWriteStream(fileName2 + '.csv');
    outputStream.write(content, 'utf8', callback);
  });
}

In the end the files doesn't contains the last few lines. I'm open for any solution, I just need stream write into several files.

Thanks in advance!

1

There are 1 best solutions below

0
On

At first I tried using the streams of Highland.js but I couldn't pause them for some reason. The script I am posting is tested and it is working. I share the original source at the end. So, I haven't actually start reading second file, but I believe it is easy now, as you have a point to proceed further after the script has reached the defined limit of lines.

var stream = require('stream'),
        fs = require('fs'),
        readStream = fs.createReadStream('./stream.txt', {highWaterMark: 15}),
        limitStream = new stream.Transform(),
        limit = 0
    limitStream._transform = function(chunk, encoding, cb) {
        if (++limit <= 5) {
            console.log('before', limit)
            return cb(null, chunk + '\n')
        }
        console.log('after',limit)
        this.end()
        cb()
    }
    limitStream.on('unpipe', function() { console.log('unpipe emitted from limitStream') })
    limitStream.on('end', function() { console.log('end emitted from limitStream') })
    readStream.pipe(limitStream).pipe(process.stdout)

Source: https://groups.google.com/forum/#!topic/nodejs/eGukJUQrOBY

After posting the answer, I found library, that can also work, but I admit that I haven't tested it. I just share it as a reference point: https://github.com/isaacs/truncating-stream