'Node.js how to pipe gzip stream to writestream
I can't seem to get this to work. I want to write some data to a gzip stream and then pipe the gzip stream to a file write stream. I want to then call a function when the files done writing. I currently have:
var gz = zlib.createGzip()
.pipe(fs.createWriteStream(gz_path));
gz.write(data);
gz.on('error', function(err){
console.log("wtf error", err);
});
gz.on('finish', function(){
console.log("write stream is done");
dosomething();
});
The finish event or the error event is never called.
Solution 1:[1]
my example:
async function run(){
var zlib = require('zlib');
var fs = require("fs");
var gz = zlib.createGzip();
gz.on('error', function(err){ console.log(err.stack); });
// gz.on('finish', function() { console.log("finished compression, now need to finish writing..."); });
var f = fs.createWriteStream('test.txt.gz');
f.on('error', function(err){ console.log(err.stack); });
f.on('finish', function() { console.log("Write success."); });
gz.pipe(f);
// if stream.write returns false need to wait for drain,
// for example using await for a promise in an async function, or maybe run the callback on drain
if(!gz.write('test','utf-8'))
await new Promise( (resolve,reject)=> gz.once('drain', resolve) )
gz.end();
console.log("Compress zip file success.");
}
run();
the code in the question had 3 problems
- pipe returns its argument, not the called-on object
- need to end the stream, so it will write the data to output
- better to set up events before they are used
// needded for code in question to run:
function dosomething(){
}
var gz_path="test.txt.gz"
var data="test"
//corrected code
var fs = require("fs");
var zlib = require('zlib');
// pipe retuns its argument, the folowing is correct:
// var f = zlib.createGzip().pipe(fs.createWriteStream(gz_path));
var gz = zlib.createGzip();
gz.pipe(fs.createWriteStream(gz_path));
// it helps to setup events before they fire
gz.on('error', function(err){
console.log("wtf error", err);
});
gz.on('finish', function(){
console.log("write stream is done");
dosomething();
});
gz.write(data);
// need to end stream for data to be written
gz.end()
Solution 2:[2]
try
var zlib = require('zlib');
var stream = require('stream');
var util = require('util');
var fs = require('fs');
var gz = zlib.createGzip();
function StringifyStream(){
stream.Transform.call(this);
this._readableState.objectMode = false;
this._writableState.objectMode = true;
}
util.inherits(StringifyStream, stream.Transform);
StringifyStream.prototype._transform = function(obj, encoding, cb){
this.push(JSON.stringify(obj));
cb();
};
var data = "some data in here";
var rs = new stream.Readable({ objectMode: true });
rs.push(data);
rs.push(null);
rs.pipe(new StringifyStream())
.pipe(gz)
.pipe(fs.createWriteStream('test.gz'))
.on('error', function(err){
console.log("wtf error", err);
})
.on('finish', function(){
console.log("write stream is done");
// dosomething();
});
Solution 3:[3]
import { createReadStream, createWriteStream } from 'fs'
import { createGzip } from 'zlib'
const filename = yourFile
// opens the file as a readable stream
createReadStream(filename)
.pipe(createGzip())
.pipe(createWriteStream(`${filename}.gz`))
.on('finish', () => console.log('File compressed'))
Solution 4:[4]
The order of pipes might be the problem. I was able to resolve this by letting createGzip
before createWriteStream
. For example:
import Readable from "stream";
import zlib from "zlib";
import fs from "fs";
Readable.from([data])
.pipe(zlib.createGzip())
.pipe(fs.createWriteStream(`filePath.gzip`))
.on("error", (error) => {
// handle error
})
.on("finish", () => {
// handle success
})
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | |
Solution 2 | |
Solution 3 | |
Solution 4 | Hieu Nguyen |