1'use strict';
2const common = require('../common');
3const assert = require('assert');
4const { createGzip, createGunzip, Z_PARTIAL_FLUSH } = require('zlib');
5
6// Verify that .flush() behaves like .write() in terms of ordering, e.g. in
7// a sequence like .write() + .flush() + .write() + .flush() each .flush() call
8// only affects the data written before it.
9// Refs: https://github.com/nodejs/node/issues/28478
10
11const compress = createGzip();
12const decompress = createGunzip();
13decompress.setEncoding('utf8');
14
15const events = [];
16const compressedChunks = [];
17
18for (const chunk of ['abc', 'def', 'ghi']) {
19  compress.write(chunk, common.mustCall(() => events.push({ written: chunk })));
20  compress.flush(Z_PARTIAL_FLUSH, common.mustCall(() => {
21    events.push('flushed');
22    const chunk = compress.read();
23    if (chunk !== null)
24      compressedChunks.push(chunk);
25  }));
26}
27
28compress.end(common.mustCall(() => {
29  events.push('compress end');
30  writeToDecompress();
31}));
32
33function writeToDecompress() {
34  // Write the compressed chunks to a decompressor, one by one, in order to
35  // verify that the flushes actually worked.
36  const chunk = compressedChunks.shift();
37  if (chunk === undefined) return decompress.end();
38  decompress.write(chunk, common.mustCall(() => {
39    events.push({ read: decompress.read() });
40    writeToDecompress();
41  }));
42}
43
44process.on('exit', () => {
45  assert.deepStrictEqual(events, [
46    { written: 'abc' },
47    'flushed',
48    { written: 'def' },
49    'flushed',
50    { written: 'ghi' },
51    'flushed',
52    'compress end',
53    { read: 'abc' },
54    { read: 'def' },
55    { read: 'ghi' },
56  ]);
57});
58