Skip to content

Commit 9fb7ac3

Browse files
authored
stream: need to cleanup event listeners if last stream is readable
fix: #35452 PR-URL: #41954 Fixes: #35452 Reviewed-By: Robert Nagy <ronagy@icloud.com> Reviewed-By: Matteo Collina <matteo.collina@gmail.com>
1 parent cc505a5 commit 9fb7ac3

File tree

3 files changed

+120
-12
lines changed

3 files changed

+120
-12
lines changed

doc/api/stream.md

+3-1
Original file line numberDiff line numberDiff line change
@@ -2575,7 +2575,9 @@ run().catch(console.error);
25752575

25762576
`stream.pipeline()` leaves dangling event listeners on the streams
25772577
after the `callback` has been invoked. In the case of reuse of streams after
2578-
failure, this can cause event listener leaks and swallowed errors.
2578+
failure, this can cause event listener leaks and swallowed errors. If the last
2579+
stream is readable, dangling event listeners will be removed so that the last
2580+
stream can be consumed later.
25792581

25802582
`stream.pipeline()` closes all the streams when an error is raised.
25812583
The `IncomingRequest` usage with `pipeline` could lead to an unexpected behavior

lib/internal/streams/pipeline.js

+41-11
Original file line numberDiff line numberDiff line change
@@ -31,6 +31,7 @@ const {
3131

3232
const {
3333
isIterable,
34+
isReadable,
3435
isReadableNodeStream,
3536
isNodeStream,
3637
} = require('internal/streams/utils');
@@ -45,14 +46,17 @@ function destroyer(stream, reading, writing) {
4546
finished = true;
4647
});
4748

48-
eos(stream, { readable: reading, writable: writing }, (err) => {
49+
const cleanup = eos(stream, { readable: reading, writable: writing }, (err) => {
4950
finished = !err;
5051
});
5152

52-
return (err) => {
53-
if (finished) return;
54-
finished = true;
55-
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'));
53+
return {
54+
destroy: (err) => {
55+
if (finished) return;
56+
finished = true;
57+
destroyImpl.destroyer(stream, err || new ERR_STREAM_DESTROYED('pipe'));
58+
},
59+
cleanup
5660
};
5761
}
5862

@@ -159,6 +163,10 @@ function pipelineImpl(streams, callback, opts) {
159163
const signal = ac.signal;
160164
const outerSignal = opts?.signal;
161165

166+
// Need to cleanup event listeners if last stream is readable
167+
// https://github.com/nodejs/node/issues/35452
168+
const lastStreamCleanup = [];
169+
162170
validateAbortSignal(outerSignal, 'options.signal');
163171

164172
function abort() {
@@ -194,6 +202,9 @@ function pipelineImpl(streams, callback, opts) {
194202
ac.abort();
195203

196204
if (final) {
205+
if (!error) {
206+
lastStreamCleanup.forEach((fn) => fn());
207+
}
197208
process.nextTick(callback, error, value);
198209
}
199210
}
@@ -204,22 +215,34 @@ function pipelineImpl(streams, callback, opts) {
204215
const reading = i < streams.length - 1;
205216
const writing = i > 0;
206217
const end = reading || opts?.end !== false;
218+
const isLastStream = i === streams.length - 1;
207219

208220
if (isNodeStream(stream)) {
209221
if (end) {
210-
destroys.push(destroyer(stream, reading, writing));
222+
const { destroy, cleanup } = destroyer(stream, reading, writing);
223+
destroys.push(destroy);
224+
225+
if (isReadable(stream) && isLastStream) {
226+
lastStreamCleanup.push(cleanup);
227+
}
211228
}
212229

213230
// Catch stream errors that occur after pipe/pump has completed.
214-
stream.on('error', (err) => {
231+
function onError(err) {
215232
if (
216233
err &&
217234
err.name !== 'AbortError' &&
218235
err.code !== 'ERR_STREAM_PREMATURE_CLOSE'
219236
) {
220237
finish(err);
221238
}
222-
});
239+
}
240+
stream.on('error', onError);
241+
if (isReadable(stream) && isLastStream) {
242+
lastStreamCleanup.push(() => {
243+
stream.removeListener('error', onError);
244+
});
245+
}
223246
}
224247

225248
if (i === 0) {
@@ -285,12 +308,19 @@ function pipelineImpl(streams, callback, opts) {
285308

286309
ret = pt;
287310

288-
destroys.push(destroyer(ret, false, true));
311+
const { destroy, cleanup } = destroyer(ret, false, true);
312+
destroys.push(destroy);
313+
if (isLastStream) {
314+
lastStreamCleanup.push(cleanup);
315+
}
289316
}
290317
} else if (isNodeStream(stream)) {
291318
if (isReadableNodeStream(ret)) {
292319
finishCount += 2;
293-
pipe(ret, stream, finish, { end });
320+
const cleanup = pipe(ret, stream, finish, { end });
321+
if (isReadable(stream) && isLastStream) {
322+
lastStreamCleanup.push(cleanup);
323+
}
294324
} else if (isIterable(ret)) {
295325
finishCount++;
296326
pump(ret, stream, finish, { end });
@@ -345,7 +375,7 @@ function pipe(src, dst, finish, { end }) {
345375
finish(err);
346376
}
347377
});
348-
eos(dst, { readable: false, writable: true }, finish);
378+
return eos(dst, { readable: false, writable: true }, finish);
349379
}
350380

351381
module.exports = { pipelineImpl, pipeline };
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
'use strict';
2+
3+
const common = require('../common');
4+
const { pipeline, Duplex, PassThrough, Writable } = require('stream');
5+
const assert = require('assert');
6+
7+
process.on('uncaughtException', common.mustCall((err) => {
8+
assert.strictEqual(err.message, 'no way');
9+
}, 2));
10+
11+
// Ensure that listeners is removed if last stream is readble
12+
// And other stream's listeners unchanged
13+
const a = new PassThrough();
14+
a.end('foobar');
15+
const b = new Duplex({
16+
write(chunk, encoding, callback) {
17+
callback();
18+
}
19+
});
20+
pipeline(a, b, common.mustCall((error) => {
21+
if (error) {
22+
assert.ifError(error);
23+
}
24+
25+
assert(a.listenerCount('error') > 0);
26+
assert.strictEqual(b.listenerCount('error'), 0);
27+
setTimeout(() => {
28+
assert.strictEqual(b.listenerCount('error'), 0);
29+
b.destroy(new Error('no way'));
30+
}, 100);
31+
}));
32+
33+
// Async generators
34+
const c = new PassThrough();
35+
c.end('foobar');
36+
const d = pipeline(
37+
c,
38+
async function* (source) {
39+
for await (const chunk of source) {
40+
yield String(chunk).toUpperCase();
41+
}
42+
},
43+
common.mustCall((error) => {
44+
if (error) {
45+
assert.ifError(error);
46+
}
47+
48+
assert(c.listenerCount('error') > 0);
49+
assert.strictEqual(d.listenerCount('error'), 0);
50+
setTimeout(() => {
51+
assert.strictEqual(b.listenerCount('error'), 0);
52+
d.destroy(new Error('no way'));
53+
}, 100);
54+
})
55+
);
56+
57+
// If last stream is not readable, will not throw and remove listeners
58+
const e = new PassThrough();
59+
e.end('foobar');
60+
const f = new Writable({
61+
write(chunk, encoding, callback) {
62+
callback();
63+
}
64+
});
65+
pipeline(e, f, common.mustCall((error) => {
66+
if (error) {
67+
assert.ifError(error);
68+
}
69+
70+
assert(e.listenerCount('error') > 0);
71+
assert(f.listenerCount('error') > 0);
72+
setTimeout(() => {
73+
assert(f.listenerCount('error') > 0);
74+
f.destroy(new Error('no way'));
75+
}, 100);
76+
}));

0 commit comments

Comments
 (0)