Skip to content

Commit 04939ee

Browse files
authored
fix: destroy sockets to stop memory leaking when stream errors (#2336)
1 parent 00d744d commit 04939ee

File tree

1 file changed

+7
-0
lines changed

1 file changed

+7
-0
lines changed

src/file.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1433,6 +1433,7 @@ class File extends ServiceObject<File, FileMetadata> {
14331433
const tailRequest = options.end! < 0;
14341434

14351435
let validateStream: HashStreamValidator | undefined = undefined;
1436+
let request: r.Request | undefined = undefined;
14361437

14371438
const throughStream = new PassThroughShim();
14381439

@@ -1464,6 +1465,11 @@ class File extends ServiceObject<File, FileMetadata> {
14641465

14651466
const onComplete = (err: Error | null) => {
14661467
if (err) {
1468+
// There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed.
1469+
// This causes a memory leak, so cleanup the sockets manually here by destroying the agent.
1470+
if (request?.agent) {
1471+
request.agent.destroy();
1472+
}
14671473
throughStream.destroy(err);
14681474
}
14691475
};
@@ -1492,6 +1498,7 @@ class File extends ServiceObject<File, FileMetadata> {
14921498
return;
14931499
}
14941500

1501+
request = (rawResponseStream as r.Response).request;
14951502
const headers = (rawResponseStream as ResponseBody).toJSON().headers;
14961503
const isCompressed = headers['content-encoding'] === 'gzip';
14971504
const hashes: {crc32c?: string; md5?: string} = {};

0 commit comments

Comments
 (0)