Skip to content

Commit 6940252

Browse files
addaleaxtargos
authored andcommitted
fs: read full size if known in promises.readFile
If we have an estimate of the file size available from the previous stat call, use that for the size of the first chunk to be read. This increases performance by reading more data (and, most likely, all data) at once without incurring memory overhead in most situations. PR-URL: #37127 Reviewed-By: Antoine du Hamel <duhamelantoine1995@gmail.com> Reviewed-By: Colin Ihrig <cjihrig@gmail.com> Reviewed-By: Darshan Sen <raisinten@gmail.com> Reviewed-By: Zijian Liu <lxxyxzj@gmail.com> Reviewed-By: Benjamin Gruenbaum <benjamingr@gmail.com> Reviewed-By: Juan José Arboleda <soyjuanarbol@gmail.com> Reviewed-By: Luigi Pinca <luigipinca@gmail.com> Reviewed-By: James M Snell <jasnell@gmail.com>
1 parent ad12fef commit 6940252

File tree

1 file changed

+6
-5
lines changed

1 file changed

+6
-5
lines changed

lib/internal/fs/promises.js

+6-5
Original file line numberDiff line numberDiff line change
@@ -315,20 +315,21 @@ async function readFileHandle(filehandle, options) {
315315
throw new ERR_FS_FILE_TOO_LARGE(size);
316316

317317
const chunks = [];
318-
const chunkSize = size === 0 ?
319-
kReadFileMaxChunkSize :
320-
MathMin(size, kReadFileMaxChunkSize);
318+
let isFirstChunk = true;
319+
const firstChunkSize = size === 0 ? kReadFileMaxChunkSize : size;
320+
const chunkSize = MathMin(firstChunkSize, kReadFileMaxChunkSize);
321321
let endOfFile = false;
322322
do {
323323
if (signal?.aborted) {
324324
throw lazyDOMException('The operation was aborted', 'AbortError');
325325
}
326-
const buf = Buffer.alloc(chunkSize);
326+
const buf = Buffer.alloc(isFirstChunk ? firstChunkSize : chunkSize);
327327
const { bytesRead, buffer } =
328-
await read(filehandle, buf, 0, chunkSize, -1);
328+
await read(filehandle, buf, 0, buf.length, -1);
329329
endOfFile = bytesRead === 0;
330330
if (bytesRead > 0)
331331
ArrayPrototypePush(chunks, buffer.slice(0, bytesRead));
332+
isFirstChunk = false;
332333
} while (!endOfFile);
333334

334335
const result = chunks.length === 1 ? chunks[0] : Buffer.concat(chunks);

0 commit comments

Comments
 (0)