From adf0d37a4de3ff0c7abf5c4f4ea7616a2da28459 Mon Sep 17 00:00:00 2001 From: Ciro Spaciari Date: Fri, 27 Dec 2024 23:47:07 -0800 Subject: [PATCH] less agressive --- test/js/bun/s3/bun-write-leak-fixture.js | 5 ++--- test/js/bun/s3/s3-stream-leak-fixture.js | 13 +++++++------ test/js/bun/s3/s3-writer-leak-fixture.js | 5 ++++- test/js/bun/s3/s3.test.ts | 14 +++++++------- 4 files changed, 20 insertions(+), 17 deletions(-) diff --git a/test/js/bun/s3/bun-write-leak-fixture.js b/test/js/bun/s3/bun-write-leak-fixture.js index a00a94fa963cfe..5f4a2358118ff8 100644 --- a/test/js/bun/s3/bun-write-leak-fixture.js +++ b/test/js/bun/s3/bun-write-leak-fixture.js @@ -13,6 +13,7 @@ async function run(inputType) { for (let i = 0; i < 5; i++) { const largeFile = inputType; await Bun.write(dest, largeFile); + await Bun.sleep(10); Bun.gc(true); if (!MAX_ALLOWED_MEMORY_USAGE) { MAX_ALLOWED_MEMORY_USAGE = ((process.memoryUsage.rss() / 1024 / 1024) | 0) + MAX_ALLOWED_MEMORY_USAGE_INCREMENT; @@ -24,8 +25,6 @@ async function run(inputType) { } } } -// 30 MB, plain-text ascii -await s3file.write(new Buffer(1024 * 1024 * 1).fill("A".charCodeAt(0)).toString("utf-8")); -await run(s3file); +await s3file.write(new Buffer(1024 * 1024 * 1, "A".charCodeAt(0)).toString("utf-8")); await run(`s3://${s3Dest}`); await s3file.unlink(); diff --git a/test/js/bun/s3/s3-stream-leak-fixture.js b/test/js/bun/s3/s3-stream-leak-fixture.js index 9f2ee258d4072c..d67635348a9d72 100644 --- a/test/js/bun/s3/s3-stream-leak-fixture.js +++ b/test/js/bun/s3/s3-stream-leak-fixture.js @@ -8,19 +8,20 @@ const { randomUUID } = require("crypto"); const s3Dest = randomUUID(); const s3file = Bun.s3(s3Dest); -async function readLargeFile(inputType) { - const stream = s3file.stream(); +async function readLargeFile() { + const stream = Bun.s3(s3Dest).stream(); const reader = stream.getReader(); while (true) { const { done, value } = await reader.read(); if (done) break; } - Bun.gc(true); } async function run(inputType) { await s3file.write(inputType); - for (let i = 0; i < 10; i++) { - await readLargeFile(inputType); + Bun.gc(true); + for (let i = 0; i < 5; i++) { + await readLargeFile(); + await Bun.sleep(10); Bun.gc(true); if (!MAX_ALLOWED_MEMORY_USAGE) { MAX_ALLOWED_MEMORY_USAGE = ((process.memoryUsage.rss() / 1024 / 1024) | 0) + MAX_ALLOWED_MEMORY_USAGE_INCREMENT; @@ -32,5 +33,5 @@ async function run(inputType) { } } } -await run(new Buffer(1024 * 1024 * 1).fill("A".charCodeAt(0)).toString("utf-8")); +await run(new Buffer(1024 * 1024 * 1, "A".charCodeAt(0)).toString("utf-8")); await s3file.unlink(); diff --git a/test/js/bun/s3/s3-writer-leak-fixture.js b/test/js/bun/s3/s3-writer-leak-fixture.js index 49f52b9676814e..ee7233a99b4e9b 100644 --- a/test/js/bun/s3/s3-writer-leak-fixture.js +++ b/test/js/bun/s3/s3-writer-leak-fixture.js @@ -12,8 +12,11 @@ const s3file = Bun.s3(s3Dest); async function writeLargeFile(inputType) { const writer = s3file.writer(); writer.write(inputType); + await Bun.sleep(10); + writer.write(inputType); await writer.end(); Bun.gc(true); + await Bun.sleep(10); } async function run(inputType) { for (let i = 0; i < 5; i++) { @@ -29,5 +32,5 @@ async function run(inputType) { } } } -await run(new Buffer(1024 * 1024 * 1).fill("A".charCodeAt(0)).toString("utf-8")); +await run(new Buffer(1024 * 512 * 1, "A".charCodeAt(0)).toString("utf-8")); await s3file.unlink(); diff --git a/test/js/bun/s3/s3.test.ts b/test/js/bun/s3/s3.test.ts index e33272fad36236..708ff3bbe15fe2 100644 --- a/test/js/bun/s3/s3.test.ts +++ b/test/js/bun/s3/s3.test.ts @@ -19,8 +19,8 @@ function makePayLoadFrom(text: string, size: number): string { return text.slice(0, size); } -// 15 MiB big enough to Multipart upload in more than one part -const bigPayload = makePayLoadFrom("Bun is the best runtime ever", 15 * 1024 * 1024); +// 10 MiB big enough to Multipart upload in more than one part +const bigPayload = makePayLoadFrom("Bun is the best runtime ever", 10 * 1024 * 1024); const bigishPayload = makePayLoadFrom("Bun is the best runtime ever", 1 * 1024 * 1024); describe.skipIf(!s3Options.accessKeyId)("s3", () => { @@ -106,13 +106,13 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }); it("should be able to upload large files", async () => { - // 15 MiB big enough to Multipart upload in more than one part + // 10 MiB big enough to Multipart upload in more than one part const buffer = Buffer.alloc(1 * 1024 * 1024, "a"); { await fetch(tmp_filename, { method: "PUT", body: async function* () { - for (let i = 0; i < 15; i++) { + for (let i = 0; i < 10; i++) { await Bun.sleep(10); yield buffer; } @@ -122,7 +122,7 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { const result = await fetch(tmp_filename, { method: "HEAD", s3: options }); expect(result.status).toBe(200); - expect(result.headers.get("content-length")).toBe("15728640"); + expect(result.headers.get("content-length")).toBe((buffer.byteLength * 10).toString()); } }, 10_000); }); @@ -205,7 +205,7 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { new Request("https://example.com", { method: "PUT", body: async function* () { - for (let i = 0; i < 15; i++) { + for (let i = 0; i < 10; i++) { if (i % 5 === 0) { await Bun.sleep(10); } @@ -215,7 +215,7 @@ describe.skipIf(!s3Options.accessKeyId)("s3", () => { }), options, ); - expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigPayload)); + expect(await S3.size(tmp_filename, options)).toBe(Buffer.byteLength(bigishPayload) * 10); } }, 10_000);