diff --git a/package.json b/package.json index a86e162f..ad09d16f 100644 --- a/package.json +++ b/package.json @@ -67,6 +67,7 @@ "chai-as-promised": "^8.0.1", "del-cli": "^6.0.0", "mocha": "^11.1.0", + "node-readable-to-web-readable-stream": "^0.4.2", "remark-cli": "^12.0.1", "remark-preset-lint-recommended": "^7.0.1", "token-types": "^6.0.0", diff --git a/test/test.ts b/test/test.ts index 40770352..d9264e8c 100644 --- a/test/test.ts +++ b/test/test.ts @@ -21,7 +21,7 @@ import { EndOfStreamError } from 'peek-readable'; import mocha from 'mocha'; import { stringToUint8Array } from 'uint8array-extras'; -import { DelayedStream, makeReadableByteFileStream } from './util.js'; +import { DelayedStream, makeByteReadableStreamFromFile } from './util.js'; import process from 'node:process'; use(chaiAsPromised); @@ -64,8 +64,8 @@ describe('Matrix tests', () => { }, { name: 'fromWebStream()', loadTokenizer: async (testFile, delay, abortSignal?: AbortSignal) => { - const fileStream = await makeReadableByteFileStream(Path.join(__dirname, 'resources', testFile), delay); - return fromWebStream(fileStream.stream, {onClose: () => fileStream.closeFile(), abortSignal}); + const fileStream = makeByteReadableStreamFromFile(Path.join(__dirname, 'resources', testFile), delay); + return fromWebStream(fileStream, {abortSignal}); }, hasFileInfo: false, abortable: true, @@ -1068,15 +1068,10 @@ it('should determine the file size using a file stream', async () => { it('should release stream after close', async () => { - const fileStream = await makeReadableByteFileStream(Path.join(__dirname, 'resources', 'test1.dat'), 0); - const stream = fileStream.stream; - assert.isFalse(stream.locked, 'stream is unlocked before initializing tokenizer'); - const webStreamTokenizer = fromWebStream(fileStream.stream, { - onClose: () => { - return fileStream.closeFile(); - } - }); - assert.isTrue(stream.locked, 'stream is locked after initializing tokenizer'); + const fileStream = makeByteReadableStreamFromFile(Path.join(__dirname, 'resources', 'test1.dat'), 0); + assert.isFalse(fileStream.locked, 'stream is unlocked before initializing tokenizer'); + const webStreamTokenizer = fromWebStream(fileStream); + assert.isTrue(fileStream.locked, 'stream is locked after initializing tokenizer'); await webStreamTokenizer.close(); - assert.isFalse(stream.locked, 'stream is unlocked after closing tokenizer'); + assert.isFalse(fileStream.locked, 'stream is unlocked after closing tokenizer'); }); diff --git a/test/util.ts b/test/util.ts index 0d862e17..2ea170a8 100644 --- a/test/util.ts +++ b/test/util.ts @@ -1,51 +1,23 @@ -import * as fs from 'node:fs/promises'; -import { ReadableStream } from 'node:stream/web'; -import { Readable } from 'node:stream'; - -export async function makeReadableByteFileStream(filename: string, delay = 0): Promise<{ stream: ReadableStream, closeFile: () => Promise }> { - - let position = 0; - const fileHandle = await fs.open(filename, 'r'); - - return { - stream: new ReadableStream({ - type: 'bytes', - - async pull(controller) { - - // @ts-ignore - const view = controller.byobRequest.view; - - setTimeout(async () => { - try { - const {bytesRead} = await fileHandle.read(view, 0, view.byteLength, position); - if (bytesRead === 0) { - await fileHandle.close(); - controller.close(); - // @ts-ignore - controller.byobRequest.respond(0); - } else { - position += bytesRead; - // @ts-ignore - controller.byobRequest.respond(bytesRead); - } - } catch (err) { - controller.error(err); - await fileHandle.close(); - } - }, delay); - }, - - cancel() { - return fileHandle.close(); - }, - - autoAllocateChunkSize: 1024 - }), - closeFile: () => { - return fileHandle.close(); +import { createReadStream } from 'node:fs'; +import { Transform, Readable } from 'node:stream'; +import { makeByteReadableStreamFromNodeReadable } from 'node-readable-to-web-readable-stream'; + +export function makeByteReadableStreamFromFile(filename: string, delay = 0): ReadableStream { + + // Create a Node.js Readable stream + const nodeReadable = createReadStream(filename); + + // Create a Transform stream to introduce delay + const delayTransform = new Transform({ + transform(chunk, encoding, callback) { + setTimeout(() => callback(null, chunk), delay); } - }; + }); + + // Pipe through the delay transform + const delayedNodeStream = nodeReadable.pipe(delayTransform); + + return makeByteReadableStreamFromNodeReadable(delayedNodeStream); } export class DelayedStream extends Readable { diff --git a/yarn.lock b/yarn.lock index af82089a..d7ee5e7c 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2359,6 +2359,13 @@ __metadata: languageName: node linkType: hard +"node-readable-to-web-readable-stream@npm:^0.4.2": + version: 0.4.2 + resolution: "node-readable-to-web-readable-stream@npm:0.4.2" + checksum: 10c0/8c3d09cac51c5f886e1636fa2a5404d664245c8bdc9a65e102552894963ed1b27207d5b94de59e37045d81cb9e8970cf79e561006df7ee8821cb761e728b3a80 + languageName: node + linkType: hard + "nopt@npm:^7.0.0, nopt@npm:^7.2.1": version: 7.2.1 resolution: "nopt@npm:7.2.1" @@ -3136,6 +3143,7 @@ __metadata: chai-as-promised: "npm:^8.0.1" del-cli: "npm:^6.0.0" mocha: "npm:^11.1.0" + node-readable-to-web-readable-stream: "npm:^0.4.2" peek-readable: "npm:^7.0.0" remark-cli: "npm:^12.0.1" remark-preset-lint-recommended: "npm:^7.0.1"