Skip to content

Commit

Permalink
Abort tokenizer on close()
Browse files Browse the repository at this point in the history
  • Loading branch information
Borewit committed Oct 13, 2024
1 parent 993082a commit 95b7363
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 9 deletions.
1 change: 1 addition & 0 deletions lib/AbstractTokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,7 @@ export abstract class AbstractTokenizer implements ITokenizer {
}

public async close(): Promise<void> {
await this.abort();
await this.onClose?.();
}

Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
},
"dependencies": {
"@tokenizer/token": "^0.3.0",
"peek-readable": "^5.3.0"
"peek-readable": "^5.3.1"
},
"keywords": [
"tokenizer",
Expand Down
23 changes: 22 additions & 1 deletion test/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -908,6 +908,12 @@ describe('Matrix tests', () => {
await expect(promise).to.be.rejectedWith(Error);
});

it('abort async operation using `close()`', async () => {
const fileReadStream = await getTokenizerWithData('123', tokenizerType, 500);
const promise = fileReadStream.readToken(new Token.StringType(3, 'utf-8'), 0);
await fileReadStream.close();
await expect(promise).to.be.rejectedWith(Error);
});

it('abort async operation using `AbortController`', async () => {
const abortController = new AbortController();
Expand All @@ -916,8 +922,8 @@ describe('Matrix tests', () => {
abortController.abort();
await expect(promise).to.be.rejectedWith(Error);
});
});

});
}

}); // End of test "Tokenizer-types"
Expand Down Expand Up @@ -974,3 +980,18 @@ it('should determine the file size using a file stream', async () => {
assert.strictEqual(tokenizer.fileInfo.size, 16, 'fileInfo.size');
await tokenizer.close();
});


it('should release stream after close', async () => {

const fileStream = await makeReadableByteFileStream(Path.join(__dirname, 'resources', 'test1.dat'), 0);
const stream = fileStream.stream;
assert.isFalse(stream.locked, 'stream is unlocked before initializing tokenizer');
const webStreamTokenizer = fromWebStream(fileStream.stream, {onClose: () => fileStream.closeFile()});
assert.isTrue(stream.locked, 'stream is locked after initializing tokenizer');
await webStreamTokenizer.close();
assert.isFalse(stream.locked, 'stream is unlocked after closing tokenizer');
});



4 changes: 2 additions & 2 deletions test/util.ts
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ export async function makeReadableByteFileStream(filename: string, delay = 0): P
}, delay);
},

async cancel() {
await fileHandle.close();
cancel() {
return fileHandle.close();
},

autoAllocateChunkSize: 1024
Expand Down
10 changes: 5 additions & 5 deletions yarn.lock
Original file line number Diff line number Diff line change
Expand Up @@ -2533,10 +2533,10 @@ __metadata:
languageName: node
linkType: hard

"peek-readable@npm:^5.3.0":
version: 5.3.0
resolution: "peek-readable@npm:5.3.0"
checksum: 10c0/da9c8fcce9c9229118dc5d582f9ee5c699db193dec41383c2344a47922304ea0b57f7340c5c46d6660095f6a6d95954a488badf8e71685f2c3db92dad035e68b
"peek-readable@npm:^5.3.1":
version: 5.3.1
resolution: "peek-readable@npm:5.3.1"
checksum: 10c0/49f628e4728887230c158699e422ebb10747f5e02aee930ec10634fa7142e74e67d3fb3a780e7a9b9f092c61bf185f07d167c099b2359b22a58cee3dbfe0e43b
languageName: node
linkType: hard

Expand Down Expand Up @@ -3177,7 +3177,7 @@ __metadata:
chai-as-promised: "npm:^8.0.0"
del-cli: "npm:^6.0.0"
mocha: "npm:^10.7.3"
peek-readable: "npm:^5.3.0"
peek-readable: "npm:^5.3.1"
remark-cli: "npm:^12.0.1"
remark-preset-lint-recommended: "npm:^7.0.0"
token-types: "npm:^6.0.0"
Expand Down

0 comments on commit 95b7363

Please sign in to comment.