Skip to content

Commit

Permalink
Deprecate option.offset
Browse files Browse the repository at this point in the history
You should use `Uint8Array.subarray(offset)` instead
  • Loading branch information
Borewit committed Nov 30, 2024
1 parent 9572b78 commit 375e817
Show file tree
Hide file tree
Showing 7 changed files with 11 additions and 20 deletions.
1 change: 0 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -293,7 +293,6 @@ Each attribute is optional:

| Attribute | Type | Description |
|-----------|---------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
| offset | number | The offset in the buffer to start writing at; if not provided, start at 0 |
| length | number | Requested number of bytes to read. |
| position | number | Position where to peek from the file. If position is null, data will be read from the [current file position](#attribute-tokenizerposition). Position may not be less then [tokenizer.position](#attribute-tokenizerposition) |
| mayBeLess | boolean | If and only if set, will not throw an EOF error if less then the requested *mayBeLess* could be read. |
Expand Down
5 changes: 1 addition & 4 deletions lib/AbstractTokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import type { IGetToken, IToken } from '@tokenizer/token';
import { EndOfStreamError } from 'peek-readable';

interface INormalizedReadChunkOptions extends IReadChunkOptions {
offset: number;
length: number;
position: number;
mayBeLess?: boolean;
Expand Down Expand Up @@ -140,15 +139,13 @@ export abstract class AbstractTokenizer implements ITokenizer {
if (options) {
return {
mayBeLess: options.mayBeLess === true,
offset: options.offset ? options.offset : 0,
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)),
length: options.length ? options.length : uint8Array.length,
position: options.position ? options.position : this.position
};
}

return {
mayBeLess: false,
offset: 0,
length: uint8Array.length,
position: this.position
};
Expand Down
2 changes: 1 addition & 1 deletion lib/BufferTokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ export class BufferTokenizer extends AbstractTokenizer implements IRandomAccessT
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read));
return bytes2read;
}

Expand Down
4 changes: 2 additions & 2 deletions lib/FileTokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ export class FileTokenizer extends AbstractTokenizer implements IRandomAccessTok
const normOptions = this.normalizeOptions(uint8Array, options);
this.position = normOptions.position;
if (normOptions.length === 0) return 0;
const res = await this.fileHandle.read(uint8Array, normOptions.offset, normOptions.length, normOptions.position);
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
this.position += res.bytesRead;
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();
Expand All @@ -57,7 +57,7 @@ export class FileTokenizer extends AbstractTokenizer implements IRandomAccessTok

const normOptions = this.normalizeOptions(uint8Array, options);

const res = await this.fileHandle.read(uint8Array, normOptions.offset, normOptions.length, normOptions.position);
const res = await this.fileHandle.read(uint8Array, 0, normOptions.length, normOptions.position);
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
Expand Down
6 changes: 3 additions & 3 deletions lib/ReadStreamTokenizer.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ export class ReadStreamTokenizer extends AbstractTokenizer {
if (normOptions.length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
const bytesRead = await this.streamReader.read(uint8Array, 0, normOptions.length);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
Expand All @@ -61,7 +61,7 @@ export class ReadStreamTokenizer extends AbstractTokenizer {
if (skipBytes > 0) {
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, {mayBeLess: normOptions.mayBeLess});
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
uint8Array.set(skipBuffer.subarray(skipBytes));
return bytesRead - skipBytes;
}
if (skipBytes < 0) {
Expand All @@ -71,7 +71,7 @@ export class ReadStreamTokenizer extends AbstractTokenizer {

if (normOptions.length > 0) {
try {
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
bytesRead = await this.streamReader.peek(uint8Array, 0, normOptions.length);
} catch (err) {
if (options?.mayBeLess && err instanceof EndOfStreamError) {
return 0;
Expand Down
5 changes: 0 additions & 5 deletions lib/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -30,11 +30,6 @@ export interface IRandomAccessFileInfo extends IFileInfo {

export interface IReadChunkOptions {

/**
* The offset in the buffer to start writing at; default is 0
*/
offset?: number;

/**
* Number of bytes to read.
*/
Expand Down
8 changes: 4 additions & 4 deletions test/test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ describe('Matrix tests', () => {
it('option.offset', async () => {
const buf = new Uint8Array(7);
const rst = await getTokenizerWithData('\x01\x02\x03\x04\x05\x06', tokenizerType);
assert.strictEqual(await rst.readBuffer(buf, {length: 6, offset: 1}), 6);
assert.strictEqual(await rst.readBuffer(buf.subarray(1), {length: 6}), 6);
await rst.close();
});

Expand All @@ -102,7 +102,7 @@ describe('Matrix tests', () => {
it('default length', async () => {
const buf = new Uint8Array(6);
const rst = await getTokenizerWithData('\x01\x02\x03\x04\x05\x06', tokenizerType);
assert.strictEqual(await rst.readBuffer(buf, {offset: 1}), 5, 'default length = buffer.length - option.offset');
assert.strictEqual(await rst.readBuffer(buf.subarray(1)), 5, 'default length = buffer.length - option.offset');
await rst.close();
});

Expand Down Expand Up @@ -130,7 +130,7 @@ describe('Matrix tests', () => {
it('option.offset', async () => {
const buf = new Uint8Array(7);
const rst = await getTokenizerWithData('\x01\x02\x03\x04\x05\x06', tokenizerType);
assert.strictEqual(await rst.peekBuffer(buf, {length: 6, offset: 1}), 6);
assert.strictEqual(await rst.peekBuffer(buf.subarray(1), {length: 6}), 6);
await rst.close();
});

Expand All @@ -144,7 +144,7 @@ describe('Matrix tests', () => {
it('default length', async () => {
const buf = new Uint8Array(6);
const rst = await getTokenizerWithData('\x01\x02\x03\x04\x05\x06', tokenizerType);
assert.strictEqual(await rst.peekBuffer(buf, {offset: 1}), 5, 'default length = buffer.length - option.offset');
assert.strictEqual(await rst.peekBuffer(buf.subarray(1)), 5, 'default length = buffer.length - option.offset');
await rst.close();
});

Expand Down

0 comments on commit 375e817

Please sign in to comment.