This commit is contained in:
Lukian LEIZOUR 2023-01-29 00:57:47 +01:00
parent a15c733e45
commit 2a5130cbda
2838 changed files with 288613 additions and 0 deletions

69
node_modules/strtok3/lib/AbstractTokenizer.d.ts generated vendored Normal file
View file

@ -0,0 +1,69 @@
import { ITokenizer, IFileInfo, IReadChunkOptions } from './types.js';
import { IGetToken, IToken } from '@tokenizer/token';
interface INormalizedReadChunkOptions extends IReadChunkOptions {
offset: number;
length: number;
position: number;
mayBeLess?: boolean;
}
/**
* Core tokenizer
*/
export declare abstract class AbstractTokenizer implements ITokenizer {
fileInfo: IFileInfo;
protected constructor(fileInfo?: IFileInfo);
/**
* Tokenizer-stream position
*/
position: number;
private numBuffer;
/**
* Read buffer from tokenizer
* @param buffer - Target buffer to fill with data read from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
abstract readBuffer(buffer: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array- Target buffer to fill with data peek from the tokenizer-stream
* @param options - Peek behaviour options
* @returns Promise with number of bytes read
*/
abstract peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
readToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
peekToken<Value>(token: IGetToken<Value>, position?: number): Promise<Value>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IToken<number>): Promise<number>;
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
ignore(length: number): Promise<number>;
close(): Promise<void>;
protected normalizeOptions(uint8Array: Uint8Array, options?: IReadChunkOptions): INormalizedReadChunkOptions;
}
export {};

101
node_modules/strtok3/lib/AbstractTokenizer.js generated vendored Normal file
View file

@ -0,0 +1,101 @@
import { EndOfStreamError } from 'peek-readable';
import { Buffer } from 'node:buffer';
/**
* Core tokenizer
*/
export class AbstractTokenizer {
constructor(fileInfo) {
/**
* Tokenizer-stream position
*/
this.position = 0;
this.numBuffer = new Uint8Array(8);
this.fileInfo = fileInfo ? fileInfo : {};
}
/**
* Read a token from the tokenizer-stream
* @param token - The token to read
* @param position - If provided, the desired position in the tokenizer-stream
* @returns Promise with token data
*/
async readToken(token, position = this.position) {
const uint8Array = Buffer.alloc(token.len);
const len = await this.readBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @returns Promise with token data
*/
async peekToken(token, position = this.position) {
const uint8Array = Buffer.alloc(token.len);
const len = await this.peekBuffer(uint8Array, { position });
if (len < token.len)
throw new EndOfStreamError();
return token.get(uint8Array, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async readNumber(token) {
const len = await this.readBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
async peekNumber(token) {
const len = await this.peekBuffer(this.numBuffer, { length: token.len });
if (len < token.len)
throw new EndOfStreamError();
return token.get(this.numBuffer, 0);
}
/**
* Ignore number of bytes, advances the pointer in under tokenizer-stream.
* @param length - Number of bytes to ignore
* @return resolves the number of bytes ignored, equals length if this available, otherwise the number of bytes available
*/
async ignore(length) {
if (this.fileInfo.size !== undefined) {
const bytesLeft = this.fileInfo.size - this.position;
if (length > bytesLeft) {
this.position += bytesLeft;
return bytesLeft;
}
}
this.position += length;
return length;
}
async close() {
// empty
}
normalizeOptions(uint8Array, options) {
if (options && options.position !== undefined && options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (options) {
return {
mayBeLess: options.mayBeLess === true,
offset: options.offset ? options.offset : 0,
length: options.length ? options.length : (uint8Array.length - (options.offset ? options.offset : 0)),
position: options.position ? options.position : this.position
};
}
return {
mayBeLess: false,
offset: 0,
length: uint8Array.length,
position: this.position
};
}
}

26
node_modules/strtok3/lib/BufferTokenizer.d.ts generated vendored Normal file
View file

@ -0,0 +1,26 @@
import { IFileInfo, IReadChunkOptions } from './types.js';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export declare class BufferTokenizer extends AbstractTokenizer {
private uint8Array;
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
*/
constructor(uint8Array: Uint8Array, fileInfo?: IFileInfo);
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
}

51
node_modules/strtok3/lib/BufferTokenizer.js generated vendored Normal file
View file

@ -0,0 +1,51 @@
import { EndOfStreamError } from 'peek-readable';
import { AbstractTokenizer } from './AbstractTokenizer.js';
export class BufferTokenizer extends AbstractTokenizer {
/**
* Construct BufferTokenizer
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
*/
constructor(uint8Array, fileInfo) {
super(fileInfo);
this.uint8Array = uint8Array;
this.fileInfo.size = this.fileInfo.size ? this.fileInfo.size : uint8Array.length;
}
/**
* Read buffer from tokenizer
* @param uint8Array - Uint8Array to tokenize
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async readBuffer(uint8Array, options) {
if (options && options.position) {
if (options.position < this.position) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
this.position = options.position;
}
const bytesRead = await this.peekBuffer(uint8Array, options);
this.position += bytesRead;
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array
* @param options - Read behaviour options
* @returns {Promise<number>}
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const bytes2read = Math.min(this.uint8Array.length - normOptions.position, normOptions.length);
if ((!normOptions.mayBeLess) && bytes2read < normOptions.length) {
throw new EndOfStreamError();
}
else {
uint8Array.set(this.uint8Array.subarray(normOptions.position, normOptions.position + bytes2read), normOptions.offset);
return bytes2read;
}
}
async close() {
// empty
}
}

22
node_modules/strtok3/lib/FileTokenizer.d.ts generated vendored Normal file
View file

@ -0,0 +1,22 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { IFileInfo, IReadChunkOptions } from './types.js';
export declare class FileTokenizer extends AbstractTokenizer {
private fd;
constructor(fd: number, fileInfo: IFileInfo);
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
close(): Promise<void>;
}
export declare function fromFile(sourceFilePath: string): Promise<FileTokenizer>;

50
node_modules/strtok3/lib/FileTokenizer.js generated vendored Normal file
View file

@ -0,0 +1,50 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError } from 'peek-readable';
import * as fs from './FsPromise.js';
export class FileTokenizer extends AbstractTokenizer {
constructor(fd, fileInfo) {
super(fileInfo);
this.fd = fd;
}
/**
* Read buffer from file
* @param uint8Array - Uint8Array to write result to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
this.position = normOptions.position;
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
this.position += res.bytesRead;
if (res.bytesRead < normOptions.length && (!options || !options.mayBeLess)) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
/**
* Peek buffer from file
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise number of bytes read
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const res = await fs.read(this.fd, uint8Array, normOptions.offset, normOptions.length, normOptions.position);
if ((!normOptions.mayBeLess) && res.bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return res.bytesRead;
}
async close() {
return fs.close(this.fd);
}
}
export async function fromFile(sourceFilePath) {
const stat = await fs.stat(sourceFilePath);
if (!stat.isFile) {
throw new Error(`File not a file: ${sourceFilePath}`);
}
const fd = await fs.open(sourceFilePath, 'r');
return new FileTokenizer(fd, { path: sourceFilePath, size: stat.size });
}

19
node_modules/strtok3/lib/FsPromise.d.ts generated vendored Normal file
View file

@ -0,0 +1,19 @@
/**
* Module convert fs functions to promise based functions
*/
/// <reference types="node" resolution-mode="require"/>
/// <reference types="node" resolution-mode="require"/>
import fs from 'node:fs';
export interface IReadResult {
bytesRead: number;
buffer: Uint8Array;
}
export declare const pathExists: typeof fs.existsSync;
export declare const createReadStream: typeof fs.createReadStream;
export declare function stat(path: fs.PathLike): Promise<fs.Stats>;
export declare function close(fd: number): Promise<void>;
export declare function open(path: fs.PathLike, mode: fs.Mode): Promise<number>;
export declare function read(fd: number, buffer: Uint8Array, offset: number, length: number, position: number): Promise<IReadResult>;
export declare function writeFile(path: fs.PathLike, data: Buffer | string): Promise<void>;
export declare function writeFileSync(path: fs.PathLike, data: Buffer | string): void;
export declare function readFile(path: fs.PathLike): Promise<Buffer>;

69
node_modules/strtok3/lib/FsPromise.js generated vendored Normal file
View file

@ -0,0 +1,69 @@
/**
* Module convert fs functions to promise based functions
*/
import fs from 'node:fs';
export const pathExists = fs.existsSync;
export const createReadStream = fs.createReadStream;
export async function stat(path) {
return new Promise((resolve, reject) => {
fs.stat(path, (err, stats) => {
if (err)
reject(err);
else
resolve(stats);
});
});
}
export async function close(fd) {
return new Promise((resolve, reject) => {
fs.close(fd, err => {
if (err)
reject(err);
else
resolve();
});
});
}
export async function open(path, mode) {
return new Promise((resolve, reject) => {
fs.open(path, mode, (err, fd) => {
if (err)
reject(err);
else
resolve(fd);
});
});
}
export async function read(fd, buffer, offset, length, position) {
return new Promise((resolve, reject) => {
fs.read(fd, buffer, offset, length, position, (err, bytesRead, _buffer) => {
if (err)
reject(err);
else
resolve({ bytesRead, buffer: _buffer });
});
});
}
export async function writeFile(path, data) {
return new Promise((resolve, reject) => {
fs.writeFile(path, data, err => {
if (err)
reject(err);
else
resolve();
});
});
}
export function writeFileSync(path, data) {
fs.writeFileSync(path, data);
}
export async function readFile(path) {
return new Promise((resolve, reject) => {
fs.readFile(path, (err, buffer) => {
if (err)
reject(err);
else
resolve(buffer);
});
});
}

28
node_modules/strtok3/lib/ReadStreamTokenizer.d.ts generated vendored Normal file
View file

@ -0,0 +1,28 @@
/// <reference types="node" resolution-mode="require"/>
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { Readable } from 'node:stream';
import { IFileInfo, IReadChunkOptions } from './types.js';
export declare class ReadStreamTokenizer extends AbstractTokenizer {
private streamReader;
constructor(stream: Readable, fileInfo?: IFileInfo);
/**
* Get file information, an HTTP-client may implement this doing a HEAD request
* @return Promise with file information
*/
getFileInfo(): Promise<IFileInfo>;
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
readBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
peekBuffer(uint8Array: Uint8Array, options?: IReadChunkOptions): Promise<number>;
ignore(length: number): Promise<number>;
}

94
node_modules/strtok3/lib/ReadStreamTokenizer.js generated vendored Normal file
View file

@ -0,0 +1,94 @@
import { AbstractTokenizer } from './AbstractTokenizer.js';
import { EndOfStreamError, StreamReader } from 'peek-readable';
const maxBufferSize = 256000;
export class ReadStreamTokenizer extends AbstractTokenizer {
constructor(stream, fileInfo) {
super(fileInfo);
this.streamReader = new StreamReader(stream);
}
/**
* Get file information, an HTTP-client may implement this doing a HEAD request
* @return Promise with file information
*/
async getFileInfo() {
return this.fileInfo;
}
/**
* Read buffer from tokenizer
* @param uint8Array - Target Uint8Array to fill with data read from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
async readBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
await this.ignore(skipBytes);
return this.readBuffer(uint8Array, options);
}
else if (skipBytes < 0) {
throw new Error('`options.position` must be equal or greater than `tokenizer.position`');
}
if (normOptions.length === 0) {
return 0;
}
const bytesRead = await this.streamReader.read(uint8Array, normOptions.offset, normOptions.length);
this.position += bytesRead;
if ((!options || !options.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
return bytesRead;
}
/**
* Peek (read ahead) buffer from tokenizer
* @param uint8Array - Uint8Array (or Buffer) to write data to
* @param options - Read behaviour options
* @returns Promise with number of bytes peeked
*/
async peekBuffer(uint8Array, options) {
const normOptions = this.normalizeOptions(uint8Array, options);
let bytesRead = 0;
if (normOptions.position) {
const skipBytes = normOptions.position - this.position;
if (skipBytes > 0) {
const skipBuffer = new Uint8Array(normOptions.length + skipBytes);
bytesRead = await this.peekBuffer(skipBuffer, { mayBeLess: normOptions.mayBeLess });
uint8Array.set(skipBuffer.subarray(skipBytes), normOptions.offset);
return bytesRead - skipBytes;
}
else if (skipBytes < 0) {
throw new Error('Cannot peek from a negative offset in a stream');
}
}
if (normOptions.length > 0) {
try {
bytesRead = await this.streamReader.peek(uint8Array, normOptions.offset, normOptions.length);
}
catch (err) {
if (options && options.mayBeLess && err instanceof EndOfStreamError) {
return 0;
}
throw err;
}
if ((!normOptions.mayBeLess) && bytesRead < normOptions.length) {
throw new EndOfStreamError();
}
}
return bytesRead;
}
async ignore(length) {
// debug(`ignore ${this.position}...${this.position + length - 1}`);
const bufSize = Math.min(maxBufferSize, length);
const buf = new Uint8Array(bufSize);
let totBytesRead = 0;
while (totBytesRead < length) {
const remaining = length - totBytesRead;
const bytesRead = await this.readBuffer(buf, { length: Math.min(bufSize, remaining) });
if (bytesRead < 0) {
return bytesRead;
}
totBytesRead += bytesRead;
}
return totBytesRead;
}
}

23
node_modules/strtok3/lib/core.d.ts generated vendored Normal file
View file

@ -0,0 +1,23 @@
/// <reference types="node" resolution-mode="require"/>
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { Readable } from 'node:stream';
import { BufferTokenizer } from './BufferTokenizer.js';
import { IFileInfo } from './types.js';
export { EndOfStreamError } from 'peek-readable';
export { ITokenizer, IFileInfo } from './types.js';
export { IToken, IGetToken } from '@tokenizer/token';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer
*/
export declare function fromStream(stream: Readable, fileInfo?: IFileInfo): ReadStreamTokenizer;
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
* @returns BufferTokenizer
*/
export declare function fromBuffer(uint8Array: Uint8Array, fileInfo?: IFileInfo): BufferTokenizer;

23
node_modules/strtok3/lib/core.js generated vendored Normal file
View file

@ -0,0 +1,23 @@
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import { BufferTokenizer } from './BufferTokenizer.js';
export { EndOfStreamError } from 'peek-readable';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property/
* @param stream - Read from Node.js Stream.Readable
* @param fileInfo - Pass the file information, like size and MIME-type of the corresponding stream.
* @returns ReadStreamTokenizer
*/
export function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};
return new ReadStreamTokenizer(stream, fileInfo);
}
/**
* Construct ReadStreamTokenizer from given Buffer.
* @param uint8Array - Uint8Array to tokenize
* @param fileInfo - Pass additional file information to the tokenizer
* @returns BufferTokenizer
*/
export function fromBuffer(uint8Array, fileInfo) {
return new BufferTokenizer(uint8Array, fileInfo);
}

15
node_modules/strtok3/lib/index.d.ts generated vendored Normal file
View file

@ -0,0 +1,15 @@
/// <reference types="node" resolution-mode="require"/>
import { Readable } from 'node:stream';
import { ReadStreamTokenizer } from './ReadStreamTokenizer.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { ITokenizer, EndOfStreamError, fromBuffer, IFileInfo } from './core.js';
export { IToken, IGetToken } from '@tokenizer/token';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param fileInfo - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export declare function fromStream(stream: Readable, fileInfo?: core.IFileInfo): Promise<ReadStreamTokenizer>;

20
node_modules/strtok3/lib/index.js generated vendored Normal file
View file

@ -0,0 +1,20 @@
import * as fs from './FsPromise.js';
import * as core from './core.js';
export { fromFile } from './FileTokenizer.js';
export { EndOfStreamError, fromBuffer } from './core.js';
/**
* Construct ReadStreamTokenizer from given Stream.
* Will set fileSize, if provided given Stream has set the .path property.
* @param stream - Node.js Stream.Readable
* @param fileInfo - Pass additional file information to the tokenizer
* @returns Tokenizer
*/
export async function fromStream(stream, fileInfo) {
fileInfo = fileInfo ? fileInfo : {};
if (stream.path) {
const stat = await fs.stat(stream.path);
fileInfo.path = stream.path;
fileInfo.size = stat.size;
}
return core.fromStream(stream, fileInfo);
}

103
node_modules/strtok3/lib/types.d.ts generated vendored Normal file
View file

@ -0,0 +1,103 @@
/// <reference types="node" resolution-mode="require"/>
import { IGetToken } from '@tokenizer/token';
export interface IFileInfo {
/**
* File size in bytes
*/
size?: number;
/**
* MIME-type of file
*/
mimeType?: string;
/**
* File path
*/
path?: string;
/**
* File URL
*/
url?: string;
}
export interface IReadChunkOptions {
/**
* The offset in the buffer to start writing at; default is 0
*/
offset?: number;
/**
* Number of bytes to read.
*/
length?: number;
/**
* Position where to begin reading from the file.
* Default it is `tokenizer.position`.
* Position may not be less then `tokenizer.position`.
*/
position?: number;
/**
* If set, will not throw an EOF error if not all of the requested data could be read
*/
mayBeLess?: boolean;
}
/**
* The tokenizer allows us to read or peek from the tokenizer-stream.
* The tokenizer-stream is an abstraction of a stream, file or Buffer.
*/
export interface ITokenizer {
/**
* Provide access to information of the underlying information stream or file.
*/
fileInfo: IFileInfo;
/**
* Offset in bytes (= number of bytes read) since beginning of file or stream
*/
position: number;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peek from the tokenizer-stream
* @param options - Read behaviour options
* @returns Promise with number of bytes read
*/
peekBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
/**
* Peek (read ahead) buffer from tokenizer
* @param buffer - Target buffer to fill with data peeked from the tokenizer-stream
* @param options - Additional read options
* @returns Promise with number of bytes read
*/
readBuffer(buffer: Buffer, options?: IReadChunkOptions): Promise<number>;
/**
* Peek a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
* @param maybeless - If set, will not throw an EOF error if the less then the requested length could be read.
*/
peekToken<T>(token: IGetToken<T>, position?: number | null, maybeless?: boolean): Promise<T>;
/**
* Read a token from the tokenizer-stream.
* @param token - Token to peek from the tokenizer-stream.
* @param position - Offset where to begin reading within the file. If position is null, data will be read from the current file position.
*/
readToken<T>(token: IGetToken<T>, position?: number): Promise<T>;
/**
* Peek a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
peekNumber(token: IGetToken<number>): Promise<number>;
/**
* Read a numeric token from the stream
* @param token - Numeric token
* @returns Promise with number
*/
readNumber(token: IGetToken<number>): Promise<number>;
/**
* Ignore given number of bytes
* @param length - Number of bytes ignored
*/
ignore(length: number): Promise<number>;
/**
* Clean up resources.
* It does not close the stream for StreamReader, but is does close the file-descriptor.
*/
close(): Promise<void>;
}

1
node_modules/strtok3/lib/types.js generated vendored Normal file
View file

@ -0,0 +1 @@
export {};