diff options
Diffstat (limited to 'node_modules/liquidjs/dist/parser')
10 files changed, 129 insertions, 0 deletions
diff --git a/node_modules/liquidjs/dist/parser/filter-arg.d.ts b/node_modules/liquidjs/dist/parser/filter-arg.d.ts new file mode 100644 index 0000000..dfe7996 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/filter-arg.d.ts @@ -0,0 +1,5 @@ +import { ValueToken } from '../tokens/value-token';
+type KeyValuePair = [string?, ValueToken?];
+export type FilterArg = ValueToken | KeyValuePair;
+export declare function isKeyValuePair(arr: FilterArg): arr is KeyValuePair;
+export {};
diff --git a/node_modules/liquidjs/dist/parser/index.d.ts b/node_modules/liquidjs/dist/parser/index.d.ts new file mode 100644 index 0000000..931955e --- /dev/null +++ b/node_modules/liquidjs/dist/parser/index.d.ts @@ -0,0 +1,4 @@ +export * from './tokenizer';
+export * from './parser';
+export * from './parse-stream';
+export * from './token-kind';
diff --git a/node_modules/liquidjs/dist/parser/parse-stream.d.ts b/node_modules/liquidjs/dist/parser/parse-stream.d.ts new file mode 100644 index 0000000..83b97d7 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/parse-stream.d.ts @@ -0,0 +1,15 @@ +import { Token, TopLevelToken } from '../tokens';
+import { Template } from '../template';
+type ParseToken<T extends Token> = ((token: T, remainTokens: T[]) => Template);
+export declare class ParseStream<T extends Token = TopLevelToken> {
+ private tokens;
+ private handlers;
+ private stopRequested;
+ private parseToken;
+ constructor(tokens: T[], parseToken: ParseToken<T>);
+ on<T2 extends Template | T | undefined>(name: string, cb: (this: ParseStream, arg: T2) => void): ParseStream<T>;
+ private trigger;
+ start(): this;
+ stop(): this;
+}
+export {};
diff --git a/node_modules/liquidjs/dist/parser/parse-stream.spec.d.ts b/node_modules/liquidjs/dist/parser/parse-stream.spec.d.ts new file mode 100644 index 0000000..509db18 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/parse-stream.spec.d.ts @@ -0,0 +1 @@ +export {};
diff --git a/node_modules/liquidjs/dist/parser/parser.d.ts b/node_modules/liquidjs/dist/parser/parser.d.ts new file mode 100644 index 0000000..ab60443 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/parser.d.ts @@ -0,0 +1,20 @@ +import { ParseStream } from './parse-stream';
+import { TopLevelToken } from '../tokens';
+import { Template, Output, HTML } from '../template';
+import { LookupType } from '../fs';
+import type { Liquid } from '../liquid';
+export declare class Parser {
+ parseFile: (file: string, sync?: boolean, type?: LookupType, currentFile?: string) => Generator<unknown, Template[], Template[] | string>;
+ private liquid;
+ private fs;
+ private cache?;
+ private loader;
+ private parseLimit;
+ constructor(liquid: Liquid);
+ parse(html: string, filepath?: string): Template[];
+ parseTokens(tokens: TopLevelToken[]): Template[];
+ parseToken(token: TopLevelToken, remainTokens: TopLevelToken[]): import("../template").Tag | Output | HTML;
+ parseStream(tokens: TopLevelToken[]): ParseStream<TopLevelToken>;
+ private _parseFileCached;
+ private _parseFile;
+}
diff --git a/node_modules/liquidjs/dist/parser/parser.spec.d.ts b/node_modules/liquidjs/dist/parser/parser.spec.d.ts new file mode 100644 index 0000000..509db18 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/parser.spec.d.ts @@ -0,0 +1 @@ +export {};
diff --git a/node_modules/liquidjs/dist/parser/token-kind.d.ts b/node_modules/liquidjs/dist/parser/token-kind.d.ts new file mode 100644 index 0000000..47bec4f --- /dev/null +++ b/node_modules/liquidjs/dist/parser/token-kind.d.ts @@ -0,0 +1,16 @@ +export declare enum TokenKind {
+ Number = 1,
+ Literal = 2,
+ Tag = 4,
+ Output = 8,
+ HTML = 16,
+ Filter = 32,
+ Hash = 64,
+ PropertyAccess = 128,
+ Word = 256,
+ Range = 512,
+ Quoted = 1024,
+ Operator = 2048,
+ FilteredValue = 4096,
+ Delimited = 12
+}
diff --git a/node_modules/liquidjs/dist/parser/tokenizer.d.ts b/node_modules/liquidjs/dist/parser/tokenizer.d.ts new file mode 100644 index 0000000..6070291 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/tokenizer.d.ts @@ -0,0 +1,63 @@ +import { FilteredValueToken, TagToken, HTMLToken, HashToken, QuotedToken, LiquidTagToken, OutputToken, ValueToken, Token, RangeToken, FilterToken, TopLevelToken, OperatorToken, LiteralToken, IdentifierToken, NumberToken } from '../tokens';
+import { Trie, TokenizationError } from '../util';
+import { Operators, Expression } from '../render';
+import { NormalizedFullOptions } from '../liquid-options';
+import { FilterArg } from './filter-arg';
+export declare class Tokenizer {
+ input: string;
+ file?: string | undefined;
+ p: number;
+ N: number;
+ private rawBeginAt;
+ private opTrie;
+ private literalTrie;
+ constructor(input: string, operators?: Operators, file?: string | undefined, range?: [number, number]);
+ readExpression(): Expression;
+ readExpressionTokens(): IterableIterator<Token>;
+ readOperator(): OperatorToken | undefined;
+ matchTrie<T>(trie: Trie<T>): number;
+ readFilteredValue(): FilteredValueToken;
+ readFilters(): FilterToken[];
+ readFilter(): FilterToken | null;
+ readFilterArg(): FilterArg | undefined;
+ readTopLevelTokens(options?: NormalizedFullOptions): TopLevelToken[];
+ readTopLevelToken(options: NormalizedFullOptions): TopLevelToken;
+ readHTMLToken(stopStrings: string[]): HTMLToken;
+ readTagToken(options: NormalizedFullOptions): TagToken;
+ readToDelimiter(delimiter: string, respectQuoted?: boolean): number;
+ readOutputToken(options?: NormalizedFullOptions): OutputToken;
+ readEndrawOrRawContent(options: NormalizedFullOptions): HTMLToken | TagToken;
+ readLiquidTagTokens(options?: NormalizedFullOptions): LiquidTagToken[];
+ readLiquidTagToken(options: NormalizedFullOptions): LiquidTagToken | undefined;
+ error(msg: string, pos?: number): TokenizationError;
+ assert(pred: unknown, msg: string | (() => string), pos?: number): void;
+ snapshot(begin?: number): string;
+ /**
+ * @deprecated use #readIdentifier instead
+ */
+ readWord(): IdentifierToken;
+ readIdentifier(): IdentifierToken;
+ readNonEmptyIdentifier(): IdentifierToken | undefined;
+ readTagName(): string;
+ readHashes(jekyllStyle?: boolean | string): HashToken[];
+ readHash(jekyllStyle?: boolean | string): HashToken | undefined;
+ remaining(): string;
+ advance(step?: number): void;
+ end(): boolean;
+ read(): string;
+ readTo(end: string): number;
+ readValue(): ValueToken | undefined;
+ readScopeValue(): ValueToken | undefined;
+ private readProperties;
+ readNumber(): NumberToken | undefined;
+ readLiteral(): LiteralToken | undefined;
+ readRange(): RangeToken | undefined;
+ readValueOrThrow(): ValueToken;
+ readQuoted(): QuotedToken | undefined;
+ readFileNameTemplate(options: NormalizedFullOptions): IterableIterator<TopLevelToken>;
+ match(word: string): boolean;
+ rmatch(pattern: string): boolean;
+ peekType(n?: number): number;
+ peek(n?: number): string;
+ skipBlank(): void;
+}
diff --git a/node_modules/liquidjs/dist/parser/tokenizer.spec.d.ts b/node_modules/liquidjs/dist/parser/tokenizer.spec.d.ts new file mode 100644 index 0000000..509db18 --- /dev/null +++ b/node_modules/liquidjs/dist/parser/tokenizer.spec.d.ts @@ -0,0 +1 @@ +export {};
diff --git a/node_modules/liquidjs/dist/parser/whitespace-ctrl.d.ts b/node_modules/liquidjs/dist/parser/whitespace-ctrl.d.ts new file mode 100644 index 0000000..a1cbdcd --- /dev/null +++ b/node_modules/liquidjs/dist/parser/whitespace-ctrl.d.ts @@ -0,0 +1,3 @@ +import { Token } from '../tokens';
+import { NormalizedFullOptions } from '../liquid-options';
+export declare function whiteSpaceCtrl(tokens: Token[], options: NormalizedFullOptions): void;
|
