Aktueller Stand
This commit is contained in:
37
backend/node_modules/grammex/dist/index.d.ts
generated
vendored
Normal file
37
backend/node_modules/grammex/dist/index.d.ts
generated
vendored
Normal file
@@ -0,0 +1,37 @@
|
||||
import type { CompoundHandler, PrimitiveHandler } from './types.js';
|
||||
import type { ExplicitRule, ImplicitRule, Rule, MappedRule } from './types.js';
|
||||
import type { Options, State } from './types.js';
|
||||
declare const parse: <T>(input: string, rule: Rule<T>, options?: Options) => T[];
|
||||
declare const validate: <T>(input: string, rule: Rule<T>, options?: Options) => boolean;
|
||||
declare const match: <T>(target: RegExp | string | string[], handler?: PrimitiveHandler<T> | T) => ExplicitRule<T>;
|
||||
declare const repeat: <T, U = T>(rule: Rule<T>, min: number, max: number, handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const optional: <T, U = T>(rule: Rule<T>, handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const star: <T, U = T>(rule: Rule<T>, handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const plus: <T, U = T>(rule: Rule<T>, handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const and: <T, U = T>(rules: (MappedRule<T> | Rule<T>)[], handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const or: <T, U = T>(rules: (MappedRule<T> | Rule<T>)[], handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const jump: <T, U = T>(rules: Record<string, Rule<T>>, handler?: CompoundHandler<T, U>) => ExplicitRule<U>;
|
||||
declare const negative: <T>(rule: Rule<T>) => ExplicitRule<T>;
|
||||
declare const positive: <T>(rule: Rule<T>) => ExplicitRule<T>;
|
||||
declare const grammar: <T, U>(fn: (operators: {
|
||||
match: typeof match<T>;
|
||||
repeat: typeof repeat<T>;
|
||||
optional: typeof optional<T>;
|
||||
star: typeof star<T>;
|
||||
plus: typeof plus<T>;
|
||||
and: typeof and<T>;
|
||||
or: typeof or<T>;
|
||||
jump: typeof jump<T>;
|
||||
negative: typeof negative<T>;
|
||||
positive: typeof positive<T>;
|
||||
lazy: typeof lazy<T>;
|
||||
}) => U) => U;
|
||||
declare const lazy: <T = any>(getter: Function) => ExplicitRule<T>;
|
||||
export { parse, validate };
|
||||
export { match };
|
||||
export { repeat, optional, star, plus };
|
||||
export { and };
|
||||
export { or, jump };
|
||||
export { negative, positive };
|
||||
export { grammar, lazy };
|
||||
export type { CompoundHandler, PrimitiveHandler, ExplicitRule, ImplicitRule, Rule, Options, State };
|
||||
378
backend/node_modules/grammex/dist/index.js
generated
vendored
Normal file
378
backend/node_modules/grammex/dist/index.js
generated
vendored
Normal file
@@ -0,0 +1,378 @@
|
||||
/* IMPORT */
|
||||
import { isArray, isFunction, isFunctionNullary, isFunctionStrictlyNullaryOrUnary, isNumber, isObject, isRegExp, isRegExpCapturing, isRegExpStatic, isString, isUndefined, memoize } from './utils.js';
|
||||
/* MAIN */
|
||||
const parse = (input, rule, options = {}) => {
|
||||
const state = { cache: {}, input, index: 0, indexBacktrackMax: 0, options, output: [] };
|
||||
const matched = resolve(rule)(state);
|
||||
const indexMax = Math.max(state.index, state.indexBacktrackMax);
|
||||
if (matched && state.index === input.length) {
|
||||
return state.output;
|
||||
}
|
||||
else {
|
||||
throw new Error(`Failed to parse at index ${indexMax}`);
|
||||
}
|
||||
};
|
||||
const validate = (input, rule, options = {}) => {
|
||||
const state = { cache: {}, input, index: 0, indexBacktrackMax: 0, options, output: [] };
|
||||
const matched = resolve(rule)(state);
|
||||
const validated = matched && state.index === input.length;
|
||||
return validated;
|
||||
};
|
||||
/* RULES - PRIMIVITE */
|
||||
const match = (target, handler) => {
|
||||
if (isArray(target)) {
|
||||
return chars(target, handler);
|
||||
}
|
||||
else if (isString(target)) {
|
||||
return string(target, handler);
|
||||
}
|
||||
else {
|
||||
return regex(target, handler);
|
||||
}
|
||||
};
|
||||
const chars = (target, handler) => {
|
||||
const charCodes = {};
|
||||
for (const char of target) {
|
||||
if (char.length !== 1)
|
||||
throw new Error(`Invalid character: "${char}"`);
|
||||
const charCode = char.charCodeAt(0);
|
||||
charCodes[charCode] = true;
|
||||
}
|
||||
return (state) => {
|
||||
const input = state.input;
|
||||
let indexStart = state.index;
|
||||
let indexEnd = indexStart;
|
||||
while (indexEnd < input.length) {
|
||||
const charCode = input.charCodeAt(indexEnd);
|
||||
if (!(charCode in charCodes))
|
||||
break;
|
||||
indexEnd += 1;
|
||||
}
|
||||
if (indexEnd > indexStart) {
|
||||
if (!isUndefined(handler) && !state.options.silent) {
|
||||
const target = input.slice(indexStart, indexEnd);
|
||||
const output = isFunction(handler) ? handler(target, input, `${indexStart}`) : handler;
|
||||
if (!isUndefined(output)) {
|
||||
state.output.push(output);
|
||||
}
|
||||
}
|
||||
state.index = indexEnd;
|
||||
}
|
||||
return true;
|
||||
};
|
||||
};
|
||||
const regex = (target, handler) => {
|
||||
if (isRegExpStatic(target)) {
|
||||
return string(target.source, handler);
|
||||
}
|
||||
else {
|
||||
const source = target.source;
|
||||
const flags = target.flags.replace(/y|$/, 'y');
|
||||
const re = new RegExp(source, flags);
|
||||
if (isRegExpCapturing(target) && isFunction(handler) && !isFunctionStrictlyNullaryOrUnary(handler)) {
|
||||
return regexCapturing(re, handler);
|
||||
}
|
||||
else {
|
||||
return regexNonCapturing(re, handler);
|
||||
}
|
||||
}
|
||||
};
|
||||
const regexCapturing = (re, handler) => {
|
||||
return (state) => {
|
||||
const indexStart = state.index;
|
||||
const input = state.input;
|
||||
re.lastIndex = indexStart;
|
||||
const match = re.exec(input);
|
||||
if (match) {
|
||||
const indexEnd = re.lastIndex;
|
||||
if (!state.options.silent) {
|
||||
const output = handler(...match, input, `${indexStart}`);
|
||||
if (!isUndefined(output)) {
|
||||
state.output.push(output);
|
||||
}
|
||||
}
|
||||
state.index = indexEnd;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
const regexNonCapturing = (re, handler) => {
|
||||
return (state) => {
|
||||
const indexStart = state.index;
|
||||
const input = state.input;
|
||||
re.lastIndex = indexStart;
|
||||
const matched = re.test(input);
|
||||
if (matched) {
|
||||
const indexEnd = re.lastIndex;
|
||||
if (!isUndefined(handler) && !state.options.silent) {
|
||||
const output = isFunction(handler) ? handler(input.slice(indexStart, indexEnd), input, `${indexStart}`) : handler;
|
||||
if (!isUndefined(output)) {
|
||||
state.output.push(output);
|
||||
}
|
||||
}
|
||||
state.index = indexEnd;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
const string = (target, handler) => {
|
||||
return (state) => {
|
||||
const indexStart = state.index;
|
||||
const input = state.input;
|
||||
const matched = input.startsWith(target, indexStart);
|
||||
if (matched) {
|
||||
if (!isUndefined(handler) && !state.options.silent) {
|
||||
const output = isFunction(handler) ? handler(target, input, `${indexStart}`) : handler;
|
||||
if (!isUndefined(output)) {
|
||||
state.output.push(output);
|
||||
}
|
||||
}
|
||||
state.index += target.length;
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
/* RULES - REPETITION */
|
||||
const repeat = (rule, min, max, handler) => {
|
||||
const erule = resolve(rule);
|
||||
const isBacktrackable = (min > 1);
|
||||
return memoizable(handleable(backtrackable((state) => {
|
||||
let repetitions = 0;
|
||||
while (repetitions < max) {
|
||||
const index = state.index;
|
||||
const matched = erule(state);
|
||||
if (!matched)
|
||||
break;
|
||||
repetitions += 1;
|
||||
if (state.index === index)
|
||||
break;
|
||||
}
|
||||
return (repetitions >= min);
|
||||
}, isBacktrackable), handler));
|
||||
};
|
||||
const optional = (rule, handler) => {
|
||||
return repeat(rule, 0, 1, handler);
|
||||
};
|
||||
const star = (rule, handler) => {
|
||||
return repeat(rule, 0, Infinity, handler);
|
||||
};
|
||||
const plus = (rule, handler) => {
|
||||
return repeat(rule, 1, Infinity, handler);
|
||||
};
|
||||
/* RULES - SEQUENCE */
|
||||
const and = (rules, handler) => {
|
||||
const erules = rules.map(resolve);
|
||||
return memoizable(handleable(backtrackable((state) => {
|
||||
for (let i = 0, l = erules.length; i < l; i++) {
|
||||
if (!erules[i](state))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}), handler));
|
||||
};
|
||||
/* RULES - CHOICE */
|
||||
const or = (rules, handler) => {
|
||||
const erules = rules.map(resolve);
|
||||
return memoizable(handleable((state) => {
|
||||
for (let i = 0, l = erules.length; i < l; i++) {
|
||||
if (erules[i](state))
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}, handler));
|
||||
};
|
||||
const jump = (rules, handler) => {
|
||||
const erules = {};
|
||||
for (const char in rules) {
|
||||
if (char.length !== 1 && char !== 'default')
|
||||
throw new Error(`Invalid jump character: "${char}"`);
|
||||
erules[char] = resolve(rules[char]);
|
||||
}
|
||||
return handleable((state) => {
|
||||
const char = state.input[state.index];
|
||||
const erule = erules[char] || erules['default'];
|
||||
if (erule) {
|
||||
return erule(state);
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}, handler);
|
||||
};
|
||||
/* RULES - LOOKAHEAD */
|
||||
const lookahead = (rule, result) => {
|
||||
const erule = resolve(rule);
|
||||
return backtrackable((state) => {
|
||||
return erule(state) === result;
|
||||
}, true, true);
|
||||
};
|
||||
const negative = (rule) => {
|
||||
return lookahead(rule, false);
|
||||
};
|
||||
const positive = (rule) => {
|
||||
return lookahead(rule, true);
|
||||
};
|
||||
/* RULES - DECORATORS */
|
||||
const backtrackable = (rule, enabled = true, force = false) => {
|
||||
const erule = resolve(rule);
|
||||
if (!enabled)
|
||||
return erule;
|
||||
return (state) => {
|
||||
const index = state.index;
|
||||
const length = state.output.length;
|
||||
const matched = erule(state);
|
||||
if (!matched && !force) {
|
||||
state.indexBacktrackMax = Math.max(state.indexBacktrackMax, state.index);
|
||||
}
|
||||
if (!matched || force) {
|
||||
state.index = index;
|
||||
if (state.output.length !== length) { // This can be surprisingly slow otherwise
|
||||
state.output.length = length;
|
||||
}
|
||||
}
|
||||
return matched;
|
||||
};
|
||||
};
|
||||
const handleable = (rule, handler) => {
|
||||
const erule = resolve(rule);
|
||||
if (!handler)
|
||||
return erule; //TSC: incorrect types, but correct behavior
|
||||
return (state) => {
|
||||
if (state.options.silent)
|
||||
return erule(state);
|
||||
const length = state.output.length;
|
||||
const matched = erule(state);
|
||||
if (matched) {
|
||||
const outputs = state.output.splice(length, Infinity);
|
||||
const output = handler(outputs);
|
||||
if (!isUndefined(output)) {
|
||||
state.output.push(output);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
const memoizable = (() => {
|
||||
let RULE_ID = 0; // This is faster than using symbols, for some reason
|
||||
return (rule) => {
|
||||
const erule = resolve(rule);
|
||||
const ruleId = (RULE_ID += 1);
|
||||
return (state) => {
|
||||
var _a;
|
||||
if (state.options.memoization === false)
|
||||
return erule(state);
|
||||
const indexStart = state.index;
|
||||
const cache = ((_a = state.cache)[ruleId] || (_a[ruleId] = { indexMax: -1, queue: [] }));
|
||||
const cacheQueue = cache.queue;
|
||||
const isPotentiallyCached = (indexStart <= cache.indexMax);
|
||||
if (isPotentiallyCached) {
|
||||
const cacheStore = (cache.store || (cache.store = new Map()));
|
||||
if (cacheQueue.length) { // There are some pending cache entires to register, which is somewhat expensive
|
||||
for (let i = 0, l = cacheQueue.length; i < l; i += 2) {
|
||||
const key = cacheQueue[i * 2]; //TSC
|
||||
const value = cacheQueue[i * 2 + 1];
|
||||
cacheStore.set(key, value);
|
||||
}
|
||||
cacheQueue.length = 0;
|
||||
}
|
||||
const cached = cacheStore.get(indexStart);
|
||||
if (cached === false) {
|
||||
return false;
|
||||
}
|
||||
else if (isNumber(cached)) {
|
||||
state.index = cached;
|
||||
return true;
|
||||
}
|
||||
else if (cached) {
|
||||
state.index = cached.index;
|
||||
if (cached.output?.length) {
|
||||
state.output.push(...cached.output);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
}
|
||||
const lengthStart = state.output.length;
|
||||
const matched = erule(state);
|
||||
cache.indexMax = Math.max(cache.indexMax, indexStart);
|
||||
if (matched) {
|
||||
const indexEnd = state.index;
|
||||
const lengthEnd = state.output.length;
|
||||
if (lengthEnd > lengthStart) {
|
||||
const output = state.output.slice(lengthStart, lengthEnd);
|
||||
cacheQueue.push(indexStart, { index: indexEnd, output });
|
||||
}
|
||||
else {
|
||||
cacheQueue.push(indexStart, indexEnd);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
else {
|
||||
cacheQueue.push(indexStart, false);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
||||
})();
|
||||
/* RULES - UTILITIES */
|
||||
const grammar = (fn) => {
|
||||
return fn({
|
||||
match: (match),
|
||||
repeat: (repeat),
|
||||
optional: (optional),
|
||||
star: (star),
|
||||
plus: (plus),
|
||||
and: (and),
|
||||
or: (or),
|
||||
jump: (jump),
|
||||
negative: (negative),
|
||||
positive: (positive),
|
||||
lazy: (lazy)
|
||||
});
|
||||
};
|
||||
const lazy = (getter) => {
|
||||
let erule;
|
||||
return (state) => {
|
||||
erule || (erule = resolve(getter()));
|
||||
return erule(state);
|
||||
};
|
||||
};
|
||||
const resolve = memoize((rule) => {
|
||||
if (isFunction(rule)) {
|
||||
if (isFunctionNullary(rule)) {
|
||||
return lazy(rule);
|
||||
}
|
||||
else {
|
||||
return rule;
|
||||
}
|
||||
}
|
||||
if (isString(rule) || isRegExp(rule)) {
|
||||
return match(rule);
|
||||
}
|
||||
if (isArray(rule)) {
|
||||
return and(rule);
|
||||
}
|
||||
if (isObject(rule)) {
|
||||
return or(Object.values(rule));
|
||||
}
|
||||
throw new Error('Invalid rule');
|
||||
});
|
||||
/* EXPORT */
|
||||
export { parse, validate };
|
||||
export { match };
|
||||
export { repeat, optional, star, plus };
|
||||
export { and };
|
||||
export { or, jump };
|
||||
export { negative, positive };
|
||||
export { grammar, lazy };
|
||||
32
backend/node_modules/grammex/dist/types.d.ts
generated
vendored
Normal file
32
backend/node_modules/grammex/dist/types.d.ts
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
type CompoundHandler<T, U> = (token: T[]) => U | undefined;
|
||||
type PrimitiveHandler<T> = (...args: string[]) => T | undefined;
|
||||
type ExplicitRule<T> = (state: State<T>) => boolean;
|
||||
type ImplicitRule<T> = string | RegExp | Rule<T>[] | {
|
||||
[Key in string]: Rule<T>;
|
||||
} | (() => Rule<T>);
|
||||
type Rule<T> = ExplicitRule<T> | ImplicitRule<T>;
|
||||
type MappedRule<T> = T extends any ? Rule<T> : never;
|
||||
type Cache<T> = Record<number, {
|
||||
indexMax: number;
|
||||
queue: (number | CacheValue<T>)[];
|
||||
store?: Map<number, CacheValue<T>>;
|
||||
}>;
|
||||
type CacheValue<T> = {
|
||||
index: number;
|
||||
output?: T[];
|
||||
} | number | false;
|
||||
type Options = {
|
||||
memoization?: boolean;
|
||||
silent?: boolean;
|
||||
};
|
||||
type State<T> = {
|
||||
cache: Cache<T>;
|
||||
input: string;
|
||||
index: number;
|
||||
indexBacktrackMax: number;
|
||||
options: Options;
|
||||
output: T[];
|
||||
};
|
||||
export type { CompoundHandler, PrimitiveHandler };
|
||||
export type { ExplicitRule, ImplicitRule, Rule, MappedRule };
|
||||
export type { Cache, CacheValue, Options, State };
|
||||
2
backend/node_modules/grammex/dist/types.js
generated
vendored
Normal file
2
backend/node_modules/grammex/dist/types.js
generated
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
/* MAIN */
|
||||
export {};
|
||||
13
backend/node_modules/grammex/dist/utils.d.ts
generated
vendored
Normal file
13
backend/node_modules/grammex/dist/utils.d.ts
generated
vendored
Normal file
@@ -0,0 +1,13 @@
|
||||
declare const isArray: (value: unknown) => value is unknown[];
|
||||
declare const isFunction: (value: unknown) => value is Function;
|
||||
declare const isFunctionNullary: (value: Function) => value is (() => unknown);
|
||||
declare const isFunctionStrictlyNullaryOrUnary: (value: Function) => boolean;
|
||||
declare const isNumber: (value: unknown) => value is number;
|
||||
declare const isObject: (value: unknown) => value is object;
|
||||
declare const isRegExp: (value: unknown) => value is RegExp;
|
||||
declare const isRegExpCapturing: (re: RegExp) => boolean;
|
||||
declare const isRegExpStatic: (re: RegExp) => boolean;
|
||||
declare const isString: (value: unknown) => value is string;
|
||||
declare const isUndefined: (value: unknown) => value is undefined;
|
||||
declare const memoize: <T, U>(fn: (arg: T) => U) => ((arg: T) => U);
|
||||
export { isArray, isFunction, isFunctionNullary, isFunctionStrictlyNullaryOrUnary, isNumber, isObject, isRegExp, isRegExpCapturing, isRegExpStatic, isString, isUndefined, memoize };
|
||||
57
backend/node_modules/grammex/dist/utils.js
generated
vendored
Normal file
57
backend/node_modules/grammex/dist/utils.js
generated
vendored
Normal file
@@ -0,0 +1,57 @@
|
||||
/* MAIN */
|
||||
const isArray = (value) => {
|
||||
return Array.isArray(value);
|
||||
};
|
||||
const isFunction = (value) => {
|
||||
return typeof value === 'function';
|
||||
};
|
||||
const isFunctionNullary = (value) => {
|
||||
return value.length === 0;
|
||||
};
|
||||
const isFunctionStrictlyNullaryOrUnary = (() => {
|
||||
const { toString } = Function.prototype;
|
||||
const re = /(?:^\(\s*(?:[^,.()]|\.(?!\.\.))*\s*\)\s*=>|^\s*[a-zA-Z$_][a-zA-Z0-9$_]*\s*=>)/;
|
||||
return (value) => {
|
||||
return (value.length === 0 || value.length === 1) && re.test(toString.call(value));
|
||||
};
|
||||
})();
|
||||
const isNumber = (value) => {
|
||||
return typeof value === 'number';
|
||||
};
|
||||
const isObject = (value) => {
|
||||
return typeof value === 'object' && value !== null;
|
||||
};
|
||||
const isRegExp = (value) => {
|
||||
return value instanceof RegExp;
|
||||
};
|
||||
const isRegExpCapturing = (() => {
|
||||
const sourceRe = /\\\(|\((?!\?(?::|=|!|<=|<!))/;
|
||||
return (re) => {
|
||||
return sourceRe.test(re.source);
|
||||
};
|
||||
})();
|
||||
const isRegExpStatic = (() => {
|
||||
const sourceRe = /^[a-zA-Z0-9_-]+$/;
|
||||
return (re) => {
|
||||
return sourceRe.test(re.source) && !re.flags.includes('i');
|
||||
};
|
||||
})();
|
||||
const isString = (value) => {
|
||||
return typeof value === 'string';
|
||||
};
|
||||
const isUndefined = (value) => {
|
||||
return value === undefined;
|
||||
};
|
||||
const memoize = (fn) => {
|
||||
const cache = new Map();
|
||||
return (arg) => {
|
||||
const cached = cache.get(arg);
|
||||
if (cached !== undefined)
|
||||
return cached;
|
||||
const value = fn(arg);
|
||||
cache.set(arg, value);
|
||||
return value;
|
||||
};
|
||||
};
|
||||
/* EXPORT */
|
||||
export { isArray, isFunction, isFunctionNullary, isFunctionStrictlyNullaryOrUnary, isNumber, isObject, isRegExp, isRegExpCapturing, isRegExpStatic, isString, isUndefined, memoize };
|
||||
21
backend/node_modules/grammex/license
generated
vendored
Normal file
21
backend/node_modules/grammex/license
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2023-present Fabio Spampinato
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a
|
||||
copy of this software and associated documentation files (the "Software"),
|
||||
to deal in the Software without restriction, including without limitation
|
||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
||||
and/or sell copies of the Software, and to permit persons to whom the
|
||||
Software is furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
||||
DEALINGS IN THE SOFTWARE.
|
||||
36
backend/node_modules/grammex/package.json
generated
vendored
Executable file
36
backend/node_modules/grammex/package.json
generated
vendored
Executable file
@@ -0,0 +1,36 @@
|
||||
{
|
||||
"name": "grammex",
|
||||
"repository": "github:fabiospampinato/grammex",
|
||||
"description": "A tiny, PEG-like system for building language grammars with regexes.",
|
||||
"license": "MIT",
|
||||
"version": "3.1.12",
|
||||
"type": "module",
|
||||
"main": "dist/index.js",
|
||||
"exports": "./dist/index.js",
|
||||
"types": "./dist/index.d.ts",
|
||||
"sideEffects": false,
|
||||
"scripts": {
|
||||
"benchmark": "tsex benchmark",
|
||||
"benchmark:watch": "tsex benchmark --watch",
|
||||
"clean": "tsex clean",
|
||||
"compile": "tsex compile",
|
||||
"compile:watch": "tsex compile --watch",
|
||||
"test": "tsex test",
|
||||
"test:watch": "tsex test --watch",
|
||||
"prepublishOnly": "tsex prepare"
|
||||
},
|
||||
"keywords": [
|
||||
"tiny",
|
||||
"peg",
|
||||
"regex",
|
||||
"parsing",
|
||||
"expression",
|
||||
"grammar"
|
||||
],
|
||||
"devDependencies": {
|
||||
"benchloop": "^2.1.1",
|
||||
"fava": "^0.3.4",
|
||||
"tsex": "^4.0.2",
|
||||
"typescript": "^5.7.3"
|
||||
}
|
||||
}
|
||||
122
backend/node_modules/grammex/readme.md
generated
vendored
Normal file
122
backend/node_modules/grammex/readme.md
generated
vendored
Normal file
@@ -0,0 +1,122 @@
|
||||
# Grammex
|
||||
|
||||
A tiny PEG-like system for building language grammars with regexes.
|
||||
|
||||
## Overview
|
||||
|
||||
The following functions for executing rules are provided:
|
||||
|
||||
| Function | Description |
|
||||
| ------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `parse(input,rule,options)` | Parses an input string with a given rule and options. It throws if parsing fails, including if some of the input string wasn't consumed. |
|
||||
| `validate(input,rule,options)` | Parses an input string with a given rule and options. It returns a boolean. |
|
||||
|
||||
The following functions for creating a primitive rule are provided:
|
||||
|
||||
| Function | Description |
|
||||
| ------------------ | ----------------------------------------------------------------------------------------------------------------------- |
|
||||
| `match(target,cb)` | Creates a new rule that tries to match the input string at the current position with the given regex/string/characters. |
|
||||
|
||||
The following higher-order functions for creating a rule out of other rules are provided:
|
||||
|
||||
| Function | Description |
|
||||
| ------------------------- | --------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `repeat(rule,min,max,cb)` | Creates a rule that tries to match the given rule at least `min` times and at most `max` times. |
|
||||
| `optional(rule,cb)` | Creates a rule that tries to match the given rule zero or one times. |
|
||||
| `star(rule,cb)` | Creates a rule that tries to match the given rule zero or more times. |
|
||||
| `plus(rule,cb)` | Creates a rule that tries to match the given rule one or more times. |
|
||||
| `and(rule[],cb)` | Creates a rule that tries to match all the given rules in sequence, one after the other. |
|
||||
| `or(rule[],cb)` | Creates a rule that tries to match any of the given rules, stopping at the first matching one. |
|
||||
| `jump(rule{}, cb)` | Creates a rule that tries to match any of the given rules, but trying only one of the options, chosen by looking at the next character. |
|
||||
| `negative(rule)` | Creates a rule that tries to not match the given rule. This rule doesn't consume any input, it's a negative lookahead. |
|
||||
| `positive(rule)` | Creates a rule that tries to match the given rule. This rule doesn't consume any input, it's a positive lookahead. |
|
||||
| `lazy(()=>rule)` | Creates a rule out of a getter for another rule. This is necessary when dealing with circular references. |
|
||||
|
||||
The following shorthands for creating rules are provided:
|
||||
|
||||
| Shorthand | Description |
|
||||
| --------------- | ----------------------------------------------------------------------------------------------------------------------- |
|
||||
| `'foo'` | A string is automatically interpreted as the primitive rule using the regex that would match the provided string. |
|
||||
| `/foo/` | A regex is automatically interpreted as the primitive rule using the provided regex. |
|
||||
| `['foo',/bar/]` | An array of strings and regexes is automatically interpreted as wrapped in an `and` rule. |
|
||||
| `{Foo,Bar}` | A plain object with strings and regexes as values is automatically interpreted as those values wrapped in an `or` rule. |
|
||||
| `()=>Foo` | An argumentless function is automatically interpreted as the same function wrapped in a `lazy`rule. |
|
||||
|
||||
The following utility functions are provided:
|
||||
|
||||
| Utility | Description |
|
||||
| ---------------- | -------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `grammar<T>(cb)` | Creates a new set of primitive and higher-order functions for making rules, with a fixed token type, and passes them to your callback. |
|
||||
|
||||
The following options are supported:
|
||||
|
||||
| Option | Description |
|
||||
| ------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `memoization` | `true` by default. If enabled this lowers the time complexity of the parser, but it can have a negative impact on performance for grammars and inputs with not a lot of backtracking happening. |
|
||||
| `silent` | `false` by default. If enabled then rules callbacks are not called, this enables faster validation if you have side-effects-free callbacks. |
|
||||
|
||||
Basically you should create some primitive rules with `match`, combine those into higher-order rules, decide which one of those will be your "root" rule, and use that to `parse` or `validate` an input string.
|
||||
|
||||
If a `parse` call is successful that means that a number of rules successfully matched the entire input string, each time a rule matches its `cb` function is called and its return value is appended to the output stream -- `parse` will simply return you this output stream.
|
||||
|
||||
All provided rules are "greedy", to conform with PEG grammars, removing ambiguities and improving performance significantly. Higher-order rules are also internally memoized by default, to ensure fast parsing times in edge cases, but you can turn that off for extra speed if your grammar is not too ambiguous. Primitive ruels are never internally memoized, but if needed you can enable memoization for a primitive rule by wrapping it an `and` rule.
|
||||
|
||||
## Install
|
||||
|
||||
```sh
|
||||
npm install grammex
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
```ts
|
||||
import {optional as O, or, validate} from 'grammex';
|
||||
|
||||
// Example grammar for matching timestamps
|
||||
|
||||
const Hour = /[0-1][0-9]|2[0-4]/;
|
||||
const Minute = /[0-5][0-9]/;
|
||||
const Second = /[0-5][0-9]|60/;
|
||||
const Fraction = /[.,][0-9]+/;
|
||||
const IsoTz = or ([ 'Z', [/[+-]/, Hour, O([O(':'), Minute])] ]);
|
||||
const TzL = /[A-Z]/;
|
||||
const TzAbbr = [TzL, TzL, O([TzL, O([TzL, O(TzL)])])];
|
||||
const TZ = {IsoTz, TzAbbr};
|
||||
const HM = [Hour, ':', Minute, O(Fraction)];
|
||||
const HMS = [Hour, ':', Minute, ':', Second, O(Fraction)];
|
||||
const Time = [O(/T ?/), {HMS, HM}, O([/ ?/, TZ])];
|
||||
|
||||
const Year = /[0-9][0-9][0-9][0-9]/;
|
||||
const Month = /0[1-9]|1[0-2]/;
|
||||
const Day = /0[1-9]|[1-2][0-9]|3[0-1]/;
|
||||
const Date = [Year, '-', Month, O(['-', Day])];
|
||||
|
||||
const DateTime = [Date, / ?/, Time];
|
||||
|
||||
const MonthAbbr = /Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Sept|Oct|Nov|Dec/;
|
||||
const WeekDayAbbr = /Mon|Tu|Tue|Tues|Wed|Th|Thu|Thur|Thurs|Fri|Sat|Sun/;
|
||||
const FreeDateTime = [WeekDayAbbr, ' ', MonthAbbr, ' ', Day, ' ', Time, ' ', Year];
|
||||
|
||||
const Timestamp = {DateTime, FreeDateTime};
|
||||
|
||||
validate ( '2009-09-22T06:59:28', Timestamp ); // => true
|
||||
validate ( '2009-09-22 06:59:28', Timestamp ); // => true
|
||||
validate ( 'Fri Jun 17 03:50:56 PDT 2011', Timestamp ); // => true
|
||||
validate ( '2010-10-26 10:00:53.360', Timestamp ); // => true
|
||||
|
||||
validate ( '2009--09-22T06:59:28', Timestamp ); // => false
|
||||
validate ( '2009-09-22Z06:59:28', Timestamp ); // => false
|
||||
validate ( '2009-09-22T06.59:28', Timestamp ); // => false
|
||||
|
||||
validate ( '2009-09-22 06:59:280', Timestamp ); // => false
|
||||
validate ( '2009-09-22 06:590:28', Timestamp ); // => false
|
||||
validate ( '2009-09-22 060:59:28', Timestamp ); // => false
|
||||
|
||||
validate ( 'Fri Jun 170 03:50:56 PDT 2011', Timestamp ); // => false
|
||||
validate ( 'Fri Juns 17 03:50:56 PDT 2011', Timestamp ); // => false
|
||||
validate ( 'Friz Jun 17 03:50:56 PDT 2011', Timestamp ); // => false
|
||||
```
|
||||
|
||||
## License
|
||||
|
||||
MIT © Fabio Spampinato
|
||||
Reference in New Issue
Block a user