Add comprehensive development roadmap via GitHub Issues

Created 10 detailed GitHub issues covering:
- Project activation and management UI (#1-2)
- Worker node coordination and visualization (#3-4)
- Automated GitHub repository scanning (#5)
- Intelligent model-to-issue matching (#6)
- Multi-model task execution system (#7)
- N8N workflow integration (#8)
- Hive-Bzzz P2P bridge (#9)
- Peer assistance protocol (#10)

Each issue includes detailed specifications, acceptance criteria,
technical implementation notes, and dependency mapping.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
This commit is contained in:
anthonyrawlins
2025-07-12 19:41:01 +10:00
parent 9a6a06da89
commit e89f2f4b7b
4980 changed files with 1501266 additions and 57 deletions

View File

@@ -0,0 +1,158 @@
import { plugins, format as format$1 } from '@vitest/pretty-format';
import * as loupe from 'loupe';
const { AsymmetricMatcher, DOMCollection, DOMElement, Immutable, ReactElement, ReactTestComponent } = plugins;
const PLUGINS = [
ReactTestComponent,
ReactElement,
DOMElement,
DOMCollection,
Immutable,
AsymmetricMatcher
];
function stringify(object, maxDepth = 10, { maxLength,...options } = {}) {
const MAX_LENGTH = maxLength ?? 1e4;
let result;
try {
result = format$1(object, {
maxDepth,
escapeString: false,
plugins: PLUGINS,
...options
});
} catch {
result = format$1(object, {
callToJSON: false,
maxDepth,
escapeString: false,
plugins: PLUGINS,
...options
});
}
// Prevents infinite loop https://github.com/vitest-dev/vitest/issues/7249
return result.length >= MAX_LENGTH && maxDepth > 1 ? stringify(object, Math.floor(Math.min(maxDepth, Number.MAX_SAFE_INTEGER) / 2), {
maxLength,
...options
}) : result;
}
const formatRegExp = /%[sdjifoOc%]/g;
function format(...args) {
if (typeof args[0] !== "string") {
const objects = [];
for (let i = 0; i < args.length; i++) {
objects.push(inspect(args[i], {
depth: 0,
colors: false
}));
}
return objects.join(" ");
}
const len = args.length;
let i = 1;
const template = args[0];
let str = String(template).replace(formatRegExp, (x) => {
if (x === "%%") {
return "%";
}
if (i >= len) {
return x;
}
switch (x) {
case "%s": {
const value = args[i++];
if (typeof value === "bigint") {
return `${value.toString()}n`;
}
if (typeof value === "number" && value === 0 && 1 / value < 0) {
return "-0";
}
if (typeof value === "object" && value !== null) {
if (typeof value.toString === "function" && value.toString !== Object.prototype.toString) {
return value.toString();
}
return inspect(value, {
depth: 0,
colors: false
});
}
return String(value);
}
case "%d": {
const value = args[i++];
if (typeof value === "bigint") {
return `${value.toString()}n`;
}
return Number(value).toString();
}
case "%i": {
const value = args[i++];
if (typeof value === "bigint") {
return `${value.toString()}n`;
}
return Number.parseInt(String(value)).toString();
}
case "%f": return Number.parseFloat(String(args[i++])).toString();
case "%o": return inspect(args[i++], {
showHidden: true,
showProxy: true
});
case "%O": return inspect(args[i++]);
case "%c": {
i++;
return "";
}
case "%j": try {
return JSON.stringify(args[i++]);
} catch (err) {
const m = err.message;
if (m.includes("circular structure") || m.includes("cyclic structures") || m.includes("cyclic object")) {
return "[Circular]";
}
throw err;
}
default: return x;
}
});
for (let x = args[i]; i < len; x = args[++i]) {
if (x === null || typeof x !== "object") {
str += ` ${x}`;
} else {
str += ` ${inspect(x)}`;
}
}
return str;
}
function inspect(obj, options = {}) {
if (options.truncate === 0) {
options.truncate = Number.POSITIVE_INFINITY;
}
return loupe.inspect(obj, options);
}
function objDisplay(obj, options = {}) {
if (typeof options.truncate === "undefined") {
options.truncate = 40;
}
const str = inspect(obj, options);
const type = Object.prototype.toString.call(obj);
if (options.truncate && str.length >= options.truncate) {
if (type === "[object Function]") {
const fn = obj;
return !fn.name ? "[Function]" : `[Function: ${fn.name}]`;
} else if (type === "[object Array]") {
return `[ Array(${obj.length}) ]`;
} else if (type === "[object Object]") {
const keys = Object.keys(obj);
const kstr = keys.length > 2 ? `${keys.splice(0, 2).join(", ")}, ...` : keys.join(", ");
return `{ Object (${kstr}) }`;
} else {
return str;
}
}
return str;
}
function getDefaultExportFromCjs (x) {
return x && x.__esModule && Object.prototype.hasOwnProperty.call(x, 'default') ? x['default'] : x;
}
export { format as f, getDefaultExportFromCjs as g, inspect as i, objDisplay as o, stringify as s };

104
frontend/node_modules/@vitest/utils/dist/diff.d.ts generated vendored Normal file
View File

@@ -0,0 +1,104 @@
import { D as DiffOptions } from './types.d-BCElaP-c.js';
export { a as DiffOptionsColor, S as SerializedDiffOptions } from './types.d-BCElaP-c.js';
import '@vitest/pretty-format';
/**
* Diff Match and Patch
* Copyright 2018 The diff-match-patch Authors.
* https://github.com/google/diff-match-patch
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @fileoverview Computes the difference between two texts to create a patch.
* Applies the patch onto another text, allowing for errors.
* @author fraser@google.com (Neil Fraser)
*/
/**
* CHANGES by pedrottimark to diff_match_patch_uncompressed.ts file:
*
* 1. Delete anything not needed to use diff_cleanupSemantic method
* 2. Convert from prototype properties to var declarations
* 3. Convert Diff to class from constructor and prototype
* 4. Add type annotations for arguments and return values
* 5. Add exports
*/
/**
* The data structure representing a diff is an array of tuples:
* [[DIFF_DELETE, 'Hello'], [DIFF_INSERT, 'Goodbye'], [DIFF_EQUAL, ' world.']]
* which means: delete 'Hello', add 'Goodbye' and keep ' world.'
*/
declare const DIFF_DELETE = -1;
declare const DIFF_INSERT = 1;
declare const DIFF_EQUAL = 0;
/**
* Class representing one diff tuple.
* Attempts to look like a two-element array (which is what this used to be).
* @param {number} op Operation, one of: DIFF_DELETE, DIFF_INSERT, DIFF_EQUAL.
* @param {string} text Text to be deleted, inserted, or retained.
* @constructor
*/
declare class Diff {
0: number;
1: string;
constructor(op: number, text: string);
}
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// Compare two arrays of strings line-by-line. Format as comparison lines.
declare function diffLinesUnified(aLines: Array<string>, bLines: Array<string>, options?: DiffOptions): string;
// Given two pairs of arrays of strings:
// Compare the pair of comparison arrays line-by-line.
// Format the corresponding lines in the pair of displayable arrays.
declare function diffLinesUnified2(aLinesDisplay: Array<string>, bLinesDisplay: Array<string>, aLinesCompare: Array<string>, bLinesCompare: Array<string>, options?: DiffOptions): string;
// Compare two arrays of strings line-by-line.
declare function diffLinesRaw(aLines: Array<string>, bLines: Array<string>, options?: DiffOptions): [Array<Diff>, boolean];
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
// Compare two strings character-by-character.
// Format as comparison lines in which changed substrings have inverse colors.
declare function diffStringsUnified(a: string, b: string, options?: DiffOptions): string;
// Compare two strings character-by-character.
// Optionally clean up small common substrings, also known as chaff.
declare function diffStringsRaw(a: string, b: string, cleanup: boolean, options?: DiffOptions): [Array<Diff>, boolean];
// Generate a string that will highlight the difference between two values
// with green and red. (similar to how github does code diffing)
/**
* @param a Expected value
* @param b Received value
* @param options Diff options
* @returns {string | null} a string diff
*/
declare function diff(a: any, b: any, options?: DiffOptions): string | undefined;
declare function printDiffOrStringify(received: unknown, expected: unknown, options?: DiffOptions): string | undefined;
declare function replaceAsymmetricMatcher(actual: any, expected: any, actualReplaced?: WeakSet<WeakKey>, expectedReplaced?: WeakSet<WeakKey>): {
replacedActual: any
replacedExpected: any
};
type PrintLabel = (string: string) => string;
declare function getLabelPrinter(...strings: Array<string>): PrintLabel;
export { DIFF_DELETE, DIFF_EQUAL, DIFF_INSERT, Diff, DiffOptions, diff, diffLinesRaw, diffLinesUnified, diffLinesUnified2, diffStringsRaw, diffStringsUnified, getLabelPrinter, printDiffOrStringify, replaceAsymmetricMatcher };

2185
frontend/node_modules/@vitest/utils/dist/diff.js generated vendored Normal file

File diff suppressed because it is too large Load Diff

9
frontend/node_modules/@vitest/utils/dist/error.d.ts generated vendored Normal file
View File

@@ -0,0 +1,9 @@
import { D as DiffOptions } from './types.d-BCElaP-c.js';
import '@vitest/pretty-format';
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
declare function serializeValue(val: any, seen?: WeakMap<WeakKey, any>): any;
declare function processError(_err: any, diffOptions?: DiffOptions, seen?: WeakSet<WeakKey>): any;
export { processError, serializeValue as serializeError, serializeValue };

162
frontend/node_modules/@vitest/utils/dist/error.js generated vendored Normal file
View File

@@ -0,0 +1,162 @@
import { printDiffOrStringify } from './diff.js';
import { f as format, s as stringify } from './chunk-_commonjsHelpers.js';
import '@vitest/pretty-format';
import 'tinyrainbow';
import './helpers.js';
import 'loupe';
const IS_RECORD_SYMBOL = "@@__IMMUTABLE_RECORD__@@";
const IS_COLLECTION_SYMBOL = "@@__IMMUTABLE_ITERABLE__@@";
function isImmutable(v) {
return v && (v[IS_COLLECTION_SYMBOL] || v[IS_RECORD_SYMBOL]);
}
const OBJECT_PROTO = Object.getPrototypeOf({});
function getUnserializableMessage(err) {
if (err instanceof Error) {
return `<unserializable>: ${err.message}`;
}
if (typeof err === "string") {
return `<unserializable>: ${err}`;
}
return "<unserializable>";
}
// https://developer.mozilla.org/en-US/docs/Web/API/Web_Workers_API/Structured_clone_algorithm
function serializeValue(val, seen = new WeakMap()) {
if (!val || typeof val === "string") {
return val;
}
if (val instanceof Error && "toJSON" in val && typeof val.toJSON === "function") {
const jsonValue = val.toJSON();
if (jsonValue && jsonValue !== val && typeof jsonValue === "object") {
if (typeof val.message === "string") {
safe(() => jsonValue.message ?? (jsonValue.message = val.message));
}
if (typeof val.stack === "string") {
safe(() => jsonValue.stack ?? (jsonValue.stack = val.stack));
}
if (typeof val.name === "string") {
safe(() => jsonValue.name ?? (jsonValue.name = val.name));
}
if (val.cause != null) {
safe(() => jsonValue.cause ?? (jsonValue.cause = serializeValue(val.cause, seen)));
}
}
return serializeValue(jsonValue, seen);
}
if (typeof val === "function") {
return `Function<${val.name || "anonymous"}>`;
}
if (typeof val === "symbol") {
return val.toString();
}
if (typeof val !== "object") {
return val;
}
if (typeof Buffer !== "undefined" && val instanceof Buffer) {
return `<Buffer(${val.length}) ...>`;
}
if (typeof Uint8Array !== "undefined" && val instanceof Uint8Array) {
return `<Uint8Array(${val.length}) ...>`;
}
// cannot serialize immutables as immutables
if (isImmutable(val)) {
return serializeValue(val.toJSON(), seen);
}
if (val instanceof Promise || val.constructor && val.constructor.prototype === "AsyncFunction") {
return "Promise";
}
if (typeof Element !== "undefined" && val instanceof Element) {
return val.tagName;
}
if (typeof val.asymmetricMatch === "function") {
return `${val.toString()} ${format(val.sample)}`;
}
if (typeof val.toJSON === "function") {
return serializeValue(val.toJSON(), seen);
}
if (seen.has(val)) {
return seen.get(val);
}
if (Array.isArray(val)) {
// eslint-disable-next-line unicorn/no-new-array -- we need to keep sparse arrays ([1,,3])
const clone = new Array(val.length);
seen.set(val, clone);
val.forEach((e, i) => {
try {
clone[i] = serializeValue(e, seen);
} catch (err) {
clone[i] = getUnserializableMessage(err);
}
});
return clone;
} else {
// Objects with `Error` constructors appear to cause problems during worker communication
// using `MessagePort`, so the serialized error object is being recreated as plain object.
const clone = Object.create(null);
seen.set(val, clone);
let obj = val;
while (obj && obj !== OBJECT_PROTO) {
Object.getOwnPropertyNames(obj).forEach((key) => {
if (key in clone) {
return;
}
try {
clone[key] = serializeValue(val[key], seen);
} catch (err) {
// delete in case it has a setter from prototype that might throw
delete clone[key];
clone[key] = getUnserializableMessage(err);
}
});
obj = Object.getPrototypeOf(obj);
}
return clone;
}
}
function safe(fn) {
try {
return fn();
} catch {}
}
function normalizeErrorMessage(message) {
return message.replace(/__(vite_ssr_import|vi_import)_\d+__\./g, "");
}
function processError(_err, diffOptions, seen = new WeakSet()) {
if (!_err || typeof _err !== "object") {
return { message: String(_err) };
}
const err = _err;
if (err.showDiff || err.showDiff === undefined && err.expected !== undefined && err.actual !== undefined) {
err.diff = printDiffOrStringify(err.actual, err.expected, {
...diffOptions,
...err.diffOptions
});
}
if ("expected" in err && typeof err.expected !== "string") {
err.expected = stringify(err.expected, 10);
}
if ("actual" in err && typeof err.actual !== "string") {
err.actual = stringify(err.actual, 10);
}
// some Error implementations don't allow rewriting message
try {
if (typeof err.message === "string") {
err.message = normalizeErrorMessage(err.message);
}
} catch {}
// some Error implementations may not allow rewriting cause
// in most cases, the assignment will lead to "err.cause = err.cause"
try {
if (!seen.has(err) && typeof err.cause === "object") {
seen.add(err);
err.cause = processError(err.cause, diffOptions, seen);
}
} catch {}
try {
return serializeValue(err);
} catch (e) {
return serializeValue(new Error(`Failed to fully serialize error: ${e === null || e === void 0 ? void 0 : e.message}\nInner error message: ${err === null || err === void 0 ? void 0 : err.message}`));
}
}
export { processError, serializeValue as serializeError, serializeValue };

56
frontend/node_modules/@vitest/utils/dist/helpers.d.ts generated vendored Normal file
View File

@@ -0,0 +1,56 @@
import { Nullable, Arrayable } from './types.js';
interface CloneOptions {
forceWritable?: boolean;
}
interface ErrorOptions {
message?: string;
stackTraceLimit?: number;
}
/**
* Get original stacktrace without source map support the most performant way.
* - Create only 1 stack frame.
* - Rewrite prepareStackTrace to bypass "support-stack-trace" (usually takes ~250ms).
*/
declare function createSimpleStackTrace(options?: ErrorOptions): string;
declare function notNullish<T>(v: T | null | undefined): v is NonNullable<T>;
declare function assertTypes(value: unknown, name: string, types: string[]): void;
declare function isPrimitive(value: unknown): boolean;
declare function slash(path: string): string;
// convert RegExp.toString to RegExp
declare function parseRegexp(input: string): RegExp;
declare function toArray<T>(array?: Nullable<Arrayable<T>>): Array<T>;
declare function isObject(item: unknown): boolean;
declare function getType(value: unknown): string;
declare function getOwnProperties(obj: any): (string | symbol)[];
declare function deepClone<T>(val: T, options?: CloneOptions): T;
declare function clone<T>(val: T, seen: WeakMap<any, any>, options?: CloneOptions): T;
declare function noop(): void;
declare function objectAttr(source: any, path: string, defaultValue?: undefined): any;
type DeferPromise<T> = Promise<T> & {
resolve: (value: T | PromiseLike<T>) => void
reject: (reason?: any) => void
};
declare function createDefer<T>(): DeferPromise<T>;
/**
* If code starts with a function call, will return its last index, respecting arguments.
* This will return 25 - last ending character of toMatch ")"
* Also works with callbacks
* ```
* toMatch({ test: '123' });
* toBeAliased('123')
* ```
*/
declare function getCallLastIndex(code: string): number | null;
declare function isNegativeNaN(val: number): boolean;
/**
* Deep merge :P
*
* Will merge objects only if they are plain
*
* Do not merge types - it is very expensive and usually it's better to case a type here
*/
declare function deepMerge<T extends object = object>(target: T, ...sources: any[]): T;
export { assertTypes, clone, createDefer, createSimpleStackTrace, deepClone, deepMerge, getCallLastIndex, getOwnProperties, getType, isNegativeNaN, isObject, isPrimitive, noop, notNullish, objectAttr, parseRegexp, slash, toArray };
export type { DeferPromise };

251
frontend/node_modules/@vitest/utils/dist/helpers.js generated vendored Normal file
View File

@@ -0,0 +1,251 @@
/**
* Get original stacktrace without source map support the most performant way.
* - Create only 1 stack frame.
* - Rewrite prepareStackTrace to bypass "support-stack-trace" (usually takes ~250ms).
*/
function createSimpleStackTrace(options) {
const { message = "$$stack trace error", stackTraceLimit = 1 } = options || {};
const limit = Error.stackTraceLimit;
const prepareStackTrace = Error.prepareStackTrace;
Error.stackTraceLimit = stackTraceLimit;
Error.prepareStackTrace = (e) => e.stack;
const err = new Error(message);
const stackTrace = err.stack || "";
Error.prepareStackTrace = prepareStackTrace;
Error.stackTraceLimit = limit;
return stackTrace;
}
function notNullish(v) {
return v != null;
}
function assertTypes(value, name, types) {
const receivedType = typeof value;
const pass = types.includes(receivedType);
if (!pass) {
throw new TypeError(`${name} value must be ${types.join(" or ")}, received "${receivedType}"`);
}
}
function isPrimitive(value) {
return value === null || typeof value !== "function" && typeof value !== "object";
}
function slash(path) {
return path.replace(/\\/g, "/");
}
// convert RegExp.toString to RegExp
function parseRegexp(input) {
// Parse input
// eslint-disable-next-line regexp/no-misleading-capturing-group
const m = input.match(/(\/?)(.+)\1([a-z]*)/i);
// match nothing
if (!m) {
return /$^/;
}
// Invalid flags
// eslint-disable-next-line regexp/optimal-quantifier-concatenation
if (m[3] && !/^(?!.*?(.).*?\1)[gmixXsuUAJ]+$/.test(m[3])) {
return new RegExp(input);
}
// Create the regular expression
return new RegExp(m[2], m[3]);
}
function toArray(array) {
if (array === null || array === undefined) {
array = [];
}
if (Array.isArray(array)) {
return array;
}
return [array];
}
function isObject(item) {
return item != null && typeof item === "object" && !Array.isArray(item);
}
function isFinalObj(obj) {
return obj === Object.prototype || obj === Function.prototype || obj === RegExp.prototype;
}
function getType(value) {
return Object.prototype.toString.apply(value).slice(8, -1);
}
function collectOwnProperties(obj, collector) {
const collect = typeof collector === "function" ? collector : (key) => collector.add(key);
Object.getOwnPropertyNames(obj).forEach(collect);
Object.getOwnPropertySymbols(obj).forEach(collect);
}
function getOwnProperties(obj) {
const ownProps = new Set();
if (isFinalObj(obj)) {
return [];
}
collectOwnProperties(obj, ownProps);
return Array.from(ownProps);
}
const defaultCloneOptions = { forceWritable: false };
function deepClone(val, options = defaultCloneOptions) {
const seen = new WeakMap();
return clone(val, seen, options);
}
function clone(val, seen, options = defaultCloneOptions) {
let k, out;
if (seen.has(val)) {
return seen.get(val);
}
if (Array.isArray(val)) {
out = Array.from({ length: k = val.length });
seen.set(val, out);
while (k--) {
out[k] = clone(val[k], seen, options);
}
return out;
}
if (Object.prototype.toString.call(val) === "[object Object]") {
out = Object.create(Object.getPrototypeOf(val));
seen.set(val, out);
// we don't need properties from prototype
const props = getOwnProperties(val);
for (const k of props) {
const descriptor = Object.getOwnPropertyDescriptor(val, k);
if (!descriptor) {
continue;
}
const cloned = clone(val[k], seen, options);
if (options.forceWritable) {
Object.defineProperty(out, k, {
enumerable: descriptor.enumerable,
configurable: true,
writable: true,
value: cloned
});
} else if ("get" in descriptor) {
Object.defineProperty(out, k, {
...descriptor,
get() {
return cloned;
}
});
} else {
Object.defineProperty(out, k, {
...descriptor,
value: cloned
});
}
}
return out;
}
return val;
}
function noop() {}
function objectAttr(source, path, defaultValue = undefined) {
// a[3].b -> a.3.b
const paths = path.replace(/\[(\d+)\]/g, ".$1").split(".");
let result = source;
for (const p of paths) {
result = new Object(result)[p];
if (result === undefined) {
return defaultValue;
}
}
return result;
}
function createDefer() {
let resolve = null;
let reject = null;
const p = new Promise((_resolve, _reject) => {
resolve = _resolve;
reject = _reject;
});
p.resolve = resolve;
p.reject = reject;
return p;
}
/**
* If code starts with a function call, will return its last index, respecting arguments.
* This will return 25 - last ending character of toMatch ")"
* Also works with callbacks
* ```
* toMatch({ test: '123' });
* toBeAliased('123')
* ```
*/
function getCallLastIndex(code) {
let charIndex = -1;
let inString = null;
let startedBracers = 0;
let endedBracers = 0;
let beforeChar = null;
while (charIndex <= code.length) {
beforeChar = code[charIndex];
charIndex++;
const char = code[charIndex];
const isCharString = char === "\"" || char === "'" || char === "`";
if (isCharString && beforeChar !== "\\") {
if (inString === char) {
inString = null;
} else if (!inString) {
inString = char;
}
}
if (!inString) {
if (char === "(") {
startedBracers++;
}
if (char === ")") {
endedBracers++;
}
}
if (startedBracers && endedBracers && startedBracers === endedBracers) {
return charIndex;
}
}
return null;
}
function isNegativeNaN(val) {
if (!Number.isNaN(val)) {
return false;
}
const f64 = new Float64Array(1);
f64[0] = val;
const u32 = new Uint32Array(f64.buffer);
const isNegative = u32[1] >>> 31 === 1;
return isNegative;
}
function toString(v) {
return Object.prototype.toString.call(v);
}
function isPlainObject(val) {
return toString(val) === "[object Object]" && (!val.constructor || val.constructor.name === "Object");
}
function isMergeableObject(item) {
return isPlainObject(item) && !Array.isArray(item);
}
/**
* Deep merge :P
*
* Will merge objects only if they are plain
*
* Do not merge types - it is very expensive and usually it's better to case a type here
*/
function deepMerge(target, ...sources) {
if (!sources.length) {
return target;
}
const source = sources.shift();
if (source === undefined) {
return target;
}
if (isMergeableObject(target) && isMergeableObject(source)) {
Object.keys(source).forEach((key) => {
const _source = source;
if (isMergeableObject(_source[key])) {
if (!target[key]) {
target[key] = {};
}
deepMerge(target[key], _source[key]);
} else {
target[key] = _source[key];
}
});
}
return deepMerge(target, ...sources);
}
export { assertTypes, clone, createDefer, createSimpleStackTrace, deepClone, deepMerge, getCallLastIndex, getOwnProperties, getType, isNegativeNaN, isObject, isPrimitive, noop, notNullish, objectAttr, parseRegexp, slash, toArray };

57
frontend/node_modules/@vitest/utils/dist/index.d.ts generated vendored Normal file
View File

@@ -0,0 +1,57 @@
import { PrettyFormatOptions } from '@vitest/pretty-format';
export { DeferPromise, assertTypes, clone, createDefer, createSimpleStackTrace, deepClone, deepMerge, getCallLastIndex, getOwnProperties, getType, isNegativeNaN, isObject, isPrimitive, noop, notNullish, objectAttr, parseRegexp, slash, toArray } from './helpers.js';
import { Colors } from 'tinyrainbow';
export { ArgumentsType, Arrayable, Awaitable, Constructable, DeepMerge, ErrorWithDiff, MergeInsertions, MutableArray, Nullable, ParsedStack, SerializedError, TestError } from './types.js';
type Inspect = (value: unknown, options: Options) => string;
interface Options {
showHidden: boolean;
depth: number;
colors: boolean;
customInspect: boolean;
showProxy: boolean;
maxArrayLength: number;
breakLength: number;
truncate: number;
seen: unknown[];
inspect: Inspect;
stylize: (value: string, styleType: string) => string;
}
type LoupeOptions = Partial<Options>;
interface StringifyOptions extends PrettyFormatOptions {
maxLength?: number;
}
declare function stringify(object: unknown, maxDepth?: number, { maxLength,...options }?: StringifyOptions): string;
declare function format(...args: unknown[]): string;
declare function inspect(obj: unknown, options?: LoupeOptions): string;
declare function objDisplay(obj: unknown, options?: LoupeOptions): string;
interface HighlightOptions {
jsx?: boolean;
colors?: Colors;
}
declare function highlight(code: string, options?: HighlightOptions): string;
declare function nanoid(size?: number): string;
declare const lineSplitRE: RegExp;
declare function positionToOffset(source: string, lineNumber: number, columnNumber: number): number;
declare function offsetToLineNumber(source: string, offset: number): number;
declare function shuffle<T>(array: T[], seed?: number): T[];
interface SafeTimers {
nextTick: (cb: () => void) => void;
setTimeout: typeof setTimeout;
setInterval: typeof setInterval;
clearInterval: typeof clearInterval;
clearTimeout: typeof clearTimeout;
setImmediate: typeof setImmediate;
clearImmediate: typeof clearImmediate;
queueMicrotask: typeof queueMicrotask;
}
declare function getSafeTimers(): SafeTimers;
declare function setSafeTimers(): void;
export { format, getSafeTimers, highlight, inspect, lineSplitRE, nanoid, objDisplay, offsetToLineNumber, positionToOffset, setSafeTimers, shuffle, stringify };
export type { LoupeOptions, SafeTimers, StringifyOptions };

633
frontend/node_modules/@vitest/utils/dist/index.js generated vendored Normal file
View File

@@ -0,0 +1,633 @@
import { g as getDefaultExportFromCjs } from './chunk-_commonjsHelpers.js';
export { f as format, i as inspect, o as objDisplay, s as stringify } from './chunk-_commonjsHelpers.js';
export { assertTypes, clone, createDefer, createSimpleStackTrace, deepClone, deepMerge, getCallLastIndex, getOwnProperties, getType, isNegativeNaN, isObject, isPrimitive, noop, notNullish, objectAttr, parseRegexp, slash, toArray } from './helpers.js';
import c from 'tinyrainbow';
import '@vitest/pretty-format';
import 'loupe';
var jsTokens_1;
var hasRequiredJsTokens;
function requireJsTokens () {
if (hasRequiredJsTokens) return jsTokens_1;
hasRequiredJsTokens = 1;
// Copyright 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, 2022, 2023 Simon Lydell
// License: MIT.
var Identifier, JSXIdentifier, JSXPunctuator, JSXString, JSXText, KeywordsWithExpressionAfter, KeywordsWithNoLineTerminatorAfter, LineTerminatorSequence, MultiLineComment, Newline, NumericLiteral, Punctuator, RegularExpressionLiteral, SingleLineComment, StringLiteral, Template, TokensNotPrecedingObjectLiteral, TokensPrecedingExpression, WhiteSpace;
RegularExpressionLiteral = /\/(?![*\/])(?:\[(?:(?![\]\\]).|\\.)*\]|(?![\/\\]).|\\.)*(\/[$_\u200C\u200D\p{ID_Continue}]*|\\)?/yu;
Punctuator = /--|\+\+|=>|\.{3}|\??\.(?!\d)|(?:&&|\|\||\?\?|[+\-%&|^]|\*{1,2}|<{1,2}|>{1,3}|!=?|={1,2}|\/(?![\/*]))=?|[?~,:;[\](){}]/y;
Identifier = /(\x23?)(?=[$_\p{ID_Start}\\])(?:[$_\u200C\u200D\p{ID_Continue}]|\\u[\da-fA-F]{4}|\\u\{[\da-fA-F]+\})+/yu;
StringLiteral = /(['"])(?:(?!\1)[^\\\n\r]|\\(?:\r\n|[^]))*(\1)?/y;
NumericLiteral = /(?:0[xX][\da-fA-F](?:_?[\da-fA-F])*|0[oO][0-7](?:_?[0-7])*|0[bB][01](?:_?[01])*)n?|0n|[1-9](?:_?\d)*n|(?:(?:0(?!\d)|0\d*[89]\d*|[1-9](?:_?\d)*)(?:\.(?:\d(?:_?\d)*)?)?|\.\d(?:_?\d)*)(?:[eE][+-]?\d(?:_?\d)*)?|0[0-7]+/y;
Template = /[`}](?:[^`\\$]|\\[^]|\$(?!\{))*(`|\$\{)?/y;
WhiteSpace = /[\t\v\f\ufeff\p{Zs}]+/yu;
LineTerminatorSequence = /\r?\n|[\r\u2028\u2029]/y;
MultiLineComment = /\/\*(?:[^*]|\*(?!\/))*(\*\/)?/y;
SingleLineComment = /\/\/.*/y;
JSXPunctuator = /[<>.:={}]|\/(?![\/*])/y;
JSXIdentifier = /[$_\p{ID_Start}][$_\u200C\u200D\p{ID_Continue}-]*/yu;
JSXString = /(['"])(?:(?!\1)[^])*(\1)?/y;
JSXText = /[^<>{}]+/y;
TokensPrecedingExpression = /^(?:[\/+-]|\.{3}|\?(?:InterpolationIn(?:JSX|Template)|NoLineTerminatorHere|NonExpressionParenEnd|UnaryIncDec))?$|[{}([,;<>=*%&|^!~?:]$/;
TokensNotPrecedingObjectLiteral = /^(?:=>|[;\]){}]|else|\?(?:NoLineTerminatorHere|NonExpressionParenEnd))?$/;
KeywordsWithExpressionAfter = /^(?:await|case|default|delete|do|else|instanceof|new|return|throw|typeof|void|yield)$/;
KeywordsWithNoLineTerminatorAfter = /^(?:return|throw|yield)$/;
Newline = RegExp(LineTerminatorSequence.source);
jsTokens_1 = function*(input, {jsx = false} = {}) {
var braces, firstCodePoint, isExpression, lastIndex, lastSignificantToken, length, match, mode, nextLastIndex, nextLastSignificantToken, parenNesting, postfixIncDec, punctuator, stack;
({length} = input);
lastIndex = 0;
lastSignificantToken = "";
stack = [
{tag: "JS"}
];
braces = [];
parenNesting = 0;
postfixIncDec = false;
while (lastIndex < length) {
mode = stack[stack.length - 1];
switch (mode.tag) {
case "JS":
case "JSNonExpressionParen":
case "InterpolationInTemplate":
case "InterpolationInJSX":
if (input[lastIndex] === "/" && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
RegularExpressionLiteral.lastIndex = lastIndex;
if (match = RegularExpressionLiteral.exec(input)) {
lastIndex = RegularExpressionLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "RegularExpressionLiteral",
value: match[0],
closed: match[1] !== void 0 && match[1] !== "\\"
});
continue;
}
}
Punctuator.lastIndex = lastIndex;
if (match = Punctuator.exec(input)) {
punctuator = match[0];
nextLastIndex = Punctuator.lastIndex;
nextLastSignificantToken = punctuator;
switch (punctuator) {
case "(":
if (lastSignificantToken === "?NonExpressionParenKeyword") {
stack.push({
tag: "JSNonExpressionParen",
nesting: parenNesting
});
}
parenNesting++;
postfixIncDec = false;
break;
case ")":
parenNesting--;
postfixIncDec = true;
if (mode.tag === "JSNonExpressionParen" && parenNesting === mode.nesting) {
stack.pop();
nextLastSignificantToken = "?NonExpressionParenEnd";
postfixIncDec = false;
}
break;
case "{":
Punctuator.lastIndex = 0;
isExpression = !TokensNotPrecedingObjectLiteral.test(lastSignificantToken) && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken));
braces.push(isExpression);
postfixIncDec = false;
break;
case "}":
switch (mode.tag) {
case "InterpolationInTemplate":
if (braces.length === mode.nesting) {
Template.lastIndex = lastIndex;
match = Template.exec(input);
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
postfixIncDec = false;
yield ({
type: "TemplateMiddle",
value: match[0]
});
} else {
stack.pop();
postfixIncDec = true;
yield ({
type: "TemplateTail",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "InterpolationInJSX":
if (braces.length === mode.nesting) {
stack.pop();
lastIndex += 1;
lastSignificantToken = "}";
yield ({
type: "JSXPunctuator",
value: "}"
});
continue;
}
}
postfixIncDec = braces.pop();
nextLastSignificantToken = postfixIncDec ? "?ExpressionBraceEnd" : "}";
break;
case "]":
postfixIncDec = true;
break;
case "++":
case "--":
nextLastSignificantToken = postfixIncDec ? "?PostfixIncDec" : "?UnaryIncDec";
break;
case "<":
if (jsx && (TokensPrecedingExpression.test(lastSignificantToken) || KeywordsWithExpressionAfter.test(lastSignificantToken))) {
stack.push({tag: "JSXTag"});
lastIndex += 1;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: punctuator
});
continue;
}
postfixIncDec = false;
break;
default:
postfixIncDec = false;
}
lastIndex = nextLastIndex;
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "Punctuator",
value: punctuator
});
continue;
}
Identifier.lastIndex = lastIndex;
if (match = Identifier.exec(input)) {
lastIndex = Identifier.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "for":
case "if":
case "while":
case "with":
if (lastSignificantToken !== "." && lastSignificantToken !== "?.") {
nextLastSignificantToken = "?NonExpressionParenKeyword";
}
}
lastSignificantToken = nextLastSignificantToken;
postfixIncDec = !KeywordsWithExpressionAfter.test(match[0]);
yield ({
type: match[1] === "#" ? "PrivateIdentifier" : "IdentifierName",
value: match[0]
});
continue;
}
StringLiteral.lastIndex = lastIndex;
if (match = StringLiteral.exec(input)) {
lastIndex = StringLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "StringLiteral",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
NumericLiteral.lastIndex = lastIndex;
if (match = NumericLiteral.exec(input)) {
lastIndex = NumericLiteral.lastIndex;
lastSignificantToken = match[0];
postfixIncDec = true;
yield ({
type: "NumericLiteral",
value: match[0]
});
continue;
}
Template.lastIndex = lastIndex;
if (match = Template.exec(input)) {
lastIndex = Template.lastIndex;
lastSignificantToken = match[0];
if (match[1] === "${") {
lastSignificantToken = "?InterpolationInTemplate";
stack.push({
tag: "InterpolationInTemplate",
nesting: braces.length
});
postfixIncDec = false;
yield ({
type: "TemplateHead",
value: match[0]
});
} else {
postfixIncDec = true;
yield ({
type: "NoSubstitutionTemplate",
value: match[0],
closed: match[1] === "`"
});
}
continue;
}
break;
case "JSXTag":
case "JSXTagEnd":
JSXPunctuator.lastIndex = lastIndex;
if (match = JSXPunctuator.exec(input)) {
lastIndex = JSXPunctuator.lastIndex;
nextLastSignificantToken = match[0];
switch (match[0]) {
case "<":
stack.push({tag: "JSXTag"});
break;
case ">":
stack.pop();
if (lastSignificantToken === "/" || mode.tag === "JSXTagEnd") {
nextLastSignificantToken = "?JSX";
postfixIncDec = true;
} else {
stack.push({tag: "JSXChildren"});
}
break;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
nextLastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
break;
case "/":
if (lastSignificantToken === "<") {
stack.pop();
if (stack[stack.length - 1].tag === "JSXChildren") {
stack.pop();
}
stack.push({tag: "JSXTagEnd"});
}
}
lastSignificantToken = nextLastSignificantToken;
yield ({
type: "JSXPunctuator",
value: match[0]
});
continue;
}
JSXIdentifier.lastIndex = lastIndex;
if (match = JSXIdentifier.exec(input)) {
lastIndex = JSXIdentifier.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXIdentifier",
value: match[0]
});
continue;
}
JSXString.lastIndex = lastIndex;
if (match = JSXString.exec(input)) {
lastIndex = JSXString.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXString",
value: match[0],
closed: match[2] !== void 0
});
continue;
}
break;
case "JSXChildren":
JSXText.lastIndex = lastIndex;
if (match = JSXText.exec(input)) {
lastIndex = JSXText.lastIndex;
lastSignificantToken = match[0];
yield ({
type: "JSXText",
value: match[0]
});
continue;
}
switch (input[lastIndex]) {
case "<":
stack.push({tag: "JSXTag"});
lastIndex++;
lastSignificantToken = "<";
yield ({
type: "JSXPunctuator",
value: "<"
});
continue;
case "{":
stack.push({
tag: "InterpolationInJSX",
nesting: braces.length
});
lastIndex++;
lastSignificantToken = "?InterpolationInJSX";
postfixIncDec = false;
yield ({
type: "JSXPunctuator",
value: "{"
});
continue;
}
}
WhiteSpace.lastIndex = lastIndex;
if (match = WhiteSpace.exec(input)) {
lastIndex = WhiteSpace.lastIndex;
yield ({
type: "WhiteSpace",
value: match[0]
});
continue;
}
LineTerminatorSequence.lastIndex = lastIndex;
if (match = LineTerminatorSequence.exec(input)) {
lastIndex = LineTerminatorSequence.lastIndex;
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
yield ({
type: "LineTerminatorSequence",
value: match[0]
});
continue;
}
MultiLineComment.lastIndex = lastIndex;
if (match = MultiLineComment.exec(input)) {
lastIndex = MultiLineComment.lastIndex;
if (Newline.test(match[0])) {
postfixIncDec = false;
if (KeywordsWithNoLineTerminatorAfter.test(lastSignificantToken)) {
lastSignificantToken = "?NoLineTerminatorHere";
}
}
yield ({
type: "MultiLineComment",
value: match[0],
closed: match[1] !== void 0
});
continue;
}
SingleLineComment.lastIndex = lastIndex;
if (match = SingleLineComment.exec(input)) {
lastIndex = SingleLineComment.lastIndex;
postfixIncDec = false;
yield ({
type: "SingleLineComment",
value: match[0]
});
continue;
}
firstCodePoint = String.fromCodePoint(input.codePointAt(lastIndex));
lastIndex += firstCodePoint.length;
lastSignificantToken = firstCodePoint;
postfixIncDec = false;
yield ({
type: mode.tag.startsWith("JSX") ? "JSXInvalid" : "Invalid",
value: firstCodePoint
});
}
return void 0;
};
return jsTokens_1;
}
var jsTokensExports = requireJsTokens();
var jsTokens = /*@__PURE__*/getDefaultExportFromCjs(jsTokensExports);
// src/index.ts
var reservedWords = {
keyword: [
"break",
"case",
"catch",
"continue",
"debugger",
"default",
"do",
"else",
"finally",
"for",
"function",
"if",
"return",
"switch",
"throw",
"try",
"var",
"const",
"while",
"with",
"new",
"this",
"super",
"class",
"extends",
"export",
"import",
"null",
"true",
"false",
"in",
"instanceof",
"typeof",
"void",
"delete"
],
strict: [
"implements",
"interface",
"let",
"package",
"private",
"protected",
"public",
"static",
"yield"
]
}, keywords = new Set(reservedWords.keyword), reservedWordsStrictSet = new Set(reservedWords.strict), sometimesKeywords = /* @__PURE__ */ new Set(["as", "async", "from", "get", "of", "set"]);
function isReservedWord(word) {
return word === "await" || word === "enum";
}
function isStrictReservedWord(word) {
return isReservedWord(word) || reservedWordsStrictSet.has(word);
}
function isKeyword(word) {
return keywords.has(word);
}
var BRACKET = /^[()[\]{}]$/, getTokenType = function(token) {
if (token.type === "IdentifierName") {
if (isKeyword(token.value) || isStrictReservedWord(token.value) || sometimesKeywords.has(token.value))
return "Keyword";
if (token.value[0] && token.value[0] !== token.value[0].toLowerCase())
return "IdentifierCapitalized";
}
return token.type === "Punctuator" && BRACKET.test(token.value) ? "Bracket" : token.type === "Invalid" && (token.value === "@" || token.value === "#") ? "Punctuator" : token.type;
};
function getCallableType(token) {
if (token.type === "IdentifierName")
return "IdentifierCallable";
if (token.type === "PrivateIdentifier")
return "PrivateIdentifierCallable";
throw new Error("Not a callable token");
}
var colorize = (defs, type, value) => {
let colorize2 = defs[type];
return colorize2 ? colorize2(value) : value;
}, highlightTokens = (defs, text, jsx) => {
let highlighted = "", lastPotentialCallable = null, stackedHighlight = "";
for (let token of jsTokens(text, { jsx })) {
let type = getTokenType(token);
if (type === "IdentifierName" || type === "PrivateIdentifier") {
lastPotentialCallable && (highlighted += colorize(defs, getTokenType(lastPotentialCallable), lastPotentialCallable.value) + stackedHighlight, stackedHighlight = ""), lastPotentialCallable = token;
continue;
}
if (lastPotentialCallable && (token.type === "WhiteSpace" || token.type === "LineTerminatorSequence" || token.type === "Punctuator" && (token.value === "?." || token.value === "!"))) {
stackedHighlight += colorize(defs, type, token.value);
continue;
}
if (stackedHighlight && !lastPotentialCallable && (highlighted += stackedHighlight, stackedHighlight = ""), lastPotentialCallable) {
let type2 = token.type === "Punctuator" && token.value === "(" ? getCallableType(lastPotentialCallable) : getTokenType(lastPotentialCallable);
highlighted += colorize(defs, type2, lastPotentialCallable.value) + stackedHighlight, stackedHighlight = "", lastPotentialCallable = null;
}
highlighted += colorize(defs, type, token.value);
}
return highlighted;
};
function highlight$1(code, options = { jsx: false, colors: {} }) {
return code && highlightTokens(options.colors || {}, code, options.jsx);
}
function getDefs(c) {
const Invalid = (text) => c.white(c.bgRed(c.bold(text)));
return {
Keyword: c.magenta,
IdentifierCapitalized: c.yellow,
Punctuator: c.yellow,
StringLiteral: c.green,
NoSubstitutionTemplate: c.green,
MultiLineComment: c.gray,
SingleLineComment: c.gray,
RegularExpressionLiteral: c.cyan,
NumericLiteral: c.blue,
TemplateHead: (text) => c.green(text.slice(0, text.length - 2)) + c.cyan(text.slice(-2)),
TemplateTail: (text) => c.cyan(text.slice(0, 1)) + c.green(text.slice(1)),
TemplateMiddle: (text) => c.cyan(text.slice(0, 1)) + c.green(text.slice(1, text.length - 2)) + c.cyan(text.slice(-2)),
IdentifierCallable: c.blue,
PrivateIdentifierCallable: (text) => `#${c.blue(text.slice(1))}`,
Invalid,
JSXString: c.green,
JSXIdentifier: c.yellow,
JSXInvalid: Invalid,
JSXPunctuator: c.yellow
};
}
function highlight(code, options = { jsx: false }) {
return highlight$1(code, {
jsx: options.jsx,
colors: getDefs(options.colors || c)
});
}
// port from nanoid
// https://github.com/ai/nanoid
const urlAlphabet = "useandom-26T198340PX75pxJACKVERYMINDBUSHWOLF_GQZbfghjklqvwyzrict";
function nanoid(size = 21) {
let id = "";
let i = size;
while (i--) {
id += urlAlphabet[Math.random() * 64 | 0];
}
return id;
}
const lineSplitRE = /\r?\n/;
function positionToOffset(source, lineNumber, columnNumber) {
const lines = source.split(lineSplitRE);
const nl = /\r\n/.test(source) ? 2 : 1;
let start = 0;
if (lineNumber > lines.length) {
return source.length;
}
for (let i = 0; i < lineNumber - 1; i++) {
start += lines[i].length + nl;
}
return start + columnNumber;
}
function offsetToLineNumber(source, offset) {
if (offset > source.length) {
throw new Error(`offset is longer than source length! offset ${offset} > length ${source.length}`);
}
const lines = source.split(lineSplitRE);
const nl = /\r\n/.test(source) ? 2 : 1;
let counted = 0;
let line = 0;
for (; line < lines.length; line++) {
const lineLength = lines[line].length + nl;
if (counted + lineLength >= offset) {
break;
}
counted += lineLength;
}
return line + 1;
}
const RealDate = Date;
function random(seed) {
const x = Math.sin(seed++) * 1e4;
return x - Math.floor(x);
}
function shuffle(array, seed = RealDate.now()) {
let length = array.length;
while (length) {
const index = Math.floor(random(seed) * length--);
const previous = array[length];
array[length] = array[index];
array[index] = previous;
++seed;
}
return array;
}
const SAFE_TIMERS_SYMBOL = Symbol("vitest:SAFE_TIMERS");
function getSafeTimers() {
const { setTimeout: safeSetTimeout, setInterval: safeSetInterval, clearInterval: safeClearInterval, clearTimeout: safeClearTimeout, setImmediate: safeSetImmediate, clearImmediate: safeClearImmediate, queueMicrotask: safeQueueMicrotask } = globalThis[SAFE_TIMERS_SYMBOL] || globalThis;
const { nextTick: safeNextTick } = globalThis[SAFE_TIMERS_SYMBOL] || globalThis.process || { nextTick: (cb) => cb() };
return {
nextTick: safeNextTick,
setTimeout: safeSetTimeout,
setInterval: safeSetInterval,
clearInterval: safeClearInterval,
clearTimeout: safeClearTimeout,
setImmediate: safeSetImmediate,
clearImmediate: safeClearImmediate,
queueMicrotask: safeQueueMicrotask
};
}
function setSafeTimers() {
const { setTimeout: safeSetTimeout, setInterval: safeSetInterval, clearInterval: safeClearInterval, clearTimeout: safeClearTimeout, setImmediate: safeSetImmediate, clearImmediate: safeClearImmediate, queueMicrotask: safeQueueMicrotask } = globalThis;
const { nextTick: safeNextTick } = globalThis.process || { nextTick: (cb) => cb() };
const timers = {
nextTick: safeNextTick,
setTimeout: safeSetTimeout,
setInterval: safeSetInterval,
clearInterval: safeClearInterval,
clearTimeout: safeClearTimeout,
setImmediate: safeSetImmediate,
clearImmediate: safeClearImmediate,
queueMicrotask: safeQueueMicrotask
};
globalThis[SAFE_TIMERS_SYMBOL] = timers;
}
export { getSafeTimers, highlight, lineSplitRE, nanoid, offsetToLineNumber, positionToOffset, setSafeTimers, shuffle };

View File

@@ -0,0 +1,139 @@
import { ErrorWithDiff, ParsedStack } from './types.js';
type GeneratedColumn = number;
type SourcesIndex = number;
type SourceLine = number;
type SourceColumn = number;
type NamesIndex = number;
type SourceMapSegment = [GeneratedColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn] | [GeneratedColumn, SourcesIndex, SourceLine, SourceColumn, NamesIndex];
interface SourceMapV3 {
file?: string | null;
names: string[];
sourceRoot?: string;
sources: (string | null)[];
sourcesContent?: (string | null)[];
version: 3;
ignoreList?: number[];
}
interface EncodedSourceMap extends SourceMapV3 {
mappings: string;
}
interface DecodedSourceMap extends SourceMapV3 {
mappings: SourceMapSegment[][];
}
type OriginalMapping = {
source: string | null;
line: number;
column: number;
name: string | null;
};
type InvalidOriginalMapping = {
source: null;
line: null;
column: null;
name: null;
};
type GeneratedMapping = {
line: number;
column: number;
};
type InvalidGeneratedMapping = {
line: null;
column: null;
};
type Bias = typeof GREATEST_LOWER_BOUND | typeof LEAST_UPPER_BOUND;
type XInput = {
x_google_ignoreList?: SourceMapV3['ignoreList'];
};
type EncodedSourceMapXInput = EncodedSourceMap & XInput;
type DecodedSourceMapXInput = DecodedSourceMap & XInput;
type SourceMapInput = string | EncodedSourceMapXInput | DecodedSourceMapXInput | TraceMap;
type Needle = {
line: number;
column: number;
bias?: Bias;
};
type SourceNeedle = {
source: string;
line: number;
column: number;
bias?: Bias;
};
type EachMapping = {
generatedLine: number;
generatedColumn: number;
source: null;
originalLine: null;
originalColumn: null;
name: null;
} | {
generatedLine: number;
generatedColumn: number;
source: string | null;
originalLine: number;
originalColumn: number;
name: string | null;
};
declare abstract class SourceMap {
version: SourceMapV3['version'];
file: SourceMapV3['file'];
names: SourceMapV3['names'];
sourceRoot: SourceMapV3['sourceRoot'];
sources: SourceMapV3['sources'];
sourcesContent: SourceMapV3['sourcesContent'];
resolvedSources: SourceMapV3['sources'];
ignoreList: SourceMapV3['ignoreList'];
}
declare const LEAST_UPPER_BOUND = -1;
declare const GREATEST_LOWER_BOUND = 1;
declare class TraceMap implements SourceMap {
version: SourceMapV3['version'];
file: SourceMapV3['file'];
names: SourceMapV3['names'];
sourceRoot: SourceMapV3['sourceRoot'];
sources: SourceMapV3['sources'];
sourcesContent: SourceMapV3['sourcesContent'];
ignoreList: SourceMapV3['ignoreList'];
resolvedSources: string[];
private _encoded;
private _decoded;
private _decodedMemo;
private _bySources;
private _bySourceMemos;
constructor(map: SourceMapInput, mapUrl?: string | null);
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
declare function originalPositionFor(map: TraceMap, needle: Needle): OriginalMapping | InvalidOriginalMapping;
/**
* Finds the generated line/column position of the provided source/line/column source position.
*/
declare function generatedPositionFor(map: TraceMap, needle: SourceNeedle): GeneratedMapping | InvalidGeneratedMapping;
/**
* Iterates each mapping in generated position order.
*/
declare function eachMapping(map: TraceMap, cb: (mapping: EachMapping) => void): void;
interface StackTraceParserOptions {
ignoreStackEntries?: (RegExp | string)[];
getSourceMap?: (file: string) => unknown;
getUrlId?: (id: string) => string;
frameFilter?: (error: ErrorWithDiff, frame: ParsedStack) => boolean | void;
}
declare function parseSingleFFOrSafariStack(raw: string): ParsedStack | null;
declare function parseSingleStack(raw: string): ParsedStack | null;
// Based on https://github.com/stacktracejs/error-stack-parser
// Credit to stacktracejs
declare function parseSingleV8Stack(raw: string): ParsedStack | null;
declare function createStackString(stacks: ParsedStack[]): string;
declare function parseStacktrace(stack: string, options?: StackTraceParserOptions): ParsedStack[];
declare function parseErrorStacktrace(e: ErrorWithDiff, options?: StackTraceParserOptions): ParsedStack[];
export { TraceMap, createStackString, eachMapping, generatedPositionFor, originalPositionFor, parseErrorStacktrace, parseSingleFFOrSafariStack, parseSingleStack, parseSingleV8Stack, parseStacktrace };
export type { EachMapping, SourceMapInput, StackTraceParserOptions };

996
frontend/node_modules/@vitest/utils/dist/source-map.js generated vendored Normal file
View File

@@ -0,0 +1,996 @@
import { isPrimitive, notNullish } from './helpers.js';
const comma = ','.charCodeAt(0);
const chars = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
const intToChar = new Uint8Array(64); // 64 possible chars.
const charToInt = new Uint8Array(128); // z is 122 in ASCII
for (let i = 0; i < chars.length; i++) {
const c = chars.charCodeAt(i);
intToChar[i] = c;
charToInt[c] = i;
}
function decodeInteger(reader, relative) {
let value = 0;
let shift = 0;
let integer = 0;
do {
const c = reader.next();
integer = charToInt[c];
value |= (integer & 31) << shift;
shift += 5;
} while (integer & 32);
const shouldNegate = value & 1;
value >>>= 1;
if (shouldNegate) {
value = -2147483648 | -value;
}
return relative + value;
}
function hasMoreVlq(reader, max) {
if (reader.pos >= max)
return false;
return reader.peek() !== comma;
}
class StringReader {
constructor(buffer) {
this.pos = 0;
this.buffer = buffer;
}
next() {
return this.buffer.charCodeAt(this.pos++);
}
peek() {
return this.buffer.charCodeAt(this.pos);
}
indexOf(char) {
const { buffer, pos } = this;
const idx = buffer.indexOf(char, pos);
return idx === -1 ? buffer.length : idx;
}
}
function decode(mappings) {
const { length } = mappings;
const reader = new StringReader(mappings);
const decoded = [];
let genColumn = 0;
let sourcesIndex = 0;
let sourceLine = 0;
let sourceColumn = 0;
let namesIndex = 0;
do {
const semi = reader.indexOf(';');
const line = [];
let sorted = true;
let lastCol = 0;
genColumn = 0;
while (reader.pos < semi) {
let seg;
genColumn = decodeInteger(reader, genColumn);
if (genColumn < lastCol)
sorted = false;
lastCol = genColumn;
if (hasMoreVlq(reader, semi)) {
sourcesIndex = decodeInteger(reader, sourcesIndex);
sourceLine = decodeInteger(reader, sourceLine);
sourceColumn = decodeInteger(reader, sourceColumn);
if (hasMoreVlq(reader, semi)) {
namesIndex = decodeInteger(reader, namesIndex);
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn, namesIndex];
}
else {
seg = [genColumn, sourcesIndex, sourceLine, sourceColumn];
}
}
else {
seg = [genColumn];
}
line.push(seg);
reader.pos++;
}
if (!sorted)
sort(line);
decoded.push(line);
reader.pos = semi + 1;
} while (reader.pos <= length);
return decoded;
}
function sort(line) {
line.sort(sortComparator$1);
}
function sortComparator$1(a, b) {
return a[0] - b[0];
}
// Matches the scheme of a URL, eg "http://"
const schemeRegex = /^[\w+.-]+:\/\//;
/**
* Matches the parts of a URL:
* 1. Scheme, including ":", guaranteed.
* 2. User/password, including "@", optional.
* 3. Host, guaranteed.
* 4. Port, including ":", optional.
* 5. Path, including "/", optional.
* 6. Query, including "?", optional.
* 7. Hash, including "#", optional.
*/
const urlRegex = /^([\w+.-]+:)\/\/([^@/#?]*@)?([^:/#?]*)(:\d+)?(\/[^#?]*)?(\?[^#]*)?(#.*)?/;
/**
* File URLs are weird. They dont' need the regular `//` in the scheme, they may or may not start
* with a leading `/`, they can have a domain (but only if they don't start with a Windows drive).
*
* 1. Host, optional.
* 2. Path, which may include "/", guaranteed.
* 3. Query, including "?", optional.
* 4. Hash, including "#", optional.
*/
const fileRegex = /^file:(?:\/\/((?![a-z]:)[^/#?]*)?)?(\/?[^#?]*)(\?[^#]*)?(#.*)?/i;
var UrlType;
(function (UrlType) {
UrlType[UrlType["Empty"] = 1] = "Empty";
UrlType[UrlType["Hash"] = 2] = "Hash";
UrlType[UrlType["Query"] = 3] = "Query";
UrlType[UrlType["RelativePath"] = 4] = "RelativePath";
UrlType[UrlType["AbsolutePath"] = 5] = "AbsolutePath";
UrlType[UrlType["SchemeRelative"] = 6] = "SchemeRelative";
UrlType[UrlType["Absolute"] = 7] = "Absolute";
})(UrlType || (UrlType = {}));
function isAbsoluteUrl(input) {
return schemeRegex.test(input);
}
function isSchemeRelativeUrl(input) {
return input.startsWith('//');
}
function isAbsolutePath(input) {
return input.startsWith('/');
}
function isFileUrl(input) {
return input.startsWith('file:');
}
function isRelative(input) {
return /^[.?#]/.test(input);
}
function parseAbsoluteUrl(input) {
const match = urlRegex.exec(input);
return makeUrl(match[1], match[2] || '', match[3], match[4] || '', match[5] || '/', match[6] || '', match[7] || '');
}
function parseFileUrl(input) {
const match = fileRegex.exec(input);
const path = match[2];
return makeUrl('file:', '', match[1] || '', '', isAbsolutePath(path) ? path : '/' + path, match[3] || '', match[4] || '');
}
function makeUrl(scheme, user, host, port, path, query, hash) {
return {
scheme,
user,
host,
port,
path,
query,
hash,
type: UrlType.Absolute,
};
}
function parseUrl(input) {
if (isSchemeRelativeUrl(input)) {
const url = parseAbsoluteUrl('http:' + input);
url.scheme = '';
url.type = UrlType.SchemeRelative;
return url;
}
if (isAbsolutePath(input)) {
const url = parseAbsoluteUrl('http://foo.com' + input);
url.scheme = '';
url.host = '';
url.type = UrlType.AbsolutePath;
return url;
}
if (isFileUrl(input))
return parseFileUrl(input);
if (isAbsoluteUrl(input))
return parseAbsoluteUrl(input);
const url = parseAbsoluteUrl('http://foo.com/' + input);
url.scheme = '';
url.host = '';
url.type = input
? input.startsWith('?')
? UrlType.Query
: input.startsWith('#')
? UrlType.Hash
: UrlType.RelativePath
: UrlType.Empty;
return url;
}
function stripPathFilename(path) {
// If a path ends with a parent directory "..", then it's a relative path with excess parent
// paths. It's not a file, so we can't strip it.
if (path.endsWith('/..'))
return path;
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
function mergePaths(url, base) {
normalizePath(base, base.type);
// If the path is just a "/", then it was an empty path to begin with (remember, we're a relative
// path).
if (url.path === '/') {
url.path = base.path;
}
else {
// Resolution happens relative to the base path's directory, not the file.
url.path = stripPathFilename(base.path) + url.path;
}
}
/**
* The path can have empty directories "//", unneeded parents "foo/..", or current directory
* "foo/.". We need to normalize to a standard representation.
*/
function normalizePath(url, type) {
const rel = type <= UrlType.RelativePath;
const pieces = url.path.split('/');
// We need to preserve the first piece always, so that we output a leading slash. The item at
// pieces[0] is an empty string.
let pointer = 1;
// Positive is the number of real directories we've output, used for popping a parent directory.
// Eg, "foo/bar/.." will have a positive 2, and we can decrement to be left with just "foo".
let positive = 0;
// We need to keep a trailing slash if we encounter an empty directory (eg, splitting "foo/" will
// generate `["foo", ""]` pieces). And, if we pop a parent directory. But once we encounter a
// real directory, we won't need to append, unless the other conditions happen again.
let addTrailingSlash = false;
for (let i = 1; i < pieces.length; i++) {
const piece = pieces[i];
// An empty directory, could be a trailing slash, or just a double "//" in the path.
if (!piece) {
addTrailingSlash = true;
continue;
}
// If we encounter a real directory, then we don't need to append anymore.
addTrailingSlash = false;
// A current directory, which we can always drop.
if (piece === '.')
continue;
// A parent directory, we need to see if there are any real directories we can pop. Else, we
// have an excess of parents, and we'll need to keep the "..".
if (piece === '..') {
if (positive) {
addTrailingSlash = true;
positive--;
pointer--;
}
else if (rel) {
// If we're in a relativePath, then we need to keep the excess parents. Else, in an absolute
// URL, protocol relative URL, or an absolute path, we don't need to keep excess.
pieces[pointer++] = piece;
}
continue;
}
// We've encountered a real directory. Move it to the next insertion pointer, which accounts for
// any popped or dropped directories.
pieces[pointer++] = piece;
positive++;
}
let path = '';
for (let i = 1; i < pointer; i++) {
path += '/' + pieces[i];
}
if (!path || (addTrailingSlash && !path.endsWith('/..'))) {
path += '/';
}
url.path = path;
}
/**
* Attempts to resolve `input` URL/path relative to `base`.
*/
function resolve$2(input, base) {
if (!input && !base)
return '';
const url = parseUrl(input);
let inputType = url.type;
if (base && inputType !== UrlType.Absolute) {
const baseUrl = parseUrl(base);
const baseType = baseUrl.type;
switch (inputType) {
case UrlType.Empty:
url.hash = baseUrl.hash;
// fall through
case UrlType.Hash:
url.query = baseUrl.query;
// fall through
case UrlType.Query:
case UrlType.RelativePath:
mergePaths(url, baseUrl);
// fall through
case UrlType.AbsolutePath:
// The host, user, and port are joined, you can't copy one without the others.
url.user = baseUrl.user;
url.host = baseUrl.host;
url.port = baseUrl.port;
// fall through
case UrlType.SchemeRelative:
// The input doesn't have a schema at least, so we need to copy at least that over.
url.scheme = baseUrl.scheme;
}
if (baseType > inputType)
inputType = baseType;
}
normalizePath(url, inputType);
const queryHash = url.query + url.hash;
switch (inputType) {
// This is impossible, because of the empty checks at the start of the function.
// case UrlType.Empty:
case UrlType.Hash:
case UrlType.Query:
return queryHash;
case UrlType.RelativePath: {
// The first char is always a "/", and we need it to be relative.
const path = url.path.slice(1);
if (!path)
return queryHash || '.';
if (isRelative(base || input) && !isRelative(path)) {
// If base started with a leading ".", or there is no base and input started with a ".",
// then we need to ensure that the relative path starts with a ".". We don't know if
// relative starts with a "..", though, so check before prepending.
return './' + path + queryHash;
}
return path + queryHash;
}
case UrlType.AbsolutePath:
return url.path + queryHash;
default:
return url.scheme + '//' + url.user + url.host + url.port + url.path + queryHash;
}
}
function resolve$1(input, base) {
// The base is always treated as a directory, if it's not empty.
// https://github.com/mozilla/source-map/blob/8cb3ee57/lib/util.js#L327
// https://github.com/chromium/chromium/blob/da4adbb3/third_party/blink/renderer/devtools/front_end/sdk/SourceMap.js#L400-L401
if (base && !base.endsWith('/'))
base += '/';
return resolve$2(input, base);
}
/**
* Removes everything after the last "/", but leaves the slash.
*/
function stripFilename(path) {
if (!path)
return '';
const index = path.lastIndexOf('/');
return path.slice(0, index + 1);
}
const COLUMN = 0;
const SOURCES_INDEX = 1;
const SOURCE_LINE = 2;
const SOURCE_COLUMN = 3;
const NAMES_INDEX = 4;
const REV_GENERATED_LINE = 1;
const REV_GENERATED_COLUMN = 2;
function maybeSort(mappings, owned) {
const unsortedIndex = nextUnsortedSegmentLine(mappings, 0);
if (unsortedIndex === mappings.length)
return mappings;
// If we own the array (meaning we parsed it from JSON), then we're free to directly mutate it. If
// not, we do not want to modify the consumer's input array.
if (!owned)
mappings = mappings.slice();
for (let i = unsortedIndex; i < mappings.length; i = nextUnsortedSegmentLine(mappings, i + 1)) {
mappings[i] = sortSegments(mappings[i], owned);
}
return mappings;
}
function nextUnsortedSegmentLine(mappings, start) {
for (let i = start; i < mappings.length; i++) {
if (!isSorted(mappings[i]))
return i;
}
return mappings.length;
}
function isSorted(line) {
for (let j = 1; j < line.length; j++) {
if (line[j][COLUMN] < line[j - 1][COLUMN]) {
return false;
}
}
return true;
}
function sortSegments(line, owned) {
if (!owned)
line = line.slice();
return line.sort(sortComparator);
}
function sortComparator(a, b) {
return a[COLUMN] - b[COLUMN];
}
let found = false;
/**
* A binary search implementation that returns the index if a match is found.
* If no match is found, then the left-index (the index associated with the item that comes just
* before the desired index) is returned. To maintain proper sort order, a splice would happen at
* the next index:
*
* ```js
* const array = [1, 3];
* const needle = 2;
* const index = binarySearch(array, needle, (item, needle) => item - needle);
*
* assert.equal(index, 0);
* array.splice(index + 1, 0, needle);
* assert.deepEqual(array, [1, 2, 3]);
* ```
*/
function binarySearch(haystack, needle, low, high) {
while (low <= high) {
const mid = low + ((high - low) >> 1);
const cmp = haystack[mid][COLUMN] - needle;
if (cmp === 0) {
found = true;
return mid;
}
if (cmp < 0) {
low = mid + 1;
}
else {
high = mid - 1;
}
}
found = false;
return low - 1;
}
function upperBound(haystack, needle, index) {
for (let i = index + 1; i < haystack.length; index = i++) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function lowerBound(haystack, needle, index) {
for (let i = index - 1; i >= 0; index = i--) {
if (haystack[i][COLUMN] !== needle)
break;
}
return index;
}
function memoizedState() {
return {
lastKey: -1,
lastNeedle: -1,
lastIndex: -1,
};
}
/**
* This overly complicated beast is just to record the last tested line/column and the resulting
* index, allowing us to skip a few tests if mappings are monotonically increasing.
*/
function memoizedBinarySearch(haystack, needle, state, key) {
const { lastKey, lastNeedle, lastIndex } = state;
let low = 0;
let high = haystack.length - 1;
if (key === lastKey) {
if (needle === lastNeedle) {
found = lastIndex !== -1 && haystack[lastIndex][COLUMN] === needle;
return lastIndex;
}
if (needle >= lastNeedle) {
// lastIndex may be -1 if the previous needle was not found.
low = lastIndex === -1 ? 0 : lastIndex;
}
else {
high = lastIndex;
}
}
state.lastKey = key;
state.lastNeedle = needle;
return (state.lastIndex = binarySearch(haystack, needle, low, high));
}
// Rebuilds the original source files, with mappings that are ordered by source line/column instead
// of generated line/column.
function buildBySources(decoded, memos) {
const sources = memos.map(buildNullArray);
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
if (seg.length === 1)
continue;
const sourceIndex = seg[SOURCES_INDEX];
const sourceLine = seg[SOURCE_LINE];
const sourceColumn = seg[SOURCE_COLUMN];
const originalSource = sources[sourceIndex];
const originalLine = (originalSource[sourceLine] || (originalSource[sourceLine] = []));
const memo = memos[sourceIndex];
// The binary search either found a match, or it found the left-index just before where the
// segment should go. Either way, we want to insert after that. And there may be multiple
// generated segments associated with an original location, so there may need to move several
// indexes before we find where we need to insert.
let index = upperBound(originalLine, sourceColumn, memoizedBinarySearch(originalLine, sourceColumn, memo, sourceLine));
memo.lastIndex = ++index;
insert(originalLine, index, [sourceColumn, i, seg[COLUMN]]);
}
}
return sources;
}
function insert(array, index, value) {
for (let i = array.length; i > index; i--) {
array[i] = array[i - 1];
}
array[index] = value;
}
// Null arrays allow us to use ordered index keys without actually allocating contiguous memory like
// a real array. We use a null-prototype object to avoid prototype pollution and deoptimizations.
// Numeric properties on objects are magically sorted in ascending order by the engine regardless of
// the insertion order. So, by setting any numeric keys, even out of order, we'll get ascending
// order when iterating with for-in.
function buildNullArray() {
return { __proto__: null };
}
const LINE_GTR_ZERO = '`line` must be greater than 0 (lines start at line 1)';
const COL_GTR_EQ_ZERO = '`column` must be greater than or equal to 0 (columns start at column 0)';
const LEAST_UPPER_BOUND = -1;
const GREATEST_LOWER_BOUND = 1;
class TraceMap {
constructor(map, mapUrl) {
const isString = typeof map === 'string';
if (!isString && map._decodedMemo)
return map;
const parsed = (isString ? JSON.parse(map) : map);
const { version, file, names, sourceRoot, sources, sourcesContent } = parsed;
this.version = version;
this.file = file;
this.names = names || [];
this.sourceRoot = sourceRoot;
this.sources = sources;
this.sourcesContent = sourcesContent;
this.ignoreList = parsed.ignoreList || parsed.x_google_ignoreList || undefined;
const from = resolve$1(sourceRoot || '', stripFilename(mapUrl));
this.resolvedSources = sources.map((s) => resolve$1(s || '', from));
const { mappings } = parsed;
if (typeof mappings === 'string') {
this._encoded = mappings;
this._decoded = undefined;
}
else {
this._encoded = undefined;
this._decoded = maybeSort(mappings, isString);
}
this._decodedMemo = memoizedState();
this._bySources = undefined;
this._bySourceMemos = undefined;
}
}
/**
* Typescript doesn't allow friend access to private fields, so this just casts the map into a type
* with public access modifiers.
*/
function cast(map) {
return map;
}
/**
* Returns the decoded (array of lines of segments) form of the SourceMap's mappings field.
*/
function decodedMappings(map) {
var _a;
return ((_a = cast(map))._decoded || (_a._decoded = decode(cast(map)._encoded)));
}
/**
* A higher-level API to find the source/line/column associated with a generated line/column
* (think, from a stack trace). Line is 1-based, but column is 0-based, due to legacy behavior in
* `source-map` library.
*/
function originalPositionFor(map, needle) {
let { line, column, bias } = needle;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const decoded = decodedMappings(map);
// It's common for parent source maps to have pointers to lines that have no
// mapping (like a "//# sourceMappingURL=") at the end of the child file.
if (line >= decoded.length)
return OMapping(null, null, null, null);
const segments = decoded[line];
const index = traceSegmentInternal(segments, cast(map)._decodedMemo, line, column, bias || GREATEST_LOWER_BOUND);
if (index === -1)
return OMapping(null, null, null, null);
const segment = segments[index];
if (segment.length === 1)
return OMapping(null, null, null, null);
const { names, resolvedSources } = map;
return OMapping(resolvedSources[segment[SOURCES_INDEX]], segment[SOURCE_LINE] + 1, segment[SOURCE_COLUMN], segment.length === 5 ? names[segment[NAMES_INDEX]] : null);
}
/**
* Finds the generated line/column position of the provided source/line/column source position.
*/
function generatedPositionFor(map, needle) {
const { source, line, column, bias } = needle;
return generatedPosition(map, source, line, column, bias || GREATEST_LOWER_BOUND, false);
}
/**
* Iterates each mapping in generated position order.
*/
function eachMapping(map, cb) {
const decoded = decodedMappings(map);
const { names, resolvedSources } = map;
for (let i = 0; i < decoded.length; i++) {
const line = decoded[i];
for (let j = 0; j < line.length; j++) {
const seg = line[j];
const generatedLine = i + 1;
const generatedColumn = seg[0];
let source = null;
let originalLine = null;
let originalColumn = null;
let name = null;
if (seg.length !== 1) {
source = resolvedSources[seg[1]];
originalLine = seg[2] + 1;
originalColumn = seg[3];
}
if (seg.length === 5)
name = names[seg[4]];
cb({
generatedLine,
generatedColumn,
source,
originalLine,
originalColumn,
name,
});
}
}
}
function OMapping(source, line, column, name) {
return { source, line, column, name };
}
function GMapping(line, column) {
return { line, column };
}
function traceSegmentInternal(segments, memo, line, column, bias) {
let index = memoizedBinarySearch(segments, column, memo, line);
if (found) {
index = (bias === LEAST_UPPER_BOUND ? upperBound : lowerBound)(segments, column, index);
}
else if (bias === LEAST_UPPER_BOUND)
index++;
if (index === -1 || index === segments.length)
return -1;
return index;
}
function generatedPosition(map, source, line, column, bias, all) {
var _a;
line--;
if (line < 0)
throw new Error(LINE_GTR_ZERO);
if (column < 0)
throw new Error(COL_GTR_EQ_ZERO);
const { sources, resolvedSources } = map;
let sourceIndex = sources.indexOf(source);
if (sourceIndex === -1)
sourceIndex = resolvedSources.indexOf(source);
if (sourceIndex === -1)
return all ? [] : GMapping(null, null);
const generated = ((_a = cast(map))._bySources || (_a._bySources = buildBySources(decodedMappings(map), (cast(map)._bySourceMemos = sources.map(memoizedState)))));
const segments = generated[sourceIndex][line];
if (segments == null)
return all ? [] : GMapping(null, null);
const memo = cast(map)._bySourceMemos[sourceIndex];
const index = traceSegmentInternal(segments, memo, line, column, bias);
if (index === -1)
return GMapping(null, null);
const segment = segments[index];
return GMapping(segment[REV_GENERATED_LINE] + 1, segment[REV_GENERATED_COLUMN]);
}
const _DRIVE_LETTER_START_RE = /^[A-Za-z]:\//;
function normalizeWindowsPath(input = "") {
if (!input) {
return input;
}
return input.replace(/\\/g, "/").replace(_DRIVE_LETTER_START_RE, (r) => r.toUpperCase());
}
const _IS_ABSOLUTE_RE = /^[/\\](?![/\\])|^[/\\]{2}(?!\.)|^[A-Za-z]:[/\\]/;
function cwd() {
if (typeof process !== "undefined" && typeof process.cwd === "function") {
return process.cwd().replace(/\\/g, "/");
}
return "/";
}
const resolve = function(...arguments_) {
arguments_ = arguments_.map((argument) => normalizeWindowsPath(argument));
let resolvedPath = "";
let resolvedAbsolute = false;
for (let index = arguments_.length - 1; index >= -1 && !resolvedAbsolute; index--) {
const path = index >= 0 ? arguments_[index] : cwd();
if (!path || path.length === 0) {
continue;
}
resolvedPath = `${path}/${resolvedPath}`;
resolvedAbsolute = isAbsolute(path);
}
resolvedPath = normalizeString(resolvedPath, !resolvedAbsolute);
if (resolvedAbsolute && !isAbsolute(resolvedPath)) {
return `/${resolvedPath}`;
}
return resolvedPath.length > 0 ? resolvedPath : ".";
};
function normalizeString(path, allowAboveRoot) {
let res = "";
let lastSegmentLength = 0;
let lastSlash = -1;
let dots = 0;
let char = null;
for (let index = 0; index <= path.length; ++index) {
if (index < path.length) {
char = path[index];
} else if (char === "/") {
break;
} else {
char = "/";
}
if (char === "/") {
if (lastSlash === index - 1 || dots === 1) ; else if (dots === 2) {
if (res.length < 2 || lastSegmentLength !== 2 || res[res.length - 1] !== "." || res[res.length - 2] !== ".") {
if (res.length > 2) {
const lastSlashIndex = res.lastIndexOf("/");
if (lastSlashIndex === -1) {
res = "";
lastSegmentLength = 0;
} else {
res = res.slice(0, lastSlashIndex);
lastSegmentLength = res.length - 1 - res.lastIndexOf("/");
}
lastSlash = index;
dots = 0;
continue;
} else if (res.length > 0) {
res = "";
lastSegmentLength = 0;
lastSlash = index;
dots = 0;
continue;
}
}
if (allowAboveRoot) {
res += res.length > 0 ? "/.." : "..";
lastSegmentLength = 2;
}
} else {
if (res.length > 0) {
res += `/${path.slice(lastSlash + 1, index)}`;
} else {
res = path.slice(lastSlash + 1, index);
}
lastSegmentLength = index - lastSlash - 1;
}
lastSlash = index;
dots = 0;
} else if (char === "." && dots !== -1) {
++dots;
} else {
dots = -1;
}
}
return res;
}
const isAbsolute = function(p) {
return _IS_ABSOLUTE_RE.test(p);
};
const CHROME_IE_STACK_REGEXP = /^\s*at .*(?:\S:\d+|\(native\))/m;
const SAFARI_NATIVE_CODE_REGEXP = /^(?:eval@)?(?:\[native code\])?$/;
const stackIgnorePatterns = [
"node:internal",
/\/packages\/\w+\/dist\//,
/\/@vitest\/\w+\/dist\//,
"/vitest/dist/",
"/vitest/src/",
"/vite-node/dist/",
"/vite-node/src/",
"/node_modules/chai/",
"/node_modules/tinypool/",
"/node_modules/tinyspy/",
"/deps/chunk-",
"/deps/@vitest",
"/deps/loupe",
"/deps/chai",
/node:\w+/,
/__vitest_test__/,
/__vitest_browser__/,
/\/deps\/vitest_/
];
function extractLocation(urlLike) {
// Fail-fast but return locations like "(native)"
if (!urlLike.includes(":")) {
return [urlLike];
}
const regExp = /(.+?)(?::(\d+))?(?::(\d+))?$/;
const parts = regExp.exec(urlLike.replace(/^\(|\)$/g, ""));
if (!parts) {
return [urlLike];
}
let url = parts[1];
if (url.startsWith("async ")) {
url = url.slice(6);
}
if (url.startsWith("http:") || url.startsWith("https:")) {
const urlObj = new URL(url);
urlObj.searchParams.delete("import");
urlObj.searchParams.delete("browserv");
url = urlObj.pathname + urlObj.hash + urlObj.search;
}
if (url.startsWith("/@fs/")) {
const isWindows = /^\/@fs\/[a-zA-Z]:\//.test(url);
url = url.slice(isWindows ? 5 : 4);
}
return [
url,
parts[2] || undefined,
parts[3] || undefined
];
}
function parseSingleFFOrSafariStack(raw) {
let line = raw.trim();
if (SAFARI_NATIVE_CODE_REGEXP.test(line)) {
return null;
}
if (line.includes(" > eval")) {
line = line.replace(/ line (\d+)(?: > eval line \d+)* > eval:\d+:\d+/g, ":$1");
}
if (!line.includes("@") && !line.includes(":")) {
return null;
}
// eslint-disable-next-line regexp/no-super-linear-backtracking, regexp/optimal-quantifier-concatenation
const functionNameRegex = /((.*".+"[^@]*)?[^@]*)(@)/;
const matches = line.match(functionNameRegex);
const functionName = matches && matches[1] ? matches[1] : undefined;
const [url, lineNumber, columnNumber] = extractLocation(line.replace(functionNameRegex, ""));
if (!url || !lineNumber || !columnNumber) {
return null;
}
return {
file: url,
method: functionName || "",
line: Number.parseInt(lineNumber),
column: Number.parseInt(columnNumber)
};
}
function parseSingleStack(raw) {
const line = raw.trim();
if (!CHROME_IE_STACK_REGEXP.test(line)) {
return parseSingleFFOrSafariStack(line);
}
return parseSingleV8Stack(line);
}
// Based on https://github.com/stacktracejs/error-stack-parser
// Credit to stacktracejs
function parseSingleV8Stack(raw) {
let line = raw.trim();
if (!CHROME_IE_STACK_REGEXP.test(line)) {
return null;
}
if (line.includes("(eval ")) {
line = line.replace(/eval code/g, "eval").replace(/(\(eval at [^()]*)|(,.*$)/g, "");
}
let sanitizedLine = line.replace(/^\s+/, "").replace(/\(eval code/g, "(").replace(/^.*?\s+/, "");
// capture and preserve the parenthesized location "(/foo/my bar.js:12:87)" in
// case it has spaces in it, as the string is split on \s+ later on
const location = sanitizedLine.match(/ (\(.+\)$)/);
// remove the parenthesized location from the line, if it was matched
sanitizedLine = location ? sanitizedLine.replace(location[0], "") : sanitizedLine;
// if a location was matched, pass it to extractLocation() otherwise pass all sanitizedLine
// because this line doesn't have function name
const [url, lineNumber, columnNumber] = extractLocation(location ? location[1] : sanitizedLine);
let method = location && sanitizedLine || "";
let file = url && ["eval", "<anonymous>"].includes(url) ? undefined : url;
if (!file || !lineNumber || !columnNumber) {
return null;
}
if (method.startsWith("async ")) {
method = method.slice(6);
}
if (file.startsWith("file://")) {
file = file.slice(7);
}
// normalize Windows path (\ -> /)
file = file.startsWith("node:") || file.startsWith("internal:") ? file : resolve(file);
if (method) {
method = method.replace(/__vite_ssr_import_\d+__\./g, "");
}
return {
method,
file,
line: Number.parseInt(lineNumber),
column: Number.parseInt(columnNumber)
};
}
function createStackString(stacks) {
return stacks.map((stack) => {
const line = `${stack.file}:${stack.line}:${stack.column}`;
if (stack.method) {
return ` at ${stack.method}(${line})`;
}
return ` at ${line}`;
}).join("\n");
}
function parseStacktrace(stack, options = {}) {
const { ignoreStackEntries = stackIgnorePatterns } = options;
const stacks = !CHROME_IE_STACK_REGEXP.test(stack) ? parseFFOrSafariStackTrace(stack) : parseV8Stacktrace(stack);
return stacks.map((stack) => {
var _options$getSourceMap;
if (options.getUrlId) {
stack.file = options.getUrlId(stack.file);
}
const map = (_options$getSourceMap = options.getSourceMap) === null || _options$getSourceMap === void 0 ? void 0 : _options$getSourceMap.call(options, stack.file);
if (!map || typeof map !== "object" || !map.version) {
return shouldFilter(ignoreStackEntries, stack.file) ? null : stack;
}
const traceMap = new TraceMap(map);
const { line, column, source, name } = originalPositionFor(traceMap, stack);
let file = stack.file;
if (source) {
const fileUrl = stack.file.startsWith("file://") ? stack.file : `file://${stack.file}`;
const sourceRootUrl = map.sourceRoot ? new URL(map.sourceRoot, fileUrl) : fileUrl;
file = new URL(source, sourceRootUrl).pathname;
// if the file path is on windows, we need to remove the leading slash
if (file.match(/\/\w:\//)) {
file = file.slice(1);
}
}
if (shouldFilter(ignoreStackEntries, file)) {
return null;
}
if (line != null && column != null) {
return {
line,
column,
file,
method: name || stack.method
};
}
return stack;
}).filter((s) => s != null);
}
function shouldFilter(ignoreStackEntries, file) {
return ignoreStackEntries.some((p) => file.match(p));
}
function parseFFOrSafariStackTrace(stack) {
return stack.split("\n").map((line) => parseSingleFFOrSafariStack(line)).filter(notNullish);
}
function parseV8Stacktrace(stack) {
return stack.split("\n").map((line) => parseSingleV8Stack(line)).filter(notNullish);
}
function parseErrorStacktrace(e, options = {}) {
if (!e || isPrimitive(e)) {
return [];
}
if (e.stacks) {
return e.stacks;
}
const stackStr = e.stack || "";
// if "stack" property was overwritten at runtime to be something else,
// ignore the value because we don't know how to process it
let stackFrames = typeof stackStr === "string" ? parseStacktrace(stackStr, options) : [];
if (!stackFrames.length) {
const e_ = e;
if (e_.fileName != null && e_.lineNumber != null && e_.columnNumber != null) {
stackFrames = parseStacktrace(`${e_.fileName}:${e_.lineNumber}:${e_.columnNumber}`, options);
}
if (e_.sourceURL != null && e_.line != null && e_._column != null) {
stackFrames = parseStacktrace(`${e_.sourceURL}:${e_.line}:${e_.column}`, options);
}
}
if (options.frameFilter) {
stackFrames = stackFrames.filter((f) => options.frameFilter(e, f) !== false);
}
e.stacks = stackFrames;
return stackFrames;
}
export { TraceMap, createStackString, eachMapping, generatedPositionFor, originalPositionFor, parseErrorStacktrace, parseSingleFFOrSafariStack, parseSingleStack, parseSingleV8Stack, parseStacktrace };

View File

@@ -0,0 +1,53 @@
import { CompareKeys } from '@vitest/pretty-format';
/**
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
type DiffOptionsColor = (arg: string) => string;
interface DiffOptions {
aAnnotation?: string;
aColor?: DiffOptionsColor;
aIndicator?: string;
bAnnotation?: string;
bColor?: DiffOptionsColor;
bIndicator?: string;
changeColor?: DiffOptionsColor;
changeLineTrailingSpaceColor?: DiffOptionsColor;
commonColor?: DiffOptionsColor;
commonIndicator?: string;
commonLineTrailingSpaceColor?: DiffOptionsColor;
contextLines?: number;
emptyFirstOrLastLinePlaceholder?: string;
expand?: boolean;
includeChangeCounts?: boolean;
omitAnnotationLines?: boolean;
patchColor?: DiffOptionsColor;
printBasicPrototype?: boolean;
maxDepth?: number;
compareKeys?: CompareKeys;
truncateThreshold?: number;
truncateAnnotation?: string;
truncateAnnotationColor?: DiffOptionsColor;
}
interface SerializedDiffOptions {
aAnnotation?: string;
aIndicator?: string;
bAnnotation?: string;
bIndicator?: string;
commonIndicator?: string;
contextLines?: number;
emptyFirstOrLastLinePlaceholder?: string;
expand?: boolean;
includeChangeCounts?: boolean;
omitAnnotationLines?: boolean;
printBasicPrototype?: boolean;
maxDepth?: number;
truncateThreshold?: number;
truncateAnnotation?: string;
}
export type { DiffOptions as D, SerializedDiffOptions as S, DiffOptionsColor as a };

53
frontend/node_modules/@vitest/utils/dist/types.d.ts generated vendored Normal file
View File

@@ -0,0 +1,53 @@
type Awaitable<T> = T | PromiseLike<T>;
type Nullable<T> = T | null | undefined;
type Arrayable<T> = T | Array<T>;
type ArgumentsType<T> = T extends (...args: infer U) => any ? U : never;
type MergeInsertions<T> = T extends object ? { [K in keyof T] : MergeInsertions<T[K]> } : T;
type DeepMerge<
F,
S
> = MergeInsertions<{ [K in keyof F | keyof S] : K extends keyof S & keyof F ? DeepMerge<F[K], S[K]> : K extends keyof S ? S[K] : K extends keyof F ? F[K] : never }>;
type MutableArray<T extends readonly any[]> = { -readonly [k in keyof T] : T[k] };
interface Constructable {
new (...args: any[]): any;
}
interface ParsedStack {
method: string;
file: string;
line: number;
column: number;
}
interface SerializedError {
message: string;
stack?: string;
name?: string;
stacks?: ParsedStack[];
cause?: SerializedError;
[key: string]: unknown;
}
interface TestError extends SerializedError {
cause?: TestError;
diff?: string;
actual?: string;
expected?: string;
}
/**
* @deprecated Use `TestError` instead
*/
interface ErrorWithDiff {
message: string;
name?: string;
cause?: unknown;
stack?: string;
stacks?: ParsedStack[];
showDiff?: boolean;
actual?: any;
expected?: any;
operator?: string;
type?: string;
frame?: string;
diff?: string;
codeFrame?: string;
}
export type { ArgumentsType, Arrayable, Awaitable, Constructable, DeepMerge, ErrorWithDiff, MergeInsertions, MutableArray, Nullable, ParsedStack, SerializedError, TestError };

1
frontend/node_modules/@vitest/utils/dist/types.js generated vendored Normal file
View File

@@ -0,0 +1 @@