diff --git "a/demo/ort-phi3/dist/esm/ort.webgpu.min.js.map" "b/demo/ort-phi3/dist/esm/ort.webgpu.min.js.map" new file mode 100644--- /dev/null +++ "b/demo/ort-phi3/dist/esm/ort.webgpu.min.js.map" @@ -0,0 +1,7 @@ +{ + "version": 3, + "sources": ["../../../common/lib/backend-impl.ts", "../../../common/lib/backend.ts", "../../../common/lib/version.ts", "../../../common/lib/env-impl.ts", "../../../common/lib/env.ts", "../../../common/lib/tensor-conversion-impl.ts", "../../../common/lib/tensor-factory-impl.ts", "../../../common/lib/tensor-impl-type-mapping.ts", "../../../common/lib/tensor-utils-impl.ts", "../../../common/lib/tensor-impl.ts", "../../../common/lib/tensor.ts", "../../../common/lib/trace.ts", "../../../common/lib/inference-session-impl.ts", "../../../common/lib/inference-session.ts", "../../../common/lib/tensor-conversion.ts", "../../../common/lib/tensor-factory.ts", "../../../common/lib/onnx-model.ts", "../../../common/lib/onnx-value.ts", "../../../common/lib/training-session-impl.ts", "../../../common/lib/training-session.ts", "../../../common/lib/index.ts", "nodejs-ignore:fs", "nodejs-ignore:path", "../../lib/wasm/binding/ort-wasm-simd.jsep.js", "nodejs-ignore:worker_threads", "nodejs-ignore:perf_hooks", "nodejs-ignore:os", "../../lib/wasm/binding/ort-wasm-simd-threaded.jsep.js", "../../lib/wasm/binding/ort-wasm-threaded.worker.js", "../../lib/wasm/wasm-factory.ts", "../../lib/wasm/wasm-utils.ts", "../../lib/wasm/run-options.ts", "../../lib/wasm/session-options.ts", "../../lib/wasm/wasm-common.ts", "../../lib/wasm/wasm-utils-load-file.ts", "../../lib/wasm/jsep/log.ts", "../../lib/wasm/jsep/tensor-view.ts", "../../lib/wasm/jsep/webgpu/types.ts", "../../lib/wasm/jsep/webgpu/gpu-data-manager.ts", "../../lib/wasm/jsep/webgpu/attribute-with-cache-key.ts", "../../lib/wasm/jsep/util.ts", "../../lib/wasm/jsep/webgpu/ops/common.ts", "../../lib/wasm/jsep/webgpu/ops/transpose.ts", "../../lib/wasm/jsep/webgpu/ops/reduce-shared.ts", "../../lib/wasm/jsep/webgpu/ops/reduce.ts", "../../lib/wasm/jsep/webgpu/ops/argminmax.ts", "../../lib/wasm/jsep/webgpu/ops/concat.ts", "../../lib/wasm/jsep/webgpu/ops/attention.ts", "../../lib/wasm/jsep/webgpu/ops/batch-norm.ts", "../../lib/wasm/jsep/webgpu/ops/bias-add.ts", "../../lib/wasm/jsep/webgpu/ops/unary-op.ts", "../../lib/wasm/jsep/webgpu/ops/bias-split-gelu.ts", "../../lib/wasm/jsep/webgpu/ops/binary-op.ts", "../../lib/wasm/jsep/webgpu/ops/fuse-utils.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/activation_util.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/conv_util.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/matmul_packed_webgpu.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/conv2d_mm_webgpu.ts", "../../lib/wasm/jsep/webgpu/ops/conv-grouped.ts", "../../lib/wasm/jsep/webgpu/ops/matmul.ts", "../../lib/wasm/jsep/webgpu/ops/conv.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_mm_webgpu.ts", "../../lib/wasm/jsep/webgpu/ops/3rd-party/conv_backprop_webgpu.ts", "../../lib/wasm/jsep/webgpu/ops/conv-transpose.ts", "../../lib/wasm/jsep/webgpu/ops/cumsum.ts", "../../lib/wasm/jsep/webgpu/ops/depth-to-space.ts", "../../lib/wasm/jsep/webgpu/ops/einsum.ts", "../../lib/wasm/jsep/webgpu/ops/expand.ts", "../../lib/wasm/jsep/webgpu/ops/fast-gelu.ts", "../../lib/wasm/jsep/webgpu/ops/gather.ts", "../../lib/wasm/jsep/webgpu/ops/gather-elements.ts", "../../lib/wasm/jsep/webgpu/ops/gemm.ts", "../../lib/wasm/jsep/webgpu/ops/instance-norm.ts", "../../lib/wasm/jsep/webgpu/ops/layer-norm.ts", "../../lib/wasm/jsep/webgpu/ops/matmulnbits.ts", "../../lib/wasm/jsep/webgpu/ops/multihead-attentiion.ts", "../../lib/wasm/jsep/webgpu/ops/pad.ts", "../../lib/wasm/jsep/webgpu/ops/pool.ts", "../../lib/wasm/jsep/webgpu/ops/range.ts", "../../lib/wasm/jsep/webgpu/ops/resize.ts", "../../lib/wasm/jsep/webgpu/ops/rotary-embedding.ts", "../../lib/wasm/jsep/webgpu/ops/skip-layer-norm.ts", "../../lib/wasm/jsep/webgpu/ops/slice.ts", "../../lib/wasm/jsep/webgpu/ops/softmax.ts", "../../lib/wasm/jsep/webgpu/ops/split.ts", "../../lib/wasm/jsep/webgpu/ops/tile.ts", "../../lib/wasm/jsep/webgpu/ops/where.ts", "../../lib/wasm/jsep/webgpu/op-resolve-rules.ts", "../../lib/wasm/jsep/webgpu/program-manager.ts", "../../lib/wasm/jsep/backend-webgpu.ts", "../../lib/wasm/jsep/init.ts", "../../lib/wasm/wasm-core-impl.ts", "proxy-worker:./proxy-worker/main", "../../lib/wasm/proxy-wrapper.ts", "../../lib/wasm/session-handler-inference.ts", "../../lib/backend-wasm.ts", "../../lib/backend-wasm-inference.ts", "../../lib/index.ts", "../../lib/version.ts"], + "sourcesContent": ["// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Backend} from './backend.js';\nimport {InferenceSession} from './inference-session.js';\n\ninterface BackendInfo {\n backend: Backend;\n priority: number;\n\n initPromise?: Promise;\n initialized?: boolean;\n aborted?: boolean;\n error?: string;\n}\n\nconst backends: Map = new Map();\nconst backendsSortedByPriority: string[] = [];\n\n/**\n * Register a backend.\n *\n * @param name - the name as a key to lookup as an execution provider.\n * @param backend - the backend object.\n * @param priority - an integer indicating the priority of the backend. Higher number means higher priority. if priority\n * < 0, it will be considered as a 'beta' version and will not be used as a fallback backend by default.\n *\n * @ignore\n */\nexport const registerBackend = (name: string, backend: Backend, priority: number): void => {\n if (backend && typeof backend.init === 'function' && typeof backend.createInferenceSessionHandler === 'function') {\n const currentBackend = backends.get(name);\n if (currentBackend === undefined) {\n backends.set(name, {backend, priority});\n } else if (currentBackend.priority > priority) {\n // same name is already registered with a higher priority. skip registeration.\n return;\n } else if (currentBackend.priority === priority) {\n if (currentBackend.backend !== backend) {\n throw new Error(`cannot register backend \"${name}\" using priority ${priority}`);\n }\n }\n\n if (priority >= 0) {\n const i = backendsSortedByPriority.indexOf(name);\n if (i !== -1) {\n backendsSortedByPriority.splice(i, 1);\n }\n\n for (let i = 0; i < backendsSortedByPriority.length; i++) {\n if (backends.get(backendsSortedByPriority[i])!.priority <= priority) {\n backendsSortedByPriority.splice(i, 0, name);\n return;\n }\n }\n backendsSortedByPriority.push(name);\n }\n return;\n }\n\n throw new TypeError('not a valid backend');\n};\n\n/**\n * Try to resolve and initialize a backend.\n *\n * @param backendName - the name of the backend.\n * @returns the backend instance if resolved and initialized successfully, or an error message if failed.\n */\nconst tryResolveAndInitializeBackend = async(backendName: string): Promise => {\n const backendInfo = backends.get(backendName);\n if (!backendInfo) {\n return 'backend not found.';\n }\n\n if (backendInfo.initialized) {\n return backendInfo.backend;\n } else if (backendInfo.aborted) {\n return backendInfo.error!;\n } else {\n const isInitializing = !!backendInfo.initPromise;\n try {\n if (!isInitializing) {\n backendInfo.initPromise = backendInfo.backend.init(backendName);\n }\n await backendInfo.initPromise;\n backendInfo.initialized = true;\n return backendInfo.backend;\n } catch (e) {\n if (!isInitializing) {\n backendInfo.error = `${e}`;\n backendInfo.aborted = true;\n }\n return backendInfo.error!;\n } finally {\n delete backendInfo.initPromise;\n }\n }\n};\n\n/**\n * Resolve execution providers from the specific session options.\n *\n * @param options - the session options object.\n * @returns a promise that resolves to a tuple of an initialized backend instance and a session options object with\n * filtered EP list.\n *\n * @ignore\n */\nexport const resolveBackendAndExecutionProviders = async(options: InferenceSession.SessionOptions):\n Promise<[backend: Backend, options: InferenceSession.SessionOptions]> => {\n // extract backend hints from session options\n const eps = options.executionProviders || [];\n const backendHints = eps.map(i => typeof i === 'string' ? i : i.name);\n const backendNames = backendHints.length === 0 ? backendsSortedByPriority : backendHints;\n\n // try to resolve and initialize all requested backends\n let backend: Backend|undefined;\n const errors = [];\n const availableBackendNames = new Set();\n for (const backendName of backendNames) {\n const resolveResult = await tryResolveAndInitializeBackend(backendName);\n if (typeof resolveResult === 'string') {\n errors.push({name: backendName, err: resolveResult});\n } else {\n if (!backend) {\n backend = resolveResult;\n }\n if (backend === resolveResult) {\n availableBackendNames.add(backendName);\n }\n }\n }\n\n // if no backend is available, throw error.\n if (!backend) {\n throw new Error(`no available backend found. ERR: ${errors.map(e => `[${e.name}] ${e.err}`).join(', ')}`);\n }\n\n // for each explicitly requested backend, if it's not available, output warning message.\n for (const {name, err} of errors) {\n if (backendHints.includes(name)) {\n // eslint-disable-next-line no-console\n console.warn(`removing requested execution provider \"${\n name}\" from session options because it is not available: ${err}`);\n }\n }\n\n const filteredEps = eps.filter(i => availableBackendNames.has(typeof i === 'string' ? i : i.name));\n\n return [\n backend, new Proxy(options, {\n get: (target, prop) => {\n if (prop === 'executionProviders') {\n return filteredEps;\n }\n return Reflect.get(target, prop);\n }\n })\n ];\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession} from './training-session.js';\n\n/**\n * @ignore\n */\nexport declare namespace SessionHandler {\n type FeedsType = {[name: string]: OnnxValue};\n type FetchesType = {[name: string]: OnnxValue | null};\n type ReturnType = {[name: string]: OnnxValue};\n}\n\n/**\n * Represents shared SessionHandler functionality\n *\n * @ignore\n */\ninterface SessionHandler {\n dispose(): Promise;\n\n readonly inputNames: readonly string[];\n readonly outputNames: readonly string[];\n}\n\n/**\n * Represent a handler instance of an inference session.\n *\n * @ignore\n */\nexport interface InferenceSessionHandler extends SessionHandler {\n startProfiling(): void;\n endProfiling(): void;\n\n run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n}\n\n/**\n * Represent a handler instance of a training inference session.\n *\n * @ignore\n */\nexport interface TrainingSessionHandler extends SessionHandler {\n readonly evalInputNames: readonly string[];\n readonly evalOutputNames: readonly string[];\n\n lazyResetGrad(): Promise;\n runTrainStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n runOptimizerStep(options: InferenceSession.RunOptions): Promise;\n runEvalStep(\n feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType,\n options: InferenceSession.RunOptions): Promise;\n\n getParametersSize(trainableOnly: boolean): Promise;\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n getContiguousParameters(trainableOnly: boolean): Promise;\n}\n\n/**\n * Represent a backend that provides implementation of model inferencing.\n *\n * @ignore\n */\nexport interface Backend {\n /**\n * Initialize the backend asynchronously. Should throw when failed.\n */\n init(backendName: string): Promise;\n\n createInferenceSessionHandler(uriOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n\n createTrainingSessionHandler?\n (checkpointStateUriOrBuffer: TrainingSession.UriOrBuffer, trainModelUriOrBuffer: TrainingSession.UriOrBuffer,\n evalModelUriOrBuffer: TrainingSession.UriOrBuffer, optimizerModelUriOrBuffer: TrainingSession.UriOrBuffer,\n options: InferenceSession.SessionOptions): Promise;\n}\n\nexport {registerBackend} from './backend-impl.js';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.18.0';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from './env.js';\nimport {version} from './version.js';\n\ntype LogLevelType = Env['logLevel'];\n\nlet logLevelValue: Required = 'warning';\n\nexport const env: Env = {\n wasm: {} as Env.WebAssemblyFlags,\n webgl: {} as Env.WebGLFlags,\n webgpu: {} as Env.WebGpuFlags,\n versions: {common: version},\n\n set logLevel(value: LogLevelType) {\n if (value === undefined) {\n return;\n }\n if (typeof value !== 'string' || ['verbose', 'info', 'warning', 'error', 'fatal'].indexOf(value) === -1) {\n throw new Error(`Unsupported logging level: ${value}`);\n }\n logLevelValue = value;\n },\n get logLevel(): Required {\n return logLevelValue;\n },\n};\n\n// set property 'logLevel' so that they can be correctly transferred to worker by `postMessage()`.\nObject.defineProperty(env, 'logLevel', {enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env as envImpl} from './env-impl.js';\n\nexport declare namespace Env {\n export type WasmPrefixOrFilePaths = string|{\n /* eslint-disable @typescript-eslint/naming-convention */\n 'ort-wasm.wasm'?: string;\n 'ort-wasm-threaded.wasm'?: string;\n 'ort-wasm-simd.wasm'?: string;\n 'ort-training-wasm-simd.wasm'?: string;\n 'ort-wasm-simd-threaded.wasm'?: string;\n /* eslint-enable @typescript-eslint/naming-convention */\n };\n export interface WebAssemblyFlags {\n /**\n * set or get number of thread(s). If omitted or set to 0, number of thread(s) will be determined by system. If set\n * to 1, no worker thread will be spawned.\n *\n * This setting is available only when WebAssembly multithread feature is available in current context.\n *\n * @defaultValue `0`\n */\n numThreads?: number;\n\n /**\n * set or get a boolean value indicating whether to enable SIMD. If set to false, SIMD will be forcely disabled.\n *\n * This setting is available only when WebAssembly SIMD feature is available in current context.\n *\n * @defaultValue `true`\n */\n simd?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @deprecated Use `env.trace` instead. If `env.trace` is set, this property will be ignored.\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Set or get a number specifying the timeout for initialization of WebAssembly backend, in milliseconds. A zero\n * value indicates no timeout is set.\n *\n * @defaultValue `0`\n */\n initTimeout?: number;\n\n /**\n * Set a custom URL prefix to the .wasm files or a set of overrides for each .wasm file. The override path should be\n * an absolute path.\n */\n wasmPaths?: WasmPrefixOrFilePaths;\n\n /**\n * Set or get a boolean value indicating whether to proxy the execution of main thread to a worker thread.\n *\n * @defaultValue `false`\n */\n proxy?: boolean;\n }\n\n export interface WebGLFlags {\n /**\n * Set or get the WebGL Context ID (webgl or webgl2).\n *\n * @defaultValue `'webgl2'`\n */\n contextId?: 'webgl'|'webgl2';\n /**\n * Get the WebGL rendering context.\n */\n readonly context: WebGLRenderingContext;\n /**\n * Set or get the maximum batch size for matmul. 0 means to disable batching.\n *\n * @deprecated\n */\n matmulMaxBatchSize?: number;\n /**\n * Set or get the texture cache mode.\n *\n * @defaultValue `'full'`\n */\n textureCacheMode?: 'initializerOnly'|'full';\n /**\n * Set or get the packed texture mode\n *\n * @defaultValue `false`\n */\n pack?: boolean;\n /**\n * Set or get whether enable async download.\n *\n * @defaultValue `false`\n */\n async?: boolean;\n }\n\n export interface WebGpuProfilingDataV1TensorMetadata {\n dims: readonly number[];\n dataType: string;\n }\n export interface WebGpuProfilingDataV1 {\n version: 1;\n inputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n outputsMetadata: readonly WebGpuProfilingDataV1TensorMetadata[];\n kernelId: number;\n kernelType: string;\n kernelName: string;\n programName: string;\n startTime: number;\n endTime: number;\n }\n\n export type WebGpuProfilingData = WebGpuProfilingDataV1;\n\n export interface WebGpuFlags {\n /**\n * Set or get the profiling mode.\n *\n * @deprecated Use `env.webgpu.profiling.mode` instead. If `env.webgpu.profiling.mode` is set, this property will be\n * ignored.\n */\n profilingMode?: 'off'|'default';\n /**\n * Set or get the profiling configuration.\n */\n profiling?: {\n /**\n * Set or get the profiling mode.\n *\n * @defaultValue `'off'`\n */\n mode?: 'off'|'default';\n\n /**\n * Set or get a callback function when a profiling data is received. If not set, the profiling data will be\n * printed to console.\n */\n ondata?: (data: WebGpuProfilingData) => void;\n };\n /**\n * Set or get the power preference.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n powerPreference?: 'low-power'|'high-performance';\n /**\n * Set or get the force fallback adapter flag.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as options for `navigator.gpu.requestAdapter()`.\n *\n * See {@link https://gpuweb.github.io/gpuweb/#dictdef-gpurequestadapteroptions} for more details.\n *\n * @defaultValue `undefined`\n */\n forceFallbackAdapter?: boolean;\n /**\n * Set or get the adapter for WebGPU.\n *\n * Setting this property only has effect before the first WebGPU inference session is created. The value will be\n * used as the GPU adapter for the underlying WebGPU backend to create GPU device.\n *\n * If this property is not set, it will be available to get after the first WebGPU inference session is created. The\n * value will be the GPU adapter that created by the underlying WebGPU backend.\n *\n * When use with TypeScript, the type of this property is `GPUAdapter` defined in \"@webgpu/types\".\n * Use `const adapter = env.webgpu.adapter as GPUAdapter;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType}\n */\n adapter: unknown;\n /**\n * Get the device for WebGPU.\n *\n * This property is only available after the first WebGPU inference session is created.\n *\n * When use with TypeScript, the type of this property is `GPUDevice` defined in \"@webgpu/types\".\n * Use `const device = env.webgpu.device as GPUDevice;` in TypeScript to access this property with correct type.\n *\n * see comments on {@link Tensor.GpuBufferType} for more details about why not use types defined in \"@webgpu/types\".\n */\n readonly device: unknown;\n /**\n * Set or get whether validate input content.\n *\n * @defaultValue `false`\n */\n validateInputContent?: boolean;\n }\n}\n\nexport interface Env {\n /**\n * set the severity level for logging.\n *\n * @defaultValue `'warning'`\n */\n logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal';\n\n /**\n * Indicate whether run in debug mode.\n *\n * @defaultValue `false`\n */\n debug?: boolean;\n\n /**\n * set or get a boolean value indicating whether to enable trace.\n *\n * @defaultValue `false`\n */\n trace?: boolean;\n\n /**\n * Get version of the current package.\n */\n readonly versions: {\n readonly common: string;\n readonly web?: string;\n readonly node?: string;\n // eslint-disable-next-line @typescript-eslint/naming-convention\n readonly 'react-native'?: string;\n };\n\n /**\n * Represent a set of flags for WebAssembly\n */\n readonly wasm: Env.WebAssemblyFlags;\n\n /**\n * Represent a set of flags for WebGL\n */\n readonly webgl: Env.WebGLFlags;\n\n /**\n * Represent a set of flags for WebGPU\n */\n readonly webgpu: Env.WebGpuFlags;\n\n [name: string]: unknown;\n}\n\n/**\n * Represent a set of flags as a global singleton.\n */\nexport const env: Env = envImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {Tensor} from './tensor.js';\n\n/**\n * implementation of Tensor.toDataURL()\n */\nexport const tensorToDataURL = (tensor: Tensor, options?: TensorToDataUrlOptions): string => {\n const canvas = typeof document !== 'undefined' ? document.createElement('canvas') : (new OffscreenCanvas(1, 1));\n canvas.width = tensor.dims[3];\n canvas.height = tensor.dims[2];\n const pixels2DContext =\n canvas.getContext('2d') as (CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D | null);\n\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n }\n\n const inputformat = options?.format !== undefined ? options.format : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 0];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n // Default pointer assignments\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < height; i++) {\n for (let j = 0; j < width; j++) {\n const R = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n const G = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n const B = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n const A = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n // eslint-disable-next-line @typescript-eslint/restrict-plus-operands\n pixels2DContext.fillStyle = 'rgba(' + R + ',' + G + ',' + B + ',' + A + ')';\n pixels2DContext.fillRect(j, i, 1, 1);\n }\n }\n if ('toDataURL' in canvas) {\n return canvas.toDataURL();\n } else {\n throw new Error('toDataURL is not supported');\n }\n } else {\n throw new Error('Can not access image data');\n }\n};\n\n/**\n * implementation of Tensor.toImageData()\n */\nexport const tensorToImageData = (tensor: Tensor, options?: TensorToImageDataOptions): ImageData => {\n const pixels2DContext = typeof document !== 'undefined' ?\n document.createElement('canvas').getContext('2d') :\n new OffscreenCanvas(1, 1).getContext('2d') as OffscreenCanvasRenderingContext2D;\n let image: ImageData;\n if (pixels2DContext != null) {\n // Default values for height and width & format\n let width: number;\n let height: number;\n let channels: number;\n if (options?.tensorLayout !== undefined && options.tensorLayout === 'NHWC') {\n width = tensor.dims[2];\n height = tensor.dims[1];\n channels = tensor.dims[3];\n } else { // Default layout is NCWH\n width = tensor.dims[3];\n height = tensor.dims[2];\n channels = tensor.dims[1];\n }\n const inputformat = options !== undefined ? (options.format !== undefined ? options.format : 'RGB') : 'RGB';\n\n const norm = options?.norm;\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n if (norm === undefined || norm.mean === undefined) {\n normMean = [255, 255, 255, 255];\n } else {\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean[0], norm.mean[1], norm.mean[2], 255];\n if (norm.mean[3] !== undefined) {\n normMean[3] = norm.mean[3];\n }\n }\n }\n if (norm === undefined || norm.bias === undefined) {\n normBias = [0, 0, 0, 0];\n } else {\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias[0], norm.bias[1], norm.bias[2], 0];\n if (norm.bias[3] !== undefined) {\n normBias[3] = norm.bias[3];\n }\n }\n }\n\n const stride = height * width;\n if (options !== undefined) {\n if (options.format !== undefined && (channels === 4 && options.format !== 'RGBA') ||\n (channels === 3 && (options.format !== 'RGB' && options.format !== 'BGR'))) {\n throw new Error('Tensor format doesn\\'t match input tensor dims');\n }\n }\n\n // Default pointer assignments\n const step = 4;\n let rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGBA') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n aTensorPointer = stride * 3;\n } else if (inputformat === 'RGB') {\n rTensorPointer = 0;\n gTensorPointer = stride;\n bTensorPointer = stride * 2;\n } else if (inputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n }\n\n image = pixels2DContext.createImageData(width, height);\n\n for (let i = 0; i < height * width;\n rImagePointer += step, gImagePointer += step, bImagePointer += step, aImagePointer += step, i++) {\n image.data[rImagePointer] = ((tensor.data[rTensorPointer++] as number) - normBias[0]) * normMean[0]; // R value\n image.data[gImagePointer] = ((tensor.data[gTensorPointer++] as number) - normBias[1]) * normMean[1]; // G value\n image.data[bImagePointer] = ((tensor.data[bTensorPointer++] as number) - normBias[2]) * normMean[2]; // B value\n image.data[aImagePointer] = aTensorPointer === -1 ?\n 255 :\n ((tensor.data[aTensorPointer++] as number) - normBias[3]) * normMean[3]; // A value\n }\n\n } else {\n throw new Error('Can not access image data');\n }\n return image;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsDimensions, OptionsFormat, OptionsNormalizationParameters, OptionsTensorFormat, OptionsTensorLayout, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\ninterface BufferToTensorOptions extends OptionsDimensions, OptionsTensorLayout, OptionsNormalizationParameters,\n OptionsFormat, OptionsTensorFormat {}\n\n/**\n * Create a new tensor object from image object\n *\n * @param buffer - Extracted image buffer data - assuming RGBA format\n * @param imageFormat - input image configuration - required configurations height, width, format\n * @param tensorFormat - output tensor configuration - Default is RGB format\n */\nexport const bufferToTensor = (buffer: Uint8ClampedArray|undefined, options: BufferToTensorOptions): Tensor => {\n if (buffer === undefined) {\n throw new Error('Image buffer must be defined');\n }\n if (options.height === undefined || options.width === undefined) {\n throw new Error('Image height and width must be defined');\n }\n if (options.tensorLayout === 'NHWC') {\n throw new Error('NHWC Tensor layout is not supported yet');\n }\n\n const {height, width} = options;\n\n const norm = options.norm ?? {mean: 255, bias: 0};\n let normMean: [number, number, number, number];\n let normBias: [number, number, number, number];\n\n if (typeof (norm.mean) === 'number') {\n normMean = [norm.mean, norm.mean, norm.mean, norm.mean];\n } else {\n normMean = [norm.mean![0], norm.mean![1], norm.mean![2], norm.mean![3] ?? 255];\n }\n\n if (typeof (norm.bias) === 'number') {\n normBias = [norm.bias, norm.bias, norm.bias, norm.bias];\n } else {\n normBias = [norm.bias![0], norm.bias![1], norm.bias![2], norm.bias![3] ?? 0];\n }\n\n const inputformat = options.format !== undefined ? options.format : 'RGBA';\n // default value is RGBA since imagedata and HTMLImageElement uses it\n\n const outputformat =\n options.tensorFormat !== undefined ? (options.tensorFormat !== undefined ? options.tensorFormat : 'RGB') : 'RGB';\n const stride = height * width;\n const float32Data = outputformat === 'RGBA' ? new Float32Array(stride * 4) : new Float32Array(stride * 3);\n\n // Default pointer assignments\n let step = 4, rImagePointer = 0, gImagePointer = 1, bImagePointer = 2, aImagePointer = 3;\n let rTensorPointer = 0, gTensorPointer = stride, bTensorPointer = stride * 2, aTensorPointer = -1;\n\n // Updating the pointer assignments based on the input image format\n if (inputformat === 'RGB') {\n step = 3;\n rImagePointer = 0;\n gImagePointer = 1;\n bImagePointer = 2;\n aImagePointer = -1;\n }\n\n // Updating the pointer assignments based on the output tensor format\n if (outputformat === 'RGBA') {\n aTensorPointer = stride * 3;\n } else if (outputformat === 'RBG') {\n rTensorPointer = 0;\n bTensorPointer = stride;\n gTensorPointer = stride * 2;\n } else if (outputformat === 'BGR') {\n bTensorPointer = 0;\n gTensorPointer = stride;\n rTensorPointer = stride * 2;\n }\n\n for (let i = 0; i < stride;\n i++, rImagePointer += step, bImagePointer += step, gImagePointer += step, aImagePointer += step) {\n float32Data[rTensorPointer++] = (buffer[rImagePointer] + normBias[0]) / normMean[0];\n float32Data[gTensorPointer++] = (buffer[gImagePointer] + normBias[1]) / normMean[1];\n float32Data[bTensorPointer++] = (buffer[bImagePointer] + normBias[2]) / normMean[2];\n if (aTensorPointer !== -1 && aImagePointer !== -1) {\n float32Data[aTensorPointer++] = (buffer[aImagePointer] + normBias[3]) / normMean[3];\n }\n }\n\n // Float32Array -> ort.Tensor\n const outputTensor = outputformat === 'RGBA' ? new Tensor('float32', float32Data, [1, 4, height, width]) :\n new Tensor('float32', float32Data, [1, 3, height, width]);\n return outputTensor;\n};\n\n/**\n * implementation of Tensor.fromImage().\n */\nexport const tensorFromImage = async(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise => {\n // checking the type of image object\n const isHTMLImageEle = typeof (HTMLImageElement) !== 'undefined' && image instanceof HTMLImageElement;\n const isImageDataEle = typeof (ImageData) !== 'undefined' && image instanceof ImageData;\n const isImageBitmap = typeof (ImageBitmap) !== 'undefined' && image instanceof ImageBitmap;\n const isString = typeof image === 'string';\n\n let data: Uint8ClampedArray|undefined;\n let bufferToTensorOptions: BufferToTensorOptions = options ?? {};\n\n const createCanvas = () => {\n if (typeof document !== 'undefined') {\n return document.createElement('canvas');\n } else if (typeof OffscreenCanvas !== 'undefined') {\n return new OffscreenCanvas(1, 1);\n } else {\n throw new Error('Canvas is not supported');\n }\n };\n const createCanvasContext = (canvas: HTMLCanvasElement|OffscreenCanvas) => {\n if (canvas instanceof HTMLCanvasElement) {\n return canvas.getContext('2d');\n } else if (canvas instanceof OffscreenCanvas) {\n return canvas.getContext('2d') as OffscreenCanvasRenderingContext2D;\n } else {\n return null;\n }\n };\n // filling and checking image configuration options\n if (isHTMLImageEle) {\n // HTMLImageElement - image object - format is RGBA by default\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n let height = image.height;\n let width = image.width;\n if (options !== undefined && options.resizedHeight !== undefined && options.resizedWidth !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n if (options.tensorFormat !== undefined) {\n throw new Error('Image input config format must be RGBA for HTMLImageElement');\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n }\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n } else {\n bufferToTensorOptions.tensorFormat = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n }\n\n pixels2DContext.drawImage(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isImageDataEle) {\n let height: number;\n let width: number;\n\n if (options !== undefined && options.resizedWidth !== undefined && options.resizedHeight !== undefined) {\n height = options.resizedHeight;\n width = options.resizedWidth;\n } else {\n height = image.height;\n width = image.width;\n }\n\n if (options !== undefined) {\n bufferToTensorOptions = options;\n }\n bufferToTensorOptions.format = 'RGBA';\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n\n if (options !== undefined) {\n const tempCanvas = createCanvas();\n\n tempCanvas.width = width;\n tempCanvas.height = height;\n\n const pixels2DContext = createCanvasContext(tempCanvas);\n\n if (pixels2DContext != null) {\n pixels2DContext.putImageData(image, 0, 0);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n } else {\n throw new Error('Can not access image data');\n }\n } else {\n data = image.data;\n }\n } else if (isImageBitmap) {\n // ImageBitmap - image object - format must be provided by user\n if (options === undefined) {\n throw new Error('Please provide image config with format for Imagebitmap');\n }\n\n const canvas = createCanvas();\n canvas.width = image.width;\n canvas.height = image.height;\n const pixels2DContext = createCanvasContext(canvas);\n\n if (pixels2DContext != null) {\n const height = image.height;\n const width = image.width;\n pixels2DContext.drawImage(image, 0, 0, width, height);\n data = pixels2DContext.getImageData(0, 0, width, height).data;\n bufferToTensorOptions.height = height;\n bufferToTensorOptions.width = width;\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Can not access image data');\n }\n } else if (isString) {\n return new Promise((resolve, reject) => {\n const canvas = createCanvas();\n const context = createCanvasContext(canvas);\n if (!image || !context) {\n return reject();\n }\n const newImage = new Image();\n newImage.crossOrigin = 'Anonymous';\n newImage.src = image;\n newImage.onload = () => {\n canvas.width = newImage.width;\n canvas.height = newImage.height;\n context.drawImage(newImage, 0, 0, canvas.width, canvas.height);\n const img = context.getImageData(0, 0, canvas.width, canvas.height);\n\n bufferToTensorOptions.height = canvas.height;\n bufferToTensorOptions.width = canvas.width;\n resolve(bufferToTensor(img.data, bufferToTensorOptions));\n };\n });\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n\n if (data !== undefined) {\n return bufferToTensor(data, bufferToTensorOptions);\n } else {\n throw new Error('Input data provided is not supported - aborted tensor creation');\n }\n};\n\n/**\n * implementation of Tensor.fromTexture().\n */\nexport const tensorFromTexture = (\n texture: TensorInterface.TextureType, options: TensorFromTextureOptions): Tensor => {\n const {width, height, download, dispose} = options;\n // Always assume RGBAF32. TODO: support different texture format\n const dims = [1, height, width, 4];\n return new Tensor({location: 'texture', type: 'float32', texture, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromGpuBuffer().\n */\nexport const tensorFromGpuBuffer = (\n gpuBuffer: TensorInterface.GpuBufferType, options: TensorFromGpuBufferOptions): Tensor => {\n const {dataType, dims, download, dispose} = options;\n return new Tensor({location: 'gpu-buffer', type: dataType ?? 'float32', gpuBuffer, dims, download, dispose});\n};\n\n/**\n * implementation of Tensor.fromPinnedBuffer().\n */\nexport const tensorFromPinnedBuffer = (\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor =>\n new Tensor({location: 'cpu-pinned', type, data: buffer, dims: dims ?? [buffer.length]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type SupportedTypedArrayConstructors = Float32ArrayConstructor|Uint8ArrayConstructor|Int8ArrayConstructor|\n Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|Uint8ArrayConstructor|\n Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor;\nexport type SupportedTypedArray = InstanceType;\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP = new Map([\n ['float32', Float32Array],\n ['uint8', Uint8Array],\n ['int8', Int8Array],\n ['uint16', Uint16Array],\n ['int16', Int16Array],\n ['int32', Int32Array],\n ['bool', Uint8Array],\n ['float64', Float64Array],\n ['uint32', Uint32Array],\n]);\n\n// a runtime map that maps type string to TypedArray constructor. Should match Tensor.DataTypeMap.\nexport const NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP = new Map([\n [Float32Array, 'float32'],\n [Uint8Array, 'uint8'],\n [Int8Array, 'int8'],\n [Uint16Array, 'uint16'],\n [Int16Array, 'int16'],\n [Int32Array, 'int32'],\n [Float64Array, 'float64'],\n [Uint32Array, 'uint32'],\n]);\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// the following code allows delaying execution of BigInt/Float16Array checking. This allows lazy initialization for\n// NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP and NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, which allows BigInt/Float16Array\n// polyfill if available.\nlet isTypedArrayChecked = false;\nexport const checkTypedArray = () => {\n if (!isTypedArrayChecked) {\n isTypedArrayChecked = true;\n const isBigInt64ArrayAvailable = typeof BigInt64Array !== 'undefined' && BigInt64Array.from;\n const isBigUint64ArrayAvailable = typeof BigUint64Array !== 'undefined' && BigUint64Array.from;\n const isFloat16ArrayAvailable = typeof Float16Array !== 'undefined' && Float16Array.from;\n\n if (isBigInt64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('int64', BigInt64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigInt64Array, 'int64');\n }\n if (isBigUint64ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('uint64', BigUint64Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(BigUint64Array, 'uint64');\n }\n if (isFloat16ArrayAvailable) {\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Float16Array);\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.set(Float16Array, 'float16');\n } else {\n // if Float16Array is not available, use 'Uint16Array' to store the data.\n NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.set('float16', Uint16Array);\n }\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TextureConstructorParameters} from './tensor-factory.js';\nimport {Tensor} from './tensor-impl.js';\n\n/**\n * calculate size from dims.\n *\n * @param dims the dims array. May be an illegal input.\n */\nexport const calculateSize = (dims: readonly unknown[]): number => {\n let size = 1;\n for (let i = 0; i < dims.length; i++) {\n const dim = dims[i];\n if (typeof dim !== 'number' || !Number.isSafeInteger(dim)) {\n throw new TypeError(`dims[${i}] must be an integer, got: ${dim}`);\n }\n if (dim < 0) {\n throw new RangeError(`dims[${i}] must be a non-negative integer, got: ${dim}`);\n }\n size *= dim;\n }\n return size;\n};\n\n/**\n * implementation of Tensor.reshape()\n */\nexport const tensorReshape = (tensor: Tensor, dims: readonly number[]): Tensor => {\n switch (tensor.location) {\n case 'cpu':\n return new Tensor(tensor.type, tensor.data, dims);\n case 'cpu-pinned':\n return new Tensor({\n location: 'cpu-pinned',\n data: tensor.data as CpuPinnedConstructorParameters['data'],\n type: tensor.type as CpuPinnedConstructorParameters['type'],\n dims,\n });\n case 'texture':\n return new Tensor({\n location: 'texture',\n texture: tensor.texture,\n type: tensor.type as TextureConstructorParameters['type'],\n dims,\n });\n case 'gpu-buffer':\n return new Tensor({\n location: 'gpu-buffer',\n gpuBuffer: tensor.gpuBuffer,\n type: tensor.type as GpuBufferConstructorParameters['type'],\n dims,\n });\n default:\n throw new Error(`tensorReshape: tensor location ${tensor.location} is not supported`);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {tensorToDataURL, tensorToImageData} from './tensor-conversion-impl.js';\nimport {TensorToDataUrlOptions, TensorToImageDataOptions} from './tensor-conversion.js';\nimport {tensorFromGpuBuffer, tensorFromImage, tensorFromPinnedBuffer, tensorFromTexture} from './tensor-factory-impl.js';\nimport {CpuPinnedConstructorParameters, GpuBufferConstructorParameters, TensorFromGpuBufferOptions, TensorFromImageBitmapOptions, TensorFromImageDataOptions, TensorFromImageElementOptions, TensorFromTextureOptions, TensorFromUrlOptions, TextureConstructorParameters} from './tensor-factory.js';\nimport {checkTypedArray, NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP, NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP, SupportedTypedArray, SupportedTypedArrayConstructors} from './tensor-impl-type-mapping.js';\nimport {calculateSize, tensorReshape} from './tensor-utils-impl.js';\nimport {Tensor as TensorInterface} from './tensor.js';\n\n// type aliases for those exported from Tensor interface\n\ntype TensorType = TensorInterface.Type;\ntype TensorDataType = TensorInterface.DataType;\ntype TensorDataLocation = TensorInterface.DataLocation;\ntype TensorTextureType = TensorInterface.TextureType;\ntype TensorGpuBufferType = TensorInterface.GpuBufferType;\n\n/**\n * the implementation of Tensor interface.\n *\n * @ignore\n */\nexport class Tensor implements TensorInterface {\n // #region constructors\n\n /**\n * Construct a new CPU tensor object from the given type, data and dims.\n */\n constructor(\n type: TensorType, data: TensorDataType|readonly string[]|readonly number[]|readonly boolean[],\n dims?: readonly number[]);\n /**\n * Construct a new CPU tensor object from the given data and dims. Type is inferred from data.\n */\n constructor(data: TensorDataType|readonly string[]|readonly boolean[], dims?: readonly number[]);\n /**\n * Construct a new tensor object from the pinned CPU data with the given type and dims.\n *\n * Tensor's location will be set to 'cpu-pinned'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: CpuPinnedConstructorParameters);\n /**\n * Construct a new tensor object from the WebGL texture with the given type and dims.\n *\n * Tensor's location will be set to 'texture'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: TextureConstructorParameters);\n /**\n * Construct a new tensor object from the WebGPU buffer with the given type and dims.\n *\n * Tensor's location will be set to 'gpu-buffer'.\n *\n * @param params - Specify the parameters to construct the tensor.\n */\n constructor(params: GpuBufferConstructorParameters);\n\n /**\n * implementation.\n */\n constructor(\n arg0: TensorType|TensorDataType|readonly string[]|readonly boolean[]|CpuPinnedConstructorParameters|\n TextureConstructorParameters|GpuBufferConstructorParameters,\n arg1?: TensorDataType|readonly number[]|readonly string[]|readonly boolean[], arg2?: readonly number[]) {\n // perform one-time check for BigInt/Float16Array support\n checkTypedArray();\n\n let type: TensorType;\n let dims: readonly number[];\n\n if (typeof arg0 === 'object' && 'location' in arg0) {\n //\n // constructing tensor from specific location\n //\n this.dataLocation = arg0.location;\n type = arg0.type;\n dims = arg0.dims;\n switch (arg0.location) {\n case 'cpu-pinned': {\n const expectedTypedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(type);\n if (!expectedTypedArrayConstructor) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from pinned buffer`);\n }\n if (!(arg0.data instanceof expectedTypedArrayConstructor)) {\n throw new TypeError(`buffer should be of type ${expectedTypedArrayConstructor.name}`);\n }\n this.cpuData = arg0.data;\n break;\n }\n case 'texture': {\n if (type !== 'float32') {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from texture`);\n }\n this.gpuTextureData = arg0.texture;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n case 'gpu-buffer': {\n if ((type !== 'float32' && type !== 'float16' && type !== 'int32' && type !== 'int64' && type !== 'uint32' &&\n type !== 'uint8' && type !== 'bool')) {\n throw new TypeError(`unsupported type \"${type}\" to create tensor from gpu buffer`);\n }\n this.gpuBufferData = arg0.gpuBuffer;\n this.downloader = arg0.download;\n this.disposer = arg0.dispose;\n break;\n }\n default:\n throw new Error(`Tensor constructor: unsupported location '${this.dataLocation}'`);\n }\n } else {\n //\n // constructing tensor of location 'cpu'\n //\n let data: TensorDataType;\n let maybeDims: typeof arg1|typeof arg2;\n // check whether arg0 is type or data\n if (typeof arg0 === 'string') {\n //\n // Override: constructor(type, data, ...)\n //\n type = arg0;\n maybeDims = arg2;\n if (arg0 === 'string') {\n // string tensor\n if (!Array.isArray(arg1)) {\n throw new TypeError('A string tensor\\'s data must be a string array.');\n }\n // we don't check whether every element in the array is string; this is too slow. we assume it's correct and\n // error will be populated at inference\n data = arg1;\n } else {\n // numeric tensor\n const typedArrayConstructor = NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP.get(arg0);\n if (typedArrayConstructor === undefined) {\n throw new TypeError(`Unsupported tensor type: ${arg0}.`);\n }\n if (Array.isArray(arg1)) {\n if (arg0 === 'float16' && typedArrayConstructor === Uint16Array) {\n // When no Float16Array polyfill is used, we cannot create 'float16' tensor from number array.\n //\n // Throw error here because when user try to use number array as data,\n // e.g. new Tensor('float16', [1, 2, 3, 4], dims)), it will actually call\n // Uint16Array.from(arg1) which generates wrong data.\n throw new TypeError(\n 'Creating a float16 tensor from number array is not supported. Please use Uint16Array as data.');\n } else if (arg0 === 'uint64' || arg0 === 'int64') {\n // use 'as any' here because:\n // 1. TypeScript's check on type of 'Array.isArray()' does not work with readonly arrays.\n // see https://github.com/microsoft/TypeScript/issues/17002\n // 2. TypeScript's check on union type of '(BigInt64ArrayConstructor|BigUint64ArrayConstructor).from()'\n // does not accept parameter mapFn.\n // 3. parameters of 'SupportedTypedArrayConstructors.from()' does not match the requirement of the union\n // type.\n\n // assume 'arg1' is of type \"readonly number[]|readonly bigint[]\" here.\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1, BigInt);\n } else {\n // assume 'arg1' is of type \"readonly number[]\" here.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = (typedArrayConstructor as any).from(arg1);\n }\n } else if (arg1 instanceof typedArrayConstructor) {\n data = arg1;\n } else {\n throw new TypeError(`A ${type} tensor's data must be type of ${typedArrayConstructor}`);\n }\n }\n } else {\n //\n // Override: constructor(data, ...)\n //\n maybeDims = arg1;\n if (Array.isArray(arg0)) {\n // only boolean[] and string[] is supported\n if (arg0.length === 0) {\n throw new TypeError('Tensor type cannot be inferred from an empty array.');\n }\n const firstElementType = typeof arg0[0];\n if (firstElementType === 'string') {\n type = 'string';\n data = arg0;\n } else if (firstElementType === 'boolean') {\n type = 'bool';\n // 'arg0' is of type 'boolean[]'. Uint8Array.from(boolean[]) actually works, but typescript thinks this is\n // wrong type. We use 'as any' to make it happy.\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n data = Uint8Array.from(arg0 as any[]);\n } else {\n throw new TypeError(`Invalid element type of data array: ${firstElementType}.`);\n }\n } else {\n // get tensor type from TypedArray\n const mappedType =\n NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP.get(arg0.constructor as SupportedTypedArrayConstructors);\n if (mappedType === undefined) {\n throw new TypeError(`Unsupported type for tensor data: ${arg0.constructor}.`);\n }\n type = mappedType;\n data = arg0 as SupportedTypedArray;\n }\n }\n\n // type and data is processed, now processing dims\n if (maybeDims === undefined) {\n // assume 1-D tensor if dims omitted\n maybeDims = [data.length];\n } else if (!Array.isArray(maybeDims)) {\n throw new TypeError('A tensor\\'s dims must be a number array');\n }\n dims = maybeDims as readonly number[];\n\n this.cpuData = data;\n this.dataLocation = 'cpu';\n }\n\n // perform check on dims\n const size = calculateSize(dims);\n // if data is on CPU, check whether data length matches tensor size\n if (this.cpuData && size !== this.cpuData.length) {\n throw new Error(`Tensor's size(${size}) does not match data length(${this.cpuData.length}).`);\n }\n\n this.type = type;\n this.dims = dims;\n this.size = size;\n }\n // #endregion\n\n // #region factory\n static async fromImage(\n image: ImageData|HTMLImageElement|ImageBitmap|string,\n options?: TensorFromImageDataOptions|TensorFromImageElementOptions|TensorFromImageBitmapOptions|\n TensorFromUrlOptions): Promise {\n return tensorFromImage(image, options);\n }\n\n static fromTexture(\n texture: TensorTextureType, options: TensorFromTextureOptions): TensorInterface {\n return tensorFromTexture(texture, options);\n }\n\n static fromGpuBuffer(\n gpuBuffer: TensorGpuBufferType, options: TensorFromGpuBufferOptions): TensorInterface {\n return tensorFromGpuBuffer(gpuBuffer, options);\n }\n\n static fromPinnedBuffer(\n type: T, buffer: TensorInterface.DataTypeMap[T], dims?: readonly number[]): Tensor {\n return tensorFromPinnedBuffer(type, buffer, dims);\n }\n\n // #endregion\n\n // #region conversions\n toDataURL(options?: TensorToDataUrlOptions): string {\n return tensorToDataURL(this, options);\n }\n\n toImageData(options?: TensorToImageDataOptions): ImageData {\n return tensorToImageData(this, options);\n }\n // #endregion\n\n // #region public fields\n readonly dims: readonly number[];\n readonly type: TensorType;\n readonly size: number;\n // #endregion\n\n // #region private fields\n\n /**\n * stores the location of the data.\n */\n private dataLocation: TensorDataLocation;\n\n /**\n * stores the data on CPU, if location is 'cpu' or 'cpu-pinned'. otherwise empty.\n */\n private cpuData?: TensorDataType;\n\n /**\n * stores the underlying texture when location is 'texture'. otherwise empty.\n */\n private gpuTextureData?: TensorTextureType;\n\n /**\n * stores the underlying GPU buffer when location is 'gpu-buffer'. otherwise empty.\n */\n private gpuBufferData?: TensorGpuBufferType;\n\n /**\n * stores an optional downloader function to download data from GPU to CPU.\n */\n private downloader?(): Promise;\n\n /**\n * a flag indicating whether the data is being downloaded from GPU to CPU.\n */\n private isDownloading?: boolean;\n\n /**\n * stores an optional disposer function to dispose the underlying data.\n */\n private disposer?(): void;\n // #endregion\n\n // #region properties\n get data(): TensorDataType {\n this.ensureValid();\n if (!this.cpuData) {\n throw new Error(\n 'The data is not on CPU. Use `getData()` to download GPU data to CPU, ' +\n 'or use `texture` or `gpuBuffer` property to access the GPU data directly.');\n }\n return this.cpuData;\n }\n\n get location(): TensorDataLocation {\n return this.dataLocation;\n }\n\n get texture(): TensorTextureType {\n this.ensureValid();\n if (!this.gpuTextureData) {\n throw new Error('The data is not stored as a WebGL texture.');\n }\n return this.gpuTextureData;\n }\n\n get gpuBuffer(): TensorGpuBufferType {\n this.ensureValid();\n if (!this.gpuBufferData) {\n throw new Error('The data is not stored as a WebGPU buffer.');\n }\n return this.gpuBufferData;\n }\n // #endregion\n\n // #region methods\n\n async getData(releaseData?: boolean): Promise {\n this.ensureValid();\n switch (this.dataLocation) {\n case 'cpu':\n case 'cpu-pinned':\n return this.data;\n case 'texture':\n case 'gpu-buffer': {\n if (!this.downloader) {\n throw new Error('The current tensor is not created with a specified data downloader.');\n }\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n try {\n this.isDownloading = true;\n const data = await this.downloader();\n this.downloader = undefined;\n this.dataLocation = 'cpu';\n this.cpuData = data;\n\n if (releaseData && this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n\n return data;\n\n } finally {\n this.isDownloading = false;\n }\n }\n default:\n throw new Error(`cannot get data from location: ${this.dataLocation}`);\n }\n }\n\n dispose(): void {\n if (this.isDownloading) {\n throw new Error('The current tensor is being downloaded.');\n }\n\n if (this.disposer) {\n this.disposer();\n this.disposer = undefined;\n }\n this.cpuData = undefined;\n this.gpuTextureData = undefined;\n this.gpuBufferData = undefined;\n this.downloader = undefined;\n this.isDownloading = undefined;\n\n this.dataLocation = 'none';\n }\n\n // #endregion\n\n // #region tensor utilities\n private ensureValid(): void {\n if (this.dataLocation === 'none') {\n throw new Error('The tensor is disposed.');\n }\n }\n\n reshape(dims: readonly number[]): TensorInterface {\n this.ensureValid();\n if (this.downloader || this.disposer) {\n throw new Error('Cannot reshape a tensor that owns GPU resource.');\n }\n return tensorReshape(this, dims);\n }\n // #endregion\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorFactory} from './tensor-factory.js';\nimport {Tensor as TensorImpl} from './tensor-impl.js';\nimport {TypedTensorUtils} from './tensor-utils.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\n/**\n * represent a basic tensor with specified dimensions and data type.\n */\ninterface TypedTensorBase {\n /**\n * Get the dimensions of the tensor.\n */\n readonly dims: readonly number[];\n /**\n * Get the data type of the tensor.\n */\n readonly type: T;\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is not on CPU (eg. it's in the form of WebGL texture or WebGPU buffer), throw error.\n */\n readonly data: Tensor.DataTypeMap[T];\n /**\n * Get the location of the data.\n */\n readonly location: Tensor.DataLocation;\n /**\n * Get the WebGL texture that holds the tensor data.\n *\n * If the data is not on GPU as WebGL texture, throw error.\n */\n readonly texture: Tensor.TextureType;\n /**\n * Get the WebGPU buffer that holds the tensor data.\n *\n * If the data is not on GPU as WebGPU buffer, throw error.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n\n /**\n * Get the buffer data of the tensor.\n *\n * If the data is on CPU, returns the data immediately.\n * If the data is on GPU, downloads the data and returns the promise.\n *\n * @param releaseData - whether release the data on GPU. Ignore if data is already on CPU.\n */\n getData(releaseData?: boolean): Promise;\n\n /**\n * Dispose the tensor data.\n *\n * If the data is on CPU, remove its internal reference to the underlying data.\n * If the data is on GPU, release the data on GPU.\n *\n * After calling this function, the tensor is considered no longer valid. Its location will be set to 'none'.\n */\n dispose(): void;\n}\n\nexport declare namespace Tensor {\n interface DataTypeMap {\n float32: Float32Array;\n uint8: Uint8Array;\n int8: Int8Array;\n uint16: Uint16Array;\n int16: Int16Array;\n int32: Int32Array;\n int64: BigInt64Array;\n string: string[];\n bool: Uint8Array;\n float16: Uint16Array; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: Float64Array;\n uint32: Uint32Array;\n uint64: BigUint64Array;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n interface ElementTypeMap {\n float32: number;\n uint8: number;\n int8: number;\n uint16: number;\n int16: number;\n int32: number;\n int64: bigint;\n string: string;\n bool: boolean;\n float16: number; // Keep using Uint16Array until we have a concrete solution for float 16.\n float64: number;\n uint32: number;\n uint64: bigint;\n // complex64: never;\n // complex128: never;\n // bfloat16: never;\n }\n\n type DataType = DataTypeMap[Type];\n type ElementType = ElementTypeMap[Type];\n\n /**\n * supported data types for constructing a tensor from a pinned CPU buffer\n */\n export type CpuPinnedDataTypes = Exclude;\n\n /**\n * type alias for WebGL texture\n */\n export type TextureType = WebGLTexture;\n\n /**\n * supported data types for constructing a tensor from a WebGL texture\n */\n export type TextureDataTypes = 'float32';\n\n /**\n * type alias for WebGPU buffer\n *\n * The reason why we don't use type \"GPUBuffer\" defined in webgpu.d.ts from @webgpu/types is because \"@webgpu/types\"\n * requires \"@types/dom-webcodecs\" as peer dependency when using TypeScript < v5.1 and its version need to be chosen\n * carefully according to the TypeScript version being used. This means so far there is not a way to keep every\n * TypeScript version happy. It turns out that we will easily broke users on some TypeScript version.\n *\n * for more info see https://github.com/gpuweb/types/issues/127\n */\n export type GpuBufferType = {size: number; mapState: 'unmapped' | 'pending' | 'mapped'};\n\n /**\n * supported data types for constructing a tensor from a WebGPU buffer\n */\n export type GpuBufferDataTypes = 'float32'|'float16'|'int32'|'int64'|'uint32'|'uint8'|'bool';\n\n /**\n * represent where the tensor data is stored\n */\n export type DataLocation = 'none'|'cpu'|'cpu-pinned'|'texture'|'gpu-buffer';\n\n /**\n * represent the data type of a tensor\n */\n export type Type = keyof DataTypeMap;\n}\n\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface TypedTensor extends TypedTensorBase, TypedTensorUtils {}\n/**\n * Represent multi-dimensional arrays to feed to or fetch from model inferencing.\n */\nexport interface Tensor extends TypedTensorBase, TypedTensorUtils {}\n\n/**\n * type TensorConstructor defines the constructors of 'Tensor' to create CPU tensor instances.\n */\nexport interface TensorConstructor extends TensorFactory {\n // #region CPU tensor - specify element type\n /**\n * Construct a new string tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'string', data: Tensor.DataTypeMap['string']|readonly string[],\n dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: 'bool', data: Tensor.DataTypeMap['bool']|readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new 64-bit integer typed tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(\n type: T, data: Tensor.DataTypeMap[T]|readonly bigint[]|readonly number[],\n dims?: readonly number[]): TypedTensor;\n\n /**\n * Construct a new numeric tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new>(\n type: T, data: Tensor.DataTypeMap[T]|readonly number[], dims?: readonly number[]): TypedTensor;\n // #endregion\n\n // #region CPU tensor - infer element types\n\n /**\n * Construct a new float32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float32Array, dims?: readonly number[]): TypedTensor<'float32'>;\n\n /**\n * Construct a new int8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int8Array, dims?: readonly number[]): TypedTensor<'int8'>;\n\n /**\n * Construct a new uint8 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint8Array, dims?: readonly number[]): TypedTensor<'uint8'>;\n\n /**\n * Construct a new uint16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint16Array, dims?: readonly number[]): TypedTensor<'uint16'>;\n\n /**\n * Construct a new int16 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int16Array, dims?: readonly number[]): TypedTensor<'int16'>;\n\n /**\n * Construct a new int32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Int32Array, dims?: readonly number[]): TypedTensor<'int32'>;\n\n /**\n * Construct a new int64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigInt64Array, dims?: readonly number[]): TypedTensor<'int64'>;\n\n /**\n * Construct a new string tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly string[], dims?: readonly number[]): TypedTensor<'string'>;\n\n /**\n * Construct a new bool tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: readonly boolean[], dims?: readonly number[]): TypedTensor<'bool'>;\n\n /**\n * Construct a new float64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Float64Array, dims?: readonly number[]): TypedTensor<'float64'>;\n\n /**\n * Construct a new uint32 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Uint32Array, dims?: readonly number[]): TypedTensor<'uint32'>;\n\n /**\n * Construct a new uint64 tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: BigUint64Array, dims?: readonly number[]): TypedTensor<'uint64'>;\n\n // #endregion\n\n // #region CPU tensor - fall back to non-generic tensor type declaration\n\n /**\n * Construct a new tensor object from the given type, data and dims.\n *\n * @param type - Specify the element type.\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(type: Tensor.Type, data: Tensor.DataType|readonly number[]|readonly string[]|readonly bigint[]|readonly boolean[],\n dims?: readonly number[]): Tensor;\n\n /**\n * Construct a new tensor object from the given data and dims.\n *\n * @param data - Specify the CPU tensor data.\n * @param dims - Specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n */\n new(data: Tensor.DataType, dims?: readonly number[]): Tensor;\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const Tensor = TensorImpl as TensorConstructor;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from './env-impl.js';\n\n/**\n * @ignore\n */\nexport const TRACE = (deviceType: string, label: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n // eslint-disable-next-line no-console\n console.timeStamp(`${deviceType}::ORT::${label}`);\n};\n\nconst TRACE_FUNC = (msg: string, extraMsg?: string) => {\n const stack = new Error().stack?.split(/\\r\\n|\\r|\\n/g) || [];\n let hasTraceFunc = false;\n for (let i = 0; i < stack.length; i++) {\n if (hasTraceFunc && !stack[i].includes('TRACE_FUNC')) {\n let label = `FUNC_${msg}::${stack[i].trim().split(' ')[1]}`;\n if (extraMsg) {\n label += `::${extraMsg}`;\n }\n TRACE('CPU', label);\n return;\n }\n if (stack[i].includes('TRACE_FUNC')) {\n hasTraceFunc = true;\n }\n }\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_BEGIN = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('BEGIN', extraMsg);\n};\n\n/**\n * @ignore\n */\nexport const TRACE_FUNC_END = (extraMsg?: string) => {\n if (typeof env.trace === 'undefined' ? !env.wasm.trace : !env.trace) {\n return;\n }\n TRACE_FUNC('END', extraMsg);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {InferenceSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSessionInterface} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from './trace.js';\n\ntype SessionOptions = InferenceSessionInterface.SessionOptions;\ntype RunOptions = InferenceSessionInterface.RunOptions;\ntype FeedsType = InferenceSessionInterface.FeedsType;\ntype FetchesType = InferenceSessionInterface.FetchesType;\ntype ReturnType = InferenceSessionInterface.ReturnType;\n\nexport class InferenceSession implements InferenceSessionInterface {\n private constructor(handler: InferenceSessionHandler) {\n this.handler = handler;\n }\n run(feeds: FeedsType, options?: RunOptions): Promise;\n run(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async run(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n TRACE_FUNC_BEGIN();\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (this.outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of this.outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSessionInterface.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of this.inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of this.outputNames) {\n fetches[name] = null;\n }\n }\n\n // feeds, fetches and options are prepared\n\n const results = await this.handler.run(feeds, fetches, options);\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n TRACE_FUNC_END();\n return returnValue;\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n\n static create(path: string, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, options?: SessionOptions): Promise;\n static create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: SessionOptions):\n Promise;\n static create(buffer: Uint8Array, options?: SessionOptions): Promise;\n static async create(\n arg0: string|ArrayBufferLike|Uint8Array, arg1?: SessionOptions|number, arg2?: number,\n arg3?: SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n // either load from a file or buffer\n let filePathOrUint8Array: string|Uint8Array;\n let options: SessionOptions = {};\n\n if (typeof arg0 === 'string') {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (arg0 instanceof Uint8Array) {\n filePathOrUint8Array = arg0;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (\n arg0 instanceof ArrayBuffer ||\n (typeof SharedArrayBuffer !== 'undefined' && arg0 instanceof SharedArrayBuffer)) {\n const buffer = arg0;\n let byteOffset = 0;\n let byteLength = arg0.byteLength;\n if (typeof arg1 === 'object' && arg1 !== null) {\n options = arg1;\n } else if (typeof arg1 === 'number') {\n byteOffset = arg1;\n if (!Number.isSafeInteger(byteOffset)) {\n throw new RangeError('\\'byteOffset\\' must be an integer.');\n }\n if (byteOffset < 0 || byteOffset >= buffer.byteLength) {\n throw new RangeError(`'byteOffset' is out of range [0, ${buffer.byteLength}).`);\n }\n byteLength = arg0.byteLength - byteOffset;\n if (typeof arg2 === 'number') {\n byteLength = arg2;\n if (!Number.isSafeInteger(byteLength)) {\n throw new RangeError('\\'byteLength\\' must be an integer.');\n }\n if (byteLength <= 0 || byteOffset + byteLength > buffer.byteLength) {\n throw new RangeError(`'byteLength' is out of range (0, ${buffer.byteLength - byteOffset}].`);\n }\n if (typeof arg3 === 'object' && arg3 !== null) {\n options = arg3;\n } else if (typeof arg3 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'byteLength\\' must be a number.');\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n filePathOrUint8Array = new Uint8Array(buffer, byteOffset, byteLength);\n } else {\n throw new TypeError('Unexpected argument[0]: must be \\'path\\' or \\'buffer\\'.');\n }\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n const handler = await backend.createInferenceSessionHandler(filePathOrUint8Array, optionsWithValidatedEPs);\n TRACE_FUNC_END();\n return new InferenceSession(handler);\n }\n\n startProfiling(): void {\n this.handler.startProfiling();\n }\n endProfiling(): void {\n this.handler.endProfiling();\n }\n\n get inputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get outputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n private handler: InferenceSessionHandler;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession as InferenceSessionImpl} from './inference-session-impl.js';\nimport {OnnxModelOptions} from './onnx-model.js';\nimport {OnnxValue, OnnxValueDataLocation} from './onnx-value.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace InferenceSession {\n // #region input/output types\n\n type OnnxValueMapType = {readonly [name: string]: OnnxValue};\n type NullableOnnxValueMapType = {readonly [name: string]: OnnxValue | null};\n\n /**\n * A feeds (model inputs) is an object that uses input names as keys and OnnxValue as corresponding values.\n */\n type FeedsType = OnnxValueMapType;\n\n /**\n * A fetches (model outputs) could be one of the following:\n *\n * - Omitted. Use model's output names definition.\n * - An array of string indicating the output names.\n * - An object that use output names as keys and OnnxValue or null as corresponding values.\n *\n * @remark\n * different from input argument, in output, OnnxValue is optional. If an OnnxValue is present it will be\n * used as a pre-allocated value by the inference engine; if omitted, inference engine will allocate buffer\n * internally.\n */\n type FetchesType = readonly string[]|NullableOnnxValueMapType;\n\n /**\n * A inferencing return type is an object that uses output names as keys and OnnxValue as corresponding values.\n */\n type ReturnType = OnnxValueMapType;\n\n // #endregion\n\n // #region session options\n\n /**\n * A set of configurations for session behavior.\n */\n export interface SessionOptions extends OnnxModelOptions {\n /**\n * An array of execution provider options.\n *\n * An execution provider option can be a string indicating the name of the execution provider,\n * or an object of corresponding type.\n */\n executionProviders?: readonly ExecutionProviderConfig[];\n\n /**\n * The intra OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n intraOpNumThreads?: number;\n\n /**\n * The inter OP threads number.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native).\n */\n interOpNumThreads?: number;\n\n /**\n * The free dimension override.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n freeDimensionOverrides?: {readonly [dimensionName: string]: number};\n\n /**\n * The optimization level.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n graphOptimizationLevel?: 'disabled'|'basic'|'extended'|'all';\n\n /**\n * Whether enable CPU memory arena.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableCpuMemArena?: boolean;\n\n /**\n * Whether enable memory pattern.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n enableMemPattern?: boolean;\n\n /**\n * Execution mode.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n executionMode?: 'sequential'|'parallel';\n\n /**\n * Optimized model file path.\n *\n * If this setting is specified, the optimized model will be dumped. In browser, a blob will be created\n * with a pop-up window.\n */\n optimizedModelFilePath?: string;\n\n /**\n * Whether enable profiling.\n *\n * This setting is a placeholder for a future use.\n */\n enableProfiling?: boolean;\n\n /**\n * File prefix for profiling.\n *\n * This setting is a placeholder for a future use.\n */\n profileFilePrefix?: string;\n\n /**\n * Log ID.\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logId?: string;\n\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Specify string as a preferred data location for all outputs, or an object that use output names as keys and a\n * preferred data location as corresponding values.\n *\n * This setting is available only in ONNXRuntime Web for WebGL and WebGPU EP.\n */\n preferredOutputLocation?: OnnxValueDataLocation|{readonly [outputName: string]: OnnxValueDataLocation};\n\n /**\n * Whether enable graph capture.\n * This setting is available only in ONNXRuntime Web for WebGPU EP.\n */\n enableGraphCapture?: boolean;\n\n /**\n * Store configurations for a session. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_session_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n * ```js\n * extra: {\n * session: {\n * set_denormal_as_zero: \"1\",\n * disable_prepacking: \"1\"\n * },\n * optimization: {\n * enable_gelu_approximation: \"1\"\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #region execution providers\n\n // Currently, we have the following backends to support execution providers:\n // Backend Node.js binding: supports 'cpu', 'dml' (win32), 'coreml' (macOS) and 'cuda' (linux).\n // Backend WebAssembly: supports 'cpu', 'wasm', 'webgpu' and 'webnn'.\n // Backend ONNX.js: supports 'webgl'.\n // Backend React Native: supports 'cpu', 'xnnpack', 'coreml' (iOS), 'nnapi' (Android).\n interface ExecutionProviderOptionMap {\n coreml: CoreMLExecutionProviderOption;\n cpu: CpuExecutionProviderOption;\n cuda: CudaExecutionProviderOption;\n dml: DmlExecutionProviderOption;\n nnapi: NnapiExecutionProviderOption;\n tensorrt: TensorRtExecutionProviderOption;\n wasm: WebAssemblyExecutionProviderOption;\n webgl: WebGLExecutionProviderOption;\n webgpu: WebGpuExecutionProviderOption;\n webnn: WebNNExecutionProviderOption;\n xnnpack: XnnpackExecutionProviderOption;\n }\n\n type ExecutionProviderName = keyof ExecutionProviderOptionMap;\n type ExecutionProviderConfig =\n ExecutionProviderOptionMap[ExecutionProviderName]|ExecutionProviderOption|ExecutionProviderName|string;\n\n export interface ExecutionProviderOption {\n readonly name: string;\n }\n export interface CpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cpu';\n useArena?: boolean;\n }\n export interface CudaExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'cuda';\n deviceId?: number;\n }\n export interface DmlExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'dml';\n deviceId?: number;\n }\n export interface TensorRtExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'tensorrt';\n deviceId?: number;\n }\n export interface WebAssemblyExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'wasm';\n }\n export interface WebGLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgl';\n // TODO: add flags\n }\n export interface XnnpackExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'xnnpack';\n }\n export interface WebGpuExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webgpu';\n preferredLayout?: 'NCHW'|'NHWC';\n }\n export interface WebNNExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'webnn';\n deviceType?: 'cpu'|'gpu'|'npu';\n numThreads?: number;\n powerPreference?: 'default'|'low-power'|'high-performance';\n }\n export interface CoreMLExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'coreml';\n /**\n * The bit flags for CoreML execution provider.\n *\n * ```\n * COREML_FLAG_USE_CPU_ONLY = 0x001\n * COREML_FLAG_ENABLE_ON_SUBGRAPH = 0x002\n * COREML_FLAG_ONLY_ENABLE_DEVICE_WITH_ANE = 0x004\n * COREML_FLAG_ONLY_ALLOW_STATIC_INPUT_SHAPES = 0x008\n * COREML_FLAG_CREATE_MLPROGRAM = 0x010\n * ```\n *\n * See include/onnxruntime/core/providers/coreml/coreml_provider_factory.h for more details.\n *\n * This flag is available only in ONNXRuntime (Node.js binding).\n */\n coreMlFlags?: number;\n /**\n * Specify whether to use CPU only in CoreML EP.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n useCPUOnly?: boolean;\n /**\n * Specify whether to enable CoreML EP on subgraph.\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n enableOnSubgraph?: boolean;\n /**\n * Specify whether to only enable CoreML EP for Apple devices with ANE (Apple Neural Engine).\n *\n * This setting is available only in ONNXRuntime (react-native).\n */\n onlyEnableDeviceWithANE?: boolean;\n }\n export interface NnapiExecutionProviderOption extends ExecutionProviderOption {\n readonly name: 'nnapi';\n useFP16?: boolean;\n useNCHW?: boolean;\n cpuDisabled?: boolean;\n cpuOnly?: boolean;\n }\n // #endregion\n\n // #endregion\n\n // #region run options\n\n /**\n * A set of configurations for inference run behavior\n */\n export interface RunOptions {\n /**\n * Log severity level. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/common/logging/severity.h\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n logSeverityLevel?: 0|1|2|3|4;\n\n /**\n * Log verbosity level.\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n logVerbosityLevel?: number;\n\n /**\n * Terminate all incomplete OrtRun calls as soon as possible if true\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n */\n terminate?: boolean;\n\n /**\n * A tag for the Run() calls using this\n *\n * This setting is available only in ONNXRuntime (Node.js binding and react-native) or WebAssembly backend\n */\n tag?: string;\n\n /**\n * Set a single run configuration entry. See\n * https://github.com/microsoft/onnxruntime/blob/main/include/onnxruntime/core/session/\n * onnxruntime_run_options_config_keys.h\n *\n * This setting is available only in WebAssembly backend. Will support Node.js binding and react-native later\n *\n * @example\n *\n * ```js\n * extra: {\n * memory: {\n * enable_memory_arena_shrinkage: \"1\",\n * }\n * }\n * ```\n */\n extra?: Record;\n }\n\n // #endregion\n\n // #region value metadata\n\n // eslint-disable-next-line @typescript-eslint/no-empty-interface\n interface ValueMetadata {\n // TBD\n }\n\n // #endregion\n}\n\n/**\n * Represent a runtime instance of an ONNX model.\n */\nexport interface InferenceSession {\n // #region run()\n\n /**\n * Execute the model asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Execute the model asynchronously with the given feeds, fetches and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for detail.\n * @param fetches - Representation of the model output. See type description of `InferenceSession.OutputType` for\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model inference.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n run(feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n\n // #endregion\n\n // #region profiling\n\n /**\n * Start profiling.\n */\n startProfiling(): void;\n\n /**\n * End profiling.\n */\n endProfiling(): void;\n\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded model.\n */\n readonly inputNames: readonly string[];\n\n /**\n * Get output names of the loaded model.\n */\n readonly outputNames: readonly string[];\n\n // /**\n // * Get input metadata of the loaded model.\n // */\n // readonly inputMetadata: ReadonlyArray>;\n\n // /**\n // * Get output metadata of the loaded model.\n // */\n // readonly outputMetadata: ReadonlyArray>;\n\n // #endregion\n}\n\nexport interface InferenceSessionFactory {\n // #region create()\n\n /**\n * Create a new inference session and load model asynchronously from an ONNX model file.\n *\n * @param uri - The URI or file path of the model to load.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(uri: string, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, options?: InferenceSession.SessionOptions): Promise;\n\n /**\n * Create a new inference session and load model asynchronously from segment of an array bufer.\n *\n * @param buffer - An ArrayBuffer representation of an ONNX model.\n * @param byteOffset - The beginning of the specified portion of the array buffer.\n * @param byteLength - The length in bytes of the array buffer.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: ArrayBufferLike, byteOffset: number, byteLength?: number, options?: InferenceSession.SessionOptions):\n Promise;\n\n /**\n * Create a new inference session and load model asynchronously from a Uint8Array.\n *\n * @param buffer - A Uint8Array representation of an ONNX model.\n * @param options - specify configuration for creating a new inference session.\n * @returns A promise that resolves to an InferenceSession object.\n */\n create(buffer: Uint8Array, options?: InferenceSession.SessionOptions): Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const InferenceSession: InferenceSessionFactory = InferenceSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OptionsFormat, OptionsNormalizationParameters, OptionsTensorLayout} from './tensor-factory.js';\n\nexport interface TensorToDataUrlOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface TensorToImageDataOptions extends OptionsTensorLayout, OptionsFormat, OptionsNormalizationParameters {}\n\nexport interface ConversionUtils {\n /**\n * creates a DataURL instance from tensor\n *\n * @param options - An optional object representing options for creating a DataURL instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns a DataURL string representing the image converted from tensor data\n */\n toDataURL(options?: TensorToDataUrlOptions): string;\n\n /**\n * creates an ImageData instance from tensor\n *\n * @param options - An optional object representing options for creating an ImageData instance from the tensor.\n *\n * The following default settings will be applied:\n * - `format`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * @returns an ImageData instance representing the image converted from tensor data\n */\n toImageData(options?: TensorToImageDataOptions): ImageData;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor, TypedTensor} from './tensor.js';\n\nexport type ImageFormat = 'RGB'|'RGBA'|'BGR'|'RBG';\nexport type ImageTensorLayout = 'NHWC'|'NCHW';\n\n// the following region contains type definitions for constructing tensor from a specific location.\n\n// #region types for constructing a tensor from a specific location\n\n/**\n * represent common properties of the parameter for constructing a tensor from a specific location.\n */\ninterface CommonConstructorParameters extends Pick {\n /**\n * Specify the data type of the tensor.\n */\n readonly type: T;\n}\n\n/**\n * represent the parameter for constructing a tensor from a GPU resource.\n */\ninterface GpuResourceConstructorParameters {\n /**\n * an optional callback function to download data from GPU to CPU.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n download?(): Promise;\n\n /**\n * an optional callback function that will be called when the tensor is disposed.\n *\n * If not provided, the tensor treat the GPU data as external resource.\n */\n dispose?(): void;\n}\n\n/**\n * represent the parameter for constructing a tensor from a pinned CPU buffer\n */\nexport interface CpuPinnedConstructorParameters extends\n CommonConstructorParameters {\n /**\n * Specify the location of the data to be 'cpu-pinned'.\n */\n readonly location: 'cpu-pinned';\n /**\n * Specify the CPU pinned buffer that holds the tensor data.\n */\n readonly data: Tensor.DataTypeMap[T];\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGL texture\n */\nexport interface TextureConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'texture'.\n */\n readonly location: 'texture';\n /**\n * Specify the WebGL texture that holds the tensor data.\n */\n readonly texture: Tensor.TextureType;\n}\n\n/**\n * represent the parameter for constructing a tensor from a WebGPU buffer\n */\nexport interface GpuBufferConstructorParameters extends\n CommonConstructorParameters, GpuResourceConstructorParameters {\n /**\n * Specify the location of the data to be 'gpu-buffer'.\n */\n readonly location: 'gpu-buffer';\n /**\n * Specify the WebGPU buffer that holds the tensor data.\n */\n readonly gpuBuffer: Tensor.GpuBufferType;\n}\n\n// #endregion\n\n// the following region contains type definitions of each individual options.\n// the tensor factory functions use a composition of those options as the parameter type.\n\n// #region Options fields\n\nexport interface OptionsFormat {\n /**\n * Describes the image format represented in RGBA color space.\n */\n format?: ImageFormat;\n}\n\nexport interface OptionsTensorFormat {\n /**\n * Describes the image format of the tensor.\n *\n * NOTE: this is different from option 'format'. While option 'format' represents the original image, 'tensorFormat'\n * represents the target format of the tensor. A transpose will be performed if they are different.\n */\n tensorFormat?: ImageFormat;\n}\n\nexport interface OptionsTensorDataType {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: 'float32'|'uint8';\n}\n\nexport interface OptionsTensorLayout {\n /**\n * Describes the tensor layout when representing data of one or more image(s).\n */\n tensorLayout?: ImageTensorLayout;\n}\n\nexport interface OptionsDimensions {\n /**\n * Describes the image height in pixel\n */\n height?: number;\n /**\n * Describes the image width in pixel\n */\n width?: number;\n}\n\nexport interface OptionResizedDimensions {\n /**\n * Describes the resized height. If omitted, original height will be used.\n */\n resizedHeight?: number;\n /**\n * Describes resized width - can be accessed via tensor dimensions as well\n */\n resizedWidth?: number;\n}\n\nexport interface OptionsNormalizationParameters {\n /**\n * Describes normalization parameters when preprocessing the image as model input.\n *\n * Data element are ranged from 0 to 255.\n */\n norm?: {\n /**\n * The 'bias' value for image normalization.\n * - If omitted, use default value 0.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n bias?: number|[number, number, number]|[number, number, number, number];\n /**\n * The 'mean' value for image normalization.\n * - If omitted, use default value 255.\n * - If it's a single number, apply to each channel\n * - If it's an array of 3 or 4 numbers, apply element-wise. Number of elements need to match the number of channels\n * for the corresponding image format\n */\n mean?: number | [number, number, number] | [number, number, number, number];\n };\n}\n\n// #endregion\n\n// #region Options composition\n\nexport interface TensorFromImageDataOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromImageElementOptions extends OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromUrlOptions extends OptionsDimensions, OptionResizedDimensions, OptionsTensorFormat,\n OptionsTensorLayout, OptionsTensorDataType,\n OptionsNormalizationParameters {}\n\nexport interface TensorFromImageBitmapOptions extends OptionResizedDimensions, OptionsTensorFormat, OptionsTensorLayout,\n OptionsTensorDataType, OptionsNormalizationParameters {}\n\nexport interface TensorFromTextureOptions extends\n Required, OptionsFormat, GpuResourceConstructorParameters/* TODO: add more */ {}\n\nexport interface TensorFromGpuBufferOptions extends\n Pick, GpuResourceConstructorParameters {\n /**\n * Describes the data type of the tensor.\n */\n dataType?: T;\n}\n\n// #endregion\n\n/**\n * type TensorFactory defines the factory functions of 'Tensor' to create tensor instances from existing data or\n * resources.\n */\nexport interface TensorFactory {\n /**\n * create a tensor from an ImageData object\n *\n * @param imageData - the ImageData object to create tensor from\n * @param options - An optional object representing options for creating tensor from ImageData.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageData: ImageData, options?: TensorFromImageDataOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a HTMLImageElement object\n *\n * @param imageElement - the HTMLImageElement object to create tensor from\n * @param options - An optional object representing options for creating tensor from HTMLImageElement.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(imageElement: HTMLImageElement, options?: TensorFromImageElementOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from URL\n *\n * @param urlSource - a string as a URL to the image or a data URL containing the image data.\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(urlSource: string, options?: TensorFromUrlOptions): Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from an ImageBitmap object\n *\n * @param bitmap - the ImageBitmap object to create tensor from\n * @param options - An optional object representing options for creating tensor from URL.\n *\n * The following default settings will be applied:\n * - `tensorFormat`: `'RGB'`\n * - `tensorLayout`: `'NCHW'`\n * - `dataType`: `'float32'`\n * @returns A promise that resolves to a tensor object\n */\n fromImage(bitmap: ImageBitmap, options: TensorFromImageBitmapOptions):\n Promise|TypedTensor<'uint8'>>;\n\n /**\n * create a tensor from a WebGL texture\n *\n * @param texture - the WebGLTexture object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGL texture.\n *\n * The options include following properties:\n * - `width`: the width of the texture. Required.\n * - `height`: the height of the texture. Required.\n * - `format`: the format of the texture. If omitted, assume 'RGBA'.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromTexture(\n texture: Tensor.TextureType, options: TensorFromTextureOptions): TypedTensor<'float32'>;\n\n /**\n * create a tensor from a WebGPU buffer\n *\n * @param buffer - the GPUBuffer object to create tensor from\n * @param options - An optional object representing options for creating tensor from WebGPU buffer.\n *\n * The options include following properties:\n * - `dataType`: the data type of the tensor. If omitted, assume 'float32'.\n * - `dims`: the dimension of the tensor. Required.\n * - `download`: an optional function to download the tensor data from GPU to CPU. If omitted, the GPU data\n * will not be able to download. Usually, this is provided by a GPU backend for the inference outputs. Users don't\n * need to provide this function.\n * - `dispose`: an optional function to dispose the tensor data on GPU. If omitted, the GPU data will not be disposed.\n * Usually, this is provided by a GPU backend for the inference outputs. Users don't need to provide this function.\n *\n * @returns a tensor object\n */\n fromGpuBuffer(\n buffer: Tensor.GpuBufferType, options: TensorFromGpuBufferOptions): TypedTensor;\n\n /**\n * create a tensor from a pre-allocated buffer. The buffer will be used as a pinned buffer.\n *\n * @param type - the tensor element type.\n * @param buffer - a TypedArray corresponding to the type.\n * @param dims - specify the dimension of the tensor. If omitted, a 1-D tensor is assumed.\n *\n * @returns a tensor object\n */\n fromPinnedBuffer>(\n type: T, buffer: Tensor.DataTypeMap[T], dims?: readonly number[]): TypedTensor;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * A string that represents a file's URL or path.\n *\n * Path is vailable only in onnxruntime-node or onnxruntime-web running in Node.js.\n */\nexport type FileUrlOrPath = string;\n\n/**\n * A Blob object that represents a file.\n */\nexport type FileBlob = Blob;\n\n/**\n * A Uint8Array, ArrayBuffer or SharedArrayBuffer object that represents a file content.\n *\n * When it is an ArrayBuffer or SharedArrayBuffer, the whole buffer is assumed to be the file content.\n */\nexport type FileData = Uint8Array|ArrayBufferLike;\n\n/**\n * Represents a file that can be loaded by the ONNX Runtime JavaScript API.\n */\nexport type FileType = FileUrlOrPath|FileBlob|FileData;\n\n/**\n * Represents an external data file.\n */\nexport interface ExternalDataFileDescription {\n /**\n * Specify the external data file.\n */\n data: FileType;\n /**\n * Specify the file path.\n */\n path: string;\n}\n\n/**\n * Represents an external data file.\n *\n * When using a string, it should be a file URL or path that in the same directory as the model file.\n */\nexport type ExternalDataFileType = ExternalDataFileDescription|FileUrlOrPath;\n\n/**\n * Options for model loading.\n */\nexport interface OnnxModelOptions {\n /**\n * Specifying a list of files that represents the external data.\n */\n externalData?: readonly ExternalDataFileType[];\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from './tensor.js';\n\nexport type NonTensorType = never;\n\n/**\n * Type OnnxValue Represents both tensors and non-tensors value for model's inputs/outputs.\n *\n * NOTE: currently not support non-tensor\n */\nexport type OnnxValue = Tensor|NonTensorType;\n\n/**\n * Type OnnxValueDataLocation represents the location of the data of an OnnxValue.\n */\nexport type OnnxValueDataLocation = Tensor.DataLocation;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {resolveBackendAndExecutionProviders} from './backend-impl.js';\nimport {SessionHandler, TrainingSessionHandler} from './backend.js';\nimport {InferenceSession as InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {Tensor} from './tensor.js';\nimport {TrainingSession as TrainingSessionInterface, TrainingSessionCreateOptions} from './training-session.js';\n\ntype SessionOptions = InferenceSession.SessionOptions;\ntype FeedsType = InferenceSession.FeedsType;\ntype FetchesType = InferenceSession.FetchesType;\ntype ReturnType = InferenceSession.ReturnType;\ntype RunOptions = InferenceSession.RunOptions;\n\nconst noBackendErrMsg: string = 'Training backend could not be resolved. ' +\n 'Make sure you\\'re using the correct configuration & WebAssembly files.';\n\nexport class TrainingSession implements TrainingSessionInterface {\n private constructor(handler: TrainingSessionHandler, hasOptimizerModel: boolean, hasEvalModel: boolean) {\n this.handler = handler;\n this.hasOptimizerModel = hasOptimizerModel;\n this.hasEvalModel = hasEvalModel;\n }\n private handler: TrainingSessionHandler;\n private hasOptimizerModel: boolean;\n private hasEvalModel: boolean;\n\n get trainingInputNames(): readonly string[] {\n return this.handler.inputNames;\n }\n get trainingOutputNames(): readonly string[] {\n return this.handler.outputNames;\n }\n\n get evalInputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalInputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n get evalOutputNames(): readonly string[] {\n if (this.hasEvalModel) {\n return this.handler.evalOutputNames;\n } else {\n throw new Error('This training session has no evalModel loaded.');\n }\n }\n\n static async create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: SessionOptions):\n Promise {\n const evalModel: string|Uint8Array = trainingOptions.evalModel || '';\n const optimizerModel: string|Uint8Array = trainingOptions.optimizerModel || '';\n const options: SessionOptions = sessionOptions || {};\n\n // resolve backend, update session options with validated EPs, and create session handler\n const [backend, optionsWithValidatedEPs] = await resolveBackendAndExecutionProviders(options);\n if (backend.createTrainingSessionHandler) {\n const handler = await backend.createTrainingSessionHandler(\n trainingOptions.checkpointState, trainingOptions.trainModel, evalModel, optimizerModel,\n optionsWithValidatedEPs);\n return new TrainingSession(handler, !!trainingOptions.optimizerModel, !!trainingOptions.evalModel);\n } else {\n throw new Error(noBackendErrMsg);\n }\n }\n\n /**\n * Helper function for runTrainStep and future runStep methods that handles the type-narrowing conversion from\n * the given parameters to SessionHandler.FetchesType and RunOptions.\n *\n * @param inputNames the feeds object is checked that they contain all input names in the provided list of input\n * names.\n * @param outputNames the fetches object is checked that their keys match up with valid names in the list of output\n * names.\n * @param feeds the required input\n * @param arg1 narrowed & converted into the SessionHandler.FetchesType or RunOptions object\n * @param arg2 optional RunOptions object.\n * @returns\n */\n typeNarrowingForRunStep(\n inputNames: readonly string[], outputNames: readonly string[], feeds: FeedsType, arg1?: FetchesType|RunOptions,\n arg2?: RunOptions): [SessionHandler.FetchesType, RunOptions] {\n const fetches: {[name: string]: OnnxValue|null} = {};\n let options: RunOptions = {};\n // check inputs\n if (typeof feeds !== 'object' || feeds === null || feeds instanceof Tensor || Array.isArray(feeds)) {\n throw new TypeError(\n '\\'feeds\\' must be an object that use input names as keys and OnnxValue as corresponding values.');\n }\n\n let isFetchesEmpty = true;\n // determine which override is being used\n if (typeof arg1 === 'object') {\n if (arg1 === null) {\n throw new TypeError('Unexpected argument[1]: cannot be null.');\n }\n if (arg1 instanceof Tensor) {\n throw new TypeError('\\'fetches\\' cannot be a Tensor');\n }\n\n if (Array.isArray(arg1)) {\n if (arg1.length === 0) {\n throw new TypeError('\\'fetches\\' cannot be an empty array.');\n }\n isFetchesEmpty = false;\n // output names\n for (const name of arg1) {\n if (typeof name !== 'string') {\n throw new TypeError('\\'fetches\\' must be a string array or an object.');\n }\n if (outputNames.indexOf(name) === -1) {\n throw new RangeError(`'fetches' contains invalid output name: ${name}.`);\n }\n fetches[name] = null;\n }\n\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n // decide whether arg1 is fetches or options\n // if any output name is present and its value is valid OnnxValue, we consider it fetches\n let isFetches = false;\n const arg1Keys = Object.getOwnPropertyNames(arg1);\n for (const name of outputNames) {\n if (arg1Keys.indexOf(name) !== -1) {\n const v = (arg1 as InferenceSession.NullableOnnxValueMapType)[name];\n if (v === null || v instanceof Tensor) {\n isFetches = true;\n isFetchesEmpty = false;\n fetches[name] = v;\n }\n }\n }\n\n if (isFetches) {\n if (typeof arg2 === 'object' && arg2 !== null) {\n options = arg2;\n } else if (typeof arg2 !== 'undefined') {\n throw new TypeError('\\'options\\' must be an object.');\n }\n } else {\n options = arg1 as RunOptions;\n }\n }\n } else if (typeof arg1 !== 'undefined') {\n throw new TypeError('Unexpected argument[1]: must be \\'fetches\\' or \\'options\\'.');\n }\n\n // check if all inputs are in feed\n for (const name of inputNames) {\n if (typeof feeds[name] === 'undefined') {\n throw new Error(`input '${name}' is missing in 'feeds'.`);\n }\n }\n\n // if no fetches is specified, we use the full output names list\n if (isFetchesEmpty) {\n for (const name of outputNames) {\n fetches[name] = null;\n }\n }\n\n return [fetches, options];\n }\n\n /**\n * Helper method for runTrainStep and any other runStep methods. Takes the ReturnType result from the SessionHandler\n * and changes it into a map of Tensors.\n *\n * @param results\n * @returns\n */\n convertHandlerReturnTypeToMapOfTensors(results: SessionHandler.ReturnType): ReturnType {\n const returnValue: {[name: string]: OnnxValue} = {};\n for (const key in results) {\n if (Object.hasOwnProperty.call(results, key)) {\n const result = results[key];\n if (result instanceof Tensor) {\n returnValue[key] = result;\n } else {\n returnValue[key] = new Tensor(result.type, result.data, result.dims);\n }\n }\n }\n return returnValue;\n }\n\n async lazyResetGrad(): Promise {\n await this.handler.lazyResetGrad();\n }\n\n runTrainStep(feeds: FeedsType, options?: RunOptions): Promise;\n runTrainStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions): Promise;\n async runTrainStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.trainingInputNames, this.trainingOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runTrainStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n }\n\n async runOptimizerStep(options?: InferenceSession.RunOptions|undefined): Promise {\n if (this.hasOptimizerModel) {\n await this.handler.runOptimizerStep(options || {});\n } else {\n throw new Error('This TrainingSession has no OptimizerModel loaded.');\n }\n }\n\n runEvalStep(feeds: FeedsType, options?: RunOptions|undefined): Promise;\n runEvalStep(feeds: FeedsType, fetches: FetchesType, options?: RunOptions|undefined): Promise;\n async runEvalStep(feeds: FeedsType, arg1?: FetchesType|RunOptions, arg2?: RunOptions): Promise {\n if (this.hasEvalModel) {\n const [fetches, options] =\n this.typeNarrowingForRunStep(this.evalInputNames, this.evalOutputNames, feeds, arg1, arg2);\n const results = await this.handler.runEvalStep(feeds, fetches, options);\n return this.convertHandlerReturnTypeToMapOfTensors(results);\n } else {\n throw new Error('This TrainingSession has no EvalModel loaded.');\n }\n }\n\n async getParametersSize(trainableOnly = true): Promise {\n return this.handler.getParametersSize(trainableOnly);\n }\n\n async loadParametersBuffer(array: Uint8Array, trainableOnly = true): Promise {\n const paramsSize = await this.getParametersSize(trainableOnly);\n // checking that the size of the Uint8Array is equivalent to the byte length of a Float32Array of the number\n // of parameters\n if (array.length !== 4 * paramsSize) {\n throw new Error(\n 'Size of the buffer passed into loadParametersBuffer must match the number of parameters in ' +\n 'the model. Please use getParametersSize method to check.');\n }\n return this.handler.loadParametersBuffer(array, trainableOnly);\n }\n\n async getContiguousParameters(trainableOnly = true): Promise {\n return this.handler.getContiguousParameters(trainableOnly);\n }\n\n async release(): Promise {\n return this.handler.dispose();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from './inference-session.js';\nimport {OnnxValue} from './onnx-value.js';\nimport {TrainingSession as TrainingSessionImpl} from './training-session-impl.js';\n\n/* eslint-disable @typescript-eslint/no-redeclare */\n\nexport declare namespace TrainingSession {\n /**\n * Either URI file path (string) or Uint8Array containing model or checkpoint information.\n */\n type UriOrBuffer = string|Uint8Array;\n}\n\n/**\n * Represent a runtime instance of an ONNX training session,\n * which contains a model that can be trained, and, optionally,\n * an eval and optimizer model.\n */\nexport interface TrainingSession {\n // #region run()\n\n /**\n * Lazily resets the gradients of all trainable parameters to zero. Should happen after the invocation of\n * runOptimizerStep.\n */\n lazyResetGrad(): Promise;\n\n /**\n * Run TrainStep asynchronously with the given feeds and options.\n *\n * @param feeds - Representation of the model input. See type description of `InferenceSession.InputType` for\n detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding values.\n */\n runTrainStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single train step with the given inputs and options.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model training.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runTrainStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Runs a single optimizer step, which performs weight updates for the trainable parameters using the optimizer model.\n *\n * @param options - Optional. A set of options that controls the behavior of model optimizing.\n */\n runOptimizerStep(options?: InferenceSession.RunOptions): Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(feeds: InferenceSession.FeedsType, options?: InferenceSession.RunOptions):\n Promise;\n\n /**\n * Run a single eval step with the given inputs and options using the eval model.\n *\n * @param feeds - Representation of the model input.\n * @param fetches - Representation of the model output.\n * detail.\n * @param options - Optional. A set of options that controls the behavior of model eval step.\n * @returns A promise that resolves to a map, which uses output names as keys and OnnxValue as corresponding\n values.\n */\n runEvalStep(\n feeds: InferenceSession.FeedsType, fetches: InferenceSession.FetchesType,\n options?: InferenceSession.RunOptions): Promise;\n\n // #endregion\n\n // #region copy parameters\n\n /**\n * Retrieves the size of all parameters for the training state. Calculates the total number of primitive (datatype of\n * the parameters) elements of all the parameters in the training state.\n *\n * @param trainableOnly - When set to true, the size is calculated for trainable params only. Default value is true.\n */\n getParametersSize(trainableOnly: boolean): Promise;\n\n /**\n * Copies parameter values from the given buffer to the training state. Currently, only supporting models with\n * parameters of type Float32.\n *\n * @param buffer - A Uint8Array representation of Float32 parameters.\n * @param trainableOnly - True if trainable parameters only to be modified, false otherwise. Default value is true.\n */\n loadParametersBuffer(buffer: Uint8Array, trainableOnly: boolean): Promise;\n\n /**\n * Copies the model parameters to a contiguous buffer. Usually used in the context of Federated Learning.\n * Currently, only supporting models with parameters of type Float32.\n *\n * @param trainableOnly - When set to true, only trainable parameters are copied. Trainable parameters are parameters\n * for which requires_grad is set to true. Default value is true.\n * @returns A promise that resolves to a Float32 OnnxValue of the requested parameters.\n */\n getContiguousParameters(trainableOnly: boolean): Promise;\n // #endregion\n\n // #region release()\n\n /**\n * Release the inference session and the underlying resources.\n */\n release(): Promise;\n // #endregion\n\n // #region metadata\n\n /**\n * Get input names of the loaded training model.\n */\n readonly trainingInputNames: readonly string[];\n\n /**\n * Get output names of the loaded training model.\n */\n readonly trainingOutputNames: readonly string[];\n\n /**\n * Get input names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalInputNames: readonly string[];\n\n /**\n * Get output names of the loaded eval model. Is an empty array if no eval model is loaded.\n */\n readonly evalOutputNames: readonly string[];\n\n // #endregion\n}\n\n/**\n * Represents the optional parameters that can be passed into the TrainingSessionFactory.\n */\nexport interface TrainingSessionCreateOptions {\n /**\n * URI or buffer for a .ckpt file that contains the checkpoint for the training model.\n */\n checkpointState: TrainingSession.UriOrBuffer;\n /**\n * URI or buffer for the .onnx training file.\n */\n trainModel: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx optimizer model file.\n */\n optimizerModel?: TrainingSession.UriOrBuffer;\n /**\n * Optional. URI or buffer for the .onnx eval model file.\n */\n evalModel?: TrainingSession.UriOrBuffer;\n}\n\n/**\n * Defines method overload possibilities for creating a TrainingSession.\n */\nexport interface TrainingSessionFactory {\n // #region create()\n\n /**\n * Creates a new TrainingSession and asynchronously loads any models passed in through trainingOptions\n *\n * @param trainingOptions specify models and checkpoints to load into the Training Session\n * @param sessionOptions specify configuration for training session behavior\n *\n * @returns Promise that resolves to a TrainingSession object\n */\n create(trainingOptions: TrainingSessionCreateOptions, sessionOptions?: InferenceSession.SessionOptions):\n Promise;\n\n // #endregion\n}\n\n// eslint-disable-next-line @typescript-eslint/naming-convention\nexport const TrainingSession: TrainingSessionFactory = TrainingSessionImpl;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/**\n * # ONNX Runtime JavaScript API\n *\n * ONNX Runtime JavaScript API is a unified API for all JavaScript usages, including the following NPM packages:\n *\n * - [onnxruntime-node](https://www.npmjs.com/package/onnxruntime-node)\n * - [onnxruntime-web](https://www.npmjs.com/package/onnxruntime-web)\n * - [onnxruntime-react-native](https://www.npmjs.com/package/onnxruntime-react-native)\n *\n * See also:\n * - [Get Started](https://onnxruntime.ai/docs/get-started/with-javascript/)\n * - [Inference examples](https://github.com/microsoft/onnxruntime-inference-examples/tree/main/js)\n *\n * @packageDocumentation\n */\n\nexport * from './backend.js';\nexport * from './env.js';\nexport * from './inference-session.js';\nexport * from './tensor.js';\nexport * from './tensor-conversion.js';\nexport * from './tensor-factory.js';\nexport * from './trace.js';\nexport * from './onnx-model.js';\nexport * from './onnx-value.js';\nexport * from './training-session.js';\n", "export const readFile = undefined;export const readFileSync = undefined;export const createReadStream = undefined;", "export const join = undefined;", "\nvar ortWasm = (() => {\n var _scriptDir = typeof document != 'undefined' ? document.currentScript?.src : undefined;\n if (typeof __filename != 'undefined') _scriptDir ||= __filename;\n return (\nfunction(moduleArg = {}) {\n\nvar g=moduleArg,aa,ba,readyPromise=new Promise((a,b)=>{aa=a;ba=b});\"use strict\";g.mountExternalData=(a,b)=>{(g.Ph||(g.Ph=new Map)).set(a,b)};g.unmountExternalData=()=>{delete g.Ph};\nlet da=()=>{const a=(c,d,e)=>(...f)=>{const h=t,k=d?.();f=c(...f);const l=d?.();k!==l&&(c=l,e(k),d=e=null);return t!=h?ca():f},b=c=>async(...d)=>{try{if(g.Oh)throw Error(\"Session already started\");const e=g.Oh={gi:d[0],errors:[]},f=await c(...d);if(g.Oh!==e)throw Error(\"Session mismatch\");g.Uh?.flush();const h=e.errors;if(0l);if(0g._OrtCreateSession,\nc=>g._OrtCreateSession=c);g._OrtRun=b(a(g._OrtRun,()=>g._OrtRun,c=>g._OrtRun=c));g._OrtRunWithBinding=b(a(g._OrtRunWithBinding,()=>g._OrtRunWithBinding,c=>g._OrtRunWithBinding=c));g._OrtBindInput=a(g._OrtBindInput,()=>g._OrtBindInput,c=>g._OrtBindInput=c);da=void 0};\ng.jsepInit=(a,b)=>{da?.();if(\"webgpu\"===a){[g.Uh,g.Zh,g.ci,g.Vh,g.bi,g.je,g.di,g.fi,g.$h,g.ai,g.ei]=b;const c=g.Uh;g.jsepRegisterBuffer=(d,e,f,h)=>c.registerBuffer(d,e,f,h);g.jsepGetBuffer=d=>c.getBuffer(d);g.jsepCreateDownloader=(d,e,f)=>c.createDownloader(d,e,f);g.jsepOnReleaseSession=d=>{c.onReleaseSession(d)};g.jsepOnRunStart=d=>c.onRunStart(d)}};\nvar ea=Object.assign({},g),fa=\"./this.program\",ha=(a,b)=>{throw b;},ia=\"object\"==typeof window,ja=\"function\"==typeof importScripts,ka=\"object\"==typeof process&&\"object\"==typeof process.versions&&\"string\"==typeof process.versions.node,v=\"\",la,ma,na;\nif(ka){var fs=require(\"fs\"),oa=require(\"path\");v=ja?oa.dirname(v)+\"/\":__dirname+\"/\";la=(a,b)=>{a=pa(a)?new URL(a):oa.normalize(a);return fs.readFileSync(a,b?void 0:\"utf8\")};na=a=>{a=la(a,!0);a.buffer||(a=new Uint8Array(a));return a};ma=(a,b,c,d=!0)=>{a=pa(a)?new URL(a):oa.normalize(a);fs.readFile(a,d?void 0:\"utf8\",(e,f)=>{e?c(e):b(d?f.buffer:f)})};!g.thisProgram&&1{process.exitCode=a;throw b;}}else if(ia||\nja)ja?v=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(v=document.currentScript.src),_scriptDir&&(v=_scriptDir),v.startsWith(\"blob:\")?v=\"\":v=v.substr(0,v.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),la=a=>{var b=new XMLHttpRequest;b.open(\"GET\",a,!1);b.send(null);return b.responseText},ja&&(na=a=>{var b=new XMLHttpRequest;b.open(\"GET\",a,!1);b.responseType=\"arraybuffer\";b.send(null);return new Uint8Array(b.response)}),ma=(a,b,c)=>{var d=new XMLHttpRequest;d.open(\"GET\",a,!0);d.responseType=\n\"arraybuffer\";d.onload=()=>{200==d.status||0==d.status&&d.response?b(d.response):c()};d.onerror=c;d.send(null)};var qa=console.log.bind(console),w=console.error.bind(console);Object.assign(g,ea);ea=null;var ra,x=!1,sa,z,E,ta,ua,G,I,va,wa,xa,ya;\nfunction za(){var a=ra.buffer;g.HEAP8=z=new Int8Array(a);g.HEAP16=ta=new Int16Array(a);g.HEAPU8=E=new Uint8Array(a);g.HEAPU16=ua=new Uint16Array(a);g.HEAP32=G=new Int32Array(a);g.HEAPU32=I=new Uint32Array(a);g.HEAPF32=va=new Float32Array(a);g.HEAPF64=ya=new Float64Array(a);g.HEAP64=wa=new BigInt64Array(a);g.HEAPU64=xa=new BigUint64Array(a)}var Aa=[],Ba=[],Ca=[],Da=0,Ea=null,Fa=null;\nfunction Ga(a){a=\"Aborted(\"+a+\")\";w(a);x=!0;sa=1;a=new WebAssembly.RuntimeError(a+\". Build with -sASSERTIONS for more info.\");ba(a);throw a;}var Ha=a=>a.startsWith(\"data:application/octet-stream;base64,\"),pa=a=>a.startsWith(\"file://\"),Ia;Ia=\"ort-wasm-simd.wasm\";if(!Ha(Ia)){var Ja=Ia;Ia=g.locateFile?g.locateFile(Ja,v):v+Ja}function Ka(a){if(na)return na(a);throw\"both async and sync fetching of the wasm failed\";}\nfunction La(a){if(ia||ja){if(\"function\"==typeof fetch&&!pa(a))return fetch(a,{credentials:\"same-origin\"}).then(b=>{if(!b.ok)throw`failed to load wasm binary file at '${a}'`;return b.arrayBuffer()}).catch(()=>Ka(a));if(ma)return new Promise((b,c)=>{ma(a,d=>b(new Uint8Array(d)),c)})}return Promise.resolve().then(()=>Ka(a))}function Ma(a,b,c){return La(a).then(d=>WebAssembly.instantiate(d,b)).then(c,d=>{w(`failed to asynchronously prepare wasm: ${d}`);Ga(d)})}\nfunction Na(a,b){var c=Ia;return\"function\"!=typeof WebAssembly.instantiateStreaming||Ha(c)||pa(c)||ka||\"function\"!=typeof fetch?Ma(c,a,b):fetch(c,{credentials:\"same-origin\"}).then(d=>WebAssembly.instantiateStreaming(d,a).then(b,function(e){w(`wasm streaming compile failed: ${e}`);w(\"falling back to ArrayBuffer instantiation\");return Ma(c,a,b)}))}\nvar Oa={1261504:(a,b,c,d)=>{if(\"undefined\"==typeof g||!g.Ph)return 1;a=J(a>>>0);a.startsWith(\"./\")&&(a=a.substring(2));a=g.Ph.get(a);if(!a)return 2;b>>>=0;c>>>=0;if(b+c>a.byteLength)return 3;try{return E.set(a.subarray(b,b+c),d>>>0>>>0),0}catch{return 4}},1262005:(a,b,c)=>{c=J(c);const d=new Uint8Array(b);d.set(E.subarray(a>>>0,a+b>>>0));\"object\"==typeof process&&\"object\"==typeof process.versions&&\"string\"==typeof process.versions.node?require(\"fs\").writeFileSync(c,d):(a=new File([d],c,{type:\"application/octet-stream\"}),\na=URL.createObjectURL(a),window.open(a,\"_blank\"))},1262513:()=>{g.$h()},1262544:()=>{g.ai()},1262573:()=>{g.ei()},1262598:a=>g.Zh(a),1262631:a=>g.ci(a),1262663:(a,b,c)=>{g.Vh(a,b,c,!0)},1262702:(a,b,c)=>{g.Vh(a,b,c)},1262735:a=>{g.je(\"Abs\",a,void 0)},1262786:a=>{g.je(\"Neg\",a,void 0)},1262837:a=>{g.je(\"Floor\",a,void 0)},1262890:a=>{g.je(\"Ceil\",a,void 0)},1262942:a=>{g.je(\"Reciprocal\",a,void 0)},1263E3:a=>{g.je(\"Sqrt\",a,void 0)},1263052:a=>{g.je(\"Exp\",a,void 0)},1263103:a=>{g.je(\"Erf\",a,void 0)},1263154:a=>\n{g.je(\"Sigmoid\",a,void 0)},1263209:(a,b,c)=>{g.je(\"HardSigmoid\",a,{alpha:b,beta:c})},1263288:a=>{g.je(\"Log\",a,void 0)},1263339:a=>{g.je(\"Sin\",a,void 0)},1263390:a=>{g.je(\"Cos\",a,void 0)},1263441:a=>{g.je(\"Tan\",a,void 0)},1263492:a=>{g.je(\"Asin\",a,void 0)},1263544:a=>{g.je(\"Acos\",a,void 0)},1263596:a=>{g.je(\"Atan\",a,void 0)},1263648:a=>{g.je(\"Sinh\",a,void 0)},1263700:a=>{g.je(\"Cosh\",a,void 0)},1263752:a=>{g.je(\"Asinh\",a,void 0)},1263805:a=>{g.je(\"Acosh\",a,void 0)},1263858:a=>{g.je(\"Atanh\",a,void 0)},\n1263911:a=>{g.je(\"Tanh\",a,void 0)},1263963:a=>{g.je(\"Not\",a,void 0)},1264014:(a,b,c)=>{g.je(\"Clip\",a,{min:b,max:c})},1264083:a=>{g.je(\"Clip\",a,void 0)},1264135:(a,b)=>{g.je(\"Elu\",a,{alpha:b})},1264193:a=>{g.je(\"Relu\",a,void 0)},1264245:(a,b)=>{g.je(\"LeakyRelu\",a,{alpha:b})},1264309:(a,b)=>{g.je(\"ThresholdedRelu\",a,{alpha:b})},1264379:(a,b)=>{g.je(\"Cast\",a,{to:b})},1264437:a=>{g.je(\"Add\",a,void 0)},1264488:a=>{g.je(\"Sub\",a,void 0)},1264539:a=>{g.je(\"Mul\",a,void 0)},1264590:a=>{g.je(\"Div\",a,void 0)},\n1264641:a=>{g.je(\"Pow\",a,void 0)},1264692:a=>{g.je(\"Equal\",a,void 0)},1264745:a=>{g.je(\"Greater\",a,void 0)},1264800:a=>{g.je(\"GreaterOrEqual\",a,void 0)},1264862:a=>{g.je(\"Less\",a,void 0)},1264914:a=>{g.je(\"LessOrEqual\",a,void 0)},1264973:(a,b,c,d,e)=>{g.je(\"ReduceMean\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265132:(a,b,c,d,e)=>{g.je(\"ReduceMax\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265290:(a,b,c,d,e)=>\n{g.je(\"ReduceMin\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265448:(a,b,c,d,e)=>{g.je(\"ReduceProd\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265607:(a,b,c,d,e)=>{g.je(\"ReduceSum\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265765:(a,b,c,d,e)=>{g.je(\"ReduceL1\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1265922:(a,b,c,d,e)=>\n{g.je(\"ReduceL2\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1266079:(a,b,c,d,e)=>{g.je(\"ReduceLogSum\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1266240:(a,b,c,d,e)=>{g.je(\"ReduceSumSquare\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1266404:(a,b,c,d,e)=>{g.je(\"ReduceLogSumExp\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1266568:a=>\n{g.je(\"Where\",a,void 0)},1266621:(a,b,c)=>{g.je(\"Transpose\",a,{perm:b?Array.from(G.subarray(b>>>0,c>>>0)):[]})},1266729:(a,b,c,d)=>{g.je(\"DepthToSpace\",a,{blocksize:b,mode:J(c),format:d?\"NHWC\":\"NCHW\"})},1266862:(a,b,c,d)=>{g.je(\"DepthToSpace\",a,{blocksize:b,mode:J(c),format:d?\"NHWC\":\"NCHW\"})},1266995:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>{g.je(\"ConvTranspose\",a,{format:l?\"NHWC\":\"NCHW\",autoPad:b,dilations:[c],group:d,kernelShape:[e],pads:[f,h],strides:[k],wIsConst:()=>!!z[m>>>0],outputPadding:n?Array.from(G.subarray(n>>>\n0,q>>>0)):[],outputShape:r?Array.from(G.subarray(r>>>0,p>>>0)):[],activation:J(u)})},1267396:(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>{g.je(\"ConvTranspose\",a,{format:k?\"NHWC\":\"NCHW\",autoPad:b,dilations:Array.from(G.subarray(c>>>0,(c>>>0)+2>>>0)),group:d,kernelShape:Array.from(G.subarray(e>>>0,(e>>>0)+2>>>0)),pads:Array.from(G.subarray(f>>>0,(f>>>0)+4>>>0)),strides:Array.from(G.subarray(h>>>0,(h>>>0)+2>>>0)),wIsConst:()=>!!z[l>>>0],outputPadding:m?Array.from(G.subarray(m>>>0,n>>>0)):[],outputShape:q?Array.from(G.subarray(q>>>\n0,r>>>0)):[],activation:J(p)})},1267961:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>{g.je(\"ConvTranspose\",a,{format:l?\"NHWC\":\"NCHW\",autoPad:b,dilations:[c],group:d,kernelShape:[e],pads:[f,h],strides:[k],wIsConst:()=>!!z[m>>>0],outputPadding:n?Array.from(G.subarray(n>>>0,q>>>0)):[],outputShape:r?Array.from(G.subarray(r>>>0,p>>>0)):[],activation:J(u)})},1268362:(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>{g.je(\"ConvTranspose\",a,{format:k?\"NHWC\":\"NCHW\",autoPad:b,dilations:Array.from(G.subarray(c>>>0,(c>>>0)+2>>>0)),group:d,\nkernelShape:Array.from(G.subarray(e>>>0,(e>>>0)+2>>>0)),pads:Array.from(G.subarray(f>>>0,(f>>>0)+4>>>0)),strides:Array.from(G.subarray(h>>>0,(h>>>0)+2>>>0)),wIsConst:()=>!!z[l>>>0],outputPadding:m?Array.from(G.subarray(m>>>0,n>>>0)):[],outputShape:q?Array.from(G.subarray(q>>>0,r>>>0)):[],activation:J(p)})},1268927:(a,b)=>{g.je(\"GlobalAveragePool\",a,{format:b?\"NHWC\":\"NCHW\"})},1269018:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>{g.je(\"AveragePool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:d,\nstorage_order:e,dilations:[f,h],kernel_shape:[k,l],pads:[m,n,q,r],strides:[p,u]})},1269302:(a,b)=>{g.je(\"GlobalAveragePool\",a,{format:b?\"NHWC\":\"NCHW\"})},1269393:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>{g.je(\"AveragePool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:e,dilations:[f,h],kernel_shape:[k,l],pads:[m,n,q,r],strides:[p,u]})},1269677:(a,b)=>{g.je(\"GlobalMaxPool\",a,{format:b?\"NHWC\":\"NCHW\"})},1269764:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>{g.je(\"MaxPool\",a,{format:y?\n\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:e,dilations:[f,h],kernel_shape:[k,l],pads:[m,n,q,r],strides:[p,u]})},1270044:(a,b)=>{g.je(\"GlobalMaxPool\",a,{format:b?\"NHWC\":\"NCHW\"})},1270131:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>{g.je(\"MaxPool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:d,storage_order:e,dilations:[f,h],kernel_shape:[k,l],pads:[m,n,q,r],strides:[p,u]})},1270411:(a,b,c,d,e)=>{g.je(\"Gemm\",a,{alpha:b,beta:c,transA:d,transB:e})},1270515:a=>\n{g.je(\"MatMul\",a,void 0)},1270569:(a,b,c,d)=>{g.je(\"ArgMax\",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})},1270677:(a,b,c,d)=>{g.je(\"ArgMin\",a,{keepDims:!!b,selectLastIndex:!!c,axis:d})},1270785:(a,b)=>{g.je(\"Softmax\",a,{axis:b})},1270848:(a,b)=>{g.je(\"Concat\",a,{axis:b})},1270908:(a,b,c,d,e)=>{g.je(\"Split\",a,{axis:b,numOutputs:c,splitSizes:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1271048:a=>{g.je(\"Expand\",a,void 0)},1271102:(a,b)=>{g.je(\"Gather\",a,{axis:Number(b)})},1271173:(a,b)=>{g.je(\"GatherElements\",\na,{axis:Number(b)})},1271252:(a,b,c,d,e,f,h,k,l,m,n)=>{g.je(\"Resize\",a,{antialias:b,axes:c?Array.from(G.subarray(c>>>0,d>>>0)):[],coordinateTransformMode:J(e),cubicCoeffA:f,excludeOutside:h,extrapolationValue:k,keepAspectRatioPolicy:J(l),mode:J(m),nearestMode:J(n)})},1271598:(a,b,c,d,e,f,h)=>{g.je(\"Slice\",a,{starts:b?Array.from(G.subarray(b>>>0,c>>>0)):[],ends:d?Array.from(G.subarray(d>>>0,e>>>0)):[],axes:f?Array.from(G.subarray(f>>>0,h>>>0)):[]})},1271814:a=>{g.je(\"Tile\",a,void 0)},1271866:(a,b,\nc,d)=>{g.je(\"LayerNormalization\",a,{axis:b,epsilon:c,simplified:!!d})},1271977:(a,b,c)=>{g.je(\"InstanceNormalization\",a,{epsilon:b,format:c?\"NHWC\":\"NCHW\"})},1272091:(a,b,c)=>{g.je(\"InstanceNormalization\",a,{epsilon:b,format:c?\"NHWC\":\"NCHW\"})},1272205:a=>{g.je(\"Range\",a,void 0)},1272258:(a,b)=>{g.je(\"Einsum\",a,{equation:J(b)})},1272339:(a,b,c,d,e)=>{g.je(\"Pad\",a,{mode:b,value:c,pads:d?Array.from(G.subarray(d>>>0,e>>>0)):[]})},1272466:(a,b,c,d,e,f)=>{g.je(\"BatchNormalization\",a,{epsilon:b,momentum:c,\nspatial:!!e,trainingMode:!!d,format:f?\"NHWC\":\"NCHW\"})},1272635:(a,b,c,d,e,f)=>{g.je(\"BatchNormalization\",a,{epsilon:b,momentum:c,spatial:!!e,trainingMode:!!d,format:f?\"NHWC\":\"NCHW\"})},1272804:(a,b,c)=>{g.je(\"CumSum\",a,{exclusive:Number(b),reverse:Number(c)})},1272901:(a,b,c,d,e,f,h,k,l)=>{g.je(\"Attention\",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:e,doRotary:f,qkvHiddenSizes:h?Array.from(G.subarray(Number(k)>>>0,Number(k)+h>>>0)):[],pastPresentShareBuffer:!!l})},1273173:a=>{g.je(\"BiasAdd\",\na,void 0)},1273228:a=>{g.je(\"BiasSplitGelu\",a,void 0)},1273289:a=>{g.je(\"FastGelu\",a,void 0)},1273345:(a,b,c,d,e,f,h,k,l,m,n,q,r)=>{g.je(\"Conv\",a,{format:l?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c],group:d,kernel_shape:[e],pads:f?Array.from(G.subarray(f>>>0,h>>>0)):[],strides:[k],w_is_const:()=>!!z[m>>>0],activation:J(n),activation_params:q?Array.from(va.subarray(q>>>0,r>>>0)):[]})},1273715:(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>{g.je(\"Conv\",a,{format:q?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c,d],group:e,kernel_shape:[f,\nh],pads:k?Array.from(G.subarray(k>>>0,l>>>0)):[],strides:[m,n],w_is_const:()=>!!z[r>>>0],activation:J(p),activation_params:u?Array.from(va.subarray(u>>>0,y>>>0)):[]})},1274106:a=>{g.je(\"Gelu\",a,void 0)},1274158:(a,b,c,d,e,f)=>{g.je(\"MatMulNBits\",a,{k:b,n:c,accuracyLevel:d,bits:e,blockSize:f})},1274285:(a,b,c,d,e,f)=>{g.je(\"MultiHeadAttention\",a,{numHeads:b,isUnidirectional:c,maskFilterValue:d,scale:e,doRotary:f})},1274444:(a,b,c,d,e)=>{g.je(\"RotaryEmbedding\",a,{interleaved:!!b,numHeads:c,rotaryEmbeddingDim:d,\nscale:e})},1274583:(a,b,c)=>{g.je(\"SkipLayerNormalization\",a,{epsilon:b,simplified:!!c})},1274685:(a,b,c)=>{g.je(\"SkipLayerNormalization\",a,{epsilon:b,simplified:!!c})},1274787:(a,b,c,d)=>{g.je(\"LayerNormalization\",a,{axis:b,epsilon:c,simplified:!!d})},1274898:a=>{g.di(a)},1274932:(a,b)=>g.fi(a,b,g.Oh.gi,g.Oh.errors)};function Pa(a){this.name=\"ExitStatus\";this.message=`Program terminated with exit(${a})`;this.status=a}var Qa=[],Ra=0,L=0;class Sa{constructor(a){this.Nh=a;this.Ih=a-24}}\nvar Za=a=>{var b=L;if(!b)return Ta(0),0;var c=new Sa(b);I[c.Ih+16>>>2>>>0]=b;var d=I[c.Ih+4>>>2>>>0];if(!d)return Ta(0),b;for(var e in a){var f=a[e];if(0===f||f===d)break;if(Ua(f,d,c.Ih+16))return Ta(f),b}Ta(d);return b},$a=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,ab=(a,b,c)=>{b>>>=0;var d=b+c;for(c=b;a[c]&&!(c>=d);)++c;if(16e?d+=String.fromCharCode(e):(e-=65536,d+=String.fromCharCode(55296|e>>10,56320|e&1023))}}else d+=String.fromCharCode(e)}return d},J=(a,b)=>(a>>>=0)?ab(E,a,b):\"\",bb=a=>{for(var b=0,c=0;c=d?b++:2047>=d?b+=2:55296<=d&&57343>=d?(b+=4,++c):b+=3}return b},M=(a,b,c,d)=>{c>>>=0;if(!(0=h){var k=a.charCodeAt(++f);h=65536+((h&1023)<<10)|k&1023}if(127>=h){if(c>=d)break;b[c++>>>0]=h}else{if(2047>=h){if(c+1>=d)break;b[c++>>>0]=192|h>>6}else{if(65535>=h){if(c+2>=d)break;b[c++>>>0]=224|h>>12}else{if(c+3>=d)break;b[c++>>>0]=240|h>>18;b[c++>>>0]=128|h>>12&63}b[c++>>>0]=128|h>>6&63}b[c++>>>0]=128|h&63}}b[c>>>0]=0;return c-e},cb,N=a=>{for(var b=\"\";E[a>>>0];)b+=cb[E[a++>>>0]];return b},db={},eb={},fb={},O;\nfunction gb(a,b,c={}){var d=b.name;if(!a)throw new O(`type \"${d}\" must have a positive integer typeid pointer`);if(eb.hasOwnProperty(a)){if(c.Xh)return;throw new O(`Cannot register type '${d}' twice`);}eb[a]=b;delete fb[a];db.hasOwnProperty(a)&&(b=db[a],delete db[a],b.forEach(e=>e()))}function P(a,b,c={}){if(!(\"argPackAdvance\"in b))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return gb(a,b,c)}\nvar hb=(a,b,c)=>{switch(b){case 1:return c?d=>z[d>>>0]:d=>E[d>>>0];case 2:return c?d=>ta[d>>>1>>>0]:d=>ua[d>>>1>>>0];case 4:return c?d=>G[d>>>2>>>0]:d=>I[d>>>2>>>0];case 8:return c?d=>wa[d>>>3]:d=>xa[d>>>3];default:throw new TypeError(`invalid integer width (${b}): ${a}`);}},ib=[],Q=[];function jb(a){a>>>=0;9{if(!a)throw new O(\"Cannot use deleted val. handle = \"+a);return Q[a]},S=a=>{switch(a){case void 0:return 2;case null:return 4;case !0:return 6;case !1:return 8;default:const b=ib.pop()||Q.length;Q[b]=a;Q[b+1]=1;return b}};function kb(a){return this.fromWireType(I[a>>>2>>>0])}\nvar lb={name:\"emscripten::val\",fromWireType:a=>{var b=R(a);jb(a);return b},toWireType:(a,b)=>S(b),argPackAdvance:8,readValueFromPointer:kb,Mh:null},mb=(a,b)=>{switch(b){case 4:return function(c){return this.fromWireType(va[c>>>2>>>0])};case 8:return function(c){return this.fromWireType(ya[c>>>3>>>0])};default:throw new TypeError(`invalid float width (${b}): ${a}`);}},nb=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf-16le\"):void 0,ob=(a,b)=>{var c=a>>1;for(var d=c+b/2;!(c>=d)&&ua[c>>>0];)++c;\nc<<=1;if(32>>0,c>>>0));c=\"\";for(d=0;!(d>=b/2);++d){var e=ta[a+2*d>>>1>>>0];if(0==e)break;c+=String.fromCharCode(e)}return c},pb=(a,b,c)=>{c??=2147483647;if(2>c)return 0;c-=2;var d=b;c=c<2*a.length?c/2:a.length;for(var e=0;e>>1>>>0]=a.charCodeAt(e),b+=2;ta[b>>>1>>>0]=0;return b-d},qb=a=>2*a.length,rb=(a,b)=>{for(var c=0,d=\"\";!(c>=b/4);){var e=G[a+4*c>>>2>>>0];if(0==e)break;++c;65536<=e?(e-=65536,d+=String.fromCharCode(55296|e>>10,56320|e&1023)):d+=\nString.fromCharCode(e)}return d},sb=(a,b,c)=>{b>>>=0;c??=2147483647;if(4>c)return 0;var d=b;c=d+c-4;for(var e=0;e=f){var h=a.charCodeAt(++e);f=65536+((f&1023)<<10)|h&1023}G[b>>>2>>>0]=f;b+=4;if(b+4>c)break}G[b>>>2>>>0]=0;return b-d},tb=a=>{for(var b=0,c=0;c=d&&++c;b+=4}return b},vb=(a,b)=>{var c=eb[a];if(void 0===c)throw a=ub(a),c=N(a),T(a),new O(`${b} has unknown type ${c}`);return c},wb=\n(a,b,c)=>{var d=[];a=a.toWireType(d,c);d.length&&(I[b>>>2>>>0]=S(d));return a},xb=a=>{try{a()}catch(b){Ga(b)}},yb=a=>{if(!x)try{a();try{sa=sa=a=sa,g.onExit?.(a),x=!0,ha(a,new Pa(a))}catch(b){b instanceof Pa||\"unwind\"==b||ha(1,b)}}catch(b){b instanceof Pa||\"unwind\"==b||ha(1,b)}};\nfunction zb(){var a=U,b={};for(let [c,d]of Object.entries(a))b[c]=\"function\"==typeof d?(...e)=>{Ab.push(c);try{return d(...e)}finally{x||(Ab.pop(),t&&1===V&&0===Ab.length&&(V=0,xb(Bb),\"undefined\"!=typeof Fibers&&Fibers.mi()))}}:d;return b}var V=0,t=null,Cb=0,Ab=[],Db={},Eb={},Fb=0,Gb=null,Hb=[];function ca(){return new Promise((a,b)=>{Gb={resolve:a,reject:b}})}\nfunction Ib(){var a=Jb(65548),b=a+12;I[a>>>2>>>0]=b;I[a+4>>>2>>>0]=b+65536;b=Ab[0];var c=Db[b];void 0===c&&(c=Fb++,Db[b]=c,Eb[c]=b);G[a+8>>>2>>>0]=c;return a}\nfunction Kb(a){if(!x){if(0===V){var b=!1,c=!1;a((d=0)=>{if(!x&&(Cb=d,b=!0,c)){V=2;xb(()=>Lb(t));\"undefined\"!=typeof Browser&&Browser.Sh.Wh&&Browser.Sh.resume();d=!1;try{var e=(0,U[Eb[G[t+8>>>2>>>0]]])()}catch(k){e=k,d=!0}var f=!1;if(!t){var h=Gb;h&&(Gb=null,(d?h.reject:h.resolve)(e),f=!0)}if(d&&!f)throw e;}});c=!0;b||(V=1,t=Ib(),\"undefined\"!=typeof Browser&&Browser.Sh.Wh&&Browser.Sh.pause(),xb(()=>Mb(t)))}else 2===V?(V=0,xb(Nb),T(t),t=null,Hb.forEach(yb)):Ga(`invalid state: ${V}`);return Cb}}\nfunction Ob(a){return Kb(b=>{a().then(b)})}var Pb=[],Qb={},Rb=a=>{var b=Qb[a];return void 0===b?N(a):b},Sb=()=>\"object\"==typeof globalThis?globalThis:Function(\"return this\")(),Tb=a=>{var b=Pb.length;Pb.push(a);return b},Ub=(a,b)=>{for(var c=Array(a),d=0;d>>2>>>0],\"parameter \"+d);return c},Vb=(a,b)=>Object.defineProperty(b,\"name\",{value:a});\nfunction Wb(a){var b=Function;if(!(b instanceof Function))throw new TypeError(`new_ called with constructor type ${typeof b} which is not a function`);var c=Vb(b.name||\"unknownFunctionName\",function(){});c.prototype=b.prototype;c=new c;a=b.apply(c,a);return a instanceof Object?a:c}\nvar W=a=>0===a%4&&(0!==a%100||0===a%400),Xb=[0,31,60,91,121,152,182,213,244,274,305,335],Yb=[0,31,59,90,120,151,181,212,243,273,304,334],Zb=[],$b=(a,b)=>{Zb.length=0;for(var c;c=E[a++>>>0];){var d=105!=c;d&=112!=c;b+=d&&b%8?4:0;Zb.push(112==c?I[b>>>2>>>0]:106==c?wa[b>>>3]:105==c?G[b>>>2>>>0]:ya[b>>>3>>>0]);b+=d?8:4}return Zb},ac={},cc=()=>{if(!bc){var a={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\n\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:fa||\"./this.program\"},b;for(b in ac)void 0===ac[b]?delete a[b]:a[b]=ac[b];var c=[];for(b in a)c.push(`${b}=${a[b]}`);bc=c}return bc},bc,dc=[null,[],[]],ec=[31,29,31,30,31,30,31,31,30,31,30,31],fc=[31,28,31,30,31,30,31,31,30,31,30,31];function gc(a){var b=Array(bb(a)+1);M(a,b,0,b.length);return b}\nfunction hc(a,b,c,d){function e(p,u,y){for(p=\"number\"==typeof p?p.toString():p||\"\";p.lengthB?-1:0A-p.getDate())u-=A-p.getDate()+1,p.setDate(1),11>y?p.setMonth(y+1):(p.setMonth(0),p.setFullYear(p.getFullYear()+1));else{p.setDate(p.getDate()+u);break}}y=new Date(p.getFullYear()+1,0,4);u=k(new Date(p.getFullYear(),\n0,4));y=k(y);return 0>=h(u,p)?0>=h(y,p)?p.getFullYear()+1:p.getFullYear():p.getFullYear()-1}a>>>=0;b>>>=0;c>>>=0;d>>>=0;var m=I[d+40>>>2>>>0];d={ji:G[d>>>2>>>0],ii:G[d+4>>>2>>>0],Qh:G[d+8>>>2>>>0],Th:G[d+12>>>2>>>0],Rh:G[d+16>>>2>>>0],Lh:G[d+20>>>2>>>0],Jh:G[d+24>>>2>>>0],Kh:G[d+28>>>2>>>0],li:G[d+32>>>2>>>0],hi:G[d+36>>>2>>>0],ki:m?J(m):\"\"};c=J(c);m={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\n\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"};for(var n in m)c=c.replace(new RegExp(n,\"g\"),m[n]);var q=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),r=\"January February March April May June July August September October November December\".split(\" \");m={\"%a\":p=>q[p.Jh].substring(0,3),\"%A\":p=>q[p.Jh],\"%b\":p=>\nr[p.Rh].substring(0,3),\"%B\":p=>r[p.Rh],\"%C\":p=>f((p.Lh+1900)/100|0,2),\"%d\":p=>f(p.Th,2),\"%e\":p=>e(p.Th,2,\" \"),\"%g\":p=>l(p).toString().substring(2),\"%G\":l,\"%H\":p=>f(p.Qh,2),\"%I\":p=>{p=p.Qh;0==p?p=12:12{for(var u=0,y=0;y<=p.Rh-1;u+=(W(p.Lh+1900)?ec:fc)[y++]);return f(p.Th+u,3)},\"%m\":p=>f(p.Rh+1,2),\"%M\":p=>f(p.ii,2),\"%n\":()=>\"\\n\",\"%p\":p=>0<=p.Qh&&12>p.Qh?\"AM\":\"PM\",\"%S\":p=>f(p.ji,2),\"%t\":()=>\"\\t\",\"%u\":p=>p.Jh||7,\"%U\":p=>f(Math.floor((p.Kh+7-p.Jh)/7),2),\"%V\":p=>{var u=\nMath.floor((p.Kh+7-(p.Jh+6)%7)/7);2>=(p.Jh+371-p.Kh-2)%7&&u++;if(u)53==u&&(y=(p.Jh+371-p.Kh)%7,4==y||3==y&&W(p.Lh)||(u=1));else{u=52;var y=(p.Jh+7-p.Kh-1)%7;(4==y||5==y&&W(p.Lh%400-1))&&u++}return f(u,2)},\"%w\":p=>p.Jh,\"%W\":p=>f(Math.floor((p.Kh+7-(p.Jh+6)%7)/7),2),\"%y\":p=>(p.Lh+1900).toString().substring(2),\"%Y\":p=>p.Lh+1900,\"%z\":p=>{p=p.hi;var u=0<=p;p=Math.abs(p)/60;return(u?\"+\":\"-\")+String(\"0000\"+(p/60*100+p%60)).slice(-4)},\"%Z\":p=>p.ki,\"%%\":()=>\"%\"};c=c.replace(/%%/g,\"\\x00\\x00\");for(n in m)c.includes(n)&&\n(c=c.replace(new RegExp(n,\"g\"),m[n](d)));c=c.replace(/\\0\\0/g,\"%\");n=gc(c);if(n.length>b)return 0;z.set(n,a>>>0);return n.length-1}for(var ic=Array(256),jc=0;256>jc;++jc)ic[jc]=String.fromCharCode(jc);cb=ic;O=g.BindingError=class extends Error{constructor(a){super(a);this.name=\"BindingError\"}};g.InternalError=class extends Error{constructor(a){super(a);this.name=\"InternalError\"}};Q.push(0,1,void 0,1,null,1,!0,1,!1,1);g.count_emval_handles=()=>Q.length/2-5-ib.length;\nvar Cf={bd:function(a,b,c){return Ob(async()=>{await g.bi(a,b,c)})},v:function(a){a=new Sa(a>>>0);0==z[a.Ih+12>>>0]&&(z[a.Ih+12>>>0]=1,Ra--);z[a.Ih+13>>>0]=0;Qa.push(a);kc(a.Nh);if(lc(I[a.Ih+4>>>2>>>0]))a=I[a.Nh>>>2>>>0];else{var b=I[a.Ih+16>>>2>>>0];a=0!==b?b:a.Nh}return a},N:()=>{X(0,0);var a=Qa.pop();mc(a.Nh);L=0},a:function(){return Za([])},m:function(a){return Za([a>>>0])},x:function(a,b){return Za([a>>>0,b>>>0])},q:function(a,b,c){return Za([a>>>0,b>>>0,c>>>0])},Bb:()=>{var a=Qa.pop();a||Ga(\"no exception to throw\");\nvar b=a.Nh;0==z[a.Ih+13>>>0]&&(Qa.push(a),z[a.Ih+13>>>0]=1,z[a.Ih+12>>>0]=0,Ra++);L=b;throw L;},s:function(a,b,c){a>>>=0;var d=new Sa(a);I[d.Ih+16>>>2>>>0]=0;I[d.Ih+4>>>2>>>0]=b>>>0;I[d.Ih+8>>>2>>>0]=c>>>0;L=a;Ra++;throw L;},fb:()=>Ra,g:function(a){L||=a>>>0;throw L;},Cb:function(){return 0},$c:function(){},Mc:function(){},Oc:function(){},Gc:function(){return 0},Zc:function(){},Uc:function(){},Yc:function(){},_b:function(){},Nc:function(){},Kc:function(){},_c:function(){},Lc:function(){},Wb:function(a,\nb,c){b=N(b>>>0);P(a>>>0,{name:b,fromWireType:d=>d,toWireType:function(d,e){if(\"bigint\"!=typeof e&&\"number\"!=typeof e)throw null===e?e=\"null\":(d=typeof e,e=\"object\"===d||\"array\"===d||\"function\"===d?e.toString():\"\"+e),new TypeError(`Cannot convert \"${e}\" to ${this.name}`);\"number\"==typeof e&&(e=BigInt(e));return e},argPackAdvance:8,readValueFromPointer:hb(b,c>>>0,-1==b.indexOf(\"u\")),Mh:null})},Ec:function(a,b,c,d){b=N(b>>>0);P(a>>>0,{name:b,fromWireType:function(e){return!!e},toWireType:function(e,\nf){return f?c:d},argPackAdvance:8,readValueFromPointer:function(e){return this.fromWireType(E[e>>>0])},Mh:null})},Cc:function(a){return P(a>>>0,lb)},Vb:function(a,b,c){b=N(b>>>0);P(a>>>0,{name:b,fromWireType:d=>d,toWireType:(d,e)=>e,argPackAdvance:8,readValueFromPointer:mb(b,c>>>0),Mh:null})},Aa:function(a,b,c,d,e){a>>>=0;c>>>=0;b=N(b>>>0);-1===e&&(e=4294967295);e=k=>k;if(0===d){var f=32-8*c;e=k=>k<>>f}var h=b.includes(\"unsigned\")?function(k,l){return l>>>0}:function(k,l){return l};P(a,{name:b,\nfromWireType:e,toWireType:h,argPackAdvance:8,readValueFromPointer:hb(b,c,0!==d),Mh:null})},_:function(a,b,c){function d(f){return new e(z.buffer,I[f+4>>>2>>>0],I[f>>>2>>>0])}var e=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][b];c=N(c>>>0);P(a>>>0,{name:c,fromWireType:d,argPackAdvance:8,readValueFromPointer:d},{Xh:!0})},Xb:function(a,b){b=N(b>>>0);var c=\"std::string\"===b;P(a>>>0,{name:b,fromWireType:function(d){var e=I[d>>>\n2>>>0],f=d+4;if(c)for(var h=f,k=0;k<=e;++k){var l=f+k;if(k==e||0==E[l>>>0]){h=J(h,l-h);if(void 0===m)var m=h;else m+=String.fromCharCode(0),m+=h;h=l+1}}else{m=Array(e);for(k=0;k>>0]);m=m.join(\"\")}T(d);return m},toWireType:function(d,e){e instanceof ArrayBuffer&&(e=new Uint8Array(e));var f=\"string\"==typeof e;if(!(f||e instanceof Uint8Array||e instanceof Uint8ClampedArray||e instanceof Int8Array))throw new O(\"Cannot pass non-string to std::string\");var h=c&&f?bb(e):\ne.length;var k=Jb(4+h+1),l=k+4;I[k>>>2>>>0]=h;if(c&&f)M(e,E,l,h+1);else if(f)for(f=0;f>>0]=m}else for(f=0;f>>0]=e[f];null!==d&&d.push(T,k);return k},argPackAdvance:8,readValueFromPointer:kb,Mh(d){T(d)}})},Ab:function(a,b,c){b>>>=0;c>>>=0;c=N(c);if(2===b){var d=ob;var e=pb;var f=qb;var h=k=>ua[k>>>1>>>0]}else 4===b&&(d=rb,e=sb,f=tb,h=k=>I[k>>>2>>>0]);P(a>>>0,{name:c,\nfromWireType:k=>{for(var l=I[k>>>2>>>0],m,n=k+4,q=0;q<=l;++q){var r=k+4+q*b;if(q==l||0==h(r))n=d(n,r-n),void 0===m?m=n:(m+=String.fromCharCode(0),m+=n),n=r+b}T(k);return m},toWireType:(k,l)=>{if(\"string\"!=typeof l)throw new O(`Cannot pass non-string to C++ string type ${c}`);var m=f(l),n=Jb(4+m+b);I[n>>>2>>>0]=m/b;e(l,n+4,m+b);null!==k&&k.push(T,n);return n},argPackAdvance:8,readValueFromPointer:kb,Mh(k){T(k)}})},Fc:function(a,b){b=N(b>>>0);P(a>>>0,{Yh:!0,name:b,argPackAdvance:0,fromWireType:()=>\n{},toWireType:()=>{}})},ad:()=>1,kd:function(a,b,c){b>>>=0;c>>>=0;a=R(a>>>0);b=vb(b,\"emval::as\");return wb(b,c,a)},Cd:function(a){a>>>=0;return Ob(()=>{a=R(a);return a.then(S)})},ud:function(a,b,c,d){c>>>=0;d>>>=0;a=Pb[a>>>0];b=R(b>>>0);return a(null,b,c,d)},ia:function(a,b,c,d,e){c>>>=0;d>>>=0;e>>>=0;a=Pb[a>>>0];b=R(b>>>0);c=Rb(c);return a(b,b[c],d,e)},Bc:jb,qd:function(a,b){b>>>=0;a=R(a>>>0);b=R(b);return a==b},zd:function(a){a>>>=0;if(0===a)return S(Sb());a=Rb(a);return S(Sb()[a])},ha:function(a,\nb,c){b=Ub(a,b>>>0);var d=b.shift();a--;var e=\"return function (obj, func, destructorsRef, args) {\\n\",f=0,h=[];0===c&&h.push(\"obj\");for(var k=[\"retType\"],l=[d],m=0;mn.name).join(\", \")}) => ${d.name}>`;return Tb(Vb(c,a))},yd:function(a,b){b>>>=0;a=R(a>>>0);b=R(b);return S(a[b])},ba:function(a){a>>>=0;9>>0);for(var b=Array(a.length),c=0;c>>0))},Xa:function(){return S({})},vd:function(a){a>>>=0;for(var b=R(a);b.length;){var c=b.pop();b.pop()(c)}jb(a)},sd:function(a,b,c){b>>>=0;c>>>=0;\na=R(a>>>0);b=R(b);c=R(c);a[b]=c},zb:function(a,b){b>>>=0;a=vb(a>>>0,\"_emval_take_value\");a=a.readValueFromPointer(b);return S(a)},Rc:function(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);G[b>>>2>>>0]=a.getUTCSeconds();G[b+4>>>2>>>0]=a.getUTCMinutes();G[b+8>>>2>>>0]=a.getUTCHours();G[b+12>>>2>>>0]=a.getUTCDate();G[b+16>>>2>>>0]=a.getUTCMonth();G[b+20>>>2>>>0]=a.getUTCFullYear()-1900;G[b+24>>>2>>>0]=a.getUTCDay();G[b+28>>>2>>>0]=(a.getTime()-Date.UTC(a.getUTCFullYear(),\n0,1,0,0,0,0))/864E5|0},Sc:function(a,b){a=-9007199254740992>a||9007199254740992>>=0;a=new Date(1E3*a);G[b>>>2>>>0]=a.getSeconds();G[b+4>>>2>>>0]=a.getMinutes();G[b+8>>>2>>>0]=a.getHours();G[b+12>>>2>>>0]=a.getDate();G[b+16>>>2>>>0]=a.getMonth();G[b+20>>>2>>>0]=a.getFullYear()-1900;G[b+24>>>2>>>0]=a.getDay();G[b+28>>>2>>>0]=(W(a.getFullYear())?Xb:Yb)[a.getMonth()]+a.getDate()-1|0;G[b+36>>>2>>>0]=-(60*a.getTimezoneOffset());var c=(new Date(a.getFullYear(),6,1)).getTimezoneOffset(),\nd=(new Date(a.getFullYear(),0,1)).getTimezoneOffset();G[b+32>>>2>>>0]=(c!=d&&a.getTimezoneOffset()==Math.min(d,c))|0},Tc:function(a){a>>>=0;var b=new Date(G[a+20>>>2>>>0]+1900,G[a+16>>>2>>>0],G[a+12>>>2>>>0],G[a+8>>>2>>>0],G[a+4>>>2>>>0],G[a>>>2>>>0],0),c=G[a+32>>>2>>>0],d=b.getTimezoneOffset(),e=(new Date(b.getFullYear(),6,1)).getTimezoneOffset(),f=(new Date(b.getFullYear(),0,1)).getTimezoneOffset(),h=Math.min(f,e);0>c?G[a+32>>>2>>>0]=Number(e!=f&&h==d):0>>2>>>0]=b.getDay();G[a+28>>>2>>>0]=(W(b.getFullYear())?Xb:Yb)[b.getMonth()]+b.getDate()-1|0;G[a>>>2>>>0]=b.getSeconds();G[a+4>>>2>>>0]=b.getMinutes();G[a+8>>>2>>>0]=b.getHours();G[a+12>>>2>>>0]=b.getDate();G[a+16>>>2>>>0]=b.getMonth();G[a+20>>>2>>>0]=b.getYear();a=b.getTime();return BigInt(isNaN(a)?-1:a/1E3)},Pc:function(){return-52},Qc:function(){},Ic:function(a,b,c,d){c>>>=0;d>>>=0;var e=(new Date).getFullYear(),f=new Date(e,0,1),h=new Date(e,6,1);e=f.getTimezoneOffset();\nvar k=h.getTimezoneOffset();I[a>>>0>>>2>>>0]=60*Math.max(e,k);G[b>>>0>>>2>>>0]=Number(e!=k);a=l=>l.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1];f=a(f);h=a(h);k{Ga(\"\")},A:function(a,b,c){a>>>=0;b=$b(b>>>0,c>>>0);return Oa[a](...b)},bc:function(a,b,c){a>>>=0;b=$b(b>>>0,c>>>0);return Oa[a](...b)},$b:()=>Date.now(),Jc:function(){return 4294901760},ga:()=>performance.now(),Hc:function(a){a>>>=0;var b=E.length;if(4294901760<\na)return!1;for(var c=1;4>=c;c*=2){var d=b*(1+.2/c);d=Math.min(d,a+100663296);var e=Math;d=Math.max(a,d);a:{e=(e.min.call(e,4294901760,d+(65536-d%65536)%65536)-ra.buffer.byteLength+65535)/65536;try{ra.grow(e);za();var f=1;break a}catch(h){}f=void 0}if(f)return!0}return!1},Wc:function(a,b){a>>>=0;b>>>=0;var c=0;cc().forEach((d,e)=>{var f=b+c;e=I[a+4*e>>>2>>>0]=f;for(f=0;f>>0]=d.charCodeAt(f);z[e>>>0]=0;c+=d.length+1});return 0},Xc:function(a,b){a>>>=0;b>>>=0;var c=cc();I[a>>>2>>>\n0]=c.length;var d=0;c.forEach(e=>d+=e.length+1);I[b>>>2>>>0]=d;return 0},Db:()=>52,Zb:function(){return 52},Vc:function(){return 70},Yb:function(a,b,c,d){b>>>=0;c>>>=0;d>>>=0;for(var e=0,f=0;f>>2>>>0],k=I[b+4>>>2>>>0];b+=8;for(var l=0;l>>0],n=dc[a];0===m||10===m?((1===a?qa:w)(ab(n,0)),n.length=0):n.push(m)}e+=k}I[d>>>2>>>0]=e;return 0},xb:nc,cd:oc,ua:pc,W:qc,$:rc,ra:sc,ta:tc,dd:uc,ob:vc,P:wc,z:xc,b:yc,Ub:zc,ya:Ac,e:Bc,kb:Cc,h:Dc,X:Ec,i:Fc,ed:Gc,j:Hc,t:Ic,r:Jc,\no:Kc,Wa:Lc,Ca:Mc,ma:Nc,Qb:Oc,db:Pc,Ib:Qc,mb:Rc,kc:Sc,xc:Tc,hc:Uc,ic:Vc,ac:Wc,oa:Xc,yb:Yc,Ba:Zc,Eb:$c,ea:ad,jc:bd,Ta:cd,F:dd,G:ed,Gb:fd,jd:gd,qa:hd,O:jd,V:kd,T:ld,y:md,Fb:nd,gc:od,D:pd,Hb:qd,id:rd,Ua:sd,wa:td,lc:ud,cc:vd,Nb:wd,aa:xd,I:yd,C:zd,_a:Ad,fc:Bd,Q:Cd,d:Dd,ab:Ed,n:Fd,Ya:Gd,va:Hd,wb:Id,f:Jd,yc:Kd,da:Ld,gb:Md,Da:Nd,lb:Od,hb:Pd,c:Qd,vc:Rd,od:Sd,k:Td,tc:Ud,l:Vd,wc:Wd,sc:Xd,rd:Yd,p:Zd,Ra:$d,tb:ae,Qa:be,Kb:ce,B:de,K:ee,S:fe,$a:ge,pc:he,ub:ie,za:je,ka:ke,xa:le,Sb:me,La:ne,jb:oe,Ga:pe,nc:qe,Ha:re,\nIa:se,fd:te,xd:ue,Z:ve,pa:we,pd:xe,wd:ye,Mb:ze,Ma:Ae,Ka:Be,Tb:Ce,rc:De,Ja:Ee,Na:Fe,pb:Ge,la:He,Ea:Ie,mc:Je,qc:Ke,Jb:Le,Fa:Me,ja:Ne,Ad:Oe,nd:Pe,R:Qe,eb:Re,Za:Se,ec:Te,ib:Ue,E:Ve,M:We,Va:Xe,ld:Ye,ca:Ze,nb:$e,na:af,dc:bf,Ac:cf,u:df,L:ef,td:ff,Pb:gf,oc:hf,Bd:jf,Ob:kf,Lb:lf,cb:mf,zc:nf,Rb:of,Oa:pf,Y:qf,uc:rf,J:sf,gd:tf,vb:uf,sa:vf,H:wf,rb:xf,Pa:yf,Sa:zf,sb:Af,qb:Bf,w:function(a){return a>>>0},Dc:hc,fa:function(a,b,c,d){return hc(a>>>0,b>>>0,c>>>0,d>>>0)}},U=function(){function a(c){U=c.exports;U=zb();\nU=Df();ra=U.Dd;za();Ba.unshift(U.Ed);Da--;0==Da&&(null!==Ea&&(clearInterval(Ea),Ea=null),Fa&&(c=Fa,Fa=null,c()));return U}var b={a:Cf};Da++;if(g.instantiateWasm)try{return g.instantiateWasm(b,a)}catch(c){w(`Module.instantiateWasm callback failed with error: ${c}`),ba(c)}Na(b,function(c){a(c.instance)}).catch(ba);return{}}(),ub=a=>(ub=U.Fd)(a);g._OrtInit=(a,b)=>(g._OrtInit=U.Gd)(a,b);g._OrtGetLastError=(a,b)=>(g._OrtGetLastError=U.Hd)(a,b);\ng._OrtCreateSessionOptions=(a,b,c,d,e,f,h,k,l,m)=>(g._OrtCreateSessionOptions=U.Id)(a,b,c,d,e,f,h,k,l,m);g._OrtAppendExecutionProvider=(a,b)=>(g._OrtAppendExecutionProvider=U.Jd)(a,b);g._OrtAddFreeDimensionOverride=(a,b,c)=>(g._OrtAddFreeDimensionOverride=U.Kd)(a,b,c);g._OrtAddSessionConfigEntry=(a,b,c)=>(g._OrtAddSessionConfigEntry=U.Ld)(a,b,c);g._OrtReleaseSessionOptions=a=>(g._OrtReleaseSessionOptions=U.Md)(a);g._OrtCreateSession=(a,b,c)=>(g._OrtCreateSession=U.Nd)(a,b,c);\ng._OrtReleaseSession=a=>(g._OrtReleaseSession=U.Od)(a);g._OrtGetInputOutputCount=(a,b,c)=>(g._OrtGetInputOutputCount=U.Pd)(a,b,c);g._OrtGetInputName=(a,b)=>(g._OrtGetInputName=U.Qd)(a,b);g._OrtGetOutputName=(a,b)=>(g._OrtGetOutputName=U.Rd)(a,b);g._OrtFree=a=>(g._OrtFree=U.Sd)(a);g._OrtCreateTensor=(a,b,c,d,e,f)=>(g._OrtCreateTensor=U.Td)(a,b,c,d,e,f);g._OrtGetTensorData=(a,b,c,d,e)=>(g._OrtGetTensorData=U.Ud)(a,b,c,d,e);g._OrtReleaseTensor=a=>(g._OrtReleaseTensor=U.Vd)(a);\ng._OrtCreateRunOptions=(a,b,c,d)=>(g._OrtCreateRunOptions=U.Wd)(a,b,c,d);g._OrtAddRunConfigEntry=(a,b,c)=>(g._OrtAddRunConfigEntry=U.Xd)(a,b,c);g._OrtReleaseRunOptions=a=>(g._OrtReleaseRunOptions=U.Yd)(a);g._OrtCreateBinding=a=>(g._OrtCreateBinding=U.Zd)(a);g._OrtBindInput=(a,b,c)=>(g._OrtBindInput=U._d)(a,b,c);g._OrtBindOutput=(a,b,c,d)=>(g._OrtBindOutput=U.$d)(a,b,c,d);g._OrtClearBoundOutputs=a=>(g._OrtClearBoundOutputs=U.ae)(a);g._OrtReleaseBinding=a=>(g._OrtReleaseBinding=U.be)(a);\ng._OrtRunWithBinding=(a,b,c,d,e)=>(g._OrtRunWithBinding=U.ce)(a,b,c,d,e);g._OrtRun=(a,b,c,d,e,f,h,k)=>(g._OrtRun=U.de)(a,b,c,d,e,f,h,k);g._OrtEndProfiling=a=>(g._OrtEndProfiling=U.ee)(a);g._JsepOutput=(a,b,c)=>(g._JsepOutput=U.fe)(a,b,c);g._JsepGetNodeName=a=>(g._JsepGetNodeName=U.ge)(a);\nvar Jb=g._malloc=a=>(Jb=g._malloc=U.he)(a),T=g._free=a=>(T=g._free=U.ie)(a),X=(a,b)=>(X=U.ke)(a,b),Ta=a=>(Ta=U.le)(a),Y=a=>(Y=U.me)(a),Ef=a=>(Ef=U.ne)(a),Z=()=>(Z=U.oe)(),mc=a=>(mc=U.pe)(a),kc=a=>(kc=U.qe)(a),Ua=(a,b,c)=>(Ua=U.re)(a,b,c),lc=a=>(lc=U.se)(a),dynCall_vii=g.dynCall_vii=(a,b,c)=>(dynCall_vii=g.dynCall_vii=U.te)(a,b,c),Ff=g.dynCall_iiii=(a,b,c,d)=>(Ff=g.dynCall_iiii=U.ue)(a,b,c,d),dynCall_iii=g.dynCall_iii=(a,b,c)=>(dynCall_iii=g.dynCall_iii=U.ve)(a,b,c),Gf=g.dynCall_ii=(a,b)=>(Gf=g.dynCall_ii=\nU.we)(a,b),Hf=g.dynCall_iiiiiii=(a,b,c,d,e,f,h)=>(Hf=g.dynCall_iiiiiii=U.xe)(a,b,c,d,e,f,h),dynCall_vi=g.dynCall_vi=(a,b)=>(dynCall_vi=g.dynCall_vi=U.ye)(a,b),dynCall_v=g.dynCall_v=a=>(dynCall_v=g.dynCall_v=U.ze)(a),If=g.dynCall_iiiiii=(a,b,c,d,e,f)=>(If=g.dynCall_iiiiii=U.Ae)(a,b,c,d,e,f),Jf=g.dynCall_iiij=(a,b,c,d)=>(Jf=g.dynCall_iiij=U.Be)(a,b,c,d),Kf=g.dynCall_iiiii=(a,b,c,d,e)=>(Kf=g.dynCall_iiiii=U.Ce)(a,b,c,d,e),Lf=g.dynCall_viii=(a,b,c,d)=>(Lf=g.dynCall_viii=U.De)(a,b,c,d),Mf=g.dynCall_viiiii=\n(a,b,c,d,e,f)=>(Mf=g.dynCall_viiiii=U.Ee)(a,b,c,d,e,f),Nf=g.dynCall_viiii=(a,b,c,d,e)=>(Nf=g.dynCall_viiii=U.Fe)(a,b,c,d,e),Of=g.dynCall_viiiiii=(a,b,c,d,e,f,h)=>(Of=g.dynCall_viiiiii=U.Ge)(a,b,c,d,e,f,h),Pf=g.dynCall_viiji=(a,b,c,d,e)=>(Pf=g.dynCall_viiji=U.He)(a,b,c,d,e),Qf=g.dynCall_viiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q)=>(Qf=g.dynCall_viiiiiiiiiii=U.Ie)(a,b,c,d,e,f,h,k,l,m,n,q),Rf=g.dynCall_viiijjjii=(a,b,c,d,e,f,h,k,l)=>(Rf=g.dynCall_viiijjjii=U.Je)(a,b,c,d,e,f,h,k,l),Sf=g.dynCall_iid=(a,b,c)=>\n(Sf=g.dynCall_iid=U.Ke)(a,b,c),Tf=g.dynCall_iif=(a,b,c)=>(Tf=g.dynCall_iif=U.Le)(a,b,c),Uf=g.dynCall_iij=(a,b,c)=>(Uf=g.dynCall_iij=U.Me)(a,b,c),Vf=g.dynCall_jii=(a,b,c)=>(Vf=g.dynCall_jii=U.Ne)(a,b,c),Wf=g.dynCall_i=a=>(Wf=g.dynCall_i=U.Oe)(a),Xf=g.dynCall_viiiiiiii=(a,b,c,d,e,f,h,k,l)=>(Xf=g.dynCall_viiiiiiii=U.Pe)(a,b,c,d,e,f,h,k,l),Yf=g.dynCall_viiiiij=(a,b,c,d,e,f,h)=>(Yf=g.dynCall_viiiiij=U.Qe)(a,b,c,d,e,f,h),Zf=g.dynCall_ji=(a,b)=>(Zf=g.dynCall_ji=U.Re)(a,b),$f=g.dynCall_viij=(a,b,c,d)=>($f=\ng.dynCall_viij=U.Se)(a,b,c,d),ag=g.dynCall_iiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q)=>(ag=g.dynCall_iiiiiiiiiiii=U.Te)(a,b,c,d,e,f,h,k,l,m,n,q),bg=g.dynCall_viiiiiiiii=(a,b,c,d,e,f,h,k,l,m)=>(bg=g.dynCall_viiiiiiiii=U.Ue)(a,b,c,d,e,f,h,k,l,m),cg=g.dynCall_ij=(a,b)=>(cg=g.dynCall_ij=U.Ve)(a,b),dg=g.dynCall_iiiiij=(a,b,c,d,e,f)=>(dg=g.dynCall_iiiiij=U.We)(a,b,c,d,e,f),eg=g.dynCall_j=a=>(eg=g.dynCall_j=U.Xe)(a),fg=g.dynCall_vij=(a,b,c)=>(fg=g.dynCall_vij=U.Ye)(a,b,c),gg=g.dynCall_viijjjiiiiii=(a,b,c,d,\ne,f,h,k,l,m,n,q)=>(gg=g.dynCall_viijjjiiiiii=U.Ze)(a,b,c,d,e,f,h,k,l,m,n,q),hg=g.dynCall_viiijiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q)=>(hg=g.dynCall_viiijiiiiiii=U._e)(a,b,c,d,e,f,h,k,l,m,n,q),ig=g.dynCall_iiiiiiii=(a,b,c,d,e,f,h,k)=>(ig=g.dynCall_iiiiiiii=U.$e)(a,b,c,d,e,f,h,k),jg=g.dynCall_viiiiiii=(a,b,c,d,e,f,h,k)=>(jg=g.dynCall_viiiiiii=U.af)(a,b,c,d,e,f,h,k),kg=g.dynCall_iiiiiiiij=(a,b,c,d,e,f,h,k,l)=>(kg=g.dynCall_iiiiiiiij=U.bf)(a,b,c,d,e,f,h,k,l),lg=g.dynCall_viiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,\nm,n,q,r,p)=>(lg=g.dynCall_viiiiiiiiiiiii=U.cf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),mg=g.dynCall_iiiiiiiii=(a,b,c,d,e,f,h,k,l)=>(mg=g.dynCall_iiiiiiiii=U.df)(a,b,c,d,e,f,h,k,l),ng=g.dynCall_iiiiijiiiii=(a,b,c,d,e,f,h,k,l,m,n)=>(ng=g.dynCall_iiiiijiiiii=U.ef)(a,b,c,d,e,f,h,k,l,m,n),og=g.dynCall_vijjjiiij=(a,b,c,d,e,f,h,k,l)=>(og=g.dynCall_vijjjiiij=U.ff)(a,b,c,d,e,f,h,k,l),pg=g.dynCall_fi=(a,b)=>(pg=g.dynCall_fi=U.gf)(a,b),qg=g.dynCall_fii=(a,b,c)=>(qg=g.dynCall_fii=U.hf)(a,b,c),rg=g.dynCall_di=(a,b)=>(rg=\ng.dynCall_di=U.jf)(a,b),sg=g.dynCall_dii=(a,b,c)=>(sg=g.dynCall_dii=U.kf)(a,b,c),tg=g.dynCall_vijj=(a,b,c,d)=>(tg=g.dynCall_vijj=U.lf)(a,b,c,d),ug=g.dynCall_iiiiiiiiii=(a,b,c,d,e,f,h,k,l,m)=>(ug=g.dynCall_iiiiiiiiii=U.mf)(a,b,c,d,e,f,h,k,l,m),vg=g.dynCall_viijiii=(a,b,c,d,e,f,h)=>(vg=g.dynCall_viijiii=U.nf)(a,b,c,d,e,f,h),wg=g.dynCall_viid=(a,b,c,d)=>(wg=g.dynCall_viid=U.of)(a,b,c,d),xg=g.dynCall_viffiii=(a,b,c,d,e,f,h)=>(xg=g.dynCall_viffiii=U.pf)(a,b,c,d,e,f,h),yg=g.dynCall_viifiii=(a,b,c,d,e,f,\nh)=>(yg=g.dynCall_viifiii=U.qf)(a,b,c,d,e,f,h),zg=g.dynCall_viiiiidiidi=(a,b,c,d,e,f,h,k,l,m,n)=>(zg=g.dynCall_viiiiidiidi=U.rf)(a,b,c,d,e,f,h,k,l,m,n),Ag=g.dynCall_viiiiiiiiidi=(a,b,c,d,e,f,h,k,l,m,n,q)=>(Ag=g.dynCall_viiiiiiiiidi=U.sf)(a,b,c,d,e,f,h,k,l,m,n,q),Bg=g.dynCall_jiii=(a,b,c,d)=>(Bg=g.dynCall_jiii=U.tf)(a,b,c,d),Cg=g.dynCall_vjiiiiii=(a,b,c,d,e,f,h,k)=>(Cg=g.dynCall_vjiiiiii=U.uf)(a,b,c,d,e,f,h,k),Dg=g.dynCall_viiid=(a,b,c,d,e)=>(Dg=g.dynCall_viiid=U.vf)(a,b,c,d,e),Eg=g.dynCall_viiiiiiiiiji=\n(a,b,c,d,e,f,h,k,l,m,n,q)=>(Eg=g.dynCall_viiiiiiiiiji=U.wf)(a,b,c,d,e,f,h,k,l,m,n,q),Fg=g.dynCall_viji=(a,b,c,d)=>(Fg=g.dynCall_viji=U.xf)(a,b,c,d),Gg=g.dynCall_vijjjjjjjjjjjjji=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>(Gg=g.dynCall_vijjjjjjjjjjjjji=U.yf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y),Hg=g.dynCall_viiiji=(a,b,c,d,e,f)=>(Hg=g.dynCall_viiiji=U.zf)(a,b,c,d,e,f),Ig=g.dynCall_vijjjiiji=(a,b,c,d,e,f,h,k,l)=>(Ig=g.dynCall_vijjjiiji=U.Af)(a,b,c,d,e,f,h,k,l),Jg=g.dynCall_iiiji=(a,b,c,d,e)=>(Jg=g.dynCall_iiiji=\nU.Bf)(a,b,c,d,e),Kg=g.dynCall_iiijiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(Kg=g.dynCall_iiijiiiiiiiiii=U.Cf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),Lg=g.dynCall_vj=(a,b)=>(Lg=g.dynCall_vj=U.Df)(a,b),Mg=g.dynCall_jjj=(a,b,c)=>(Mg=g.dynCall_jjj=U.Ef)(a,b,c),Ng=g.dynCall_iiijiiiiii=(a,b,c,d,e,f,h,k,l,m)=>(Ng=g.dynCall_iiijiiiiii=U.Ff)(a,b,c,d,e,f,h,k,l,m),Og=g.dynCall_vfiii=(a,b,c,d,e)=>(Og=g.dynCall_vfiii=U.Gf)(a,b,c,d,e),Pg=g.dynCall_viiiiff=(a,b,c,d,e,f,h)=>(Pg=g.dynCall_viiiiff=U.Hf)(a,b,c,d,e,f,h),Qg=g.dynCall_viiiiiff=\n(a,b,c,d,e,f,h,k)=>(Qg=g.dynCall_viiiiiff=U.If)(a,b,c,d,e,f,h,k),Rg=g.dynCall_viiff=(a,b,c,d,e)=>(Rg=g.dynCall_viiff=U.Jf)(a,b,c,d,e),Sg=g.dynCall_viiiiiiiiifiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(Sg=g.dynCall_viiiiiiiiifiii=U.Kf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),Tg=g.dynCall_viiiiiiiijj=(a,b,c,d,e,f,h,k,l,m,n)=>(Tg=g.dynCall_viiiiiiiijj=U.Lf)(a,b,c,d,e,f,h,k,l,m,n),Ug=g.dynCall_iiiiiiiiiiiiiifii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)=>(Ug=g.dynCall_iiiiiiiiiiiiiifii=U.Mf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A),\nVg=g.dynCall_viiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Vg=g.dynCall_viiiiiiiiiiii=U.Nf)(a,b,c,d,e,f,h,k,l,m,n,q,r),Wg=g.dynCall_iiiiiiiiiiiiiiiiifii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)=>(Wg=g.dynCall_iiiiiiiiiiiiiiiiifii=U.Of)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D),Xg=g.dynCall_vijjiiiiii=(a,b,c,d,e,f,h,k,l,m)=>(Xg=g.dynCall_vijjiiiiii=U.Pf)(a,b,c,d,e,f,h,k,l,m),Yg=g.dynCall_iiiijjj=(a,b,c,d,e,f,h)=>(Yg=g.dynCall_iiiijjj=U.Qf)(a,b,c,d,e,f,h),Zg=g.dynCall_viiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,\nn)=>(Zg=g.dynCall_viiiiiiiiii=U.Rf)(a,b,c,d,e,f,h,k,l,m,n),$g=g.dynCall_iiijjj=(a,b,c,d,e,f)=>($g=g.dynCall_iiijjj=U.Sf)(a,b,c,d,e,f),ah=g.dynCall_fffffff=(a,b,c,d,e,f,h)=>(ah=g.dynCall_fffffff=U.Tf)(a,b,c,d,e,f,h),bh=g.dynCall_viiiij=(a,b,c,d,e,f)=>(bh=g.dynCall_viiiij=U.Uf)(a,b,c,d,e,f),ch=g.dynCall_viiiiiijiifiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(ch=g.dynCall_viiiiiijiifiii=U.Vf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),dh=g.dynCall_vjjjjjjffjifiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B)=>(dh=g.dynCall_vjjjjjjffjifiiiiii=\nU.Wf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B),eh=g.dynCall_viiiiiiffjifiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)=>(eh=g.dynCall_viiiiiiffjifiiiii=U.Xf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A),fh=g.dynCall_viiiiiiffjfiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>(fh=g.dynCall_viiiiiiffjfiiiii=U.Yf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y),gh=g.dynCall_viiiiiiffjiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>(gh=g.dynCall_viiiiiiffjiiiii=U.Zf)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u),hh=g.dynCall_vjjjjjjjjfffiiifiiiii=(a,b,c,d,e,f,h,k,l,\nm,n,q,r,p,u,y,A,B,C,D,F)=>(hh=g.dynCall_vjjjjjjjjfffiiifiiiii=U._f)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F),ih=g.dynCall_vjjjjjjfffifijiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C)=>(ih=g.dynCall_vjjjjjjfffifijiiiii=U.$f)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C),jh=g.dynCall_vjjjjjjfffifiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B)=>(jh=g.dynCall_vjjjjjjfffifiiiiii=U.ag)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B),kh=g.dynCall_vjjjjjjjjfffjifiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F)=>(kh=g.dynCall_vjjjjjjjjfffjifiiiiii=\nU.bg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F),lh=g.dynCall_vijiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(lh=g.dynCall_vijiiiiiiiiii=U.cg)(a,b,c,d,e,f,h,k,l,m,n,q,r),mh=g.dynCall_vijjfffiii=(a,b,c,d,e,f,h,k,l,m)=>(mh=g.dynCall_vijjfffiii=U.dg)(a,b,c,d,e,f,h,k,l,m),nh=g.dynCall_viiiiiiijiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(nh=g.dynCall_viiiiiiijiiii=U.eg)(a,b,c,d,e,f,h,k,l,m,n,q,r),oh=g.dynCall_vijjjjjjifiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>(oh=g.dynCall_vijjjjjjifiiiii=U.fg)(a,b,c,d,e,f,h,k,l,m,n,\nq,r,p,u),ph=g.dynCall_viifi=(a,b,c,d,e)=>(ph=g.dynCall_viifi=U.gg)(a,b,c,d,e),qh=g.dynCall_vjjjjjiiii=(a,b,c,d,e,f,h,k,l,m)=>(qh=g.dynCall_vjjjjjiiii=U.hg)(a,b,c,d,e,f,h,k,l,m),rh=g.dynCall_vjjjjfiii=(a,b,c,d,e,f,h,k,l)=>(rh=g.dynCall_vjjjjfiii=U.ig)(a,b,c,d,e,f,h,k,l),sh=g.dynCall_viiiiiijiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(sh=g.dynCall_viiiiiijiiiiii=U.jg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),th=g.dynCall_vijjii=(a,b,c,d,e,f)=>(th=g.dynCall_vijjii=U.kg)(a,b,c,d,e,f),uh=g.dynCall_viiiiijjiiiii=(a,b,c,\nd,e,f,h,k,l,m,n,q,r)=>(uh=g.dynCall_viiiiijjiiiii=U.lg)(a,b,c,d,e,f,h,k,l,m,n,q,r),vh=g.dynCall_iiiiiji=(a,b,c,d,e,f,h)=>(vh=g.dynCall_iiiiiji=U.mg)(a,b,c,d,e,f,h),wh=g.dynCall_iiiiji=(a,b,c,d,e,f)=>(wh=g.dynCall_iiiiji=U.ng)(a,b,c,d,e,f),xh=g.dynCall_viiiiijiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(xh=g.dynCall_viiiiijiiiiii=U.og)(a,b,c,d,e,f,h,k,l,m,n,q,r),yh=g.dynCall_viiijiiiiii=(a,b,c,d,e,f,h,k,l,m,n)=>(yh=g.dynCall_viiijiiiiii=U.pg)(a,b,c,d,e,f,h,k,l,m,n),zh=g.dynCall_viijj=(a,b,c,d,e)=>(zh=g.dynCall_viijj=\nU.qg)(a,b,c,d,e),Ah=g.dynCall_viiiijii=(a,b,c,d,e,f,h,k)=>(Ah=g.dynCall_viiiijii=U.rg)(a,b,c,d,e,f,h,k),Bh=g.dynCall_viijjiii=(a,b,c,d,e,f,h,k)=>(Bh=g.dynCall_viijjiii=U.sg)(a,b,c,d,e,f,h,k),Ch=g.dynCall_ijii=(a,b,c,d)=>(Ch=g.dynCall_ijii=U.tg)(a,b,c,d),Dh=g.dynCall_viiiiijjji=(a,b,c,d,e,f,h,k,l,m)=>(Dh=g.dynCall_viiiiijjji=U.ug)(a,b,c,d,e,f,h,k,l,m),Eh=g.dynCall_vijjjjiij=(a,b,c,d,e,f,h,k,l)=>(Eh=g.dynCall_vijjjjiij=U.vg)(a,b,c,d,e,f,h,k,l),Fh=g.dynCall_viiiiijij=(a,b,c,d,e,f,h,k,l)=>(Fh=g.dynCall_viiiiijij=\nU.wg)(a,b,c,d,e,f,h,k,l),Gh=g.dynCall_viiiiiijij=(a,b,c,d,e,f,h,k,l,m)=>(Gh=g.dynCall_viiiiiijij=U.xg)(a,b,c,d,e,f,h,k,l,m),Hh=g.dynCall_vijiii=(a,b,c,d,e,f)=>(Hh=g.dynCall_vijiii=U.yg)(a,b,c,d,e,f),Ih=g.dynCall_viiiiiiiiifi=(a,b,c,d,e,f,h,k,l,m,n,q)=>(Ih=g.dynCall_viiiiiiiiifi=U.zg)(a,b,c,d,e,f,h,k,l,m,n,q),Jh=g.dynCall_iiijiiii=(a,b,c,d,e,f,h,k)=>(Jh=g.dynCall_iiijiiii=U.Ag)(a,b,c,d,e,f,h,k),Kh=g.dynCall_viiiiiijjiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(Kh=g.dynCall_viiiiiijjiiiii=U.Bg)(a,b,c,d,e,\nf,h,k,l,m,n,q,r,p),Lh=g.dynCall_viiiiiiijiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>(Lh=g.dynCall_viiiiiiijiiiiii=U.Cg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u),Mh=g.dynCall_vif=(a,b,c)=>(Mh=g.dynCall_vif=U.Dg)(a,b,c),Nh=g.dynCall_viif=(a,b,c,d)=>(Nh=g.dynCall_viif=U.Eg)(a,b,c,d),Oh=g.dynCall_fiii=(a,b,c,d)=>(Oh=g.dynCall_fiii=U.Fg)(a,b,c,d),Ph=g.dynCall_diii=(a,b,c,d)=>(Ph=g.dynCall_diii=U.Gg)(a,b,c,d),Qh=g.dynCall_viiiiiifii=(a,b,c,d,e,f,h,k,l,m)=>(Qh=g.dynCall_viiiiiifii=U.Hg)(a,b,c,d,e,f,h,k,l,m),Rh=g.dynCall_viiiiijiiiiiiiiiiiiiiiiiii=\n(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F,H,K,Va,Wa,Xa)=>(Rh=g.dynCall_viiiiijiiiiiiiiiiiiiiiiiii=U.Ig)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F,H,K,Va,Wa,Xa),Sh=g.dynCall_viijji=(a,b,c,d,e,f)=>(Sh=g.dynCall_viijji=U.Jg)(a,b,c,d,e,f),Th=g.dynCall_iiiiiiiiiiiji=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Th=g.dynCall_iiiiiiiiiiiji=U.Kg)(a,b,c,d,e,f,h,k,l,m,n,q,r),Uh=g.dynCall_viifiifijjjii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Uh=g.dynCall_viifiifijjjii=U.Lg)(a,b,c,d,e,f,h,k,l,m,n,q,r),Vh=g.dynCall_viiiiiiiiiiiiiiiiiiii=\n(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F)=>(Vh=g.dynCall_viiiiiiiiiiiiiiiiiiii=U.Mg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F),Wh=g.dynCall_viiiiifiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Wh=g.dynCall_viiiiifiiiiii=U.Ng)(a,b,c,d,e,f,h,k,l,m,n,q,r),Xh=g.dynCall_vijiiiiiiijjii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p)=>(Xh=g.dynCall_vijiiiiiiijjii=U.Og)(a,b,c,d,e,f,h,k,l,m,n,q,r,p),Yh=g.dynCall_viiiiiiiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C)=>(Yh=g.dynCall_viiiiiiiiiiiiiiiiii=U.Pg)(a,b,c,d,e,f,h,k,l,\nm,n,q,r,p,u,y,A,B,C),Zh=g.dynCall_viiiiiiiiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)=>(Zh=g.dynCall_viiiiiiiiiiiiiiiiiii=U.Qg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D),$h=g.dynCall_viiiiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)=>($h=g.dynCall_viiiiiiiiiiiiiii=U.Rg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y),ai=g.dynCall_viiiiiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)=>(ai=g.dynCall_viiiiiiiiiiiiiiii=U.Sg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A),bi=g.dynCall_viiiijjj=(a,b,c,d,e,f,h,k)=>(bi=\ng.dynCall_viiiijjj=U.Tg)(a,b,c,d,e,f,h,k),ci=g.dynCall_iiiiid=(a,b,c,d,e,f)=>(ci=g.dynCall_iiiiid=U.Ug)(a,b,c,d,e,f),di=g.dynCall_viiiiiiijjj=(a,b,c,d,e,f,h,k,l,m,n)=>(di=g.dynCall_viiiiiiijjj=U.Vg)(a,b,c,d,e,f,h,k,l,m,n),ei=g.dynCall_iiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n)=>(ei=g.dynCall_iiiiiiiiiii=U.Wg)(a,b,c,d,e,f,h,k,l,m,n),fi=g.dynCall_iiiiiiiiiiiiiiiiiifi=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)=>(fi=g.dynCall_iiiiiiiiiiiiiiiiiifi=U.Xg)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D),gi=g.dynCall_viiif=\n(a,b,c,d,e)=>(gi=g.dynCall_viiif=U.Yg)(a,b,c,d,e),hi=g.dynCall_viiijiiiii=(a,b,c,d,e,f,h,k,l,m)=>(hi=g.dynCall_viiijiiiii=U.Zg)(a,b,c,d,e,f,h,k,l,m),ii=g.dynCall_viiij=(a,b,c,d,e)=>(ii=g.dynCall_viiij=U._g)(a,b,c,d,e),ji=g.dynCall_iijjj=(a,b,c,d,e)=>(ji=g.dynCall_iijjj=U.$g)(a,b,c,d,e),ki=g.dynCall_viiiiji=(a,b,c,d,e,f,h)=>(ki=g.dynCall_viiiiji=U.ah)(a,b,c,d,e,f,h),li=g.dynCall_iijjji=(a,b,c,d,e,f)=>(li=g.dynCall_iijjji=U.bh)(a,b,c,d,e,f),mi=g.dynCall_ijijji=(a,b,c,d,e,f)=>(mi=g.dynCall_ijijji=U.ch)(a,\nb,c,d,e,f),ni=g.dynCall_viiijjiii=(a,b,c,d,e,f,h,k,l)=>(ni=g.dynCall_viiijjiii=U.dh)(a,b,c,d,e,f,h,k,l),oi=g.dynCall_iiiiijji=(a,b,c,d,e,f,h,k)=>(oi=g.dynCall_iiiiijji=U.eh)(a,b,c,d,e,f,h,k),pi=g.dynCall_iiiifi=(a,b,c,d,e,f)=>(pi=g.dynCall_iiiifi=U.fh)(a,b,c,d,e,f),qi=g.dynCall_iiijii=(a,b,c,d,e,f)=>(qi=g.dynCall_iiijii=U.gh)(a,b,c,d,e,f),ri=g.dynCall_iiiiiiiiijii=(a,b,c,d,e,f,h,k,l,m,n,q)=>(ri=g.dynCall_iiiiiiiiijii=U.hh)(a,b,c,d,e,f,h,k,l,m,n,q),si=g.dynCall_iiiijjii=(a,b,c,d,e,f,h,k)=>(si=g.dynCall_iiiijjii=\nU.ih)(a,b,c,d,e,f,h,k),ti=g.dynCall_iiiiiijjjii=(a,b,c,d,e,f,h,k,l,m,n)=>(ti=g.dynCall_iiiiiijjjii=U.jh)(a,b,c,d,e,f,h,k,l,m,n),ui=g.dynCall_iiijiii=(a,b,c,d,e,f,h)=>(ui=g.dynCall_iiijiii=U.kh)(a,b,c,d,e,f,h),vi=g.dynCall_iiiiiiiijjjfi=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(vi=g.dynCall_iiiiiiiijjjfi=U.lh)(a,b,c,d,e,f,h,k,l,m,n,q,r),wi=g.dynCall_iijiiii=(a,b,c,d,e,f,h)=>(wi=g.dynCall_iijiiii=U.mh)(a,b,c,d,e,f,h),xi=g.dynCall_iijjjii=(a,b,c,d,e,f,h)=>(xi=g.dynCall_iijjjii=U.nh)(a,b,c,d,e,f,h),yi=g.dynCall_jij=\n(a,b,c)=>(yi=g.dynCall_jij=U.oh)(a,b,c),zi=g.dynCall_iiji=(a,b,c,d)=>(zi=g.dynCall_iiji=U.ph)(a,b,c,d),Ai=g.dynCall_iiif=(a,b,c,d)=>(Ai=g.dynCall_iiif=U.qh)(a,b,c,d),Bi=g.dynCall_vidi=(a,b,c,d)=>(Bi=g.dynCall_vidi=U.rh)(a,b,c,d),Ci=g.dynCall_vjiii=(a,b,c,d,e)=>(Ci=g.dynCall_vjiii=U.sh)(a,b,c,d,e),Di=g.dynCall_diiii=(a,b,c,d,e)=>(Di=g.dynCall_diiii=U.th)(a,b,c,d,e),Ei=g.dynCall_diiiii=(a,b,c,d,e,f)=>(Ei=g.dynCall_diiiii=U.uh)(a,b,c,d,e,f),Fi=g.dynCall_viiijjiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q)=>(Fi=g.dynCall_viiijjiiiiii=\nU.vh)(a,b,c,d,e,f,h,k,l,m,n,q),Gi=g.dynCall_viijjijjjjiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Gi=g.dynCall_viijjijjjjiii=U.wh)(a,b,c,d,e,f,h,k,l,m,n,q,r),Hi=g.dynCall_iiiij=(a,b,c,d,e)=>(Hi=g.dynCall_iiiij=U.xh)(a,b,c,d,e),Ii=g.dynCall_viiijii=(a,b,c,d,e,f,h)=>(Ii=g.dynCall_viiijii=U.yh)(a,b,c,d,e,f,h),Ji=g.dynCall_viijiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r)=>(Ji=g.dynCall_viijiiiiiiiii=U.zh)(a,b,c,d,e,f,h,k,l,m,n,q,r),Ki=g.dynCall_fiiii=(a,b,c,d,e)=>(Ki=g.dynCall_fiiii=U.Ah)(a,b,c,d,e),Li=g.dynCall_jfi=\n(a,b,c)=>(Li=g.dynCall_jfi=U.Bh)(a,b,c),Mi=g.dynCall_viiiiiiiiiiiiii=(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)=>(Mi=g.dynCall_viiiiiiiiiiiiii=U.Ch)(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u),Ni=g.dynCall_jiij=(a,b,c,d)=>(Ni=g.dynCall_jiij=U.Dh)(a,b,c,d),Mb=a=>(Mb=U.Eh)(a),Bb=()=>(Bb=U.Fh)(),Lb=a=>(Lb=U.Gh)(a),Nb=()=>(Nb=U.Hh)();g.___start_em_js=1275044;g.___stop_em_js=1275205;function Dc(a,b,c,d){var e=Z();try{return Ff(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}\nfunction Bc(a,b,c){var d=Z();try{return dynCall_iii(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function Jd(a,b,c){var d=Z();try{dynCall_vii(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function yc(a,b){var c=Z();try{return Gf(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}function Fd(a,b){var c=Z();try{dynCall_vi(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}function dd(a,b,c,d){var e=Z();try{return Jf(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}\nfunction Dd(a){var b=Z();try{dynCall_v(a)}catch(c){Y(b);if(c!==c+0)throw c;X(1,0)}}function Ic(a,b,c,d,e,f,h){var k=Z();try{return Hf(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Hc(a,b,c,d,e,f){var h=Z();try{return If(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Fc(a,b,c,d,e){var f=Z();try{return Kf(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Qd(a,b,c,d){var e=Z();try{Lf(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}\nfunction Td(a,b,c,d,e){var f=Z();try{Nf(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Vd(a,b,c,d,e,f){var h=Z();try{Mf(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Zd(a,b,c,d,e,f,h){var k=Z();try{Of(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function ke(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{Qf(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function zc(a,b,c){var d=Z();try{return Sf(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}\nfunction Ac(a,b,c){var d=Z();try{return Tf(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function md(a,b,c){var d=Z();try{return Uf(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function yd(a,b,c){var d=Z();try{return Vf(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0);return 0n}}function ee(a,b,c,d,e,f,h,k,l){var m=Z();try{Xf(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}function xc(a){var b=Z();try{return Wf(a)}catch(c){Y(b);if(c!==c+0)throw c;X(1,0)}}\nfunction df(a,b,c){var d=Z();try{fg(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function We(a,b,c,d,e){var f=Z();try{Pf(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Ce(a,b,c,d,e,f,h){var k=Z();try{Yf(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function cf(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{gg(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}\nfunction Re(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{hg(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function Jc(a,b,c,d,e,f,h,k){var l=Z();try{return ig(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function de(a,b,c,d,e,f,h,k){var l=Z();try{jg(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function Nc(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{return ag(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}\nfunction fe(a,b,c,d,e,f,h,k,l,m){var n=Z();try{bg(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function me(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{lg(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}function Yc(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{return ng(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}function Kc(a,b,c,d,e,f,h,k,l){var m=Z();try{return mg(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}\nfunction nf(a,b,c,d,e,f,h,k,l){var m=Z();try{og(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}function Ve(a,b,c,d){var e=Z();try{$f(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function tc(a,b){var c=Z();try{return pg(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}function xd(a,b){var c=Z();try{return Zf(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0);return 0n}}function nc(a,b){var c=Z();try{return rg(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}\nfunction jf(a,b,c,d){var e=Z();try{tg(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function Oe(a,b,c,d,e,f,h){var k=Z();try{Ii(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Ze(a,b,c,d,e){var f=Z();try{zh(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Lc(a,b,c,d,e,f,h,k,l,m){var n=Z();try{return ug(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function Xe(a,b,c,d,e,f,h){var k=Z();try{vg(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}\nfunction Kd(a,b,c,d){var e=Z();try{wg(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function Tc(a,b,c,d,e,f,h,k,l){var m=Z();try{return kg(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}function Id(a,b,c,d,e,f,h){var k=Z();try{xg(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Ue(a,b,c,d,e,f,h,k,l){var m=Z();try{Rf(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}\nfunction Pd(a,b,c,d,e,f,h){var k=Z();try{yg(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Wd(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{zg(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}function zd(a,b,c,d){var e=Z();try{return Bg(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0);return 0n}}function uf(a,b,c,d,e,f,h,k){var l=Z();try{Cg(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}\nfunction Rd(a,b,c,d,e){var f=Z();try{Dg(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function ef(a,b,c,d){var e=Z();try{Fg(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function rf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y){var A=Z();try{Gg(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)}catch(B){Y(A);if(B!==B+0)throw B;X(1,0)}}function Ne(a,b,c,d,e,f){var h=Z();try{Hg(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}\nfunction of(a,b,c,d,e,f,h,k,l){var m=Z();try{Ig(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}function ed(a,b,c,d,e){var f=Z();try{return Jg(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function kd(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{return Kg(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}function sf(a,b){var c=Z();try{Lg(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}\nfunction Cd(a,b,c){var d=Z();try{return Mg(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0);return 0n}}function jd(a,b,c,d,e,f,h,k,l,m){var n=Z();try{return Ng(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function ie(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{Sg(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}function Ed(a,b,c,d,e){var f=Z();try{Og(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}\nfunction Ud(a,b,c,d,e,f,h){var k=Z();try{Pg(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Md(a,b,c,d,e){var f=Z();try{Rg(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Xd(a,b,c,d,e,f,h,k){var l=Z();try{Qg(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function ue(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{Tg(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}\nfunction Oc(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A){var B=Z();try{return Ug(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)}catch(C){Y(B);if(C!==C+0)throw C;X(1,0)}}function le(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{Vg(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function sd(a,b){var c=Z();try{return cg(a,b)}catch(d){Y(c);if(d!==d+0)throw d;X(1,0)}}function wc(a,b,c,d,e){var f=Z();try{return Ki(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}\nfunction Pc(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D){var F=Z();try{return Wg(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)}catch(H){Y(F);if(H!==H+0)throw H;X(1,0)}}function mf(a,b,c,d,e,f,h,k,l,m){var n=Z();try{Xg(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function cd(a,b,c,d,e,f,h){var k=Z();try{return Yg(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function je(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{Zg(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}\nfunction ld(a,b,c,d,e,f){var h=Z();try{return $g(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Ie(a,b,c,d,e,f){var h=Z();try{bh(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function ye(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{ch(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}function zf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B){var C=Z();try{dh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B)}catch(D){Y(C);if(D!==D+0)throw D;X(1,0)}}\nfunction ae(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A){var B=Z();try{eh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)}catch(C){Y(B);if(C!==C+0)throw C;X(1,0)}}function $d(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y){var A=Z();try{fh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)}catch(B){Y(A);if(B!==B+0)throw B;X(1,0)}}function be(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u){var y=Z();try{gh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)}catch(A){Y(y);if(A!==A+0)throw A;X(1,0)}}\nfunction Af(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F){var H=Z();try{hh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F)}catch(K){Y(H);if(K!==K+0)throw K;X(1,0)}}function yf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C){var D=Z();try{ih(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C)}catch(F){Y(D);if(F!==F+0)throw F;X(1,0)}}function xf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B){var C=Z();try{jh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B)}catch(D){Y(C);if(D!==D+0)throw D;X(1,0)}}\nfunction Bf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F){var H=Z();try{kh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F)}catch(K){Y(H);if(K!==K+0)throw K;X(1,0)}}function gf(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{lh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function kf(a,b,c,d,e,f,h,k,l,m){var n=Z();try{mh(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}\nfunction ve(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{nh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function qf(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u){var y=Z();try{oh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)}catch(A){Y(y);if(A!==A+0)throw A;X(1,0)}}function wf(a,b,c,d,e,f,h,k,l,m){var n=Z();try{qh(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function vf(a,b,c,d,e,f,h,k,l){var m=Z();try{rh(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}\nfunction sc(a,b,c,d,e,f,h){var k=Z();try{return ah(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Nd(a,b,c,d,e){var f=Z();try{ph(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function wd(a,b,c){var d=Z();try{return Li(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0);return 0n}}function ze(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{sh(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}\nfunction Ge(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{uh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function Xc(a,b,c,d,e,f,h){var k=Z();try{return vh(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function ad(a,b,c,d,e,f){var h=Z();try{return wh(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function De(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{xh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}\nfunction lf(a,b,c,d,e,f){var h=Z();try{th(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Qe(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{yh(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}function Ke(a,b,c,d,e,f,h,k){var l=Z();try{Ah(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function af(a,b,c,d,e,f,h,k){var l=Z();try{Bh(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}\nfunction td(a,b,c,d){var e=Z();try{return Ch(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function He(a,b,c,d,e,f,h,k,l,m){var n=Z();try{Dh(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function pf(a,b,c,d,e,f,h,k,l){var m=Z();try{Eh(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}function Fe(a,b,c,d,e,f,h,k,l){var m=Z();try{Fh(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}\nfunction Ae(a,b,c,d,e,f,h,k,l,m){var n=Z();try{Gh(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function ff(a,b,c,d,e,f){var h=Z();try{Hh(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function he(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{Ih(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function hd(a,b,c,d,e,f,h,k){var l=Z();try{return Jh(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}\nfunction ne(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u){var y=Z();try{Mi(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)}catch(A){Y(y);if(A!==A+0)throw A;X(1,0)}}function Be(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{Kh(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}function we(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u){var y=Z();try{Lh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u)}catch(A){Y(y);if(A!==A+0)throw A;X(1,0)}}function Hd(a,b,c){var d=Z();try{Mh(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}\nfunction vc(a,b,c,d){var e=Z();try{return Oh(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function pc(a,b,c,d){var e=Z();try{return Ph(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function ce(a,b,c,d,e,f,h,k,l,m){var n=Z();try{Qh(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}function Ee(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F,H,K,Va,Wa,Xa){var Qi=Z();try{Rh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F,H,K,Va,Wa,Xa)}catch(Ya){Y(Qi);if(Ya!==Ya+0)throw Ya;X(1,0)}}\nfunction $e(a,b,c,d,e,f){var h=Z();try{Sh(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Rc(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{return Th(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function Od(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{Uh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}\nfunction se(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F){var H=Z();try{Vh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D,F)}catch(K){Y(H);if(K!==K+0)throw K;X(1,0)}}function Yd(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{Wh(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function hf(a,b,c,d,e,f,h,k,l,m,n,q,r,p){var u=Z();try{Xh(a,b,c,d,e,f,h,k,l,m,n,q,r,p)}catch(y){Y(u);if(y!==y+0)throw y;X(1,0)}}\nfunction re(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D){var F=Z();try{Zh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)}catch(H){Y(F);if(H!==H+0)throw H;X(1,0)}}function pe(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A){var B=Z();try{ai(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A)}catch(C){Y(B);if(C!==C+0)throw C;X(1,0)}}function qe(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C){var D=Z();try{Yh(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C)}catch(F){Y(D);if(F!==F+0)throw F;X(1,0)}}\nfunction Le(a,b,c,d,e,f,h,k){var l=Z();try{bi(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function xe(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{di(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}function Mc(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{return ei(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}\nfunction Qc(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D){var F=Z();try{return fi(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y,A,B,C,D)}catch(H){Y(F);if(H!==H+0)throw H;X(1,0)}}function Sd(a,b,c,d,e){var f=Z();try{gi(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function ge(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{Ag(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function Pe(a,b,c,d,e,f,h,k,l,m){var n=Z();try{hi(a,b,c,d,e,f,h,k,l,m)}catch(q){Y(n);if(q!==q+0)throw q;X(1,0)}}\nfunction Me(a,b,c,d,e){var f=Z();try{ii(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Ye(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{Ji(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function pd(a,b,c,d,e){var f=Z();try{return ji(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function Ad(a,b,c,d){var e=Z();try{return Ni(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0);return 0n}}\nfunction Je(a,b,c,d,e,f,h){var k=Z();try{ki(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function qd(a,b,c,d,e,f){var h=Z();try{return li(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function ud(a,b,c,d,e,f){var h=Z();try{return mi(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Se(a,b,c,d,e,f,h,k,l){var m=Z();try{ni(a,b,c,d,e,f,h,k,l)}catch(n){Y(m);if(n!==n+0)throw n;X(1,0)}}\nfunction Ld(a,b,c,d){var e=Z();try{Nh(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function Zc(a,b,c,d,e,f,h,k){var l=Z();try{return oi(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function Ec(a,b,c,d,e,f){var h=Z();try{return pi(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function fd(a,b,c,d,e,f){var h=Z();try{return qi(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}\nfunction Sc(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{return ri(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function bd(a,b,c,d,e,f,h,k){var l=Z();try{return si(a,b,c,d,e,f,h,k)}catch(m){Y(l);if(m!==m+0)throw m;X(1,0)}}function Vc(a,b,c,d,e,f,h,k,l,m,n){var q=Z();try{return ti(a,b,c,d,e,f,h,k,l,m,n)}catch(r){Y(q);if(r!==r+0)throw r;X(1,0)}}function gd(a,b,c,d,e,f,h){var k=Z();try{return ui(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}\nfunction Uc(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{return vi(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function od(a,b,c,d,e,f,h){var k=Z();try{return wi(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function rd(a,b,c,d,e,f,h){var k=Z();try{return xi(a,b,c,d,e,f,h)}catch(l){Y(k);if(l!==l+0)throw l;X(1,0)}}function Bd(a,b,c){var d=Z();try{return yi(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0);return 0n}}\nfunction nd(a,b,c,d){var e=Z();try{return zi(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function Gd(a,b,c,d){var e=Z();try{Bi(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function Cc(a,b,c,d){var e=Z();try{return Ai(a,b,c,d)}catch(f){Y(e);if(f!==f+0)throw f;X(1,0)}}function tf(a,b,c,d,e){var f=Z();try{Ci(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function qc(a,b,c,d,e){var f=Z();try{return Di(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}\nfunction rc(a,b,c,d,e,f){var h=Z();try{return Ei(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Te(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{Fi(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}function bf(a,b,c,d,e,f,h,k,l,m,n,q,r){var p=Z();try{Gi(a,b,c,d,e,f,h,k,l,m,n,q,r)}catch(u){Y(p);if(u!==u+0)throw u;X(1,0)}}function te(a,b,c,d,e,f,h,k,l,m,n,q){var r=Z();try{Eg(a,b,c,d,e,f,h,k,l,m,n,q)}catch(p){Y(r);if(p!==p+0)throw p;X(1,0)}}\nfunction $c(a,b,c,d,e){var f=Z();try{return Hi(a,b,c,d,e)}catch(h){Y(f);if(h!==h+0)throw h;X(1,0)}}function vd(a){var b=Z();try{return eg(a)}catch(c){Y(b);if(c!==c+0)throw c;X(1,0);return 0n}}function Wc(a,b,c,d,e,f){var h=Z();try{return dg(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}function Gc(a,b,c,d,e,f){var h=Z();try{return ci(a,b,c,d,e,f)}catch(k){Y(h);if(k!==k+0)throw k;X(1,0)}}\nfunction oe(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y){var A=Z();try{$h(a,b,c,d,e,f,h,k,l,m,n,q,r,p,u,y)}catch(B){Y(A);if(B!==B+0)throw B;X(1,0)}}function uc(a,b,c){var d=Z();try{return qg(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function oc(a,b,c){var d=Z();try{return sg(a,b,c)}catch(e){Y(d);if(e!==e+0)throw e;X(1,0)}}function Df(){var a=U;a=Object.assign({},a);var b=c=>d=>c(d)>>>0;a.Fd=b(a.Fd);a.he=b(a.he);a.ne=b(a.ne);a.oe=(c=>()=>c()>>>0)(a.oe);return a}g.stackSave=()=>Z();g.stackRestore=a=>Y(a);\ng.stackAlloc=a=>Ef(a);g.UTF8ToString=J;g.stringToUTF8=(a,b,c)=>M(a,E,b,c);g.lengthBytesUTF8=bb;var Oi;Fa=function Pi(){Oi||Ri();Oi||(Fa=Pi)};function Ri(){if(!(0 ortWasm);\n", "", "", "export const cpus = undefined;", "\nvar ortWasmThreaded = (() => {\n var _scriptDir = typeof document !== 'undefined' && document.currentScript ? document.currentScript.src : undefined;\n if (typeof __filename !== 'undefined') _scriptDir = _scriptDir || __filename;\n return (\nfunction(moduleArg = {}) {\n\nfunction d(){l.buffer!=p.buffer&&t();return p}function v(){l.buffer!=p.buffer&&t();return aa}function z(){l.buffer!=p.buffer&&t();return ba}function A(){l.buffer!=p.buffer&&t();return ca}function da(){l.buffer!=p.buffer&&t();return ea}var B=moduleArg,fa,C;B.ready=new Promise((a,b)=>{fa=a;C=b});\"use strict\";\nB.jsepInit=(a,b,c,e,f,h,k,q)=>{B.Qb=a;B.wb=b;B.yb=c;B.jb=e;B.xb=f;B.Ea=h;B.zb=k;B.Ab=q;b=(n,m,r)=>(...w)=>{const x=D,g=m?.();w=n(...w);const u=m?.();g!==u&&(n=u,r(g),m=r=null);return D!=x?ha():w};c=n=>async(...m)=>{try{if(B.bb)throw Error(\"Session already started\");const r=B.bb={Fb:m[0],errors:[]},w=await n(...m);if(B.bb!==r)throw Error(\"Session mismatch\");a.flush();const x=r.errors;if(0u);if(0B._OrtRun,n=>B._OrtRun=n));B._OrtRunWithBinding=c(b(B._OrtRunWithBinding,()=>B._OrtRunWithBinding,n=>B._OrtRunWithBinding=n));B._OrtBindInput=b(B._OrtBindInput,()=>B._OrtBindInput,n=>B._OrtBindInput=n);B.jsepRegisterBuffer=(n,m,r,w)=>a.registerBuffer(n,m,r,w);B.jsepUnregisterBuffers=n=>{a.unregisterBuffers(n)};B.jsepGetBuffer=n=>a.getBuffer(n);B.jsepCreateDownloader=(n,m,r)=>a.createDownloader(n,m,r)};\nvar ia=Object.assign({},B),ja=\"./this.program\",E=(a,b)=>{throw b;},ka=\"object\"==typeof window,F=\"function\"==typeof importScripts,G=\"object\"==typeof process&&\"object\"==typeof process.versions&&\"string\"==typeof process.versions.node,H=B.ENVIRONMENT_IS_PTHREAD||!1,I=\"\";function la(a){return B.locateFile?B.locateFile(a,I):I+a}var ma,J,na;\nif(G){var fs=require(\"fs\"),oa=require(\"path\");I=F?oa.dirname(I)+\"/\":__dirname+\"/\";ma=(b,c)=>{b=b.startsWith(\"file://\")?new URL(b):oa.normalize(b);return fs.readFileSync(b,c?void 0:\"utf8\")};na=b=>{b=ma(b,!0);b.buffer||(b=new Uint8Array(b));return b};J=(b,c,e,f=!0)=>{b=b.startsWith(\"file://\")?new URL(b):oa.normalize(b);fs.readFile(b,f?void 0:\"utf8\",(h,k)=>{h?e(h):c(f?k.buffer:k)})};!B.thisProgram&&1{process.exitCode=\nb;throw c;};B.inspect=()=>\"[Emscripten Module object]\";let a;try{a=require(\"worker_threads\")}catch(b){throw console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?'),b;}global.Worker=a.Worker}else if(ka||F)F?I=self.location.href:\"undefined\"!=typeof document&&document.currentScript&&(I=document.currentScript.src),(typeof _scriptDir !== \"undefined\" && _scriptDir)&&(I=_scriptDir),0!==I.indexOf(\"blob:\")?I=I.substr(0,I.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1):I=\"\",G||(ma=a=>{var b=\nnew XMLHttpRequest;b.open(\"GET\",a,!1);b.send(null);return b.responseText},F&&(na=a=>{var b=new XMLHttpRequest;b.open(\"GET\",a,!1);b.responseType=\"arraybuffer\";b.send(null);return new Uint8Array(b.response)}),J=(a,b,c)=>{var e=new XMLHttpRequest;e.open(\"GET\",a,!0);e.responseType=\"arraybuffer\";e.onload=()=>{200==e.status||0==e.status&&e.response?b(e.response):c()};e.onerror=c;e.send(null)});G&&\"undefined\"==typeof performance&&(global.performance=require(\"perf_hooks\").performance);\nvar pa=console.log.bind(console),qa=console.error.bind(console);G&&(pa=(...a)=>fs.writeSync(1,a.join(\" \")+\"\\n\"),qa=(...a)=>fs.writeSync(2,a.join(\" \")+\"\\n\"));var ra=B.print||pa,K=B.printErr||qa;Object.assign(B,ia);ia=null;B.thisProgram&&(ja=B.thisProgram);B.quit&&(E=B.quit);var L;B.wasmBinary&&(L=B.wasmBinary);var noExitRuntime=B.noExitRuntime||!0;\"object\"!=typeof WebAssembly&&M(\"no native wasm support detected\");var l,N,sa,P=!1,Q,p,aa,ba,ca,ea;\nfunction t(){var a=l.buffer;B.HEAP8=p=new Int8Array(a);B.HEAP16=new Int16Array(a);B.HEAP32=ba=new Int32Array(a);B.HEAPU8=aa=new Uint8Array(a);B.HEAPU16=new Uint16Array(a);B.HEAPU32=ca=new Uint32Array(a);B.HEAPF32=new Float32Array(a);B.HEAPF64=ea=new Float64Array(a)}var ta=B.INITIAL_MEMORY||16777216;5242880<=ta||M(\"INITIAL_MEMORY should be larger than STACK_SIZE, was \"+ta+\"! (STACK_SIZE=5242880)\");\nif(H)l=B.wasmMemory;else if(B.wasmMemory)l=B.wasmMemory;else if(l=new WebAssembly.Memory({initial:ta/65536,maximum:65536,shared:!0}),!(l.buffer instanceof SharedArrayBuffer))throw K(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),G&&K(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and/or recent version)\"),\nError(\"bad memory\");t();ta=l.buffer.byteLength;var ua=[],va=[],wa=[],xa=0;function ya(){return noExitRuntime||0{if(!b.ok)throw\"failed to load wasm binary file at '\"+a+\"'\";return b.arrayBuffer()}).catch(()=>Da(a));if(J)return new Promise((b,c)=>{J(a,e=>b(new Uint8Array(e)),c)})}return Promise.resolve().then(()=>Da(a))}function Fa(a,b,c){return Ea(a).then(e=>WebAssembly.instantiate(e,b)).then(e=>e).then(c,e=>{K(\"failed to asynchronously prepare wasm: \"+e);M(e)})}\nfunction Ga(a,b){var c=T;return L||\"function\"!=typeof WebAssembly.instantiateStreaming||Ca(c)||c.startsWith(\"file://\")||G||\"function\"!=typeof fetch?Fa(c,a,b):fetch(c,{credentials:\"same-origin\"}).then(e=>WebAssembly.instantiateStreaming(e,a).then(b,function(f){K(\"wasm streaming compile failed: \"+f);K(\"falling back to ArrayBuffer instantiation\");return Fa(c,a,b)}))}\nvar U,Ha={906828:a=>{B.Ea(\"Abs\",a,void 0)},906879:a=>{B.Ea(\"Neg\",a,void 0)},906930:a=>{B.Ea(\"Floor\",a,void 0)},906983:a=>{B.Ea(\"Ceil\",a,void 0)},907035:a=>{B.Ea(\"Reciprocal\",a,void 0)},907093:a=>{B.Ea(\"Sqrt\",a,void 0)},907145:a=>{B.Ea(\"Exp\",a,void 0)},907196:a=>{B.Ea(\"Erf\",a,void 0)},907247:a=>{B.Ea(\"Sigmoid\",a,void 0)},907302:a=>{B.Ea(\"Log\",a,void 0)},907353:a=>{B.Ea(\"Sin\",a,void 0)},907404:a=>{B.Ea(\"Cos\",a,void 0)},907455:a=>{B.Ea(\"Tan\",a,void 0)},907506:a=>{B.Ea(\"Asin\",a,void 0)},907558:a=>{B.Ea(\"Acos\",\na,void 0)},907610:a=>{B.Ea(\"Atan\",a,void 0)},907662:a=>{B.Ea(\"Sinh\",a,void 0)},907714:a=>{B.Ea(\"Cosh\",a,void 0)},907766:a=>{B.Ea(\"Asinh\",a,void 0)},907819:a=>{B.Ea(\"Acosh\",a,void 0)},907872:a=>{B.Ea(\"Atanh\",a,void 0)},907925:a=>{B.Ea(\"Tanh\",a,void 0)},907977:a=>{B.Ea(\"Not\",a,void 0)},908028:(a,b,c)=>{B.Ea(\"ClipV10\",a,{min:b,max:c})},908100:a=>{B.Ea(\"Clip\",a,void 0)},908152:(a,b)=>{B.Ea(\"Elu\",a,{alpha:b})},908210:a=>{B.Ea(\"Relu\",a,void 0)},908262:(a,b)=>{B.Ea(\"LeakyRelu\",a,{alpha:b})},908326:(a,b)=>\n{B.Ea(\"ThresholdedRelu\",a,{alpha:b})},908396:(a,b)=>{B.Ea(\"Cast\",a,{to:b})},908454:a=>{B.Ea(\"Add\",a,void 0)},908505:a=>{B.Ea(\"Sub\",a,void 0)},908556:a=>{B.Ea(\"Mul\",a,void 0)},908607:a=>{B.Ea(\"Div\",a,void 0)},908658:a=>{B.Ea(\"Pow\",a,void 0)},908709:a=>{B.Ea(\"Equal\",a,void 0)},908762:a=>{B.Ea(\"Greater\",a,void 0)},908817:a=>{B.Ea(\"GreaterOrEqual\",a,void 0)},908879:a=>{B.Ea(\"Less\",a,void 0)},908931:a=>{B.Ea(\"LessOrEqual\",a,void 0)},908990:(a,b,c,e,f)=>{B.Ea(\"ReduceMean\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,\naxes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909154:(a,b,c,e,f)=>{B.Ea(\"ReduceMax\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909317:(a,b,c,e,f)=>{B.Ea(\"ReduceMin\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909480:(a,b,c,e,f)=>{B.Ea(\"ReduceProd\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909644:(a,b,c,e,f)=>{B.Ea(\"ReduceSum\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,\naxes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909807:(a,b,c,e,f)=>{B.Ea(\"ReduceL1\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},909969:(a,b,c,e,f)=>{B.Ea(\"ReduceL2\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},910131:(a,b,c,e,f)=>{B.Ea(\"ReduceLogSum\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},910297:(a,b,c,e,f)=>{B.Ea(\"ReduceSumSquare\",a,{keepDims:!!b,\nnoopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},910466:(a,b,c,e,f)=>{B.Ea(\"ReduceLogSumExp\",a,{keepDims:!!b,noopWithEmptyAxes:!!c,axes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},910635:a=>{B.Ea(\"Where\",a,void 0)},910688:(a,b,c)=>{B.Ea(\"Transpose\",a,{perm:b?Array.from(z().subarray(c>>>0,c+b>>>0)):[]})},910801:(a,b,c,e,f,h,k,q,n,m)=>{B.Ea(\"Conv\",a,{format:n?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c],group:e,kernel_shape:[f],pads:[h,k],strides:[q],w_is_const:()=>!!d()[m>>>\n0]})},911029:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u)=>{B.Ea(\"Conv\",a,{format:g?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c,e],group:f,kernel_shape:[h,k],pads:[q,n,m,r],strides:[w,x],w_is_const:()=>!!d()[u>>>0]})},911288:(a,b,c,e,f,h,k,q,n,m)=>{B.Ea(\"Conv\",a,{format:n?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c],group:e,kernel_shape:[f],pads:[h,k],strides:[q],w_is_const:()=>!!d()[m>>>0]})},911516:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u)=>{B.Ea(\"Conv\",a,{format:g?\"NHWC\":\"NCHW\",auto_pad:b,dilations:[c,e],group:f,kernel_shape:[h,\nk],pads:[q,n,m,r],strides:[w,x],w_is_const:()=>!!d()[u>>>0]})},911775:(a,b,c,e,f,h,k,q,n,m,r,w,x,g)=>{B.Ea(\"ConvTranspose\",a,{format:n?\"NHWC\":\"NCHW\",autoPad:b,dilations:[c],group:e,kernel_shape:[f],pads:[h,k],strides:[q],wIsConst:()=>!!d()[m>>>0],outputPadding:r?Array.from(z().subarray(w>>>0,w+r>>>0)):[],outputShape:x?Array.from(z().subarray(g>>>0,g+x>>>0)):[]})},912155:(a,b,c,e,f,h,k,q,n,m,r,w,x)=>{B.Ea(\"ConvTranspose\",a,{format:q?\"NHWC\":\"NCHW\",autoPad:b,dilations:Array.from(z().subarray(c>>>0,c+\n2>>>0)),group:e,kernelShape:Array.from(z().subarray(f>>>0,f+2>>>0)),pads:Array.from(z().subarray(h>>>0,h+4>>>0)),strides:Array.from(z().subarray(k>>>0,k+2>>>0)),wIsConst:()=>!!d()[n>>>0],outputPadding:0>>0,r+m>>>0)):[],outputShape:0>>0,x+w>>>0)):[]})},912678:(a,b,c,e,f,h,k,q,n,m,r,w,x,g)=>{B.Ea(\"ConvTranspose\",a,{format:n?\"NHWC\":\"NCHW\",autoPad:b,dilations:[c],group:e,kernel_shape:[f],pads:[h,k],strides:[q],wIsConst:()=>!!d()[m>>>0],outputPadding:r?\nArray.from(z().subarray(w>>>0,w+r>>>0)):[],outputShape:x?Array.from(z().subarray(g>>>0,g+x>>>0)):[]})},913058:(a,b,c,e,f,h,k,q,n,m,r,w,x)=>{B.Ea(\"ConvTranspose\",a,{format:q?\"NHWC\":\"NCHW\",autoPad:b,dilations:Array.from(z().subarray(c>>>0,c+2>>>0)),group:e,kernelShape:Array.from(z().subarray(f>>>0,f+2>>>0)),pads:Array.from(z().subarray(h>>>0,h+4>>>0)),strides:Array.from(z().subarray(k>>>0,k+2>>>0)),wIsConst:()=>!!d()[n>>>0],outputPadding:0>>0,r+m>>>0)):[],outputShape:0<\nw?Array.from(z().subarray(x>>>0,x+w>>>0)):[]})},913581:(a,b)=>{B.Ea(\"GlobalAveragePool\",a,{format:b?\"NHWC\":\"NCHW\"})},913672:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u,y)=>{B.Ea(\"AveragePool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:e,storage_order:f,dilations:[h,k],kernel_shape:[q,n],pads:[m,r,w,x],strides:[g,u]})},913956:(a,b)=>{B.Ea(\"GlobalAveragePool\",a,{format:b?\"NHWC\":\"NCHW\"})},914047:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u,y)=>{B.Ea(\"AveragePool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,\ncount_include_pad:e,storage_order:f,dilations:[h,k],kernel_shape:[q,n],pads:[m,r,w,x],strides:[g,u]})},914331:(a,b)=>{B.Ea(\"GlobalMaxPool\",a,{format:b?\"NHWC\":\"NCHW\"})},914418:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u,y)=>{B.Ea(\"MaxPool\",a,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:e,storage_order:f,dilations:[h,k],kernel_shape:[q,n],pads:[m,r,w,x],strides:[g,u]})},914698:(a,b)=>{B.Ea(\"GlobalMaxPool\",a,{format:b?\"NHWC\":\"NCHW\"})},914785:(a,b,c,e,f,h,k,q,n,m,r,w,x,g,u,y)=>{B.Ea(\"MaxPool\",\na,{format:y?\"NHWC\":\"NCHW\",auto_pad:b,ceil_mode:c,count_include_pad:e,storage_order:f,dilations:[h,k],kernel_shape:[q,n],pads:[m,r,w,x],strides:[g,u]})},915065:(a,b,c,e,f)=>{B.Ea(\"Gemm\",a,{alpha:b,beta:c,transA:e,transB:f})},915169:a=>{B.Ea(\"MatMul\",a,void 0)},915223:(a,b,c,e)=>{B.Ea(\"ArgMax\",a,{keepDims:!!b,selectLastIndex:!!c,axis:e})},915331:(a,b,c,e)=>{B.Ea(\"ArgMin\",a,{keepDims:!!b,selectLastIndex:!!c,axis:e})},915439:(a,b)=>{B.Ea(\"Softmax\",a,{axis:b})},915502:(a,b)=>{B.Ea(\"Concat\",a,{axis:b})},\n915562:(a,b,c,e,f)=>{B.Ea(\"Split\",a,{axis:b,numOutputs:c,splitSizes:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},915707:a=>{B.Ea(\"Expand\",a,void 0)},915761:(a,b)=>{B.Ea(\"Gather\",a,{axis:Number(b)})},915832:(a,b)=>{B.Ea(\"GatherElements\",a,{axis:Number(b)})},915911:(a,b,c,e,f,h,k,q,n,m,r)=>{B.Ea(\"Resize\",a,{antialias:b,axes:c?Array.from(z().subarray(e>>>0,e+c>>>0)):[],coordinateTransformMode:V(f),cubicCoeffA:h,excludeOutside:k,extrapolationValue:q,keepAspectRatioPolicy:V(n),mode:V(m),nearestMode:V(r)})},\n916262:(a,b,c,e,f,h,k)=>{B.Ea(\"Slice\",a,{starts:b?Array.from(z().subarray(c>>>0,c+b>>>0)):[],ends:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[],axes:h?Array.from(z().subarray(k>>>0,k+h>>>0)):[]})},916493:a=>{B.Ea(\"Tile\",a,void 0)},916545:(a,b,c)=>{B.Ea(\"LayerNormalization\",a,{axis:Number(b),epsilon:Number(c)})},916652:(a,b,c)=>{B.Ea(\"InstanceNormalization\",a,{epsilon:b,format:c?\"NHWC\":\"NCHW\"})},916766:(a,b,c)=>{B.Ea(\"InstanceNormalization\",a,{epsilon:b,format:c?\"NHWC\":\"NCHW\"})},916880:a=>{B.Ea(\"Range\",\na,void 0)},916933:(a,b)=>{B.Ea(\"Einsum\",a,{equation:V(b)})},917014:(a,b,c,e,f)=>{B.Ea(\"Pad\",a,{mode:b,value:c,pads:e?Array.from(z().subarray(f>>>0,f+e>>>0)):[]})},917146:a=>{B.Ea(\"Gelu\",a,void 0)},917198:a=>{B.Ea(\"BiasAdd\",a,void 0)},917253:a=>{B.Ea(\"BiasSplitGelu\",a,void 0)},917314:(a,b)=>{B.Ea(\"SkipLayerNormalization\",a,{epsilon:b})},917395:a=>{B.zb(a)},917429:(a,b)=>B.Ab(a,b,B.bb.Fb,B.bb.errors),917541:a=>B.wb(a),917574:a=>B.yb(a),917606:(a,b,c)=>{B.jb(a,b,c,!0)},917645:(a,b,c)=>{B.jb(a,b,c)}};\nfunction Ia(a){this.name=\"ExitStatus\";this.message=`Program terminated with exit(${a})`;this.status=a}function Ja(a){a.terminate();a.onmessage=()=>{}}function Ka(a){(a=W.Qa[a])||M();W.Eb(a)}function La(a){var b=W.tb();if(!b)return 6;W.Ya.push(b);W.Qa[a.Xa]=b;b.Xa=a.Xa;var c={cmd:\"run\",start_routine:a.Gb,arg:a.rb,pthread_ptr:a.Xa};G&&b.unref();b.postMessage(c,a.Mb);return 0}\nvar Ma=\"undefined\"!=typeof TextDecoder?new TextDecoder(\"utf8\"):void 0,Na=(a,b,c)=>{b>>>=0;var e=b+c;for(c=b;a[c]&&!(c>=e);)++c;if(16f?e+=String.fromCharCode(f):(f-=65536,e+=String.fromCharCode(55296|f>>\n10,56320|f&1023))}}else e+=String.fromCharCode(f)}return e},V=(a,b)=>(a>>>=0)?Na(v(),a,b):\"\";function Oa(a){if(H)return X(1,1,a);Q=a;if(!ya()){W.Hb();if(B.onExit)B.onExit(a);P=!0}E(a,new Ia(a))}\nvar Qa=a=>{Q=a;if(H)throw Pa(a),\"unwind\";Oa(a)},W={ab:[],Ya:[],mb:[],Qa:{},gb:function(){H?W.vb():W.ub()},ub:function(){ua.unshift(()=>{Aa();W.Bb(()=>Ba())})},vb:function(){W.receiveObjectTransfer=W.Db;W.threadInitTLS=W.lb;W.setExitStatus=W.kb;noExitRuntime=!1},kb:function(a){Q=a},Sb:[\"$terminateWorker\"],Hb:function(){for(var a of W.Ya)Ja(a);for(a of W.ab)Ja(a);W.ab=[];W.Ya=[];W.Qa=[]},Eb:function(a){var b=a.Xa;delete W.Qa[b];W.ab.push(a);W.Ya.splice(W.Ya.indexOf(a),1);a.Xa=0;Ra(b)},Db:function(){},\nlb:function(){W.mb.forEach(a=>a())},Cb:a=>new Promise(b=>{a.onmessage=h=>{h=h.data;var k=h.cmd;if(h.targetThread&&h.targetThread!=Sa()){var q=W.Qa[h.Rb];q?q.postMessage(h,h.transferList):K('Internal error! Worker sent a message \"'+k+'\" to target pthread '+h.targetThread+\", but that thread no longer exists!\")}else if(\"checkMailbox\"===k)Ta();else if(\"spawnThread\"===k)La(h);else if(\"cleanupThread\"===k)Ka(h.thread);else if(\"killThread\"===k)h=h.thread,k=W.Qa[h],delete W.Qa[h],Ja(k),Ra(h),W.Ya.splice(W.Ya.indexOf(k),\n1),k.Xa=0;else if(\"cancelThread\"===k)W.Qa[h.thread].postMessage({cmd:\"cancel\"});else if(\"loaded\"===k)a.loaded=!0,b(a);else if(\"alert\"===k)alert(\"Thread \"+h.threadId+\": \"+h.text);else if(\"setimmediate\"===h.target)a.postMessage(h);else if(\"callHandler\"===k)B[h.handler](...h.args);else k&&K(\"worker sent an unknown command \"+k)};a.onerror=h=>{K(\"worker sent an error! \"+h.filename+\":\"+h.lineno+\": \"+h.message);throw h;};G&&(a.on(\"message\",function(h){a.onmessage({data:h})}),a.on(\"error\",function(h){a.onerror(h)}));\nvar c=[],e=[\"onExit\",\"onAbort\",\"print\",\"printErr\"],f;for(f of e)B.hasOwnProperty(f)&&c.push(f);a.postMessage({cmd:\"load\",handlers:c,urlOrBlob:B.mainScriptUrlOrBlob||_scriptDir,wasmMemory:l,wasmModule:sa})}),Bb:function(a){a()},qb:function(){var a=la(\"ort-wasm-simd-threaded.worker.js\");a=new Worker(a);W.ab.push(a)},tb:function(){0==W.ab.length&&(W.qb(),W.Cb(W.ab[0]));return W.ab.pop()}};B.PThread=W;var Ua=a=>{for(;0>2>>>0];a=z()[a+56>>2>>>0];Va(b,b-a);Wa(b)};function Pa(a){if(H)return X(2,0,a);Qa(a)}B.invokeEntryPoint=function(a,b){a=Xa.apply(null,[a,b]);ya()?W.kb(a):Ya(a)};function Za(a){this.fb=a-24;this.pb=function(b){A()[this.fb+4>>2>>>0]=b};this.ob=function(b){A()[this.fb+8>>2>>>0]=b};this.gb=function(b,c){this.nb();this.pb(b);this.ob(c)};this.nb=function(){A()[this.fb+16>>2>>>0]=0}}var $a=0,ab=0;\nfunction bb(a,b,c,e){return H?X(3,1,a,b,c,e):cb(a,b,c,e)}function cb(a,b,c,e){a>>>=0;b>>>=0;c>>>=0;e>>>=0;if(\"undefined\"==typeof SharedArrayBuffer)return K(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var f=[];if(H&&0===f.length)return bb(a,b,c,e);a={Gb:c,Xa:a,rb:e,Mb:f};return H?(a.Ob=\"spawnThread\",postMessage(a,f),0):La(a)}function db(a,b,c){return H?X(4,1,a,b,c):0}function eb(a,b){if(H)return X(5,1,a,b)}\nvar fb=a=>{for(var b=0,c=0;c=e?b++:2047>=e?b+=2:55296<=e&&57343>=e?(b+=4,++c):b+=3}return b},gb=(a,b,c,e)=>{c>>>=0;if(!(0=k){var q=a.charCodeAt(++h);k=65536+((k&1023)<<10)|q&1023}if(127>=k){if(c>=e)break;b[c++>>>0]=k}else{if(2047>=k){if(c+1>=e)break;b[c++>>>0]=192|k>>6}else{if(65535>=k){if(c+2>=e)break;b[c++>>>0]=224|k>>12}else{if(c+3>=e)break;b[c++>>>0]=240|k>>\n18;b[c++>>>0]=128|k>>12&63}b[c++>>>0]=128|k>>6&63}b[c++>>>0]=128|k&63}}b[c>>>0]=0;return c-f},hb=(a,b,c)=>gb(a,v(),b,c);function ib(a,b){if(H)return X(6,1,a,b)}function jb(a,b,c){if(H)return X(7,1,a,b,c)}function kb(a,b,c){return H?X(8,1,a,b,c):0}function lb(a,b){if(H)return X(9,1,a,b)}function mb(a,b,c){if(H)return X(10,1,a,b,c)}function nb(a,b,c,e){if(H)return X(11,1,a,b,c,e)}function ob(a,b,c,e){if(H)return X(12,1,a,b,c,e)}function pb(a,b,c,e){if(H)return X(13,1,a,b,c,e)}\nfunction qb(a){if(H)return X(14,1,a)}function rb(a,b){if(H)return X(15,1,a,b)}function sb(a,b,c){if(H)return X(16,1,a,b,c)}var tb=a=>{if(!P)try{if(a(),!ya())try{H?Ya(Q):Qa(Q)}catch(b){b instanceof Ia||\"unwind\"==b||E(1,b)}}catch(b){b instanceof Ia||\"unwind\"==b||E(1,b)}};function ub(a){a>>>=0;\"function\"===typeof Atomics.Nb&&(Atomics.Nb(z(),a>>2,a).value.then(Ta),a+=128,Atomics.store(z(),a>>2,1))}B.__emscripten_thread_mailbox_await=ub;function Ta(){var a=Sa();a&&(ub(a),tb(()=>vb()))}B.checkMailbox=Ta;\nvar Y=a=>0===a%4&&(0!==a%100||0===a%400),wb=[0,31,60,91,121,152,182,213,244,274,305,335],xb=[0,31,59,90,120,151,181,212,243,273,304,334];function yb(a,b,c,e,f,h,k,q){return H?X(17,1,a,b,c,e,f,h,k,q):-52}function zb(a,b,c,e,f,h,k){if(H)return X(18,1,a,b,c,e,f,h,k)}var Bb=a=>{var b=fb(a)+1,c=Ab(b);c&&hb(a,c,b);return c},Cb=[],Db=(a,b)=>{Cb.length=0;var c;for(b>>=2;c=v()[a++>>>0];)b+=105!=c&b,Cb.push(105==c?z()[b>>>0]:da()[b++>>>1]),++b;return Cb},Fb=a=>{var b=Eb();a=a();Wa(b);return a};\nfunction X(a,b){var c=arguments.length-2,e=arguments;return Fb(()=>{for(var f=Gb(8*c),h=f>>3,k=0;k>>0]=q}return Hb(a,c,f,b)})}\nvar Ib=[],Jb={},Lb=()=>{if(!Kb){var a={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(\"object\"==typeof navigator&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:ja||\"./this.program\"},b;for(b in Jb)void 0===Jb[b]?delete a[b]:a[b]=Jb[b];var c=[];for(b in a)c.push(`${b}=${a[b]}`);Kb=c}return Kb},Kb;\nfunction Mb(a,b){if(H)return X(19,1,a,b);a>>>=0;b>>>=0;var c=0;Lb().forEach(function(e,f){var h=b+c;f=A()[a+4*f>>2>>>0]=h;for(h=0;h>0>>>0]=e.charCodeAt(h);d()[f>>0>>>0]=0;c+=e.length+1});return 0}function Nb(a,b){if(H)return X(20,1,a,b);a>>>=0;b>>>=0;var c=Lb();A()[a>>2>>>0]=c.length;var e=0;c.forEach(function(f){e+=f.length+1});A()[b>>2>>>0]=e;return 0}function Ob(a){return H?X(21,1,a):52}function Pb(a,b,c,e){return H?X(22,1,a,b,c,e):52}\nfunction Qb(a,b,c,e,f){return H?X(23,1,a,b,c,e,f):70}var Rb=[null,[],[]];function Tb(a,b,c,e){if(H)return X(24,1,a,b,c,e);b>>>=0;c>>>=0;e>>>=0;for(var f=0,h=0;h>2>>>0],q=A()[b+4>>2>>>0];b+=8;for(var n=0;n>>0],r=Rb[a];0===m||10===m?((1===a?ra:K)(Na(r,0)),r.length=0):r.push(m)}f+=q}A()[e>>2>>>0]=f;return 0}var Ub=[31,29,31,30,31,30,31,31,30,31,30,31],Vb=[31,28,31,30,31,30,31,31,30,31,30,31];function Wb(a){var b=Array(fb(a)+1);gb(a,b,0,b.length);return b}\nvar Xb=(a,b)=>{d().set(a,b>>>0)};\nfunction Yb(a,b,c,e){function f(g,u,y){for(g=\"number\"==typeof g?g.toString():g||\"\";g.lengthSb?-1:0O-g.getDate())u-=O-g.getDate()+1,g.setDate(1),11>y?g.setMonth(y+1):(g.setMonth(0),g.setFullYear(g.getFullYear()+1));else{g.setDate(g.getDate()+u);break}}y=new Date(g.getFullYear()+1,0,4);u=q(new Date(g.getFullYear(),\n0,4));y=q(y);return 0>=k(u,g)?0>=k(y,g)?g.getFullYear()+1:g.getFullYear():g.getFullYear()-1}a>>>=0;b>>>=0;c>>>=0;e>>>=0;var m=z()[e+40>>2>>>0];e={Kb:z()[e>>2>>>0],Jb:z()[e+4>>2>>>0],cb:z()[e+8>>2>>>0],ib:z()[e+12>>2>>>0],eb:z()[e+16>>2>>>0],$a:z()[e+20>>2>>>0],Wa:z()[e+24>>2>>>0],Za:z()[e+28>>2>>>0],Tb:z()[e+32>>2>>>0],Ib:z()[e+36>>2>>>0],Lb:m?V(m):\"\"};c=V(c);m={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\n\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"};for(var r in m)c=c.replace(new RegExp(r,\"g\"),m[r]);var w=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),x=\"January February March April May June July August September October November December\".split(\" \");m={\"%a\":g=>w[g.Wa].substring(0,3),\"%A\":g=>w[g.Wa],\"%b\":g=>\nx[g.eb].substring(0,3),\"%B\":g=>x[g.eb],\"%C\":g=>h((g.$a+1900)/100|0,2),\"%d\":g=>h(g.ib,2),\"%e\":g=>f(g.ib,2,\" \"),\"%g\":g=>n(g).toString().substring(2),\"%G\":g=>n(g),\"%H\":g=>h(g.cb,2),\"%I\":g=>{g=g.cb;0==g?g=12:12{for(var u=0,y=0;y<=g.eb-1;u+=(Y(g.$a+1900)?Ub:Vb)[y++]);return h(g.ib+u,3)},\"%m\":g=>h(g.eb+1,2),\"%M\":g=>h(g.Jb,2),\"%n\":()=>\"\\n\",\"%p\":g=>0<=g.cb&&12>g.cb?\"AM\":\"PM\",\"%S\":g=>h(g.Kb,2),\"%t\":()=>\"\\t\",\"%u\":g=>g.Wa||7,\"%U\":g=>h(Math.floor((g.Za+7-g.Wa)/7),2),\"%V\":g=>\n{var u=Math.floor((g.Za+7-(g.Wa+6)%7)/7);2>=(g.Wa+371-g.Za-2)%7&&u++;if(u)53==u&&(y=(g.Wa+371-g.Za)%7,4==y||3==y&&Y(g.$a)||(u=1));else{u=52;var y=(g.Wa+7-g.Za-1)%7;(4==y||5==y&&Y(g.$a%400-1))&&u++}return h(u,2)},\"%w\":g=>g.Wa,\"%W\":g=>h(Math.floor((g.Za+7-(g.Wa+6)%7)/7),2),\"%y\":g=>(g.$a+1900).toString().substring(2),\"%Y\":g=>g.$a+1900,\"%z\":g=>{g=g.Ib;var u=0<=g;g=Math.abs(g)/60;return(u?\"+\":\"-\")+String(\"0000\"+(g/60*100+g%60)).slice(-4)},\"%Z\":g=>g.Lb,\"%%\":()=>\"%\"};c=c.replace(/%%/g,\"\\x00\\x00\");for(r in m)c.includes(r)&&\n(c=c.replace(new RegExp(r,\"g\"),m[r](e)));c=c.replace(/\\0\\0/g,\"%\");r=Wb(c);if(r.length>b)return 0;Xb(r,a);return r.length-1}function Zb(a){try{a()}catch(b){M(b)}}function $b(a){var b={},c;for(c in a)(function(e){var f=a[e];b[e]=\"function\"==typeof f?function(){ac.push(e);try{return f.apply(null,arguments)}finally{P||(ac.pop()===e||M(),D&&1===Z&&0===ac.length&&(Z=0,xa+=1,Zb(bc),\"undefined\"!=typeof Fibers&&Fibers.Ub()))}}:f})(c);return b}var Z=0,D=null,cc=0,ac=[],dc={},ec={},fc=0,gc=null,hc=[];\nfunction ha(){return new Promise((a,b)=>{gc={resolve:a,reject:b}})}function ic(){var a=Ab(65548),b=a+12;A()[a>>2>>>0]=b;A()[a+4>>2>>>0]=b+65536;b=ac[0];var c=dc[b];void 0===c&&(c=fc++,dc[b]=c,ec[c]=b);b=c;z()[a+8>>2>>>0]=b;return a}function jc(){var a=z()[D+8>>2>>>0];a=N[ec[a]];--xa;return a()}\nfunction kc(a){if(!P){if(0===Z){var b=!1,c=!1;a((e=0)=>{if(!P&&(cc=e,b=!0,c)){Z=2;Zb(()=>lc(D));\"undefined\"!=typeof Browser&&Browser.hb.sb&&Browser.hb.resume();e=!1;try{var f=jc()}catch(q){f=q,e=!0}var h=!1;if(!D){var k=gc;k&&(gc=null,(e?k.reject:k.resolve)(f),h=!0)}if(e&&!h)throw f;}});c=!0;b||(Z=1,D=ic(),\"undefined\"!=typeof Browser&&Browser.hb.sb&&Browser.hb.pause(),Zb(()=>mc(D)))}else 2===Z?(Z=0,Zb(nc),oc(D),D=null,hc.forEach(e=>tb(e))):M(`invalid state: ${Z}`);return cc}}\nfunction pc(a){return kc(b=>{a().then(b)})}W.gb();\nvar qc=[null,Oa,Pa,bb,db,eb,ib,jb,kb,lb,mb,nb,ob,pb,qb,rb,sb,yb,zb,Mb,Nb,Ob,Pb,Qb,Tb],tc={r:function(a,b,c){return pc(async()=>{await B.xb(a,b,c)})},b:function(a,b,c){a>>>=0;(new Za(a)).gb(b>>>0,c>>>0);$a=a;ab++;throw $a;},O:function(a){rc(a>>>0,!F,1,!ka,131072,!1);W.lb()},l:function(a){a>>>=0;H?postMessage({cmd:\"cleanupThread\",thread:a}):Ka(a)},I:cb,i:db,U:eb,E:ib,G:jb,V:kb,S:lb,K:mb,R:nb,p:ob,F:pb,C:qb,T:rb,D:sb,q:()=>!0,A:function(a,b){a>>>=0;a==b>>>0?setTimeout(()=>Ta()):H?postMessage({targetThread:a,\ncmd:\"checkMailbox\"}):(a=W.Qa[a])&&a.postMessage({cmd:\"checkMailbox\"})},M:function(){return-1},N:ub,X:function(a){G&&W.Qa[a>>>0].ref()},u:function(a,b,c){a=b+2097152>>>0<4194305-!!a?(a>>>0)+4294967296*b:NaN;c>>>=0;a=new Date(1E3*a);z()[c>>2>>>0]=a.getUTCSeconds();z()[c+4>>2>>>0]=a.getUTCMinutes();z()[c+8>>2>>>0]=a.getUTCHours();z()[c+12>>2>>>0]=a.getUTCDate();z()[c+16>>2>>>0]=a.getUTCMonth();z()[c+20>>2>>>0]=a.getUTCFullYear()-1900;z()[c+24>>2>>>0]=a.getUTCDay();a=(a.getTime()-Date.UTC(a.getUTCFullYear(),\n0,1,0,0,0,0))/864E5|0;z()[c+28>>2>>>0]=a},v:function(a,b,c){a=b+2097152>>>0<4194305-!!a?(a>>>0)+4294967296*b:NaN;c>>>=0;a=new Date(1E3*a);z()[c>>2>>>0]=a.getSeconds();z()[c+4>>2>>>0]=a.getMinutes();z()[c+8>>2>>>0]=a.getHours();z()[c+12>>2>>>0]=a.getDate();z()[c+16>>2>>>0]=a.getMonth();z()[c+20>>2>>>0]=a.getFullYear()-1900;z()[c+24>>2>>>0]=a.getDay();b=(Y(a.getFullYear())?wb:xb)[a.getMonth()]+a.getDate()-1|0;z()[c+28>>2>>>0]=b;z()[c+36>>2>>>0]=-(60*a.getTimezoneOffset());b=(new Date(a.getFullYear(),\n6,1)).getTimezoneOffset();var e=(new Date(a.getFullYear(),0,1)).getTimezoneOffset();a=(b!=e&&a.getTimezoneOffset()==Math.min(e,b))|0;z()[c+32>>2>>>0]=a},w:function(a){a>>>=0;var b=new Date(z()[a+20>>2>>>0]+1900,z()[a+16>>2>>>0],z()[a+12>>2>>>0],z()[a+8>>2>>>0],z()[a+4>>2>>>0],z()[a>>2>>>0],0),c=z()[a+32>>2>>>0],e=b.getTimezoneOffset(),f=(new Date(b.getFullYear(),6,1)).getTimezoneOffset(),h=(new Date(b.getFullYear(),0,1)).getTimezoneOffset(),k=Math.min(h,f);0>c?z()[a+32>>2>>>0]=Number(f!=h&&k==e):\n0>2>>>0]=b.getDay();c=(Y(b.getFullYear())?wb:xb)[b.getMonth()]+b.getDate()-1|0;z()[a+28>>2>>>0]=c;z()[a>>2>>>0]=b.getSeconds();z()[a+4>>2>>>0]=b.getMinutes();z()[a+8>>2>>>0]=b.getHours();z()[a+12>>2>>>0]=b.getDate();z()[a+16>>2>>>0]=b.getMonth();z()[a+20>>2>>>0]=b.getYear();a=b.getTime()/1E3;return sc((U=a,1<=+Math.abs(U)?0>>0:~~+Math.ceil((U-+(~~U>>>0))/4294967296)>>>0:0)),a>>>0},s:yb,t:zb,\nz:function(a,b,c){function e(m){return(m=m.toTimeString().match(/\\(([A-Za-z ]+)\\)$/))?m[1]:\"GMT\"}a>>>=0;b>>>=0;c>>>=0;var f=(new Date).getFullYear(),h=new Date(f,0,1),k=new Date(f,6,1);f=h.getTimezoneOffset();var q=k.getTimezoneOffset(),n=Math.max(f,q);A()[a>>2>>>0]=60*n;z()[b>>2>>>0]=Number(f!=q);a=e(h);b=e(k);a=Bb(a);b=Bb(b);q>2>>>0]=a,A()[c+4>>2>>>0]=b):(A()[c>>2>>>0]=b,A()[c+4>>2>>>0]=a)},d:()=>{M(\"\")},c:function(a,b,c){a>>>=0;b=Db(b>>>0,c>>>0);return Ha[a].apply(null,b)},k:function(a,\nb,c){a>>>=0;b=Db(b>>>0,c>>>0);return Ha[a].apply(null,b)},m:function(){},j:function(){return Date.now()},W:()=>{xa+=1;throw\"unwind\";},B:function(){return 4294901760},f:()=>performance.timeOrigin+performance.now(),g:function(){return G?require(\"os\").cpus().length:navigator.hardwareConcurrency},L:function(a,b,c,e){W.Pb=b>>>0;Ib.length=c;b=e>>>0>>3;for(e=0;e>>0];return(0>a?Ha[-a-1]:qc[a]).apply(null,Ib)},y:function(a){a>>>=0;var b=v().length;if(a<=b||4294901760=c;c*=2){var e=b*(1+.2/c);e=Math.min(e,a+100663296);var f=Math;e=Math.max(a,e);a:{f=f.min.call(f,4294901760,e+(65536-e%65536)%65536)-l.buffer.byteLength+65535>>>16;try{l.grow(f);t();var h=1;break a}catch(k){}h=void 0}if(h)return!0}return!1},P:Mb,Q:Nb,H:Qa,h:Ob,o:Pb,x:Qb,n:Tb,a:l||B.wasmMemory,J:Yb,e:function(a,b,c,e){return Yb(a>>>0,b>>>0,c>>>0,e>>>0)}};\n(function(){function a(c,e){c=c.exports;c=$b(c);N=c=uc(c);W.mb.push(N.Da);va.unshift(N.Y);sa=e;Ba();return c}var b={a:tc};Aa();if(B.instantiateWasm)try{return B.instantiateWasm(b,a)}catch(c){K(\"Module.instantiateWasm callback failed with error: \"+c),C(c)}Ga(b,function(c){a(c.instance,c.module)}).catch(C);return{}})();B._OrtInit=(a,b)=>(B._OrtInit=N.Z)(a,b);B._OrtGetLastError=(a,b)=>(B._OrtGetLastError=N._)(a,b);\nB._OrtCreateSessionOptions=(a,b,c,e,f,h,k,q,n,m)=>(B._OrtCreateSessionOptions=N.$)(a,b,c,e,f,h,k,q,n,m);B._OrtAppendExecutionProvider=(a,b)=>(B._OrtAppendExecutionProvider=N.aa)(a,b);B._OrtAddFreeDimensionOverride=(a,b,c)=>(B._OrtAddFreeDimensionOverride=N.ba)(a,b,c);B._OrtAddSessionConfigEntry=(a,b,c)=>(B._OrtAddSessionConfigEntry=N.ca)(a,b,c);B._OrtReleaseSessionOptions=a=>(B._OrtReleaseSessionOptions=N.da)(a);B._OrtCreateSession=(a,b,c)=>(B._OrtCreateSession=N.ea)(a,b,c);\nB._OrtReleaseSession=a=>(B._OrtReleaseSession=N.fa)(a);B._OrtGetInputOutputCount=(a,b,c)=>(B._OrtGetInputOutputCount=N.ga)(a,b,c);B._OrtGetInputName=(a,b)=>(B._OrtGetInputName=N.ha)(a,b);B._OrtGetOutputName=(a,b)=>(B._OrtGetOutputName=N.ia)(a,b);B._OrtFree=a=>(B._OrtFree=N.ja)(a);B._OrtCreateTensor=(a,b,c,e,f,h)=>(B._OrtCreateTensor=N.ka)(a,b,c,e,f,h);B._OrtGetTensorData=(a,b,c,e,f)=>(B._OrtGetTensorData=N.la)(a,b,c,e,f);B._OrtReleaseTensor=a=>(B._OrtReleaseTensor=N.ma)(a);\nB._OrtCreateRunOptions=(a,b,c,e)=>(B._OrtCreateRunOptions=N.na)(a,b,c,e);B._OrtAddRunConfigEntry=(a,b,c)=>(B._OrtAddRunConfigEntry=N.oa)(a,b,c);B._OrtReleaseRunOptions=a=>(B._OrtReleaseRunOptions=N.pa)(a);B._OrtCreateBinding=a=>(B._OrtCreateBinding=N.qa)(a);B._OrtBindInput=(a,b,c)=>(B._OrtBindInput=N.ra)(a,b,c);B._OrtBindOutput=(a,b,c,e)=>(B._OrtBindOutput=N.sa)(a,b,c,e);B._OrtClearBoundOutputs=a=>(B._OrtClearBoundOutputs=N.ta)(a);B._OrtReleaseBinding=a=>(B._OrtReleaseBinding=N.ua)(a);\nB._OrtRunWithBinding=(a,b,c,e,f)=>(B._OrtRunWithBinding=N.va)(a,b,c,e,f);B._OrtRun=(a,b,c,e,f,h,k,q)=>(B._OrtRun=N.wa)(a,b,c,e,f,h,k,q);B._OrtEndProfiling=a=>(B._OrtEndProfiling=N.xa)(a);B._JsepOutput=(a,b,c)=>(B._JsepOutput=N.ya)(a,b,c);B._JsepGetNodeName=a=>(B._JsepGetNodeName=N.za)(a);var Sa=B._pthread_self=()=>(Sa=B._pthread_self=N.Aa)(),Ab=B._malloc=a=>(Ab=B._malloc=N.Ba)(a),oc=B._free=a=>(oc=B._free=N.Ca)(a);B.__emscripten_tls_init=()=>(B.__emscripten_tls_init=N.Da)();\nvar rc=B.__emscripten_thread_init=(a,b,c,e,f,h)=>(rc=B.__emscripten_thread_init=N.Fa)(a,b,c,e,f,h);B.__emscripten_thread_crashed=()=>(B.__emscripten_thread_crashed=N.Ga)();\nvar Hb=(a,b,c,e)=>(Hb=N.Ha)(a,b,c,e),Ra=a=>(Ra=N.Ia)(a),Ya=B.__emscripten_thread_exit=a=>(Ya=B.__emscripten_thread_exit=N.Ja)(a),vb=B.__emscripten_check_mailbox=()=>(vb=B.__emscripten_check_mailbox=N.Ka)(),sc=a=>(sc=N.La)(a),Va=(a,b)=>(Va=N.Ma)(a,b),Eb=()=>(Eb=N.Na)(),Wa=a=>(Wa=N.Oa)(a),Gb=a=>(Gb=N.Pa)(a),Xa=B.dynCall_ii=(a,b)=>(Xa=B.dynCall_ii=N.Ra)(a,b),mc=a=>(mc=N.Sa)(a),bc=()=>(bc=N.Ta)(),lc=a=>(lc=N.Ua)(a),nc=()=>(nc=N.Va)();B.___start_em_js=917678;B.___stop_em_js=917839;\nfunction uc(a){a=Object.assign({},a);var b=e=>()=>e()>>>0,c=e=>f=>e(f)>>>0;a.__errno_location=b(a.__errno_location);a.pthread_self=b(a.pthread_self);a.malloc=c(a.malloc);a.stackSave=b(a.stackSave);a.stackAlloc=c(a.stackAlloc);return a}B.keepRuntimeAlive=ya;B.wasmMemory=l;B.stackAlloc=Gb;B.stackSave=Eb;B.stackRestore=Wa;B.UTF8ToString=V;B.stringToUTF8=hb;B.lengthBytesUTF8=fb;B.ExitStatus=Ia;B.PThread=W;var vc;S=function wc(){vc||xc();vc||(S=wc)};\nfunction xc(){function a(){if(!vc&&(vc=!0,B.calledRun=!0,!P)){H||Ua(va);fa(B);if(B.onRuntimeInitialized)B.onRuntimeInitialized();if(!H){if(B.postRun)for(\"function\"==typeof B.postRun&&(B.postRun=[B.postRun]);B.postRun.length;){var b=B.postRun.shift();wa.unshift(b)}Ua(wa)}}}if(!(0 ortWasmThreaded);\n", "\"use strict\";var Module={},ENVIRONMENT_IS_NODE=typeof process==\"object\"&&typeof process.versions==\"object\"&&typeof process.versions.node==\"string\";if(ENVIRONMENT_IS_NODE){var nodeWorkerThreads=require(\"worker_threads\"),parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",e=>onmessage({data:e}));var fs=require(\"fs\");Object.assign(global,{self:global,require,Module,location:{href:__filename},Worker:nodeWorkerThreads.Worker,importScripts:e=>(0,eval)(fs.readFileSync(e,\"utf8\")+\"//# sourceURL=\"+e),postMessage:e=>parentPort.postMessage(e),performance:global.performance||{now:Date.now}})}var initializedJS=!1;function threadPrintErr(){var e=Array.prototype.slice.call(arguments).join(\" \");if(ENVIRONMENT_IS_NODE){fs.writeSync(2,e+`\n`);return}console.error(e)}function threadAlert(){var e=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:e,threadId:Module._pthread_self()})}var err=threadPrintErr;self.alert=threadAlert,Module.instantiateWasm=(e,t)=>{var a=Module.wasmModule;Module.wasmModule=null;var r=new WebAssembly.Instance(a,e);return t(r)},self.onunhandledrejection=e=>{throw e.reason??e};function handleMessage(e){try{if(e.data.cmd===\"load\"){let a=[];self.onmessage=r=>a.push(r),self.startWorker=r=>{Module=r,postMessage({cmd:\"loaded\"});for(let s of a)handleMessage(s);self.onmessage=handleMessage},Module.wasmModule=e.data.wasmModule;for(const r of e.data.handlers)Module[r]=(...s)=>{postMessage({cmd:\"callHandler\",handler:r,args:s})};if(Module.wasmMemory=e.data.wasmMemory,Module.buffer=Module.wasmMemory.buffer,Module.ENVIRONMENT_IS_PTHREAD=!0,typeof e.data.urlOrBlob==\"string\")importScripts(e.data.urlOrBlob);else{var t=URL.createObjectURL(e.data.urlOrBlob);importScripts(t),URL.revokeObjectURL(t)}ortWasmThreaded(Module)}else if(e.data.cmd===\"run\"){Module.__emscripten_thread_init(e.data.pthread_ptr,0,0,1),Module.__emscripten_thread_mailbox_await(e.data.pthread_ptr),Module.establishStackSpace(),Module.PThread.receiveObjectTransfer(e.data),Module.PThread.threadInitTLS(),initializedJS||(initializedJS=!0);try{Module.invokeEntryPoint(e.data.start_routine,e.data.arg)}catch(a){if(a!=\"unwind\")throw a}}else e.data.cmd===\"cancel\"?Module._pthread_self()&&Module.__emscripten_thread_exit(-1):e.data.target===\"setimmediate\"||(e.data.cmd===\"checkMailbox\"?initializedJS&&Module.checkMailbox():e.data.cmd&&(err(\"worker.js received unknown command \"+e.data.cmd),err(e.data)))}catch(a){throw Module.__emscripten_thread_crashed&&Module.__emscripten_thread_crashed(),a}}self.onmessage=handleMessage;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport * as path from 'node:path';\nimport {Env} from 'onnxruntime-common';\n\nimport {OrtWasmModule} from './binding/ort-wasm';\nimport {OrtWasmThreadedModule} from './binding/ort-wasm-threaded';\n\n/* eslint-disable @typescript-eslint/no-require-imports */\nlet ortWasmFactory: EmscriptenModuleFactory;\n\nif (!BUILD_DEFS.DISABLE_TRAINING) {\n ortWasmFactory = require('./binding/ort-training-wasm-simd.js');\n} else {\n ortWasmFactory =\n BUILD_DEFS.DISABLE_WEBGPU ? require('./binding/ort-wasm.js') : require('./binding/ort-wasm-simd.jsep.js');\n}\n\nconst ortWasmFactoryThreaded: EmscriptenModuleFactory = !BUILD_DEFS.DISABLE_WASM_THREAD ?\n (BUILD_DEFS.DISABLE_WEBGPU ? require('./binding/ort-wasm-threaded.js') :\n require('./binding/ort-wasm-simd-threaded.jsep.js')) :\n ortWasmFactory;\n/* eslint-enable @typescript-eslint/no-require-imports */\n\nlet wasm: OrtWasmModule|undefined;\nlet initialized = false;\nlet initializing = false;\nlet aborted = false;\n\nconst isMultiThreadSupported = (numThreads: number): boolean => {\n // WebAssembly threads are set to 1 (single thread).\n if (numThreads === 1) {\n return false;\n }\n\n // If 'SharedArrayBuffer' is not available, WebAssembly threads will not work.\n if (typeof SharedArrayBuffer === 'undefined') {\n if (typeof self !== 'undefined' && !self.crossOriginIsolated) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', but this will not work unless you enable crossOriginIsolated mode. ' +\n 'See https://web.dev/cross-origin-isolation-guide/ for more info.');\n }\n return false;\n }\n\n // onnxruntime-web does not support multi-threads in Node.js.\n if (typeof process !== 'undefined' && process.versions && process.versions.node) {\n // eslint-disable-next-line no-console\n console.warn(\n 'env.wasm.numThreads is set to ' + numThreads +\n ', however, currently onnxruntime-web does not support multi-threads in Node.js. ' +\n 'Please consider using onnxruntime-node for performance critical scenarios.');\n }\n\n try {\n // Test for transferability of SABs (for browsers. needed for Firefox)\n // https://groups.google.com/forum/#!msg/mozilla.dev.platform/IHkBZlHETpA/dwsMNchWEQAJ\n if (typeof MessageChannel !== 'undefined') {\n new MessageChannel().port1.postMessage(new SharedArrayBuffer(1));\n }\n\n // Test for WebAssembly threads capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing threaded instructions.\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 5,\n 4, 1, 3, 1, 1, 10, 11, 1, 9, 0, 65, 0, 254, 16, 2, 0, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst isSimdSupported = (): boolean => {\n try {\n // Test for WebAssembly SIMD capability (for both browsers and Node.js)\n // This typed array is a WebAssembly program containing SIMD instructions.\n\n // The binary data is generated from the following code by wat2wasm:\n //\n // (module\n // (type $t0 (func))\n // (func $f0 (type $t0)\n // (drop\n // (i32x4.dot_i16x8_s\n // (i8x16.splat\n // (i32.const 0))\n // (v128.const i32x4 0x00000000 0x00000000 0x00000000 0x00000000)))))\n\n return WebAssembly.validate(new Uint8Array([\n 0, 97, 115, 109, 1, 0, 0, 0, 1, 4, 1, 96, 0, 0, 3, 2, 1, 0, 10, 30, 1, 28, 0, 65, 0,\n 253, 15, 253, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 253, 186, 1, 26, 11\n ]));\n } catch (e) {\n return false;\n }\n};\n\nconst getWasmFileName = (useSimd: boolean, useThreads: boolean) => {\n if (useSimd) {\n if (!BUILD_DEFS.DISABLE_TRAINING) {\n return 'ort-training-wasm-simd.wasm';\n }\n return useThreads ? 'ort-wasm-simd-threaded.wasm' : 'ort-wasm-simd.wasm';\n } else {\n return useThreads ? 'ort-wasm-threaded.wasm' : 'ort-wasm.wasm';\n }\n};\n\nexport const initializeWebAssembly = async(flags: Env.WebAssemblyFlags): Promise => {\n if (initialized) {\n return Promise.resolve();\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initializeWebAssembly()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initializeWebAssembly()\\' failed.');\n }\n\n initializing = true;\n\n // wasm flags are already initialized\n const timeout = flags.initTimeout!;\n const numThreads = flags.numThreads!;\n const simd = flags.simd!;\n\n const useThreads = isMultiThreadSupported(numThreads);\n const useSimd = simd && isSimdSupported();\n\n const wasmPaths = flags.wasmPaths;\n const wasmPrefixOverride = typeof wasmPaths === 'string' ? wasmPaths : undefined;\n const wasmFileName = getWasmFileName(useSimd, useThreads);\n const wasmPathOverride = typeof wasmPaths === 'object' ? wasmPaths[wasmFileName] : undefined;\n\n let isTimeout = false;\n\n const tasks: Array> = [];\n\n // promise for timeout\n if (timeout > 0) {\n tasks.push(new Promise((resolve) => {\n setTimeout(() => {\n isTimeout = true;\n resolve();\n }, timeout);\n }));\n }\n\n // promise for module initialization\n tasks.push(new Promise((resolve, reject) => {\n const factory = useThreads ? ortWasmFactoryThreaded : ortWasmFactory;\n const config: Partial = {\n locateFile: (fileName: string, scriptDirectory: string) => {\n if (!BUILD_DEFS.DISABLE_WASM_THREAD && useThreads && fileName.endsWith('.worker.js') &&\n typeof Blob !== 'undefined') {\n return URL.createObjectURL(new Blob(\n [\n // This require() function is handled by esbuild plugin to load file content as string.\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n require('./binding/ort-wasm-threaded.worker.js')\n ],\n {type: 'text/javascript'}));\n }\n\n if (fileName.endsWith('.wasm')) {\n if (wasmPathOverride) {\n return wasmPathOverride;\n }\n\n const prefix = wasmPrefixOverride ?? scriptDirectory;\n\n if (!BUILD_DEFS.DISABLE_WEBGPU) {\n if (wasmFileName === 'ort-wasm-simd.wasm') {\n return prefix + 'ort-wasm-simd.jsep.wasm';\n } else if (wasmFileName === 'ort-wasm-simd-threaded.wasm') {\n return prefix + 'ort-wasm-simd-threaded.jsep.wasm';\n }\n }\n\n return prefix + wasmFileName;\n }\n\n return scriptDirectory + fileName;\n }\n };\n\n if (!BUILD_DEFS.DISABLE_WASM_THREAD && useThreads) {\n config.numThreads = numThreads;\n if (typeof Blob === 'undefined') {\n config.mainScriptUrlOrBlob = path.join(__dirname, 'ort-wasm-threaded.js');\n } else {\n const scriptSourceCode = `var ortWasmThreaded=${factory.toString()};`;\n config.mainScriptUrlOrBlob = new Blob([scriptSourceCode], {type: 'text/javascript'});\n }\n }\n\n factory(config).then(\n // wasm module initialized successfully\n module => {\n initializing = false;\n initialized = true;\n wasm = module;\n resolve();\n },\n // wasm module failed to initialize\n (what) => {\n initializing = false;\n aborted = true;\n reject(what);\n });\n }));\n\n await Promise.race(tasks);\n\n if (isTimeout) {\n throw new Error(`WebAssembly backend initializing failed due to timeout: ${timeout}ms`);\n }\n};\n\nexport const getInstance = (): OrtWasmModule => {\n if (initialized && wasm) {\n return wasm;\n }\n\n throw new Error('WebAssembly is not initialized yet.');\n};\n\nexport const dispose = (): void => {\n if (initialized && !initializing && !aborted) {\n initializing = true;\n\n (wasm as OrtWasmThreadedModule).PThread?.terminateAllThreads();\n wasm = undefined;\n\n initializing = false;\n initialized = false;\n aborted = true;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {getInstance} from './wasm-factory';\n\nexport const allocWasmString = (data: string, allocs: number[]): number => {\n const wasm = getInstance();\n\n const dataLength = wasm.lengthBytesUTF8(data) + 1;\n const dataOffset = wasm._malloc(dataLength);\n wasm.stringToUTF8(data, dataOffset, dataLength);\n allocs.push(dataOffset);\n\n return dataOffset;\n};\n\ninterface ExtraOptionsHandler {\n (name: string, value: string): void;\n}\n\nexport const iterateExtraOptions =\n (options: Record, prefix: string, seen: WeakSet>,\n handler: ExtraOptionsHandler): void => {\n if (typeof options == 'object' && options !== null) {\n if (seen.has(options)) {\n throw new Error('Circular reference in options');\n } else {\n seen.add(options);\n }\n }\n\n Object.entries(options).forEach(([key, value]) => {\n const name = (prefix) ? prefix + key : key;\n if (typeof value === 'object') {\n iterateExtraOptions(value as Record, name + '.', seen, handler);\n } else if (typeof value === 'string' || typeof value === 'number') {\n handler(name, value.toString());\n } else if (typeof value === 'boolean') {\n handler(name, (value) ? '1' : '0');\n } else {\n throw new Error(`Can't handle extra config type: ${typeof value}`);\n }\n });\n };\n\n/**\n * check web assembly API's last error and throw error if any error occurred.\n * @param message a message used when an error occurred.\n */\nexport const checkLastError = (message: string): void => {\n const wasm = getInstance();\n\n const stack = wasm.stackSave();\n try {\n const paramsOffset = wasm.stackAlloc(8);\n wasm._OrtGetLastError(paramsOffset, paramsOffset + 4);\n const errorCode = wasm.HEAP32[paramsOffset / 4];\n const errorMessagePointer = wasm.HEAPU32[paramsOffset / 4 + 1];\n const errorMessage = errorMessagePointer ? wasm.UTF8ToString(errorMessagePointer) : '';\n throw new Error(`${message} ERROR_CODE: ${errorCode}, ERROR_MESSAGE: ${errorMessage}`);\n } finally {\n wasm.stackRestore(stack);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nexport const setRunOptions = (options: InferenceSession.RunOptions): [number, number[]] => {\n const wasm = getInstance();\n let runOptionsHandle = 0;\n const allocs: number[] = [];\n\n const runOptions: InferenceSession.RunOptions = options || {};\n\n try {\n if (options?.logSeverityLevel === undefined) {\n runOptions.logSeverityLevel = 2; // Default to warning\n } else if (\n typeof options.logSeverityLevel !== 'number' || !Number.isInteger(options.logSeverityLevel) ||\n options.logSeverityLevel < 0 || options.logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${options.logSeverityLevel}`);\n }\n\n if (options?.logVerbosityLevel === undefined) {\n runOptions.logVerbosityLevel = 0; // Default to 0\n } else if (typeof options.logVerbosityLevel !== 'number' || !Number.isInteger(options.logVerbosityLevel)) {\n throw new Error(`log verbosity level is not valid: ${options.logVerbosityLevel}`);\n }\n\n if (options?.terminate === undefined) {\n runOptions.terminate = false;\n }\n\n let tagDataOffset = 0;\n if (options?.tag !== undefined) {\n tagDataOffset = allocWasmString(options.tag, allocs);\n }\n\n runOptionsHandle = wasm._OrtCreateRunOptions(\n runOptions.logSeverityLevel!, runOptions.logVerbosityLevel!, !!runOptions.terminate!, tagDataOffset);\n if (runOptionsHandle === 0) {\n checkLastError('Can\\'t create run options.');\n }\n\n if (options?.extra !== undefined) {\n iterateExtraOptions(options.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddRunConfigEntry(runOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a run config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [runOptionsHandle, allocs];\n } catch (e) {\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession} from 'onnxruntime-common';\n\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError, iterateExtraOptions} from './wasm-utils';\n\nconst getGraphOptimzationLevel = (graphOptimizationLevel: string|unknown): number => {\n switch (graphOptimizationLevel) {\n case 'disabled':\n return 0;\n case 'basic':\n return 1;\n case 'extended':\n return 2;\n case 'all':\n return 99;\n default:\n throw new Error(`unsupported graph optimization level: ${graphOptimizationLevel}`);\n }\n};\n\nconst getExecutionMode = (executionMode: 'sequential'|'parallel'): number => {\n switch (executionMode) {\n case 'sequential':\n return 0;\n case 'parallel':\n return 1;\n default:\n throw new Error(`unsupported execution mode: ${executionMode}`);\n }\n};\n\nconst appendDefaultOptions = (options: InferenceSession.SessionOptions): void => {\n if (!options.extra) {\n options.extra = {};\n }\n if (!options.extra.session) {\n options.extra.session = {};\n }\n const session = options.extra.session as Record;\n if (!session.use_ort_model_bytes_directly) {\n // eslint-disable-next-line camelcase\n session.use_ort_model_bytes_directly = '1';\n }\n\n // if using JSEP with WebGPU, always disable memory pattern\n if (options.executionProviders &&\n options.executionProviders.some(ep => (typeof ep === 'string' ? ep : ep.name) === 'webgpu')) {\n options.enableMemPattern = false;\n }\n};\n\nconst setExecutionProviders =\n (sessionOptionsHandle: number, executionProviders: readonly InferenceSession.ExecutionProviderConfig[],\n allocs: number[]): void => {\n for (const ep of executionProviders) {\n let epName = typeof ep === 'string' ? ep : ep.name;\n\n // check EP name\n switch (epName) {\n case 'webnn':\n epName = 'WEBNN';\n if (typeof ep !== 'string') {\n const webnnOptions = ep as InferenceSession.WebNNExecutionProviderOption;\n if (webnnOptions?.deviceType) {\n const keyDataOffset = allocWasmString('deviceType', allocs);\n const valueDataOffset = allocWasmString(webnnOptions.deviceType, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'deviceType' - ${webnnOptions.deviceType}.`);\n }\n }\n if (webnnOptions?.numThreads) {\n let numThreads = webnnOptions.numThreads;\n // Just ignore invalid webnnOptions.numThreads.\n if (typeof numThreads != 'number' || !Number.isInteger(numThreads) || numThreads < 0) {\n numThreads = 0;\n }\n const keyDataOffset = allocWasmString('numThreads', allocs);\n const valueDataOffset = allocWasmString(numThreads.toString(), allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(`Can't set a session config entry: 'numThreads' - ${webnnOptions.numThreads}.`);\n }\n }\n if (webnnOptions?.powerPreference) {\n const keyDataOffset = allocWasmString('powerPreference', allocs);\n const valueDataOffset = allocWasmString(webnnOptions.powerPreference, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'powerPreference' - ${webnnOptions.powerPreference}.`);\n }\n }\n }\n break;\n case 'webgpu':\n epName = 'JS';\n if (typeof ep !== 'string') {\n const webgpuOptions = ep as InferenceSession.WebGpuExecutionProviderOption;\n if (webgpuOptions?.preferredLayout) {\n if (webgpuOptions.preferredLayout !== 'NCHW' && webgpuOptions.preferredLayout !== 'NHWC') {\n throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${webgpuOptions.preferredLayout}`);\n }\n const keyDataOffset = allocWasmString('preferredLayout', allocs);\n const valueDataOffset = allocWasmString(webgpuOptions.preferredLayout, allocs);\n if (getInstance()._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !==\n 0) {\n checkLastError(\n `Can't set a session config entry: 'preferredLayout' - ${webgpuOptions.preferredLayout}.`);\n }\n }\n }\n break;\n case 'wasm':\n case 'cpu':\n continue;\n default:\n throw new Error(`not supported execution provider: ${epName}`);\n }\n\n const epNameDataOffset = allocWasmString(epName, allocs);\n if (getInstance()._OrtAppendExecutionProvider(sessionOptionsHandle, epNameDataOffset) !== 0) {\n checkLastError(`Can't append execution provider: ${epName}.`);\n }\n }\n };\n\nexport const setSessionOptions = (options?: InferenceSession.SessionOptions): [number, number[]] => {\n const wasm = getInstance();\n let sessionOptionsHandle = 0;\n const allocs: number[] = [];\n\n const sessionOptions: InferenceSession.SessionOptions = options || {};\n appendDefaultOptions(sessionOptions);\n\n try {\n const graphOptimizationLevel = getGraphOptimzationLevel(sessionOptions.graphOptimizationLevel ?? 'all');\n const executionMode = getExecutionMode(sessionOptions.executionMode ?? 'sequential');\n const logIdDataOffset =\n typeof sessionOptions.logId === 'string' ? allocWasmString(sessionOptions.logId, allocs) : 0;\n\n const logSeverityLevel = sessionOptions.logSeverityLevel ?? 2; // Default to 2 - warning\n if (!Number.isInteger(logSeverityLevel) || logSeverityLevel < 0 || logSeverityLevel > 4) {\n throw new Error(`log serverity level is not valid: ${logSeverityLevel}`);\n }\n\n const logVerbosityLevel = sessionOptions.logVerbosityLevel ?? 0; // Default to 0 - verbose\n if (!Number.isInteger(logVerbosityLevel) || logVerbosityLevel < 0 || logVerbosityLevel > 4) {\n throw new Error(`log verbosity level is not valid: ${logVerbosityLevel}`);\n }\n\n const optimizedModelFilePathOffset = typeof sessionOptions.optimizedModelFilePath === 'string' ?\n allocWasmString(sessionOptions.optimizedModelFilePath, allocs) :\n 0;\n\n sessionOptionsHandle = wasm._OrtCreateSessionOptions(\n graphOptimizationLevel, !!sessionOptions.enableCpuMemArena, !!sessionOptions.enableMemPattern, executionMode,\n !!sessionOptions.enableProfiling, 0, logIdDataOffset, logSeverityLevel, logVerbosityLevel,\n optimizedModelFilePathOffset);\n if (sessionOptionsHandle === 0) {\n checkLastError('Can\\'t create session options.');\n }\n\n if (sessionOptions.executionProviders) {\n setExecutionProviders(sessionOptionsHandle, sessionOptions.executionProviders, allocs);\n }\n\n if (sessionOptions.enableGraphCapture !== undefined) {\n if (typeof sessionOptions.enableGraphCapture !== 'boolean') {\n throw new Error(`enableGraphCapture must be a boolean value: ${sessionOptions.enableGraphCapture}`);\n }\n const keyDataOffset = allocWasmString('enableGraphCapture', allocs);\n const valueDataOffset = allocWasmString(sessionOptions.enableGraphCapture.toString(), allocs);\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(\n `Can't set a session config entry: 'enableGraphCapture' - ${sessionOptions.enableGraphCapture}.`);\n }\n }\n\n if (sessionOptions.freeDimensionOverrides) {\n for (const [name, value] of Object.entries(sessionOptions.freeDimensionOverrides)) {\n if (typeof name !== 'string') {\n throw new Error(`free dimension override name must be a string: ${name}`);\n }\n if (typeof value !== 'number' || !Number.isInteger(value) || value < 0) {\n throw new Error(`free dimension override value must be a non-negative integer: ${value}`);\n }\n const nameOffset = allocWasmString(name, allocs);\n if (wasm._OrtAddFreeDimensionOverride(sessionOptionsHandle, nameOffset, value) !== 0) {\n checkLastError(`Can't set a free dimension override: ${name} - ${value}.`);\n }\n }\n }\n\n if (sessionOptions.extra !== undefined) {\n iterateExtraOptions(sessionOptions.extra, '', new WeakSet>(), (key, value) => {\n const keyDataOffset = allocWasmString(key, allocs);\n const valueDataOffset = allocWasmString(value, allocs);\n\n if (wasm._OrtAddSessionConfigEntry(sessionOptionsHandle, keyDataOffset, valueDataOffset) !== 0) {\n checkLastError(`Can't set a session config entry: ${key} - ${value}.`);\n }\n });\n }\n\n return [sessionOptionsHandle, allocs];\n } catch (e) {\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n throw e;\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\n// a dummy type declaration for Float16Array in case any polyfill is available.\ndeclare global {\n // eslint-disable-next-line @typescript-eslint/naming-convention, @typescript-eslint/no-explicit-any\n const Float16Array: any;\n}\n\n// This file includes common definitions. They do NOT have dependency on the WebAssembly instance.\n\n/**\n * Copied from ONNX definition. Use this to drop dependency 'onnx_proto' to decrease compiled .js file size.\n */\nexport const enum DataType {\n undefined = 0,\n float = 1,\n uint8 = 2,\n int8 = 3,\n uint16 = 4,\n int16 = 5,\n int32 = 6,\n int64 = 7,\n string = 8,\n bool = 9,\n float16 = 10,\n double = 11,\n uint32 = 12,\n uint64 = 13,\n complex64 = 14,\n complex128 = 15,\n bfloat16 = 16\n}\n\n/**\n * Map string tensor data to enum value\n */\nexport const tensorDataTypeStringToEnum = (type: string): DataType => {\n switch (type) {\n case 'int8':\n return DataType.int8;\n case 'uint8':\n return DataType.uint8;\n case 'bool':\n return DataType.bool;\n case 'int16':\n return DataType.int16;\n case 'uint16':\n return DataType.uint16;\n case 'int32':\n return DataType.int32;\n case 'uint32':\n return DataType.uint32;\n case 'float16':\n return DataType.float16;\n case 'float32':\n return DataType.float;\n case 'float64':\n return DataType.double;\n case 'string':\n return DataType.string;\n case 'int64':\n return DataType.int64;\n case 'uint64':\n return DataType.uint64;\n\n default:\n throw new Error(`unsupported data type: ${type}`);\n }\n};\n\n/**\n * Map enum value to string tensor data\n */\nexport const tensorDataTypeEnumToString = (typeProto: DataType): Tensor.Type => {\n switch (typeProto) {\n case DataType.int8:\n return 'int8';\n case DataType.uint8:\n return 'uint8';\n case DataType.bool:\n return 'bool';\n case DataType.int16:\n return 'int16';\n case DataType.uint16:\n return 'uint16';\n case DataType.int32:\n return 'int32';\n case DataType.uint32:\n return 'uint32';\n case DataType.float16:\n return 'float16';\n case DataType.float:\n return 'float32';\n case DataType.double:\n return 'float64';\n case DataType.string:\n return 'string';\n case DataType.int64:\n return 'int64';\n case DataType.uint64:\n return 'uint64';\n\n default:\n throw new Error(`unsupported data type: ${typeProto}`);\n }\n};\n\n/**\n * get tensor element size in bytes by the given data type\n * @returns size in integer or undefined if the data type is not supported\n */\nexport const getTensorElementSize = (dateType: number): number|\n undefined => [undefined, 4, 1, 1, 2, 2, 4, 8, undefined, 1, 2, 8, 4, 8, undefined, undefined, undefined][dateType];\n\n/**\n * get typed array constructor by the given tensor type\n */\nexport const tensorTypeToTypedArrayConstructor = (type: Tensor.Type): Float32ArrayConstructor|Uint8ArrayConstructor|\n Int8ArrayConstructor|Uint16ArrayConstructor|Int16ArrayConstructor|Int32ArrayConstructor|BigInt64ArrayConstructor|\n Uint8ArrayConstructor|Float64ArrayConstructor|Uint32ArrayConstructor|BigUint64ArrayConstructor => {\n switch (type) {\n case 'float16':\n // allow Float16Array polyfill.\n return typeof Float16Array !== 'undefined' && Float16Array.from ? Float16Array : Uint16Array;\n case 'float32':\n return Float32Array;\n case 'uint8':\n return Uint8Array;\n case 'int8':\n return Int8Array;\n case 'uint16':\n return Uint16Array;\n case 'int16':\n return Int16Array;\n case 'int32':\n return Int32Array;\n case 'bool':\n return Uint8Array;\n case 'float64':\n return Float64Array;\n case 'uint32':\n return Uint32Array;\n case 'int64':\n return BigInt64Array;\n case 'uint64':\n return BigUint64Array;\n default:\n throw new Error(`unsupported type: ${type}`);\n }\n };\n\n/**\n * Map string log level to integer value\n */\nexport const logLevelStringToEnum = (logLevel?: 'verbose'|'info'|'warning'|'error'|'fatal'): number => {\n switch (logLevel) {\n case 'verbose':\n return 0;\n case 'info':\n return 1;\n case 'warning':\n return 2;\n case 'error':\n return 3;\n case 'fatal':\n return 4;\n default:\n throw new Error(`unsupported logging level: ${logLevel}`);\n }\n};\n\n/**\n * Check whether the given tensor type is supported by GPU buffer\n */\nexport const isGpuBufferSupportedType = (type: Tensor.Type): type is Tensor.GpuBufferDataTypes => type === 'float32' ||\n type === 'float16' || type === 'int32' || type === 'int64' || type === 'uint32' || type === 'uint8' ||\n type === 'bool';\n\n/**\n * Map string data location to integer value\n */\nexport const dataLocationStringToEnum = (location: Tensor.DataLocation): number => {\n switch (location) {\n case 'none':\n return 0;\n case 'cpu':\n return 1;\n case 'cpu-pinned':\n return 2;\n case 'texture':\n return 3;\n case 'gpu-buffer':\n return 4;\n default:\n throw new Error(`unsupported data location: ${location}`);\n }\n};\n\n/**\n * Map integer data location to string value\n */\nexport const dataLocationEnumToString = (location: number): Tensor.DataLocation|undefined =>\n (['none', 'cpu', 'cpu-pinned', 'texture', 'gpu-buffer'] as const)[location];\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport * as fs from 'fs';\nimport {readFile} from 'node:fs/promises';\n\n/**\n * Load a file into a Uint8Array.\n *\n * @param file - the file to load. Can be a URL/path, a Blob, an ArrayBuffer, or a Uint8Array.\n * @returns a Uint8Array containing the file data.\n */\nexport const loadFile = async(file: string|Blob|ArrayBufferLike|Uint8Array): Promise => {\n if (typeof file === 'string') {\n if (typeof process !== 'undefined' && process.versions && process.versions.node) {\n // load file into ArrayBuffer in Node.js\n try {\n return new Uint8Array(await readFile(file));\n } catch (e) {\n if (e.code === 'ERR_FS_FILE_TOO_LARGE') {\n // file is too large, use fs.createReadStream instead\n const stream = fs.createReadStream(file);\n const chunks: Uint8Array[] = [];\n for await (const chunk of stream) {\n chunks.push(chunk);\n }\n return new Uint8Array(Buffer.concat(chunks));\n }\n throw e;\n }\n } else {\n // load file into ArrayBuffer in browsers\n const response = await fetch(file);\n if (!response.ok) {\n throw new Error(`failed to load external data file: ${file}`);\n }\n const contentLengthHeader = response.headers.get('Content-Length');\n const fileSize = contentLengthHeader ? parseInt(contentLengthHeader, 10) : 0;\n if (fileSize < 1073741824 /* 1GB */) {\n // when Content-Length header is not set, we cannot determine the file size. We assume it is small enough to\n // load into memory.\n return new Uint8Array(await response.arrayBuffer());\n } else {\n // file is too large, use stream instead\n if (!response.body) {\n throw new Error(`failed to load external data file: ${file}, no response body.`);\n }\n const reader = response.body.getReader();\n\n let buffer;\n try {\n // try to create ArrayBuffer directly\n buffer = new ArrayBuffer(fileSize);\n } catch (e) {\n if (e instanceof RangeError) {\n // use WebAssembly Memory to allocate larger ArrayBuffer\n const pages = Math.ceil(fileSize / 65536);\n buffer = new WebAssembly.Memory({initial: pages, maximum: pages}).buffer;\n } else {\n throw e;\n }\n }\n\n let offset = 0;\n // eslint-disable-next-line no-constant-condition\n while (true) {\n const {done, value} = await reader.read();\n if (done) {\n break;\n }\n const chunkSize = value.byteLength;\n const chunk = new Uint8Array(buffer, offset, chunkSize);\n chunk.set(value);\n offset += chunkSize;\n }\n return new Uint8Array(buffer, 0, fileSize);\n }\n }\n\n } else if (file instanceof Blob) {\n return new Uint8Array(await file.arrayBuffer());\n } else if (file instanceof Uint8Array) {\n return file;\n } else {\n return new Uint8Array(file);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {logLevelStringToEnum} from '../wasm-common';\n\ntype LogLevel = NonNullable;\ntype MessageString = string;\ntype MessageFunction = () => string;\ntype Message = MessageString|MessageFunction;\n\nconst logLevelPrefix = ['V', 'I', 'W', 'E', 'F'];\n\nconst doLog = (level: number, message: string): void => {\n // eslint-disable-next-line no-console\n console.log(`[${logLevelPrefix[level]},${new Date().toISOString()}]${message}`);\n};\n\nlet configLogLevel: LogLevel|undefined;\nlet debug: boolean|undefined;\n\nexport const configureLogger = ($configLogLevel: LogLevel, $debug: boolean): void => {\n configLogLevel = $configLogLevel;\n debug = $debug;\n};\n\n/**\n * A simple logging utility to log messages to the console.\n */\nexport const LOG = (logLevel: LogLevel, msg: Message): void => {\n const messageLevel = logLevelStringToEnum(logLevel);\n const configLevel = logLevelStringToEnum(configLogLevel);\n if (messageLevel >= configLevel) {\n doLog(messageLevel, typeof msg === 'function' ? msg() : msg);\n }\n};\n\n/**\n * A simple logging utility to log messages to the console. Only logs when debug is enabled.\n */\nexport const LOG_DEBUG: typeof LOG = (...args: Parameters) => {\n if (debug) {\n LOG(...args);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Tensor} from 'onnxruntime-common';\n\nimport {tensorTypeToTypedArrayConstructor} from '../wasm-common';\n\nexport const createView = (dataBuffer: ArrayBuffer, type: Tensor.Type): Int32Array|Uint32Array|BigInt64Array|\n BigUint64Array|Uint8Array|Float32Array|Float64Array|Int8Array|Int16Array|Uint16Array =>\n new (tensorTypeToTypedArrayConstructor(type))(dataBuffer);\n\n/**\n * a TensorView does not own the data.\n */\nexport interface TensorView {\n readonly data: number;\n readonly dataType: number;\n readonly dims: readonly number[];\n\n /**\n * get a Float32Array data view of the tensor data. tensor data must be on CPU.\n */\n getFloat32Array(): Float32Array;\n\n /**\n * get a BigInt64Array data view of the tensor data. tensor data must be on CPU.\n */\n getBigInt64Array(): BigInt64Array;\n\n /**\n * get a Int32Array data view of the tensor data. tensor data must be on CPU.\n */\n getInt32Array(): Int32Array;\n\n /**\n * create a new tensor view with the same data but different dimensions.\n */\n reshape(newDims: readonly number[]): TensorView;\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../wasm-common';\nimport {TensorView} from '../tensor-view';\n\nimport {ShaderHelper} from './ops/common';\n\nexport type SessionState = 'default'|'capturing'|'replaying';\n\nexport enum GpuDataType {\n default = 0,\n upload = 1,\n profile = 2\n}\nexport type GpuDataId = number;\n\nexport type GpuArchitecture = 'ampere';\nexport type GpuVendor = 'amd'|'intel'|'nvidia';\nexport interface AdapterInfo {\n isArchitecture: (architecture: GpuArchitecture) => boolean;\n isVendor: (vendor: GpuVendor) => boolean;\n}\n\nexport interface GpuData {\n type: GpuDataType;\n id: GpuDataId;\n buffer: GPUBuffer;\n}\n\nexport interface TensorInfo {\n dims: readonly number[];\n dataType: number;\n}\n\nexport interface ProgramUniform {\n type: DataType;\n data: number|readonly number[];\n}\n\nexport type ProgramUniformVariableInfo = [type: DataType, length: number];\n\n/**\n * Represent the dependency of a program on a specific input tensor.\n *\n * - 'none': the shader/uniform does not depend on this input's info\n * - 'type': the shader/uniform depends on data type of this input\n * - 'rank': the shader/uniform depends on data type and the rank of this input\n * - 'dims': the shader/uniform depends on data type and the dims of this input\n * - 'data': the shader/uniform depends on data type, the dims and the data of this input\n */\nexport type ProgramInputTensorInfoDependency = 'none'|'type'|'rank'|'dims'|'data';\n\n/**\n * Represent information about a program's cache for shader.\n */\nexport interface ProgramShaderCacheInfo {\n /**\n * an optional string as a cache hint in the artifact cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains initializing-time information, such as the attributes or any information of\n * initializers. It should NOT contain any runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'dims' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n/**\n * Represent information about a program's cache for uniform.\n */\nexport interface ProgramUniformCacheInfo {\n /**\n * an optional string as a cache hint in the uniform cache. If this is not specified, the cache hint will be empty.\n *\n * This hint string should only contains runtime information, such as the shape of inputs.\n */\n hint?: string;\n\n /**\n * an optional list of dependencies of the program on the input tensors. If this is not specified, the program depends\n * on 'none' of all inputs.\n */\n inputDependencies?: ProgramInputTensorInfoDependency[];\n}\n\n\n/**\n * A set of data that represent a shader program\n */\nexport interface ProgramInfo {\n /**\n * the name of the program. used for debugging and profiling\n */\n name: string;\n\n /**\n * an optional object describing the cache information of the program shader.\n *\n * If this is not specified, assume hint is empty and inputDependencies are ['dims'] for all inputs.\n */\n shaderCache?: ProgramShaderCacheInfo;\n\n /**\n * the shader's processing source code.\n *\n * This function will be called when shader cache missed.\n */\n getShaderSource: (shaderHelper: ShaderHelper) => string;\n\n /**\n * A function to get run data required to run the program.\n *\n * This function will be called every time the program is executed. Should keep this function as simple as possible.\n */\n getRunData: (inputs: readonly TensorView[]) => {\n outputs: readonly TensorInfo[];\n dispatchGroup: {x: number; y?: number; z?: number};\n programUniforms?: readonly ProgramUniform[];\n };\n}\n\nexport interface Artifact {\n programInfo: ProgramInfo;\n computePipeline: GPUComputePipeline;\n uniformVariablesInfo: readonly ProgramUniformVariableInfo[]|undefined;\n}\n\nexport interface ComputeContextInputsOutputsMapping {\n /**\n * specify the mapping to the program's inputs. the value can be a number or a tensor view.\n * - if it's a number, it's the index of the kernel's input\n * - if it's a tensor view, it's an existing tensor view that will be used as the input\n *\n * if inputs is not specified, the mapping will be the kernel's inputs in order.\n */\n readonly inputs?: ReadonlyArray;\n /**\n * specify the mapping to the program's outputs. the value must be a number.\n * - if it's a non-negative number, it's the index of the kernel's output\n * - if it's -1, it's an output that will be created as a temporary value. this value will be released after\n * the kernel is executed.\n * - if it's -2, it's an output that will be created as a persistent value. this value will be released when the\n * kernel is released.\n *\n * if outputs is not specified, the mapping will be the kernel's outputs in order.\n */\n readonly outputs?: readonly number[];\n}\n\n/**\n * A ComputeContext instance carries the states that representing the current running of a kernel.\n */\nexport interface ComputeContext {\n /**\n * gpu adapter info\n */\n readonly adapterInfo: AdapterInfo;\n\n /**\n * stores the pointer to OpKernelContext\n */\n readonly opKernelContext: number;\n\n /**\n * a list of inputs, each input is an instance of TensorView\n */\n readonly inputs: readonly TensorView[];\n\n /**\n * a custom data object that can be used to store any data that is needed by the kernel\n */\n readonly kernelCustomData: {[key: string]: unknown};\n\n /**\n * a buffer that can be used to access custom data created each time the kernel is executed\n */\n readonly customDataBuffer: Uint8Array;\n\n /**\n * a number of outputs for the node\n */\n readonly outputCount: number;\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[];\n output(index: number, dims: readonly number[]): number;\n getMaxComputeWorkgroupSizes(): [number, number, number];\n getMaxComputeWorkgroupStoragesize(): number;\n}\n\nexport type TimestampQuery = 'none'|'inside-passes'|'at-passes';\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {GpuData, GpuDataId, GpuDataType} from './types';\n\n/**\n * manages GpuDataId -> GpuBuffer\n */\nexport interface GpuDataManager {\n /**\n * copy data from CPU to GPU.\n */\n upload(id: GpuDataId, data: Uint8Array): void;\n /**\n * copy data from GPU to GPU.\n */\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void;\n /**\n * create new data on GPU.\n */\n create(size: number, usage?: number): GpuData;\n /**\n * get GPU data by ID.\n */\n get(id: GpuDataId): GpuData|undefined;\n /**\n * release the data on GPU by ID.\n *\n * @return size of the data released\n */\n release(id: GpuDataId): number;\n /**\n * copy data from GPU to CPU.\n */\n download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise;\n\n /**\n * refresh the buffers that marked for release.\n *\n * when release() is called, the buffer is not released immediately. this is because we need to wait for the commands\n * to be submitted to the GPU. this function is called after the commands are submitted so that the buffers can be\n * actually released.\n */\n refreshPendingBuffers(): void;\n\n /**\n * register an external buffer for IO Binding. If the buffer is already registered, return the existing GPU data ID.\n *\n * GPU data manager only manages a mapping between the buffer and the GPU data ID. It will not manage the lifecycle of\n * the external buffer.\n */\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number;\n\n /**\n * unregister an external buffer for IO Binding.\n */\n unregisterExternalBuffer(buffer: GPUBuffer): void;\n\n /**\n * destroy all gpu buffers.\n */\n dispose(): void;\n\n /**\n * release session related data.\n * @param sessionId - specify the session ID.\n */\n onReleaseSession(sessionId: number): void;\n}\n\ninterface StorageCacheValue {\n gpuData: GpuData;\n originalSize: number;\n}\n\nconst bucketFreelist: Map = new Map([\n [64, 250],\n [128, 200],\n [256, 200],\n [512, 200],\n [2048, 230],\n [4096, 200],\n [8192, 50],\n [16384, 50],\n [32768, 50],\n [65536, 50],\n [131072, 50],\n [262144, 50],\n [524288, 50],\n [1048576, 50],\n [2097152, 30],\n [4194304, 20],\n [8388608, 10],\n [12582912, 10],\n [16777216, 10],\n [26214400, 15],\n [33554432, 22],\n [44236800, 2],\n [58982400, 6],\n // we don't want to cache the bucket sizes below but not caching them\n // results in some major performance hits for models like sd-turbo.\n [67108864, 6],\n [134217728, 6],\n [167772160, 6],\n]);\n\nconst bucketArr: number[] = [];\n\n/**\n * normalize the buffer size so that it fits the 128-bits (16 bytes) alignment.\n */\nconst calcNormalizedBufferSize = (size: number) => Math.ceil(size / 16) * 16;\n\n/**\n * calculate the buffer size so that it fits into buckets.\n */\nconst calcBucketBufferSize = (size: number) => {\n for (let idx = 0; idx < bucketArr.length; idx++) {\n const sizeForBucket = bucketArr[idx];\n if (size <= sizeForBucket) {\n return sizeForBucket;\n }\n }\n // not in bucket list -> caller will not cache, round up to 16.\n return Math.ceil(size / 16) * 16;\n};\n\nlet guid = 1;\nconst createNewGpuDataId = () => guid++;\n\n/**\n * exported standard download function. This function is used by the session to download the data from GPU, and also by\n * factory to create GPU tensors with the capacity of downloading data from GPU.\n *\n * @param backend - the WebGPU backend\n * @param gpuBuffer - the GPU buffer to download\n * @param originalSize - the original size of the data\n * @param getTargetBuffer - optional. If provided, the data will be copied to the target buffer. Otherwise, a new buffer\n * will be created and returned.\n */\nexport const downloadGpuData =\n async(backend: WebGpuBackend, gpuBuffer: GPUBuffer, originalSize: number, getTargetBuffer?: () => Uint8Array):\n Promise => {\n const bufferSize = calcNormalizedBufferSize(originalSize);\n const gpuReadBuffer = backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: bufferSize, usage: GPUBufferUsage.COPY_DST | GPUBufferUsage.MAP_READ});\n try {\n const commandEncoder = backend.getCommandEncoder();\n backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n gpuBuffer /* source buffer */, 0 /* source offset */, gpuReadBuffer /* destination buffer */,\n 0 /* destination offset */, bufferSize /* size */\n );\n backend.flush();\n\n await gpuReadBuffer.mapAsync(GPUMapMode.READ);\n\n const arrayBuffer = gpuReadBuffer.getMappedRange();\n if (getTargetBuffer) {\n // if we already have a CPU buffer to accept the data, no need to clone the ArrayBuffer.\n const targetBuffer = getTargetBuffer();\n targetBuffer.set(new Uint8Array(arrayBuffer, 0, originalSize));\n return targetBuffer;\n } else {\n // the mapped ArrayBuffer will be released when the GPU buffer is destroyed. Need to clone the\n // ArrayBuffer.\n return new Uint8Array(arrayBuffer.slice(0, originalSize));\n }\n } finally {\n gpuReadBuffer.destroy();\n }\n };\n\nclass GpuDataManagerImpl implements GpuDataManager {\n // GPU Data ID => GPU Data ( storage buffer )\n private storageCache: Map;\n\n // pending buffers for uploading ( data is unmapped )\n private buffersForUploadingPending: GPUBuffer[];\n // pending buffers for computing\n private buffersPending: GPUBuffer[];\n\n // The reusable storage buffers for computing.\n private freeBuffers: Map;\n // The reusable uniform buffers\n private freeUniformBuffers: Map;\n\n // The external buffers registered users for IO Binding.\n private externalBuffers: Map;\n\n // The pendingBuffers for capture graph.\n // a SessionID -> GPUBuffer[] mapping.\n private capturedPendingBuffers: Map;\n\n constructor(private backend: WebGpuBackend) {\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.buffersForUploadingPending = [];\n this.buffersPending = [];\n this.externalBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n\n for (const [key, ] of bucketFreelist) {\n bucketArr.push(key);\n this.freeBuffers.set(key, []);\n this.freeUniformBuffers.set(key, []);\n }\n }\n\n upload(id: GpuDataId, data: Uint8Array): void {\n const srcArrayBuffer = data.buffer;\n const srcOffset = data.byteOffset;\n const srcLength = data.byteLength;\n const size = calcNormalizedBufferSize(srcLength);\n\n // get destination gpu buffer\n const gpuDataCache = this.storageCache.get(id);\n if (!gpuDataCache) {\n throw new Error('gpu data for uploading does not exist');\n }\n if (gpuDataCache.originalSize !== srcLength) {\n throw new Error(`inconsistent data size. gpu data size=${gpuDataCache.originalSize}, data size=${srcLength}`);\n }\n\n // create gpu buffer\n const gpuBufferForUploading = this.backend.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {mappedAtCreation: true, size, usage: GPUBufferUsage.MAP_WRITE | GPUBufferUsage.COPY_SRC});\n\n // copy (upload) data\n const arrayBuffer = gpuBufferForUploading.getMappedRange();\n new Uint8Array(arrayBuffer).set(new Uint8Array(srcArrayBuffer, srcOffset, srcLength));\n gpuBufferForUploading.unmap();\n\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(gpuBufferForUploading, 0, gpuDataCache.gpuData.buffer, 0, size);\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.upload(id=${id})`);\n\n this.buffersForUploadingPending.push(gpuBufferForUploading);\n }\n\n memcpy(sourceId: GpuDataId, destinationId: GpuDataId): void {\n // get source gpu buffer\n const sourceGpuDataCache = this.storageCache.get(sourceId);\n if (!sourceGpuDataCache) {\n throw new Error('source gpu data for memcpy does not exist');\n }\n // get destination gpu buffer\n const destinationGpuDataCache = this.storageCache.get(destinationId);\n if (!destinationGpuDataCache) {\n throw new Error('destination gpu data for memcpy does not exist');\n }\n if (sourceGpuDataCache.originalSize !== destinationGpuDataCache.originalSize) {\n throw new Error('inconsistent source and destination gpu data size');\n }\n\n const size = calcNormalizedBufferSize(sourceGpuDataCache.originalSize);\n\n // GPU copy\n const commandEncoder = this.backend.getCommandEncoder();\n this.backend.endComputePass();\n commandEncoder.copyBufferToBuffer(\n sourceGpuDataCache.gpuData.buffer, 0, destinationGpuDataCache.gpuData.buffer, 0, size);\n }\n\n registerExternalBuffer(buffer: GPUBuffer, originalSize: number, previousBuffer?: GPUBuffer): number {\n let id: number|undefined;\n if (previousBuffer) {\n id = this.externalBuffers.get(previousBuffer);\n if (id === undefined) {\n throw new Error('previous buffer is not registered');\n }\n if (buffer === previousBuffer) {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${\n id}, buffer is the same, skip.`);\n return id;\n } else if (this.backend.capturedCommandList.has(this.backend.currentSessionId!)) {\n throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);\n }\n this.externalBuffers.delete(previousBuffer);\n } else {\n id = createNewGpuDataId();\n }\n\n this.storageCache.set(id, {gpuData: {id, type: GpuDataType.default, buffer}, originalSize});\n this.externalBuffers.set(buffer, id);\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] GpuDataManager.registerExternalBuffer(size=${originalSize}) => id=${id}, registered.`);\n return id;\n }\n\n unregisterExternalBuffer(buffer: GPUBuffer): void {\n const id = this.externalBuffers.get(buffer);\n if (id !== undefined) {\n this.storageCache.delete(id);\n this.externalBuffers.delete(buffer);\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${id}`);\n }\n }\n\n // eslint-disable-next-line no-bitwise\n create(size: number, usage = GPUBufferUsage.STORAGE | GPUBufferUsage.COPY_SRC | GPUBufferUsage.COPY_DST): GpuData {\n const bufferSize = calcBucketBufferSize(size);\n\n let gpuBuffer;\n // Currently, only storage buffers are reused.\n // eslint-disable-next-line no-bitwise\n const isStorage = (usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE;\n // eslint-disable-next-line no-bitwise\n const isUniform = (usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM;\n if (isStorage || isUniform) {\n const freeBuffers = isStorage ? this.freeBuffers : this.freeUniformBuffers;\n const buffers = freeBuffers.get(bufferSize);\n if (!buffers) {\n // no such bucket/freelist - create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n } else {\n if (buffers.length > 0) {\n // in freelist, use it\n gpuBuffer = buffers.pop() as GPUBuffer;\n } else {\n // bucket empty, create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n }\n } else {\n // create gpu buffer\n gpuBuffer = this.backend.device.createBuffer({size: bufferSize, usage});\n }\n\n const gpuData = {id: createNewGpuDataId(), type: GpuDataType.default, buffer: gpuBuffer};\n this.storageCache.set(gpuData.id, {gpuData, originalSize: size});\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.create(size=${size}) => id=${gpuData.id}`);\n return gpuData;\n }\n\n get(id: GpuDataId): GpuData|undefined {\n return this.storageCache.get(id)?.gpuData;\n }\n\n release(id: GpuDataId): number {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('releasing data does not exist');\n }\n\n LOG_DEBUG('verbose', () => `[WebGPU] GpuDataManager.release(id=${id}), gpuDataId=${cachedData.gpuData.id}`);\n\n this.storageCache.delete(id);\n this.buffersPending.push(cachedData.gpuData.buffer);\n // cachedData.gpuData.buffer.destroy();\n\n return cachedData.originalSize;\n }\n\n async download(id: GpuDataId, getTargetBuffer: () => Uint8Array): Promise {\n const cachedData = this.storageCache.get(id);\n if (!cachedData) {\n throw new Error('data does not exist');\n }\n await downloadGpuData(this.backend, cachedData.gpuData.buffer, cachedData.originalSize, getTargetBuffer);\n }\n\n refreshPendingBuffers(): void {\n for (const buffer of this.buffersForUploadingPending) {\n // upload buffer is only useful in the session creation time. So we don't need to reuse them in session running.\n buffer.destroy();\n }\n this.buffersForUploadingPending = [];\n\n if (this.buffersPending.length === 0) {\n return;\n }\n\n if (this.backend.sessionStatus === 'default') {\n for (const buffer of this.buffersPending) {\n const maxInFreeList = bucketFreelist.get(buffer.size);\n\n // eslint-disable-next-line no-bitwise\n if ((buffer.usage & GPUBufferUsage.STORAGE) === GPUBufferUsage.STORAGE) {\n // Put the pending buffer to freeBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n // eslint-disable-next-line no-bitwise\n } else if ((buffer.usage & GPUBufferUsage.UNIFORM) === GPUBufferUsage.UNIFORM) {\n // Put the pending buffer to freeUniformBuffers list instead of really destroying it for buffer reusing.\n const freelist = this.freeUniformBuffers.get(buffer.size) || [];\n if (maxInFreeList === undefined || freelist.length >= maxInFreeList) {\n buffer.destroy();\n } else {\n freelist.push(buffer);\n }\n } else {\n buffer.destroy();\n }\n }\n this.buffersPending = [];\n } else {\n // Don't release intermediate tensors in non-default mode.\n // TODO: reuse the storage buffers in non-default mode.\n let capturedBuffers = this.capturedPendingBuffers.get(this.backend.currentSessionId!);\n if (!capturedBuffers) {\n capturedBuffers = [];\n this.capturedPendingBuffers.set(this.backend.currentSessionId!, capturedBuffers);\n }\n for (const buffer of this.buffersPending) {\n capturedBuffers.push(buffer);\n }\n this.buffersPending = [];\n }\n }\n\n dispose() {\n this.freeBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.freeUniformBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n\n this.storageCache.forEach((storage) => {\n storage.gpuData.buffer.destroy();\n });\n\n this.capturedPendingBuffers.forEach((buffers) => {\n buffers.forEach(buffer => {\n buffer.destroy();\n });\n });\n this.storageCache = new Map();\n this.freeBuffers = new Map();\n this.freeUniformBuffers = new Map();\n this.capturedPendingBuffers = new Map();\n }\n\n onReleaseSession(sessionId: number) {\n // release the captured pending buffers.\n const pendingBuffers = this.capturedPendingBuffers.get(sessionId);\n if (pendingBuffers) {\n pendingBuffers.forEach(buffer => {\n buffer.destroy();\n });\n this.capturedPendingBuffers.delete(sessionId);\n }\n }\n}\n\nexport const createGpuDataManager = (...args: ConstructorParameters): GpuDataManager =>\n new GpuDataManagerImpl(...args);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nclass AttributeWithCacheKeyImpl {\n constructor(attribute: Record) {\n Object.assign(this, attribute);\n }\n\n private key: string;\n public get cacheKey(): string {\n if (!this.key) {\n this.key =\n Object.getOwnPropertyNames(this).sort().map(name => `${(this as Record)[name]}`).join(';');\n }\n return this.key;\n }\n}\n\nexport interface AttributeWithCacheKey {\n readonly cacheKey: string;\n}\n\n/**\n * create a new object from the given attribute, and add a cacheKey property to it\n */\nexport const createAttributeWithCacheKey = >(attribute: T): T&AttributeWithCacheKey =>\n new AttributeWithCacheKeyImpl(attribute) as unknown as T & AttributeWithCacheKey;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable no-param-reassign */\n\nexport class MatMulUtil {\n /**\n * Calculate the expected shape when matrix multiplication\n * @param a The shape of tensor A. Should be a tuple of 2 positive integers\n * @param b The shape of tensor B. Should be a tuple of 2 positive integers\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcMatMulShape(a: [number, number], b: [number, number]): [number, number]|undefined {\n return (a[1] !== b[0]) ? undefined : [a[0], b[1]];\n }\n}\n\n\nexport class BroadcastUtil {\n /**\n * Calculate the expected shape when broadcasting 2 tensors\n * @param a The shape of tensor A. Should be an array of positive integers\n * @param b The shape of tensor B. Should be an array of positive integers\n * @param isMatMul Whether the operation is MatMul\n * @returns The expected shape of the result, or undefined if N/A\n */\n static calcShape(adims: readonly number[], bdims: readonly number[], isMatMul = false): readonly number[]|undefined {\n const arank = adims.length;\n const brank = bdims.length;\n if (arank === 0) {\n return bdims;\n }\n if (brank === 0) {\n return adims;\n }\n const crank = Math.max(adims.length, bdims.length);\n const cdims = new Array(crank);\n\n // calculate the last 2 dimension if it is MatMul\n if (isMatMul) {\n if (arank < 2 || brank < 2) {\n return undefined;\n }\n const cShapeMatMul =\n MatMulUtil.calcMatMulShape([adims[arank - 2], adims[arank - 1]], [bdims[brank - 2], bdims[brank - 1]]);\n if (cShapeMatMul === undefined) {\n return undefined;\n }\n [cdims[crank - 2], cdims[crank - 1]] = cShapeMatMul;\n }\n\n for (let i = isMatMul ? 3 : 1; i <= crank; i++) {\n const aLen = arank - i < 0 ? 1 : adims[arank - i];\n const bLen = brank - i < 0 ? 1 : bdims[brank - i];\n\n if (aLen !== bLen && aLen > 1 && bLen > 1) {\n return undefined;\n }\n const max = Math.max(aLen, bLen);\n if (aLen && bLen) {\n cdims[crank - i] = Math.max(aLen, bLen);\n } else {\n // when either aLen or bLen is 0, the other should be either 0 or 1, otherwise it is not broadcastable.\n if (max > 1) {\n return undefined;\n }\n cdims[crank - i] = 0;\n }\n }\n\n return cdims;\n }\n\n /**\n * Determine if a shape is unidirectional broadcastable to another shape\n * @param shape The input shape\n * @param finalShape The desired shape after broadcasting\n */\n static isValidBroadcast(shape: readonly number[], finalShape: readonly number[]): boolean {\n // align shape to the right\n const inputRank = shape.length;\n const finalRank = finalShape.length;\n if (inputRank > finalRank) {\n return false;\n }\n for (let i = 1; i <= inputRank; i++) {\n if (shape[inputRank - i] !== 1 && shape[inputRank - i] !== finalShape[finalRank - i]) {\n return false;\n }\n }\n return true;\n }\n}\n\n\nexport class ShapeUtil {\n /**\n * calculate the size (number of elements)\n */\n static size(dims: readonly number[]): number {\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, dims.length);\n }\n\n /**\n * convert dims corresponding to type change to pack. ex. uint8 data to uint32\n */\n static convertShape(dims: readonly number[], size = 4): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n }\n const newDims = new Array(rank);\n let i = rank - 1;\n while (i >= 0) {\n if (dims[i] % size === 0) {\n newDims[i] = dims[i] / size;\n break;\n }\n if (size % dims[i] !== 0) {\n throw new Error('cannot convert shape');\n }\n newDims[i] = 1;\n size /= dims[i];\n i--;\n }\n for (i--; i >= 0; i--) {\n newDims[i] = dims[i];\n }\n return newDims;\n }\n\n /**\n * calculate the size (number of elements) from the given axis (inclusive)\n */\n static sizeFromDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeFromDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, axis, dims.length);\n }\n\n /**\n * calculate the size (number of elements) to the given axis (exclusive)\n */\n static sizeToDimension(dims: readonly number[], axis: number): number {\n if (axis < 0 || axis > dims.length) {\n throw new Error(`invalid dimension of ${axis} for sizeToDimension as Tensor has ${dims.length} dimensions.`);\n }\n return ShapeUtil.getSizeFromDimensionRange(dims, 0, axis);\n }\n\n /**\n * calculate the size (number of elements) from and to the given axis [start, end)\n */\n static getSizeFromDimensionRange(dims: readonly number[], start: number, end: number): number {\n let size = 1;\n for (let i = start; i < end; i++) {\n // safety check as this method is called by multiple other methods requiring size.\n // size cannot be negative.\n if (dims[i] < 0) {\n throw new Error(\n // eslint-disable-next-line max-len\n 'cannot get valid size from specified dimension range. Most likely the range contains negative values in them.');\n }\n size *= dims[i];\n }\n return size;\n }\n\n static computeStrides(dims: readonly number[]): readonly number[] {\n const rank = dims.length;\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [1];\n }\n const strides = new Array(rank);\n strides[rank - 1] = 1;\n strides[rank - 2] = dims[rank - 1];\n for (let i = rank - 3; i >= 0; --i) {\n strides[i] = strides[i + 1] * dims[i + 1];\n }\n return strides;\n }\n\n /**\n * normailze axis of range [-r, r) into [0, r).\n */\n static normalizeAxis(axis: number, tensorRank: number): number {\n if (axis < -tensorRank && axis >= tensorRank) {\n throw new Error('unsupported axis for this operation.');\n }\n return axis < 0 ? axis + tensorRank : axis;\n }\n\n static normalizeAxes(axes: readonly number[], tensorRank?: number): number[] {\n return axes.map(x => this.normalizeAxis(x, tensorRank ?? axes.length));\n }\n\n /**\n * Sorts a given array based on the indices in the Perm array\n * Used in Transpose\n * @param a Array to be sorted such as dims or strides\n * @param perm Perm given; if null a will be reversed\n */\n static sortBasedOnPerm(a: readonly number[], perm?: readonly number[]): readonly number[] {\n if (perm) {\n return perm.map((v) => a[v]);\n } else {\n return a.slice().reverse();\n }\n }\n\n /**\n * Pads a given shape according to the padding values\n * @param dims shape of the Tensor to be padded\n * @param pad pad values\n */\n static padShape(dims: readonly number[], pad: readonly number[]): readonly number[] {\n const rank = dims.length;\n return dims.map((v, i) => v + pad[i] + pad[i + rank]);\n }\n\n /**\n * Determines if the two shapes are identical\n * @param shape1\n * @param shape2\n */\n static areEqual(shape1: readonly number[], shape2: readonly number[]): boolean {\n if (shape1.length !== shape2.length) {\n return false;\n }\n return shape1.every((v, i) => v === shape2[i]);\n }\n}\n\nexport class PoolConvUtil {\n /**\n * Adjust the kernel, strides, pads to correct rank. Set to default value if not present\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension.\n * @param kernelShape The size of the kernel along each axis.\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n */\n static adjustPoolAttributes(\n isGlobalOperator: boolean, inputDims: readonly number[], kernelShape: number[], strides: number[],\n dilations: number[], pads: number[]): void {\n if (!isGlobalOperator && kernelShape.length !== inputDims.length - 2) {\n throw new Error('length of specified kernel shapes should be 2 less than length of input dimensions');\n }\n\n if (isGlobalOperator) {\n // adjust kernel shape to cover the input dims\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n if (dim >= kernelShape.length) {\n kernelShape.push(inputDims[dim + 2]);\n } else {\n kernelShape[dim] = inputDims[dim + 2];\n }\n }\n }\n\n // adjust strides length to match kernel shape length\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < strides.length) {\n if (strides[dim] < 0) {\n throw new Error('strides should be greater than or equal to 1');\n }\n } else {\n strides.push(1);\n }\n }\n\n // adjust dilation value\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (dim < dilations.length) {\n if (dilations[dim] < 0) {\n throw new Error('dilations should be greater than or equal to 1');\n }\n } else {\n dilations.push(1);\n }\n }\n\n // adjust pads length to match 2 * kernel shape length\n for (let dim = 0; dim < kernelShape.length * 2; dim++) {\n if (dim < pads.length) {\n if (pads[dim] < 0) {\n throw new Error('pad should be greater than or equal to 1');\n }\n } else {\n pads.push(0);\n }\n }\n\n // sanity checks for values in kernel shapes and pads\n for (let dim = 0; dim < kernelShape.length; dim++) {\n if (kernelShape[dim] <= 0) {\n throw new Error('kernel shapes need to be greater than 0');\n }\n\n if (pads[dim] >= kernelShape[dim] || pads[dim + kernelShape.length] >= kernelShape[dim]) {\n throw new Error('pads should be smaller than kernel');\n }\n }\n }\n\n // adjust pad values based on 'autoPad' attribute\n static adjustPadsBasedOnAutoPad(\n inputDims: readonly number[], strides: readonly number[], dilations: readonly number[],\n kernelShape: readonly number[], pads: number[], isChannelLast: boolean, autoPad?: string): void {\n if (!autoPad) {\n return;\n }\n\n if (pads.length !== 2 * (inputDims.length - 2)) {\n throw new Error('length of pads should be twice the length of data dimensions');\n }\n\n if (strides.length !== (inputDims.length - 2)) {\n throw new Error('length of strides should be the length of data dimensions');\n }\n\n if (kernelShape.length !== (inputDims.length - 2)) {\n throw new Error('length of kernel shapes should be the length of data dimensions');\n }\n\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + (isChannelLast ? 1 : 2)], strides[dim], dilations[dim], kernelShape[dim], pads, dim,\n dim + inputDims.length - 2, autoPad);\n }\n }\n\n /**\n * Calculate the output shape for Pool ops based on input attributes. (Should be used only for Pool ops)\n * @param isGlobalOperator If true, perform global pooling.\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param strides Stride along each axis.\n * @param dilations Dilation along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computePoolOutputShape(\n isGlobalOperator: boolean, inputDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0) {\n throw new Error('input shape must be of size greater than 0');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], inputDims[1]];\n\n PoolConvUtil.computeShapeHelper(\n isGlobalOperator, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n /**\n * Calculate the output shape for Conv op based on input attributes. (Should be used only for Conv op)\n * @param inputDims The input tensor dimension. (inputs[0].dims)\n * @param filterDims The filter tensor dimension. (inputs[1].dims)\n * @param strides Stride along each axis.\n * @param kernelShape The size of the kernel along each axis.\n * @param pads Padding for the beginning and ending along each axis.\n * @param autoPad DEPRECATED attribute supported for legacy models. Specifies how to implicitly calculate pads in each\n * dimension. Can take values NOTSET, SAME_UPPER, SAME_LOWER, or VALID.\n */\n static computeConvOutputShape(\n inputDims: readonly number[], filterDims: readonly number[], strides: number[], dilations: number[],\n kernelShape: number[], pads: number[], autoPad?: string): number[] {\n if (inputDims.length <= 0 || filterDims.length <= 0) {\n throw new Error('invalid input tensor dims or invalid filter tensor dims');\n }\n\n // Add batch size and number of channels of output\n const outputDims = [inputDims[0], filterDims[0]];\n\n PoolConvUtil.computeShapeHelper(false, inputDims, outputDims, strides, dilations, kernelShape, pads, autoPad);\n return outputDims;\n }\n\n // will compute output shapes for data dimensions ONLY (i.e.) no batch size and channels\n // called by computePoolOutputShape() and computeConvOutputShape()\n // adjust pads based on 'autoPad' attribute prior to shape computation\n private static computeShapeHelper(\n isGlobalOperator: boolean, inputDims: readonly number[], outputDims: number[], strides: readonly number[],\n dilations: readonly number[], kernelShape: readonly number[], pads: number[], autoPad?: string) {\n if (isGlobalOperator) {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(1);\n }\n } else {\n for (let dim = 0; dim < inputDims.length - 2; dim++) {\n outputDims.push(PoolConvUtil.adjustPadAndReturnShape(\n inputDims[dim + 2], strides[dim], dilations[dim], kernelShape[dim], pads, dim, dim + inputDims.length - 2,\n autoPad));\n }\n }\n }\n\n // helper for computeShapeHelper() and adjustPadsBasedOnAutoPad()\n // adjusts pad value for given 'autoPad' string and computes output shape along a particular dimension\n private static adjustPadAndReturnShape(\n inSize: number, stride: number, dilation: number, kernel: number, pads: number[], padHeadIndex: number,\n padTailIndex: number, autoPad?: string): number {\n const dkernel = dilation * (kernel - 1) + 1;\n if (autoPad && autoPad !== 'NOTSET') {\n switch (autoPad) {\n case 'VALID':\n pads[padHeadIndex] = 0;\n pads[padTailIndex] = 0;\n return Math.floor(((inSize - dkernel) / stride) + 1);\n case 'SAME_LOWER':\n case 'SAME_UPPER':\n if (dilation !== 1) {\n throw new Error('Dilation not supported for SAME_UPPER or SAME_LOWER');\n } else {\n const legacyTargetSize = (inSize + stride - 1) / stride;\n const padNeeded = (legacyTargetSize - 1) * stride + kernel - inSize;\n pads[padHeadIndex] =\n (autoPad === 'SAME_LOWER') ? Math.floor((padNeeded + 1) / 2) : Math.floor(padNeeded / 2);\n pads[padTailIndex] = padNeeded - pads[padHeadIndex];\n return Math.floor(((inSize + padNeeded - kernel) / stride) + 1);\n }\n default:\n throw new Error('Unsupported AutoPad type');\n }\n } else {\n return Math.floor(((inSize + pads[padHeadIndex] + pads[padTailIndex] - dkernel) / stride) + 1);\n }\n }\n}\n\nexport class GemmUtil {\n // will make sure input shapes are compatible for this op\n // and return back the shape of the output in the form of a tuple\n // will throw exception if the input shapes are not compatible\n static getShapeOfGemmResult(\n leftShape: readonly number[], transLeft: boolean, rightShape: readonly number[], transRight: boolean,\n biasShape?: readonly number[]): readonly number[] {\n if (leftShape.length !== 2 || rightShape.length !== 2) {\n throw new Error('shape need to be of size 2');\n }\n\n let M: number;\n let K: number;\n let N: number;\n\n if (transLeft) {\n M = leftShape[1];\n K = leftShape[0];\n } else {\n M = leftShape[0];\n K = leftShape[1];\n }\n\n let kDim = -1;\n\n if (transRight) {\n N = rightShape[0];\n kDim = 1;\n } else {\n N = rightShape[1];\n kDim = 0;\n }\n\n if (rightShape[kDim] !== K) {\n throw new Error('dimension mismatch');\n }\n\n if (M <= 0 || N <= 0 || K <= 0) {\n throw new Error('invalid shape specified');\n }\n\n if (biasShape && !BroadcastUtil.isValidBroadcast(biasShape, [M, N])) {\n throw new Error('gemm: invalid bias shape for broadcast');\n }\n\n return [M, N, K];\n }\n}\n\n\nexport const MIN_CLIP = -3.4028234663852886e+38;\nexport const MAX_CLIP = 3.4028234663852886e+38;\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {ShapeUtil} from '../../util';\nimport {ProgramUniform, ProgramUniformVariableInfo} from '../types';\n\n/**\n * constant value for a workgroup size.\n *\n * We definitely can do further optimization in future, but for now we use 64.\n *\n * rule of thumb: Use [a workgroup size of] 64 unless you know what GPU you are targeting or that your workload\n * needs something different.\n *\n * from: https://surma.dev/things/webgpu/\n **/\nexport const WORKGROUP_SIZE = 64;\n\ninterface IndicesHelperTypes {\n /**\n * WGSL type of indices expression\n */\n readonly indices: string;\n\n /**\n * WGSL type of a value\n */\n readonly value: string;\n\n /**\n * WGSL type of storage type representing a value\n *\n * This is usually the same to `value`, but for some type (eg. bool), we need to use `u32` as storage type for\n * value type `vec4`\n */\n readonly storage: string;\n\n /**\n * tensor type as represented in TensorView\n */\n readonly tensor: number;\n}\n\n/**\n * A helper class for generating WGSL code for manipulating indices and data for a shader's input or output.\n *\n * This class is designed to offer a unified way to generate WGSL code for manipulating indices and data for a shader's\n * input or output.\n *\n * The following is a list of terminologies used in this class:\n * - `offset`: a uint32 value representing the offset of an element in the data buffer.\n * - `indices`: an abstraction of a multi-dimensional array's indices representing the data's index on each dimension.\n * - `value`: a value of a data element.\n *\n * Users are expected to create an instance of this class for each shader's input or output, and use the instance to\n * generate WGSL code for manipulating indices and data. The following 2 exported functions are for users to call to\n * create an instance of an indices helper:\n * - `inputVariable()`: create an indices helper instance for an input.\n * - `outputVariable()`: create an indices helper instance for an output.\n * - `internalVariable()`: create an indices helper instance for an internal variable.\n *\n * An indices helper instance contains helper functions for the following operations:\n * - access readonly basic information, including: `name`(the name of the input or output), `usage`(whether it's an\n * input, an output or an internal variable) and `shape`(the passed in shape).\n * - `type`: access readonly type information, including: `indices`(the type of indices), `value`(the type of value at\n * runtime), `storage`(the type of value at storage) and `tensor`(the tensor type as represented in TensorView).\n * - generate WGSL code for getting indices from offset. Use `offsetToIndices()` for WGSL code snippet to calculate\n * indices from offset, and use `indicesToOffset()` for WGSL code snippet to calculate offset from indices.\n * - to manipulate an instance of indices, use `setIndices()` and `getIndices()` to set and get the indices on an\n * indices variable.\n * - to manipulate data, use `set()`/`get()` to access data at the given indices from parameter list, use\n * `setByIndices()`/`getByIndices()` to access data at the given indices from an indices variable, and use\n * `setByOffset()`/`getByOffset()` to access data at the given offset.\n * - `impl`: get WGSL code of function implementation for the util functions mentioned above.\n */\nexport interface IndicesHelper {\n /**\n * get WGSL code of function implementation for the util functions.\n *\n */\n readonly impl: () => string;\n\n /**\n * get type info\n */\n readonly type: IndicesHelperTypes;\n\n /**\n * WGSL code of a expression for getting indices from offset.\n *\n * @param varOffset - a u32 expression representing the offset.\n *\n * @returns an `type.indices` expression\n */\n readonly offsetToIndices: (varOffset: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting offset from indices.\n *\n * @param varIndices - a `type.indices` expression representing the indices.\n *\n * @returns an `u32` expression\n */\n readonly indicesToOffset: (varIndices: string) => string;\n\n /**\n * WGSL code of an `u32` expression for getting original offset from broadcasted indices.\n *\n * @param varIndices - a `type.indices` expression representing the output indices.\n * @param output - output IndicesHelper.\n *\n * @returns an `u32` expression\n */\n readonly broadcastedIndicesToOffset: (varIndices: string, output: IndicesHelper) => string;\n\n /**\n * WGSL code of generating an indices literal\n *\n * @param init - initial value.\n */\n readonly indices: (...init: ReadonlyArray) => string;\n\n /**\n * WGSL code of a statement for setting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to set. can be a number or a string (WGSL `u32` expression).\n * @param value - the value to set. can be a number or a string (WGSL `u32` expression).\n *\n * @returns a WGSL statement\n */\n readonly indicesSet: (varIndices: string, idx: number|string, value: number|string) => void;\n\n /**\n * WGSL code of an `u32` expression for getting indices.\n *\n * @param varIndices - a variable name for the indices.\n * @param idx - the index of the indices to get. can be a number or a string (WGSL `u32` expression).\n *\n * @returns an `u32` expression\n */\n readonly indicesGet: (varIndices: string, idx: number|string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices.\n *\n * @param indicesAndValue - an array of numbers or strings (WGSL `u32` expression) representing the indices, followed\n * by the value to set. This array should have exactly `shape.length + 1` elements.\n */\n readonly set: (...indicesAndValue: ReadonlyArray) => string;\n\n /**\n * WGSL code for a statement for setting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByIndices: (varIndices: string, value: string) => string;\n\n /**\n * WGSL code for a statement for setting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n * @param value - the value to set. should be a WGSL expression.\n */\n readonly setByOffset: (offset: number|string, value: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices.\n *\n * @param indices - an array of numbers or strings (WGSL `u32` expression) representing the indices.\n */\n readonly get: (...indices: ReadonlyArray) => string;\n\n /**\n * WGSL code for an expression for getting data at the given indices variable.\n *\n * @param varIndices - a variable name for the indices.\n */\n readonly getByIndices: (varIndices: string) => string;\n\n /**\n * WGSL code for an expression for getting data at the given offset.\n *\n * @param offset - a number or a string (WGSL `u32` expression) representing the offset.\n */\n readonly getByOffset: (offset: number|string) => string;\n\n /**\n * name of the data variable\n */\n readonly name: string;\n\n /**\n * whether the helper is for an input, an output or an internal variable.\n */\n readonly usage: 'input'|'output'|'internal';\n\n /**\n * the rank of the input or output.\n */\n readonly rank: number;\n\n /**\n * a string representing the variable name for the shape of the input or output.\n */\n readonly shape: string;\n\n /**\n * a string representing the variable name for the strides of the input or output.\n */\n readonly strides: string;\n}\n\nconst getWgslMappedType = (type: number, components: 1|2|3|4): string|[string, string] => {\n if (components === 3) {\n throw new Error('vec3 has same alignment as vec4, use vec4 instead');\n }\n\n // return type is [ storage type, runtime type ] or a single string for both\n switch (type) {\n case DataType.float16:\n return components > 1 ? `vec${components}` : 'f16';\n case DataType.float:\n return components > 1 ? `vec${components}` : 'f32';\n case DataType.int32:\n return components > 1 ? `vec${components}` : 'i32';\n case DataType.uint32:\n return components > 1 ? `vec${components}` : 'u32';\n case DataType.int64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'i32'];\n case DataType.uint64:\n if (components > 1) {\n throw new Error('currently not supported vecX of uint64 yet');\n }\n return ['vec2', 'u32'];\n case DataType.bool:\n if (components !== 4) {\n throw new Error('bool must be vec4');\n }\n return ['u32', 'vec4'];\n\n default:\n throw new Error(`Unknown data type: ${type}`);\n }\n};\n\nexport const tensorTypeToWsglStorageType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[0];\n};\n\nexport const tensorTypeToWsglValueType = (type: DataType, components: 1|2|3|4 = 1) => {\n const mappedType = getWgslMappedType(type, components);\n return typeof mappedType === 'string' ? mappedType : mappedType[1];\n};\n\nexport const createTensorShapeVariables = (...dims: ReadonlyArray): ProgramUniform[] => {\n const programUniforms: ProgramUniform[] = [];\n dims.forEach(dim => {\n if (dim.length !== 0) {\n programUniforms.push(\n {type: DataType.uint32, data: dim}, {type: DataType.uint32, data: ShapeUtil.computeStrides(dim)});\n }\n });\n return programUniforms;\n};\n\n/**\n * A helper function to get maximum vector size for specified data length\n * @param size\n */\nexport const getMaxComponents = (size: number) => {\n // we cannot use vec3 type since it has alignment of 16 bytes\n if (size % 4 === 0) {\n return 4;\n } else if (size % 2 === 0) {\n return 2;\n }\n\n return 1;\n};\n\n/**\n * A helper function that initializes variable as a scalar or vector. e.g. f32(0) or vec4f(0,0,0,0)\n * @param dataType\n * @param components\n * @param value\n */\nexport const fillVector = (dataType = 'f32', components?: number, value = '0') => {\n if (!components || components === 1) {\n return `${dataType}(${value})`;\n }\n\n return `vec${components}<${dataType}>(${value})`;\n};\n\n/**\n * A helper function that casts value or vector to f32\n * @param dataType\n * @param components\n * @param value\n */\nexport const castToF32 = (dataType: string, components: number, value: string) => {\n if (dataType === 'f32') {\n return value;\n }\n if (components === 1) {\n return `f32(${value})`;\n }\n\n return `vec${components}(${value})`;\n};\n\n/**\n * A helper function that returns scalar or sums all components of a vector\n * @param name\n * @param components\n */\nexport const sumVector = (name: string, components: number) => {\n if (components === 4) {\n return `(${name}.x + ${name}.y + ${name}.z + ${name}.w)`;\n } else if (components === 2) {\n return `(${name}.x + ${name}.y)`;\n } else if (components === 3) {\n return `(${name}.x + ${name}.y + ${name}.z)`;\n }\n\n return name;\n};\n\n/**\n * A helper function that returns variable element at index.\n * @param name - the name of variable.\n * @param index - the index of variable element.\n * @param length - the length of variable.\n * @param type - the type of variable, optional.\n */\nexport const getElementAt =\n (name: string, index: number|string, length: number, type?: UniformDataElementType): string => {\n if (name.startsWith('uniforms.') && length > 4) {\n if (typeof (index) === 'string') {\n if (type === 'f16') {\n return `${name}[(${index}) / 8][(${index}) % 8 / 4][(${index}) % 8 % 4]`;\n } else {\n return `${name}[(${index}) / 4][(${index}) % 4]`;\n }\n } else {\n if (type === 'f16') {\n return `${name}[${Math.floor(index / 8)}][${Math.floor(index % 8 / 4)}][${index % 8 % 4}]`;\n } else {\n return `${name}[${Math.floor(index / 4)}][${index % 4}]`;\n }\n }\n } else {\n return length > 1 ? `${name}[${index}]` : name;\n }\n };\n\n/**\n * A helper function to get a IndicesHelper for a given input or output.\n *\n * @param name - the name of the input or output.\n * @param tensorType - the tensor type of the input or output.\n * @param shapeOrRank - the tensor shape or the rank of the input or output.\n * @param usage - the usage of the indices helper.\n * @param components - indicates the number of components of each element. 1 for scalar, 2 for vec2, 3 for vec3, 4 for\n * vec4.\n */\nconst createIndicesHelper =\n (name: string, tensorType: number, shapeOrRank: number|readonly number[], usage: IndicesHelper['usage'],\n components: 1|2|3|4): IndicesHelper => {\n const useUniform = typeof shapeOrRank === 'number';\n const rank = useUniform ? shapeOrRank : shapeOrRank.length;\n const rankIdentity = [...new Array(rank).keys()];\n const indicesType = rank < 2 ? 'u32' : rank <= 4 ? `vec${rank}` : `array`;\n const mappedType = getWgslMappedType(tensorType, components);\n const valueType = typeof mappedType === 'string' ? mappedType : mappedType[1];\n const storageType = typeof mappedType === 'string' ? mappedType : mappedType[0];\n const type = {indices: indicesType, value: valueType, storage: storageType, tensor: tensorType};\n\n const normalizeDim = (dim: number|string): string => typeof dim === 'string' ? dim : `${dim}u`;\n\n const implementationUsed = {\n offsetToIndices: false,\n indicesToOffset: false,\n broadcastedIndicesToOffset: false,\n set: false,\n setByIndices: false,\n get: false,\n getByIndices: false,\n };\n\n const uniformPrefix = useUniform ? 'uniforms.' : '';\n const shape = `${uniformPrefix}${name}_shape`;\n const strides = `${uniformPrefix}${name}_strides`;\n\n let o2iSnippet = '';\n for (let i = 0; i < rank - 1; i++) {\n o2iSnippet += `\n let dim${i} = current / ${getElementAt(strides, i, rank)};\n let rest${i} = current % ${getElementAt(strides, i, rank)};\n indices[${i}] = dim${i};\n current = rest${i};\n `;\n }\n o2iSnippet += `indices[${rank - 1}] = current;`;\n\n const offsetToIndicesImplementation = rank < 2 ? '' : `\n fn o2i_${name}(offset: u32) -> ${type.indices} {\n var indices: ${type.indices};\n var current = offset;\n ${o2iSnippet}\n return indices;\n }`;\n\n const offsetToIndices = (varOffset: string) => {\n implementationUsed.offsetToIndices = true;\n return rank < 2 ? varOffset : `o2i_${name}(${varOffset})`;\n };\n\n const offsets: string[] = [];\n if (rank >= 2) {\n for (let i = rank - 1; i >= 0; i--) {\n offsets.push(`${getElementAt(strides, i, rank)} * (indices[${i}])`);\n }\n }\n\n const indicesToOffsetImplementation = rank < 2 ? '' : `\n fn i2o_${name}(indices: ${type.indices}) -> u32 {\n return ${offsets.join('+')};\n }`;\n\n const indicesToOffset = (varIndices: string) => {\n implementationUsed.indicesToOffset = true;\n return rank < 2 ? varIndices : `i2o_${name}(${varIndices})`;\n };\n\n const indices = (...init: ReadonlyArray) =>\n rank === 0 ? '0u' : `${type.indices}(${init.map(normalizeDim).join(',')})`;\n\n const indicesGet = (varIndices: string, idx: number|string) => {\n if (rank < 2) {\n return `${varIndices}`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}`;\n }\n };\n\n const indicesSet = (varIndices: string, idx: number|string, value: string) => {\n if (rank < 2) {\n return `${varIndices}=${value};`;\n } else {\n return `${getElementAt(varIndices, idx, rank)}=${value};`;\n }\n };\n\n const broadcastedIndicesToOffsetImplementation: {[key: string]: string} = {};\n const broadcastedIndicesToOffset = (varIndices: string, output: IndicesHelper) => {\n implementationUsed.broadcastedIndicesToOffset = true;\n const implKey = `${output.name}broadcastedIndicesTo${name}Offset`;\n if (implKey in broadcastedIndicesToOffsetImplementation) {\n return `${implKey}(${varIndices})`;\n }\n const offsets = [];\n for (let i = rank - 1; i >= 0; i--) {\n const idx = output.indicesGet('outputIndices', i + output.rank - rank);\n offsets.push(`${indicesGet(strides, i)} * (${idx} % ${indicesGet(shape, i)})`);\n }\n broadcastedIndicesToOffsetImplementation[implKey] =\n `fn ${implKey}(outputIndices: ${output.type.indices}) -> u32 {\n return ${offsets.length > 0 ? offsets.join('+') : '0u'};\n }`;\n\n return `${implKey}(${varIndices})`;\n };\n\n const setByOffset = (offset: number|string, value: string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]=${value};`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), select(0u, 0xFFFFFFFFu, ${value} < 0));`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `${name}[${offset}]=vec2(u32(${value}), 0u);`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `${name}[${offset}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${value}));`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByOffset = (offset: number|string) => (() => {\n if (type.storage === type.value) {\n return `${name}[${offset}]`;\n } else if (type.storage === 'vec2' && type.value === 'i32') {\n // int64, components === 1\n return `i32(${name}[${offset}].x)`;\n } else if (type.storage === 'vec2' && type.value === 'u32') {\n // uint64, components === 1\n return `u32(${name}[${offset}].x)`;\n } else if (type.storage === 'u32' && type.value === 'vec4') {\n // bool, components === 4\n return `vec4(bool(${name}[${offset}] & 0xFFu), bool(${name}[${offset}] & 0xFF00u), bool(${name}[${\n offset}] & 0xFF0000u), bool(${name}[${offset}] & 0xFF000000u))`;\n } else {\n throw new Error(`not supported combination of storage type ${type.storage} and value type ${type.value} yet`);\n }\n })();\n\n const getByIndicesImplementation = rank < 2 ? '' : `\n fn get_${name}ByIndices(indices: ${type.indices}) -> ${valueType} {\n return ${getByOffset(`i2o_${name}(indices)`)};\n }`;\n\n const getImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn get_${name}(${functionParams}) -> ${valueType} {\n return get_${name}ByIndices(${indices(dimsParams)});\n }`;\n })();\n\n const get = (...indices: ReadonlyArray) => {\n if (indices.length !== rank) {\n throw new Error(`indices length must be ${rank}`);\n }\n\n const normalizedIndices = indices.map(normalizeDim).join(',');\n\n if (rank === 0) {\n return getByOffset('0u');\n } else if (rank === 1) {\n return getByOffset(normalizedIndices[0]);\n } else {\n implementationUsed.get = true;\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}(${normalizedIndices})`;\n }\n };\n\n const getByIndices = (varIndices: string) => {\n if (rank < 2) {\n return getByOffset(varIndices);\n } else {\n implementationUsed.getByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `get_${name}ByIndices(${varIndices})`;\n }\n };\n\n const setByIndicesImplementation = rank < 2 ? '' : `\n fn set_${name}ByIndices(indices: ${type.indices}, value: ${valueType}) {\n ${setByOffset(`i2o_${name}(indices)`, 'value')}\n }`;\n\n const setImplementation = rank < 2 ? '' : (() => {\n const functionParams = rankIdentity.map(i => `d${i}: u32`).join(', ');\n const dimsParams = rankIdentity.map(i => `d${i}`).join(', ');\n return `\n fn set_${name}(${functionParams}, value: ${valueType}) {\n set_${name}ByIndices(${indices(dimsParams)}, value);\n }`;\n })();\n\n const set = (...indicesAndValue: ReadonlyArray) => {\n if (indicesAndValue.length !== rank + 1) {\n throw new Error(`indices length must be ${rank}`);\n }\n const value = indicesAndValue[rank];\n if (typeof value !== 'string') {\n throw new Error('value must be string');\n }\n\n const normalizedIndices = indicesAndValue.slice(0, rank).map(normalizeDim).join(',');\n\n if (rank === 0) {\n return setByOffset('0u', value);\n } else if (rank === 1) {\n return setByOffset(normalizedIndices[0], value);\n } else {\n implementationUsed.set = true;\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}(${normalizedIndices}, ${value})`;\n }\n };\n\n const setByIndices = (varIndices: string, value: string) => {\n if (rank < 2) {\n return setByOffset(varIndices, value);\n } else {\n implementationUsed.setByIndices = true;\n implementationUsed.indicesToOffset = true;\n return `set_${name}ByIndices(${varIndices}, ${value});`;\n }\n };\n\n const impl = () => {\n const impls = [];\n let needShapeStrides = false;\n if (implementationUsed.offsetToIndices) {\n impls.push(offsetToIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.indicesToOffset) {\n impls.push(indicesToOffsetImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.broadcastedIndicesToOffset) {\n Object.values(broadcastedIndicesToOffsetImplementation).forEach(impl => impls.push(impl));\n needShapeStrides = true;\n }\n if (implementationUsed.set) {\n impls.push(setImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.setByIndices) {\n impls.push(setByIndicesImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.get) {\n impls.push(getImplementation);\n needShapeStrides = true;\n }\n if (implementationUsed.getByIndices) {\n impls.push(getByIndicesImplementation);\n needShapeStrides = true;\n }\n if (!useUniform && needShapeStrides) {\n impls.unshift(\n `const ${shape} = ${type.indices}(${shapeOrRank.join(',')});`,\n `const ${strides} = ${type.indices}(${ShapeUtil.computeStrides(shapeOrRank).join(',')});`);\n }\n return impls.join('\\n');\n };\n\n return {\n impl,\n type,\n offsetToIndices,\n indicesToOffset,\n broadcastedIndicesToOffset,\n indices,\n indicesGet,\n indicesSet,\n set,\n setByOffset,\n setByIndices,\n get,\n getByOffset,\n getByIndices,\n // isVec4,\n usage,\n name,\n strides,\n shape,\n rank\n };\n };\n\n/**\n * Create a IndicesHelper for an input.\n *\n * @param name - the name of the input.\n * @param type - the tensor type of the input.\n * @param shapeOrRank - the tensor shape or the rank of the input.\n * @param components - the number of components of the input. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the input.\n */\nexport const inputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'input', components);\n\n/**\n * Create a IndicesHelper for an output.\n *\n * @param name - the name of the output.\n * @param type - the tensor type of the output.\n * @param shapeOrRank - the tensor shape or the rank of the output.\n * @param components - the number of components of the output. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the output.\n */\nexport const outputVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'output', components);\n\n/**\n * Create a IndicesHelper for an internal variable.\n *\n * @param name - the name of the variable.\n * @param type - the tensor type of the variable.\n * @param shapeOrRank - the tensor shape or the rank of the variable.\n * @param components - the number of components of the variable. available values are 1, 2, 3, 4. default is 1.\n * @returns an IndicesHelper for the variable.\n */\nexport const internalVariable =\n (name: string, type: number, shapeOrRank: number|readonly number[], components: 1|2|3|4 = 1): IndicesHelper =>\n createIndicesHelper(name, type, shapeOrRank, 'internal', components);\n\nexport type UniformDataElementType = 'u32'|'f16'|'f32'|'i32';\nexport type UniformsArrayType = Array<{name: string; type: UniformDataElementType; length?: number}>;\n\n/**\n * A ShaderHelper is a helper class for generating WGSL code.\n */\nexport interface ShaderHelper {\n /**\n * A helper function to generate the start of main function in WGSL source code.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param workgroupSize - an optional workgroup size. default is WORKGROUP_SIZE.\n */\n mainStart(workgroupSize?: number|[number, number, number]): string;\n\n /**\n * A helper function to generate the code snippet for guarding against out-of-bounds size.\n *\n * @example\n * const getShaderSource = (shaderHelper: ShaderHelper) => `\n * ...\n *\n * ${shaderHelper.mainStart()}\n * ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n *\n * // your code here inside main() function\n * ...\n * }\n * `;\n *\n * @param size - the size of the data to guard against. can be a number or a string (WGSL `u32` expression).\n */\n guardAgainstOutOfBoundsWorkgroupSizes(size: unknown): string;\n\n /**\n * A helper function to generate the code snippet for declaring multiple inputs or outputs.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n declareVariables(...variables: IndicesHelper[]): string;\n\n /**\n * A helper function to register one uniform. Can be called multiple times to register multiple uniforms.\n *\n * @param name - the name of the uniform.\n * @param type - the type of the uniform.\n * @param length - the length of the uniform, default to 1 when it is not provided.\n */\n registerUniform(name: string, type: string, length?: number): ShaderHelper;\n\n /**\n * A helper function to register multiple uniforms. Can be called multiple times to register multiple uniforms.\n *\n * @param uniforms - an array of uniforms. Each element of the array is an object with 2 properties: `name` and\n * `type`.\n */\n registerUniforms(uniforms: UniformsArrayType): ShaderHelper;\n\n /**\n * A helper function to register multiple internal variables. Can be called multiple times to register multiple\n * internal variables.\n *\n * @param variables - an array of IndicesHelper for the variables.\n */\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper;\n}\n\nclass ShaderHelperImpl implements ShaderHelper {\n constructor(private normalizedDispatchGroup: [number, number, number], private limits: GPUSupportedLimits) {}\n\n guardAgainstOutOfBoundsWorkgroupSizes(size: number|string): string {\n // Guard against out-of-bounds work group sizes\n const sizeInCode = typeof size === 'number' ? `${size}u` : size;\n return `if (global_idx >= ${sizeInCode}) { return; }`;\n }\n\n mainStart(workgroupSize: number|[number, number, number] = WORKGROUP_SIZE) {\n const workgroupSizeX = typeof workgroupSize === 'number' ? workgroupSize : workgroupSize[0];\n const workgroupSizeY = typeof workgroupSize === 'number' ? 1 : workgroupSize[1];\n const workgroupSizeZ = typeof workgroupSize === 'number' ? 1 : workgroupSize[2];\n\n if (workgroupSizeX > this.limits.maxComputeWorkgroupSizeX ||\n workgroupSizeY > this.limits.maxComputeWorkgroupSizeY ||\n workgroupSizeZ > this.limits.maxComputeWorkgroupSizeZ) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${\n this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);\n }\n\n if (workgroupSizeX * workgroupSizeY * workgroupSizeZ > this.limits.maxComputeInvocationsPerWorkgroup) {\n throw new Error(`workgroup size [${workgroupSizeX}, ${workgroupSizeY}, ${\n workgroupSizeZ}] exceeds the maximum workgroup invocations ${\n this.limits.maxComputeInvocationsPerWorkgroup}.`);\n }\n\n const is1DimensionDispatch = this.normalizedDispatchGroup[1] === 1 && this.normalizedDispatchGroup[2] === 1;\n const paramList = is1DimensionDispatch ? `@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3` :\n `@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`;\n const globalIdxDefinition = is1DimensionDispatch ?\n 'let global_idx = global_id.x; let local_idx = local_id.x;' :\n `let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${\n workgroupSizeX * workgroupSizeY * workgroupSizeZ}u + local_idx;`;\n\n return `@compute @workgroup_size(${workgroupSizeX}, ${workgroupSizeY}, ${workgroupSizeZ})\n fn main(${paramList}) {\n ${globalIdxDefinition}\n `;\n }\n\n private appendVariableUniforms(variable: IndicesHelper): void {\n if (variable.rank !== 0) {\n if (variable.shape.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.shape.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n if (variable.strides.startsWith('uniforms.')) {\n this.uniforms.push({name: variable.strides.replace('uniforms.', ''), type: 'u32', length: variable.rank});\n }\n }\n }\n\n private declareVariable(variable: IndicesHelper, bindingIndex: number): string {\n if (variable.usage === 'internal') {\n throw new Error('cannot use internal variable with declareVariable(). use registerInternalVariables() instead.');\n }\n this.variables.push(variable);\n this.appendVariableUniforms(variable);\n\n const access = variable.usage === 'input' ? 'read' : 'read_write';\n const storageType = variable.type.storage;\n return `@group(0) @binding(${bindingIndex}) var ${variable.name}: array<${storageType}>;`;\n }\n\n declareVariables(...variables: IndicesHelper[]): string {\n return variables.map(v => this.declareVariable(v, this.variableIndex++)).join('\\n');\n }\n\n private registerInternalVariable(variable: IndicesHelper): void {\n if (variable.usage !== 'internal') {\n throw new Error(\n 'cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.');\n }\n\n this.internalVariables.push(variable);\n this.appendVariableUniforms(variable);\n }\n\n registerInternalVariables(...variables: IndicesHelper[]): ShaderHelper {\n variables.forEach(v => this.registerInternalVariable(v));\n return this;\n }\n\n registerUniform(name: string, type: UniformDataElementType, length = 1): ShaderHelper {\n this.uniforms.push({name, type, length});\n return this;\n }\n\n registerUniforms(additionalUniforms: UniformsArrayType): ShaderHelper {\n this.uniforms = this.uniforms.concat(additionalUniforms);\n return this;\n }\n\n private internalVariables: IndicesHelper[] = [];\n private variables: IndicesHelper[] = [];\n private uniforms: UniformsArrayType = [];\n private uniformDeclaration(): string {\n if (this.uniforms.length === 0) {\n return '';\n }\n\n const uniformSnippets: string[] = [];\n for (const {name, type, length} of this.uniforms) {\n if (length && length > 4) {\n if (type === 'f16') {\n uniformSnippets.push(`@align(16) ${name}:array, ${Math.ceil(length / 8)}>`);\n } else {\n uniformSnippets.push(`${name}:array, ${Math.ceil(length / 4)}>`);\n }\n } else {\n const typeTemp = length == null || length === 1 ? type : `vec${length}<${type}>`;\n uniformSnippets.push(`${name}:${typeTemp}`);\n }\n }\n\n return `\n struct Uniforms { ${uniformSnippets.join(', ')} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`;\n }\n private variableIndex = 0;\n\n /**\n * Get additional implementation that needs to be added to the shader source.\n */\n get additionalImplementations(): string {\n return this.uniformDeclaration() + this.variables.map(i => i.impl()).join('\\n') +\n this.internalVariables.map(i => i.impl()).join('\\n');\n }\n\n /**\n * Get the variable info of the shader program.\n */\n get variablesInfo(): ProgramUniformVariableInfo[]|undefined {\n if (this.uniforms.length === 0) {\n return undefined;\n }\n\n const uniformWgslTypeToDataType = (type: UniformDataElementType) =>\n ([DataType.uint32, DataType.float16, DataType.float,\n DataType.int32][['u32', 'f16', 'f32', 'i32'].indexOf(type)]);\n return this.uniforms.map(u => ([uniformWgslTypeToDataType(u.type), u.length ?? 1]));\n }\n}\n\nexport const createShaderHelper = (dispatchGroup: [number, number, number], limits: GPUSupportedLimits) =>\n new ShaderHelperImpl(dispatchGroup, limits);\n\n/**\n * This function comes from https://github.com/tensorflow/tfjs/blob/master/tfjs-core/src/ops/broadcast_util.ts#L18-L40\n * Returns the dimensions in the input shape that are broadcasted to\n * produce the provided output shape.\n *\n * The returned dimensions are 0-indexed and sorted. An example:\n * inShape = [4, 1, 3]\n * outShape = [5, 4, 3, 3]\n * result = [1]. Dimension 1 (2nd dimension of input) gets broadcasted 1 => 3.\n */\nexport const getBroadcastDims = (inShape: readonly number[], outShape: readonly number[]): number[] => {\n const inRank = inShape.length;\n const dims: number[] = [];\n for (let i = 0; i < inRank; i++) {\n const dim = inRank - 1 - i;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface TransposeAttributes extends AttributeWithCacheKey {\n readonly perm: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Transpose requires 1 input.');\n }\n};\n\nconst getAdjustedPerm = (inputRank: number, perm: number[]): number[] =>\n (perm && perm.length !== inputRank) ? [...(new Array(inputRank).keys())].reverse() : perm;\n\nconst getOutputShape = (inputShape: readonly number[], perm: number[]): readonly number[] =>\n ShapeUtil.sortBasedOnPerm(inputShape, getAdjustedPerm(inputShape.length, perm));\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nexport const createTransposeProgramInfo = (inputTensor: TensorView, permAttr: number[]): ProgramInfo => {\n const inputDataType = inputTensor.dataType;\n const inputRank = inputTensor.dims.length;\n const perm = getAdjustedPerm(inputRank, permAttr);\n const outputShape = getOutputShape(inputTensor.dims, perm);\n const output = outputVariable('output', inputDataType, outputShape.length);\n const input = inputVariable('a', inputDataType, inputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${permFunctionBody(perm, inputRank, input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${output.setByOffset('global_idx', input.getByIndices('aIndices'))}\n }`;\n return {\n name: 'Transpose',\n shaderCache: {hint: `${permAttr}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputSize = ShapeUtil.size(outputShape);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const transpose = (context: ComputeContext, attributes: TransposeAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createTransposeProgramInfo(context.inputs[0], attributes.perm));\n};\n\nexport const parseTransposeAttributes = (attributes: Record): TransposeAttributes =>\n createAttributeWithCacheKey({perm: attributes.perm as number[]});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\nimport {createReduceAttributesFromInputs, ReduceAttributes} from './reduce';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst reduceOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate * candidate',\n logSumExp: 'bestValue + exp(candidate)',\n l1: 'bestValue + abs(candidate)',\n l2: 'bestValue + candidate * candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceSharedOps: {[key: string]: string} = {\n max: 'select(bestValue, candidate, candidate > bestValue)',\n min: 'select(bestValue, candidate, candidate < bestValue)',\n mean: 'bestValue + candidate',\n sum: 'bestValue + candidate',\n prod: 'bestValue * candidate',\n sumSquare: 'bestValue + candidate',\n logSumExp: 'bestValue + candidate',\n l1: 'bestValue + candidate',\n l2: 'bestValue + candidate',\n logSum: 'bestValue + candidate'\n};\n\nconst reduceInitValues: {[key: string]: string} = {\n max: '_A[offset]',\n min: '_A[offset]',\n mean: '0',\n sum: '0',\n prod: '1',\n sumSquare: '0',\n logSumExp: '0',\n l1: '0',\n l2: '0',\n logSum: '0'\n};\n\nconst reduceOutputValues: {[key: string]: string} = {\n max: 'bestValue',\n min: 'bestValue',\n sum: 'bestValue',\n prod: 'bestValue',\n sumSquare: 'bestValue',\n logSumExp: 'log(bestValue)',\n l1: 'bestValue',\n l2: 'sqrt(bestValue)',\n logSum: 'log(bestValue)'\n};\n\nconst getInnerMostAxes = (numInnerAxes: number, rank: number): number[] => {\n const res = [];\n for (let i = rank - numInnerAxes; i < rank; ++i) {\n res.push(i);\n }\n return res;\n};\n\nconst computeOutAndReduceShapes = (shape: readonly number[], axes: readonly number[]): [number[], number[]] => {\n const outputShape = [];\n const rank = shape.length;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputShape.push(shape[dim]);\n }\n }\n const reduceShape = axes.map(dim => shape[dim]);\n return [outputShape, reduceShape];\n};\n\nconst expandShapeToKeepDim = (shape: number[], axes: number[]): number[] => {\n const rank = shape.length + axes.length;\n const expandShape = [];\n let shapeIdx = 0;\n for (let dim = 0; dim < rank; dim++) {\n if (axes.indexOf(dim) === -1) {\n expandShape.push(shape[shapeIdx++]);\n } else {\n expandShape.push(1);\n }\n }\n return expandShape;\n};\n\nconst areAxesInnerMostDims = (axes: number[], rank: number): boolean => {\n for (let i = 0; i < axes.length; ++i) {\n if (axes[axes.length - i - 1] !== rank - 1 - i) {\n return false;\n }\n }\n return true;\n};\n\nconst getAxesPermutation = (axes: number[], rank: number): number[] => {\n const res = [];\n if (!areAxesInnerMostDims(axes, rank)) {\n for (let i = 0; i < rank; ++i) {\n if (axes.indexOf(i) === -1) {\n res.push(i);\n }\n }\n axes.forEach(axis => res.push(axis));\n }\n return res;\n};\n\nexport const createReduceSharedProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceType: string,\n outputDataType: DataType, outputShape: number[], reduceShape: number[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n\n const outputSize = ShapeUtil.size(outputShape);\n const reduceSize = ShapeUtil.size(reduceShape);\n\n const input = inputVariable('_A', inputs[0].dataType, inputShape);\n const output = outputVariable('output', outputDataType, outputShape);\n\n const workgroupSize = 32;\n\n const sharedMemorySnippet = `\n var aBestValues : array;\n `;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('reduceSize', 'u32').declareVariables(input, output)}\n ${sharedMemorySnippet}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${shaderHelper.mainStart(workgroupSize)}\n\n let outputIndex = global_idx / ${workgroupSize};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${reduceInitValues[reduceType]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${workgroupSize}) {\n let candidate = f32(${input.getByOffset('offset + k')});\n bestValue = ${reduceOps[reduceType]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${workgroupSize}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${reduceSharedOps[reduceType]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${\n output.setByOffset(\n 'outputIndex',\n `${\n reduceType === 'mean' ? `${output.type.storage}(bestValue / f32(uniforms.reduceSize))` :\n `${output.type.storage}(${reduceOutputValues[reduceType]})`}`)};\n }\n }`;\n\n // One work group is responsible for only one element of output.\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: outputSize},\n programUniforms: [{type: DataType.uint32, data: reduceSize}]\n }),\n };\n };\n\nconst reduceCommon =\n (context: ComputeContext, name: string, attributes: ReduceAttributes,\n reduceType: 'sum'|'sumSquare'|'prod'|'min'|'max'|'mean'|'logSumExp'|'l1'|'l2'|'logSum'): void => {\n const updatedAttributes: ReduceAttributes =\n context.inputs.length === 1 ? attributes : createReduceAttributesFromInputs(context.inputs, attributes);\n\n let updatedAxes = updatedAttributes.axes;\n if (updatedAxes.length === 0 && !updatedAttributes.noopWithEmptyAxes) {\n updatedAxes = context.inputs[0].dims.map((_dim, i) => i);\n }\n const normalizeAxes = ShapeUtil.normalizeAxes(updatedAxes, context.inputs[0].dims.length);\n\n let axes = normalizeAxes;\n let input = context.inputs[0];\n const permutedAxes = getAxesPermutation(axes, context.inputs[0].dims.length);\n if (permutedAxes.length > 0) {\n input = context.compute(\n createTransposeProgramInfo(context.inputs[0], permutedAxes), {inputs: [0], outputs: [-1]})[0];\n axes = getInnerMostAxes(axes.length, input.dims.length);\n }\n\n const [outputShape, reduceShape] = computeOutAndReduceShapes(input.dims, axes);\n let finalOutputShape = outputShape;\n if (updatedAttributes.keepDims) {\n finalOutputShape = expandShapeToKeepDim(outputShape, normalizeAxes);\n }\n\n context.compute(\n createReduceSharedProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['type']}, [input], reduceType,\n context.inputs[0].dataType, finalOutputShape, reduceShape),\n {inputs: [input]});\n };\n\nexport const reduceMeanShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMeanShared', attributes, 'mean');\n};\n\nexport const reduceL1Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL1Shared', attributes, 'l1');\n};\n\nexport const reduceL2Shared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceL2Shared', attributes, 'l2');\n};\n\nexport const reduceLogSumExpShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumExpShared', attributes, 'logSumExp');\n};\n\nexport const reduceMaxShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMaxShared', attributes, 'max');\n};\n\nexport const reduceMinShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceMinShared', attributes, 'min');\n};\n\nexport const reduceProdShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceProdShared', attributes, 'prod');\n};\n\nexport const reduceSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumShared', attributes, 'sum');\n};\n\nexport const reduceSumSquareShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceSumSquareShared', attributes, 'sumSquare');\n};\n\nexport const reduceLogSumShared = (context: ComputeContext, attributes: ReduceAttributes): void => {\n reduceCommon(context, 'ReduceLogSumShared', attributes, 'logSum');\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramShaderCacheInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\nimport {reduceL1Shared, reduceL2Shared, reduceLogSumExpShared, reduceLogSumShared, reduceMaxShared, reduceMeanShared, reduceMinShared, reduceProdShared, reduceSumShared, reduceSumSquareShared} from './reduce-shared';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('Reduce op requires 1 or 2 inputs.');\n }\n\n if (inputs.length === 2 && inputs[1].dims.length !== 1) {\n throw new Error('Invalid axes input dims.');\n }\n};\n\nexport interface ReduceAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n noopWithEmptyAxes: boolean;\n axes: number[];\n}\n\nexport type ReduceOp =\n (input: IndicesHelper, output: IndicesHelper,\n axes: readonly number[]) => [string, string, string, string, ...string[]];\n\nconst noOp: ReduceOp = (input) => ['', '', `var value = ${input.getByIndices('input_indices')};`, ''];\nexport const createReduceProgramInfo =\n (name: string, shaderCache: ProgramShaderCacheInfo, inputs: readonly TensorView[], reduceOp: ReduceOp,\n axesInput: number[], outputDataType: DataType, keepDims = false, noopWithEmptyAxes = false): ProgramInfo => {\n const outputShape: number[] = [];\n const inputShape = inputs[0].dims;\n const inputRank = inputShape.length;\n const axes = ShapeUtil.normalizeAxes(axesInput, inputRank);\n const reduceOnAllAxes = !noopWithEmptyAxes && axes.length === 0;\n inputShape.forEach((d, i) => {\n if (reduceOnAllAxes || axes.indexOf(i) >= 0) {\n if (keepDims) {\n outputShape.push(1);\n } // else { // skip this axis}\n } else {\n outputShape.push(d);\n }\n });\n const outputRank = outputShape.length;\n const outputSize = ShapeUtil.size(outputShape);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = []; // copy output indexes to input indexes\n\n const input = inputVariable('_A', inputs[0].dataType, inputRank);\n const output = outputVariable('output', outputDataType, outputRank);\n const ops = reduceOp(input, output, axes);\n let reduceOps = ops[2];\n\n for (let k = 0, l = 0; k < inputRank; k++) {\n // if this axis is reduced\n if (reduceOnAllAxes || axes.indexOf(k) >= 0) {\n if (keepDims) {\n l++;\n }\n // loop over the d-th axis\n reduceOps = `for(var j${k}: u32 = 0; j${k} < ${inputShape[k]}; j${k}++) {\n ${ops[2].includes('last_index') ? `let last_index = j${k};` : ''}\n ${input.indicesSet('input_indices', k, `j${k}`)}\n ${reduceOps}\n }`;\n } else {\n idxCopy.push(`${input.indicesSet('input_indices', k, output.indicesGet('output_indices', l))};`);\n l++;\n }\n }\n return `\n\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var input_indices: ${input.type.indices};\n let output_indices = ${output.offsetToIndices('global_idx')};\n\n ${idxCopy.join('\\n')}\n ${ops[0]} // init ops for reduce max/min\n ${ops[1]}\n ${reduceOps}\n ${ops[3]}\n ${ops.length === 4 ? output.setByOffset('global_idx', 'value') : ops.slice(4).join('\\n')}\n }`;\n };\n\n return {\n name,\n shaderCache,\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)]\n }),\n };\n };\n\nexport const createReduceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: ReduceAttributes): ReduceAttributes => {\n const axes: number[] = [];\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => axes.push(Number(v)));\n }\n return createAttributeWithCacheKey(\n {axes, keepDims: attributes.keepDims, noopWithEmptyAxes: attributes.noopWithEmptyAxes});\n };\n\nconst runReduceProgram =\n (context: ComputeContext, name: string, attributes: ReduceAttributes, reduceOp: ReduceOp): void => {\n const inputs = context.inputs;\n const updatedAttributes: ReduceAttributes =\n inputs.length === 1 ? attributes : createReduceAttributesFromInputs(inputs, attributes);\n\n context.compute(\n createReduceProgramInfo(\n name, {hint: updatedAttributes.cacheKey, inputDependencies: ['rank']}, [inputs[0]],\n updatedAttributes.noopWithEmptyAxes && updatedAttributes.axes.length === 0 ? noOp : reduceOp,\n updatedAttributes.axes, inputs[0].dataType, updatedAttributes.keepDims,\n updatedAttributes.noopWithEmptyAxes),\n {inputs: [0]});\n };\n\nconst reduceLogSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSum', attributes, reduceOp);\n};\n\nconst reduceL1Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += abs(${input.getByIndices('input_indices')});`,\n '',\n ];\n runReduceProgram(context, 'ReduceL1', attributes, reduceOp);\n};\n\nconst reduceL2Naive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += (t * t);`,\n 'value = sqrt(value);',\n ];\n runReduceProgram(context, 'ReduceL2', attributes, reduceOp);\n};\n\nconst reduceLogSumExpNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += exp(${input.getByIndices('input_indices')});`,\n 'value = log(value);',\n ];\n runReduceProgram(context, 'ReduceLogSumExp', attributes, reduceOp);\n};\n\nconst reduceMaxNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(input.indicesSet('input_indices', k, 0));\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = max(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMax', attributes, reduceOp);\n};\n\nconst reduceMeanNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output, axes) => {\n let size = 1.0;\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n // TODO: this depends on the input dims. If we want to use uniform, this need to be updated.\n size *= context.inputs[0].dims[k];\n }\n }\n\n return [\n 'var sum = f32(0);',\n '',\n `sum += f32(${input.getByIndices('input_indices')});`,\n `let value = ${output.type.value}(sum / ${size});`,\n ];\n };\n runReduceProgram(context, 'ReduceMean', attributes, reduceOp);\n};\n\nconst reduceMinNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, _output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n\n return [\n `${idxZero.join('\\n')}`,\n `var value = ${input.getByIndices('input_indices')};`,\n `value = min(value, ${input.getByIndices('input_indices')});`,\n '',\n ];\n };\n runReduceProgram(context, 'ReduceMin', attributes, reduceOp);\n};\n\nconst reduceProdNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(1);`,\n '',\n `value *= ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceProd', attributes, reduceOp);\n};\n\nconst reduceSumNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var value = ${output.type.storage}(0);`,\n '',\n `value += ${input.getByIndices('input_indices')};`,\n '',\n ];\n runReduceProgram(context, 'ReduceSum', attributes, reduceOp);\n};\n\nconst reduceSumSquareNaive = (context: ComputeContext, attributes: ReduceAttributes): void => {\n validateInputs(context.inputs);\n const reduceOp: ReduceOp = (input, output) =>\n [`var t = ${output.type.value}(0); var value = ${output.type.value}(0);`,\n '',\n `t = ${input.getByIndices('input_indices')}; value += t * t;`,\n '',\n ];\n runReduceProgram(context, 'ReduceSumSquare', attributes, reduceOp);\n};\n\nconst useNaiveReduceMethod =\n (shape: readonly number[], axes: readonly number[], noopWithEmptyAxes: boolean): boolean => {\n if (axes.length === 0) {\n return noopWithEmptyAxes;\n }\n\n let outputSize = 1;\n let reduceSize = 1;\n for (let dim = 0; dim < axes.length; dim++) {\n if (axes.indexOf(dim) === -1) {\n outputSize *= shape[dim];\n } else {\n reduceSize *= shape[dim];\n }\n }\n\n // The condition data is very rough, although considering the count of Execution Unit (EU), the potential\n // work groups in a EU and the counts of loops in the naive and shared methods, also doing experiments\n // on some machines.\n return reduceSize < 32 && outputSize > 1024;\n };\n\nexport const reduceMean = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMeanNaive(context, attributes);\n } else {\n reduceMeanShared(context, attributes);\n }\n};\n\nexport const reduceL1 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL1Naive(context, attributes);\n } else {\n reduceL1Shared(context, attributes);\n }\n};\n\nexport const reduceL2 = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceL2Naive(context, attributes);\n } else {\n reduceL2Shared(context, attributes);\n }\n};\n\nexport const reduceLogSumExp = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumExpNaive(context, attributes);\n } else {\n reduceLogSumExpShared(context, attributes);\n }\n};\n\nexport const reduceMax = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMaxNaive(context, attributes);\n } else {\n reduceMaxShared(context, attributes);\n }\n};\n\nexport const reduceMin = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceMinNaive(context, attributes);\n } else {\n reduceMinShared(context, attributes);\n }\n};\n\nexport const reduceProd = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceProdNaive(context, attributes);\n } else {\n reduceProdShared(context, attributes);\n }\n};\n\nexport const reduceSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumNaive(context, attributes);\n } else {\n reduceSumShared(context, attributes);\n }\n};\n\nexport const reduceSumSquare = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceSumSquareNaive(context, attributes);\n } else {\n reduceSumSquareShared(context, attributes);\n }\n};\n\nexport const reduceLogSum = (context: ComputeContext, attributes: ReduceAttributes): void => {\n if (useNaiveReduceMethod(context.inputs[0].dims, attributes.axes, attributes.noopWithEmptyAxes)) {\n reduceLogSumNaive(context, attributes);\n } else {\n reduceLogSumShared(context, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createReduceProgramInfo, ReduceOp} from './reduce';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length === 0 || inputs.length > 2) {\n throw new Error('ArgMinMaxOp op requires 1 or 2 inputs.');\n }\n if (inputs[0].dataType !== DataType.float) {\n throw new Error('Invalid input type.');\n }\n};\n\nexport interface ArgMinMaxAttributes extends AttributeWithCacheKey {\n keepDims: boolean;\n axis: number;\n selectLastIndex: number;\n}\n\nexport const argMin = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '<=' : '<'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'ArgMin', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const argMax = (context: ComputeContext, attributes: ArgMinMaxAttributes): void => {\n validateInputs(context.inputs);\n const argMinMaxOp: ReduceOp = (input, output, axes) => {\n const idxZero = [];\n for (let k = 0; k < input.rank; k++) {\n if (axes.indexOf(k) >= 0 || axes.length === 0) {\n idxZero.push(`input_indices[${k}] = 0;`); // first element\n }\n }\n return [\n `${idxZero.join('\\n')}`, `var value = ${input.getByIndices('input_indices')};\\nvar best_index : i32 = 0;`,\n `if (${input.getByIndices('input_indices')} ${attributes.selectLastIndex > 0 ? '>=' : '>'} value) {\n value = ${input.getByIndices('input_indices')};\n best_index = i32(last_index);\n }`,\n '', output.setByOffset('global_idx', 'best_index')\n ];\n };\n\n context.compute(\n createReduceProgramInfo(\n 'argMax', {hint: attributes.cacheKey, inputDependencies: ['rank']}, [context.inputs[0]], argMinMaxOp,\n [attributes.axis], DataType.int64, attributes.keepDims),\n {inputs: [0]});\n};\n\nexport const parseArgMinMaxAttributes = (attributes: Record): ArgMinMaxAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface ConcatAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], axis: number): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n const referenceIndex = 0;\n const referenceInput = inputs[referenceIndex];\n const inputType = referenceInput.dataType;\n const inputRank = referenceInput.dims.length;\n inputs.forEach((input, i) => {\n if (i === referenceIndex) {\n return;\n }\n // make sure types of all inputs match\n if (input.dataType !== inputType) {\n throw new Error('input tensors should be one type');\n }\n // make sure the dimensionality of all inputs are the same\n if (input.dims.length !== inputRank) {\n throw new Error('input tensors should have the same shape');\n }\n input.dims.forEach((dim, i) => {\n if (i !== axis && dim !== referenceInput.dims[i]) {\n throw new Error('non concat dimensions must match');\n }\n });\n });\n};\n\nconst calculateInputIndexImpl = (numberOfTensors: number, sizeInConcatAxisStr: string): string => `\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n for (var i: u32 = 0u; i < ${numberOfTensors}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n }`;\n\nconst assignOutputData = (inputs: readonly IndicesHelper[], output: IndicesHelper) => {\n const numberOfTensors = inputs.length;\n\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = output.setByOffset('global_idx', inputs[i].getByIndices('indices'));\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (inputIndex == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (inputIndex == ${i}) { ${returnSnippet} }`);\n }\n }\n return codeLines.join('\\n');\n};\n\nexport const createConcatProgramInfo =\n (inputs: readonly TensorView[], adjustedAxis: number, outputShape: number[], dataType: DataType): ProgramInfo => {\n const outputSize = ShapeUtil.size(outputShape);\n\n const sizeInConcatAxis = new Array(inputs.length);\n const inputVars = new Array(inputs.length);\n\n let previousSum = 0;\n const inputDependencies: ProgramInputTensorInfoDependency[] = [];\n const inputRanks = [];\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: outputSize}];\n for (let i = 0; i < inputs.length; ++i) {\n previousSum += inputs[i].dims[adjustedAxis];\n sizeInConcatAxis[i] = previousSum;\n inputRanks.push(inputs[i].dims.length);\n inputVars[i] = inputVariable(`input${i}`, dataType, inputRanks[i]);\n inputDependencies.push('rank');\n programUniforms.push({type: DataType.uint32, data: sizeInConcatAxis[i]});\n }\n for (let i = 0; i < inputs.length; ++i) {\n programUniforms.push(...createTensorShapeVariables(inputs[i].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const output = outputVariable('output', dataType, outputShape.length);\n const indicesAxis = output.indicesGet('indices', adjustedAxis);\n const sizeInConcatAxisStr =\n Array.from(Array(sizeInConcatAxis.length).keys()).map(i => `uniforms.sizeInConcatAxis${i}`).join(',');\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n\n ${(() => {\n shaderHelper.registerUniform('outputSize', 'u32');\n for (let i = 0; i < inputs.length; i++) {\n shaderHelper.registerUniform(`sizeInConcatAxis${i}`, 'u32');\n }\n return shaderHelper.declareVariables(...inputVars, output);\n })()}\n\n ${calculateInputIndexImpl(sizeInConcatAxis.length, sizeInConcatAxisStr)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n var indices = ${output.offsetToIndices('global_idx')};\n\n let inputIndex = calculateInputIndex(${indicesAxis});\n if (inputIndex != 0u) {\n let sizeInConcatAxis = array(${sizeInConcatAxisStr});\n ${indicesAxis} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${assignOutputData(inputVars, output)}\n }`;\n\n return {\n name: 'Concat',\n shaderCache: {hint: `${adjustedAxis}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms,\n }),\n getShaderSource,\n };\n };\n\nexport const concat = (context: ComputeContext, attributes: ConcatAttributes): void => {\n const inputs = context.inputs;\n const inputShape = inputs[0].dims;\n const adjustedAxis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n validateInputs(inputs, adjustedAxis);\n const outputShape = inputShape.slice();\n outputShape[adjustedAxis] =\n inputs.reduce((sum, input) => sum + (input.dims.length > adjustedAxis ? input.dims[adjustedAxis] : 0), 0);\n // 0 length tensors are valid for concat, remove them\n const nonEmptyInputs = inputs.filter(input => ShapeUtil.size(input.dims) > 0);\n context.compute(\n createConcatProgramInfo(nonEmptyInputs, adjustedAxis, outputShape, inputs[0].dataType), {inputs: nonEmptyInputs});\n};\n\nexport const parseConcatAttributes = (attributes: Record): ConcatAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext, GpuDataType, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, tensorTypeToWsglValueType, UniformDataElementType, UniformsArrayType} from './common';\nimport {createConcatProgramInfo} from './concat';\n\nexport const enum AttentionQkvFormat {\n unknown, // enum value not set, or depends on qkv projection implementation details\n qkvBNSH, // for non-packed qkv, permuted\n qkvBSNH, // for non-packed qkv, not permuted, used by memory efficient attention or MultiHeadAttention\n qkvBSN3H, // for TRT fused attention, qkv are packed\n qkvBNSHqkvBS3NH, // for TRT fused causal attention, data has two formats (qkv is 3BNSH, gemm_buffer is BS3NH)\n qKvBSNHxBSN2H, // for TRT fused cross attention, kv are packed\n qkvTNH, // for memory efficient attention, qkv are not packed, and paddings are removed.\n qkvTN3H, // for TRT fused attention, qkv are packed and paddings are removed\n}\n\nexport const enum AttentionMaskType {\n none, // No mask\n mask1dKeySeqLen, // [batch_size], key sequence length\n mask1dEndStart, // [2 * batch_size] with end positions and start positions\n mask1DKeySeqLenStart, // [3 * batch_size + 2] with [key_len[0], ..., key_len[batch_size - 1], query_start[0],\n // ..., query_start[batch_size - 1], query_end[batch_size - 1], key_start[0], ...,\n // key_start[batch_size - 1], key_end[batch_size - 1]]\n mask2dDummy, // dummy mask with shape [1, 1] or [batch_size, 1]. It has same effect as no mask.\n mask2dKeyPadding, // [batch_size, total_sequence_length]\n mask3dAttention, // [batch_size, sequence_length, total_sequence_length]\n mask4dMegatron, // Megatron causal mask with shape [batch_size, 1, max_sequence_length, max_sequence_length]\n maskUnknown\n}\n\nexport interface AttentionParameters {\n batchSize: number;\n sequenceLength: number;\n pastSequenceLength: number;\n kvSequenceLength: number;\n totalSequenceLength: number;\n maxSequenceLength: number;\n inputHiddenSize: number;\n hiddenSize: number;\n vHiddenSize: number;\n headSize: number;\n vHeadSize: number;\n numHeads: number;\n isUnidirectional: boolean;\n pastPresentShareBuffer: boolean;\n maskFilterValue: number;\n maskType: AttentionMaskType;\n scale: number;\n broadcastResPosBias: boolean;\n passPastInKv: boolean;\n qkvFormat: AttentionQkvFormat;\n}\n\nexport interface AttentionAttrs {\n numHeads: number;\n isUnidirectional: number;\n maskFilterValue: number;\n scale: number;\n doRotary: number;\n qkvHiddenSizes: number[];\n pastPresentShareBuffer: boolean;\n}\n\nconst validateAttentionInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // When past state is used, Q, K and V should have same hidden size (unless we split it into past_key and past_value).\n\n // Input shapes:\n // input (Q/K/V) : (B, S, D_i)\n // weights (Q/K/V) : (D_i, D + D + D_v)\n // bias (Q/K/V) : (D + D + D_v)\n // mask_index : see below\n // past (K/V) : (2, B, N, P, H) or NULL\n // relative_position_bias : (B, N, S, T) or NULL\n\n // For mask_index, the following shapes are supported:\n // NULL, (B, 1), (1, 1)\n // (B), (2 * B), (3 * B + 2)\n // (B, T)\n // (B, S, T)\n // (B, 1, M, M)\n //\n // When a model is pruned (like some attention heads are removed in Q/K/V), input_hidden_size could be larger\n // than hidden dimension of Q, K and V.\n\n const input = inputs[0];\n const weights = inputs[1];\n const bias = inputs[2];\n const maskIndex = inputs[3];\n const past = inputs[4];\n const relativePositionBias = inputs[5];\n\n if (past && relativePositionBias) {\n throw new Error('Attention cannot have both past and relative_position_bias');\n }\n\n if (input.dims.length !== 3) {\n throw new Error('Input \"input\" must have 3 dimensions');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[1];\n const inputHiddenSize = input.dims[2];\n\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimensions');\n }\n\n if (weights.dims.length !== 2) {\n throw new Error('Input \"weights\" is expected to have 2 dimensions');\n }\n\n if (weights.dims[0] !== inputHiddenSize) {\n throw new Error('Input 1 dimension 0 should have same length as dimension 2 of input 0');\n }\n\n if (bias.dims[0] !== weights.dims[1]) {\n throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');\n }\n\n let qHiddenSize = bias.dims[0] / 3;\n let kHiddenSize = qHiddenSize;\n let vHiddenSize = kHiddenSize;\n if (attributes.qkvHiddenSizes.length > 0) {\n if (attributes.qkvHiddenSizes.length !== 3) {\n throw new Error('qkv_hidden_sizes attribute should have 3 elements');\n }\n for (const sz of attributes.qkvHiddenSizes) {\n if (sz % attributes.numHeads !== 0) {\n throw new Error('qkv_hidden_sizes should be divisible by num_heads');\n }\n }\n\n qHiddenSize = attributes.qkvHiddenSizes[0];\n kHiddenSize = attributes.qkvHiddenSizes[1];\n vHiddenSize = attributes.qkvHiddenSizes[2];\n }\n\n const kvSequenceLength = sequenceLength;\n\n if (qHiddenSize !== kHiddenSize) {\n throw new Error('qkv_hidden_sizes first element should be same as the second');\n }\n\n if (bias.dims[0] !== qHiddenSize + kHiddenSize + vHiddenSize) {\n throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');\n }\n\n let pastSequenceLength = 0;\n if (past) {\n if (kHiddenSize !== vHiddenSize) {\n throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');\n }\n if (past.dims.length !== 5) {\n throw new Error('Input \"past\" must have 5 dimensions');\n }\n if (past.dims[0] !== 2) {\n throw new Error('Input \"past\" first dimension must be 2');\n }\n if (past.dims[1] !== batchSize) {\n throw new Error('Input \"past\" second dimension must be batch_size');\n }\n if (past.dims[2] !== attributes.numHeads) {\n throw new Error('Input \"past\" third dimension must be num_heads');\n }\n if (past.dims[4] !== kHiddenSize / attributes.numHeads) {\n throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');\n }\n\n if (!attributes.pastPresentShareBuffer) {\n pastSequenceLength = past.dims[3];\n }\n // TODO: handle past_seq_len\n }\n\n const totalSequenceLength = kvSequenceLength + pastSequenceLength;\n const maxSequenceLength = -1;\n\n const maskType = AttentionMaskType.none;\n if (maskIndex) {\n // maskType = AttentionMaskType.MASK_UNKNOWN;\n // TODO: handle mask\n throw new Error('Mask not supported');\n }\n\n if (past) {\n throw new Error('past is not supported');\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize,\n hiddenSize: qHiddenSize,\n vHiddenSize,\n headSize: Math.floor(qHiddenSize / attributes.numHeads),\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias: false,\n passPastInKv: false,\n qkvFormat: AttentionQkvFormat.qkvBNSH,\n };\n};\n\nconst createInPlaceSoftmaxProgramInfo = (_context: ComputeContext, input: TensorView, n: number, d: number) => {\n const components = getMaxComponents(d);\n let WG = 64;\n const dComp = d / components;\n if (dComp < WG) {\n WG = 1;\n } else if (dComp / 8 < 64) {\n WG = Math.ceil(dComp / 8);\n }\n const elementsPerThread = Math.ceil(d / components / WG);\n const programUniforms: ProgramUniform[] = [\n {type: input.dataType, data: 1 / d}, {type: DataType.uint32, data: dComp},\n {type: DataType.uint32, data: elementsPerThread}\n ];\n const dataType = tensorTypeToWsglStorageType(input.dataType, components);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = outputVariable('x', input.dataType, input.dims, components);\n const elemValueType = tensorTypeToWsglValueType(input.dataType);\n const uniforms: UniformsArrayType = [\n {name: 'd_inv', type: elemValueType as UniformDataElementType}, {name: 'd_comp', type: 'u32'},\n {name: 'elements_per_thread', type: 'u32'}\n ];\n\n return `\n var thread_max: array;\n var thread_sum: array;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(inputHelper)}\n ${shaderHelper.mainStart([\n WG, 1, 1\n ])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${f32Type}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${f32Type}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'thread_max_vector';\n case 2:\n return 'max(thread_max_vector.x, thread_max_vector.y)';\n case 4:\n return 'max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${WG}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${f32Type}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${f32Type}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(() => {\n switch (components) {\n case 1:\n return 'sum_vector';\n case 2:\n return 'sum_vector.x + sum_vector.y';\n case 4:\n return 'sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${WG}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${inputHelper.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${f32Type}(x[offset + i]);\n x[offset + i] = ${inputHelper.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`;\n };\n\n return {\n name: 'AttentionProbsSoftmax',\n shaderCache: {hint: `${WG};${dataType};${components}`},\n getShaderSource,\n getRunData: () => ({outputs: [], dispatchGroup: {x: n}, programUniforms}),\n };\n};\n\nconst createAttentionProbsProgramInfo =\n (_context: ComputeContext, q: TensorView, key: TensorView, relativePositionBias: TensorView|undefined,\n parameters: AttentionParameters, attributes: AttentionAttrs, pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n const probsShape = [parameters.batchSize, parameters.numHeads, parameters.sequenceLength, totalSequenceLength];\n\n // TODO: handle mask\n\n const alpha = attributes.scale === 0 ? 1.0 / Math.sqrt(parameters.headSize) : attributes.scale;\n const components = getMaxComponents(parameters.headSize);\n const vectorizedHeadSize = parameters.headSize / components;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(totalSequenceLength / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: parameters.sequenceLength}, {type: DataType.uint32, data: vectorizedHeadSize},\n {type: DataType.uint32, data: totalSequenceLength}, {type: DataType.uint32, data: parameters.numHeads},\n {type: DataType.float, data: alpha}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (relativePositionBias) {\n inputDependencies.push('rank');\n programUniforms.push(...createTensorShapeVariables(relativePositionBias.dims));\n }\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const qInput = inputVariable('q', q.dataType, q.dims, components);\n const kInput = inputVariable('key', key.dataType, key.dims, components);\n const inputVars = [qInput, kInput];\n const relativePositionBiasInput = relativePositionBias ?\n inputVariable('relative_position_bias', relativePositionBias.dataType, relativePositionBias.dims.length) :\n undefined;\n if (relativePositionBiasInput) {\n inputVars.push(relativePositionBiasInput);\n }\n const output = outputVariable('output', q.dataType, probsShape);\n // const dataType = tensorTypeToWsglStorageType(q.dataType);\n const f32Type = tensorTypeToWsglValueType(DataType.float, components);\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'alpha', type: 'f32' as UniformDataElementType}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n\n var tileQ: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${qInput.type.storage}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;\n\n var value = ${f32Type}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n tileK[TILE_SIZE * local_id.y + local_id.x] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${f32Type}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(() => {\n switch (components) {\n case 1:\n return 'value';\n case 2:\n return 'value.x + value.y';\n case 4:\n return 'value.x + value.y + value.z + value.w';\n default:\n throw new Error(`Unsupported components: ${components}`);\n }\n })()};\n\n ${(() => {\n if (relativePositionBiasInput) {\n return `\n let batch = workgroup_id.z / uniforms.num_heads;\n let head = workgroup_id.z % uniforms.num_heads;\n var indices = ${relativePositionBiasInput.type.indices}(batch, head, global_id.y, global_id.x);\n output[outputIdx] = ${output.type.value}(sum * uniforms.alpha) + ${\n relativePositionBiasInput.getByIndices('indices')};`;\n }\n return `output[outputIdx] = ${output.type.value} (sum * uniforms.alpha);`;\n })()}\n }\n }`;\n };\n return {\n name: 'AttentionProbs',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: probsShape, dataType: q.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n };\n };\n\n\nconst createVxAttentionScoreProgramInfo =\n (_context: ComputeContext, probs: TensorView, v: TensorView, params: AttentionParameters,\n pastSequenceLength: number) => {\n const totalSequenceLength = pastSequenceLength + params.kvSequenceLength;\n const outputShape = [params.batchSize, params.sequenceLength, params.vHiddenSize];\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(params.vHeadSize / TILE_SIZE),\n y: Math.ceil(params.sequenceLength / TILE_SIZE),\n z: params.batchSize * params.numHeads\n };\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: params.sequenceLength}, {type: DataType.uint32, data: totalSequenceLength},\n {type: DataType.uint32, data: params.vHeadSize}, {type: DataType.uint32, data: params.numHeads},\n {type: DataType.uint32, data: params.vHiddenSize}\n ];\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const probsHelper = inputVariable('probs', probs.dataType, probs.dims);\n const vHelper = inputVariable('v', v.dataType, v.dims);\n const output = outputVariable('output', probs.dataType, outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'num_heads', type: 'u32'}, {name: 'v_hidden_size', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileQ: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n var tileK: array<${probsHelper.type.value}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(probsHelper, vHelper, output)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n let offsetB = headIdx * (uniforms.N * uniforms.K) + n;\n\n var value = ${probsHelper.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n tileK[TILE_SIZE * local_id.y + local_id.x] = v[offsetB + (w + local_id.y) * uniforms.N];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`;\n };\n\n return {\n name: 'AttentionScore',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: probs.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const applyAttention =\n (context: ComputeContext, q: TensorView, k: TensorView, v: TensorView, _maskIndex: TensorView|undefined,\n _past: TensorView|undefined, pastKey: TensorView|undefined, pastValue: TensorView|undefined,\n relativePositionBias: TensorView|undefined, parameters: AttentionParameters, attributes: AttentionAttrs) => {\n const outputPresentKey = context.outputCount > 1;\n const outputPresentValue = context.outputCount > 2;\n const pastSequenceLength = (outputPresentKey && outputPresentValue) ? parameters.pastSequenceLength : 0;\n const totalSequenceLength = pastSequenceLength + parameters.kvSequenceLength;\n // Concatinate pastKey and K to produce presentKey.\n const presentKeyShape = [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize];\n const concatKeyInputs = pastKey ? [pastKey, k] : [k];\n const key = outputPresentKey ? context.compute(\n createConcatProgramInfo(concatKeyInputs, 2, presentKeyShape, k.dataType),\n {inputs: concatKeyInputs, outputs: [1]})[0] :\n k;\n\n // Concatinate pastValue and V to produce presentValue.\n const presentValueShape = [parameters.batchSize, parameters.numHeads, totalSequenceLength, parameters.headSize];\n const concatValueInputs = pastValue ? [pastValue, v] : [v];\n const value = outputPresentValue ?\n context.compute(\n createConcatProgramInfo(concatValueInputs, 2, presentValueShape, v.dataType),\n {inputs: concatValueInputs, outputs: [2]})[0] :\n v;\n const inputsK = [q, key];\n if (relativePositionBias) {\n inputsK.push(relativePositionBias);\n }\n\n // Run AttentionProbs\n const probs = context.compute(\n createAttentionProbsProgramInfo(\n context, q, key, relativePositionBias, parameters, attributes, pastSequenceLength),\n {inputs: inputsK, outputs: [-1]})[0];\n\n // Run Softmax\n context.compute(\n createInPlaceSoftmaxProgramInfo(\n context, probs, parameters.batchSize * parameters.numHeads * parameters.sequenceLength,\n totalSequenceLength),\n {inputs: [probs], outputs: []});\n\n // Run AttrionScore\n const inputsV = [probs, value];\n context.compute(\n createVxAttentionScoreProgramInfo(context, probs, value, parameters, pastSequenceLength),\n {inputs: inputsV, outputs: [0]});\n };\n\nconst prepare = (context: ComputeContext, parameters: AttentionParameters) => {\n const outputShape = [\n parameters.batchSize,\n parameters.numHeads,\n parameters.sequenceLength,\n parameters.headSize,\n ];\n const M = parameters.sequenceLength;\n const K = parameters.inputHiddenSize;\n const N = parameters.headSize;\n const TILE_SIZE = 12;\n const dispatch = {\n x: Math.ceil(parameters.headSize / TILE_SIZE),\n y: Math.ceil(parameters.sequenceLength / TILE_SIZE),\n z: parameters.batchSize * parameters.numHeads\n };\n const inputs = [context.inputs[0], context.inputs[1], context.inputs[2]];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: M}, {type: DataType.uint32, data: K}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: parameters.numHeads}, {type: DataType.uint32, data: parameters.headSize},\n {type: DataType.uint32, data: parameters.hiddenSize},\n {type: DataType.uint32, data: parameters.hiddenSize + parameters.hiddenSize + parameters.vHiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const outputQ = outputVariable('output_q', inputs[0].dataType, outputShape);\n const outputK = outputVariable('output_k', inputs[0].dataType, outputShape);\n const outputV = outputVariable('output_v', inputs[0].dataType, outputShape);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims);\n const weight = inputVariable('weight', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const dataType = input.type.storage;\n\n const uniforms: UniformsArrayType = [\n {name: 'M', type: 'u32'}, {name: 'K', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'num_heads', type: 'u32'},\n {name: 'head_size', type: 'u32'}, {name: 'hidden_size', type: 'u32'}, {name: 'ldb', type: 'u32'}\n ];\n return `\n const TILE_SIZE = ${TILE_SIZE}u;\n var tileInput: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightQ: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightK: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n var tileWeightV: array<${dataType}, ${TILE_SIZE * TILE_SIZE}>;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, weight, bias, outputQ, outputK, outputV)}\n ${shaderHelper.mainStart([\n TILE_SIZE, TILE_SIZE, 1\n ])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${dataType}(0);\n var valueK = ${dataType}(0);\n var valueV = ${dataType}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k ({\n outputs: [\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n {dims: outputShape, dataType: context.inputs[0].dataType, gpuDataType: GpuDataType.default},\n ],\n dispatchGroup: dispatch,\n programUniforms\n }),\n getShaderSource,\n },\n {inputs, outputs: [-1, -1, -1]});\n};\n\nexport const attention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateAttentionInputs(context.inputs, attributes);\n\n const [q, k, v] = prepare(context, params);\n\n return applyAttention(\n context, q, k, v, context.inputs[4], undefined, undefined, undefined, context.inputs[5], params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface BatchNormAttributes extends AttributeWithCacheKey {\n readonly epsilon: number;\n readonly momentum: number;\n readonly spatial: boolean;\n readonly trainingMode: boolean;\n readonly format: 'NHWC'|'NCHW';\n readonly outputCount: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: BatchNormAttributes): void => {\n if (!inputs || inputs.length !== 5) {\n throw new Error('BatchNormalization requires 5 inputs');\n }\n\n const checkShapeEqual = (actual: readonly number[], expected: readonly number[], message: string) => {\n const r = expected.length;\n if (r !== actual.length) {\n throw new Error(`${message}: num dimensions != ${r}`);\n }\n expected.forEach((v, i) => {\n if (v !== actual[i]) {\n throw new Error(`${message}: dim[${i}] do not match`);\n }\n });\n };\n\n if (inputs[0].dims.length > 1) {\n const shape = attributes.format === 'NHWC' ?\n (attributes.spatial ? inputs[0].dims.slice(-1) :\n inputs[0].dims.slice(-1).concat(inputs[0].dims.slice(1, inputs[0].dims.length - 1))) :\n inputs[0].dims.slice(1, attributes.spatial ? 2 : undefined);\n checkShapeEqual(inputs[1].dims, shape, 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, shape, 'Invalid input B');\n checkShapeEqual(inputs[3].dims, shape, 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, shape, 'Invalid input var');\n } else {\n checkShapeEqual(inputs[1].dims, [1], 'Invalid input scale');\n checkShapeEqual(inputs[2].dims, [1], 'Invalid input B');\n checkShapeEqual(inputs[3].dims, [1], 'Invalid input mean');\n checkShapeEqual(inputs[4].dims, [1], 'Invalid input var');\n }\n};\n\nconst createBatchNormInferenceProgramInfo =\n (inputs: readonly TensorView[], attributes: BatchNormAttributes): ProgramInfo => {\n const {epsilon, spatial, format} = attributes;\n const yShape = inputs[0].dims;\n const components = spatial ? getMaxComponents(yShape[yShape.length - 1]) : 1;\n const cComponents = format === 'NHWC' && yShape.length > 1 ? components : 1;\n const outputSize = ShapeUtil.size(yShape) / components;\n // Only support uniforms for opset version >= 9 (spatial = true).\n const useShapesUniforms = spatial;\n const shapeOrRank = useShapesUniforms ? yShape.length : yShape;\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims, cComponents);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims, cComponents);\n const inputMean = inputVariable('inputMean', inputs[3].dataType, inputs[3].dims, cComponents);\n const inputVar = inputVariable('inputVar', inputs[4].dataType, inputs[4].dims, cComponents);\n const y = outputVariable('y', inputs[0].dataType, shapeOrRank, components);\n // TODO: support inputs with different data type. Current we need to make sure all inputs have the same data type.\n // Otherwise, the shader compilation will fail.\n const calcCOffset = (): string => {\n let cOffset = '';\n if (spatial) {\n cOffset = `let cOffset = ${\n yShape.length === 1 ? '0u' :\n format === 'NHWC' ? `outputIndices[${yShape.length - 1}] / ${components}` :\n 'outputIndices[1]'};`;\n } else {\n if (format === 'NCHW') {\n cOffset = `\n ${y.indicesSet('outputIndices', '0', '0')}\n let cOffset = ${y.indicesToOffset('outputIndices')};`;\n } else {\n // update C channel.\n cOffset = `var cIndices = ${scale.type.indices}(0);\n cIndices[0] = outputIndices[${yShape.length - 1}];`;\n // update D1 x ... x Dn channels.\n for (let i = 1; i < scale.rank; i++) {\n cOffset += `cIndices[${i}] = outputIndices[${i}];`;\n }\n cOffset += `let cOffset = ${scale.indicesToOffset('cIndices')};`;\n }\n }\n return cOffset;\n };\n const getInferenceModeShaderSource = (helper: ShaderHelper) => `\n const epsilon = ${epsilon};\n ${helper.registerUniform('outputSize', 'u32').declareVariables(x, scale, bias, inputMean, inputVar, y)}\n ${helper.mainStart()}\n ${helper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${y.offsetToIndices(`global_idx * ${components}`)};\n ${calcCOffset()}\n let scale = ${scale.getByOffset('cOffset')};\n let bias = ${bias.getByOffset('cOffset')};\n let inputMean = ${inputMean.getByOffset('cOffset')};\n let inputVar = ${inputVar.getByOffset('cOffset')};\n let x = ${x.getByOffset('global_idx')};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${y.setByOffset('global_idx', 'value')}\n }`;\n return {\n name: 'BatchNormalization',\n shaderCache: {\n hint: `${attributes.epsilon}_${attributes.format}_${spatial}_${components}`,\n inputDependencies: useShapesUniforms ? ['rank', 'type', 'type', 'type', 'type'] : undefined,\n },\n getShaderSource: getInferenceModeShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: useShapesUniforms ?\n [\n {type: DataType.uint32, data: outputSize},\n ...createTensorShapeVariables(yShape),\n ] :\n [\n {type: DataType.uint32, data: outputSize},\n ],\n }),\n };\n };\n\nexport const parseBatchNormAttributes = (attributes: Record): BatchNormAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n\nexport const batchNorm = (context: ComputeContext, attributes: Record): void => {\n const {inputs, outputCount} = context;\n const updatedAttributes = parseBatchNormAttributes({...attributes, outputCount});\n if (env.webgpu.validateInputContent) {\n validateInputs(inputs, updatedAttributes);\n }\n if (attributes.trainingMode) {\n throw new Error('BatchNormalization trainingMode is not supported yet.');\n } else {\n context.compute(createBatchNormInferenceProgramInfo(inputs, updatedAttributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![320, 640, 1280].includes(inputs[0].dims[2])) {\n throw new Error('number of channels should be 320, 640 or 1280');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasAddProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims;\n\n const channels = inputs[0].dims[2];\n // since channel number can be only 320/640/1280, it's always divisable by 4\n const outputSize = ShapeUtil.size(outputShape) / 4;\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, outputShape, 4);\n const bias = inputVariable('bias', dataType, [channels], 4);\n const residual = inputVariable('residual', dataType, outputShape, 4);\n const output = outputVariable('output', dataType, outputShape, 4);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const channels = ${channels}u / 4;\n ${shaderHelper.declareVariables(input, bias, residual, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let value = ${input.getByOffset('global_idx')}\n + ${bias.getByOffset('global_idx % channels')} + ${residual.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', 'value')}\n }`;\n\n return {\n name: 'BiasAdd',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasAdd = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasAddProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {MAX_CLIP, MIN_CLIP, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType} from './common';\n\ntype BuiltinFunctionName = string;\ntype ElementwiseCustomExpression = (expression: string) => string;\ntype ElementwiseFunctionCall = BuiltinFunctionName|ElementwiseCustomExpression;\n\nconst createElementwiseProgramShader =\n (shaderHelper: ShaderHelper, datasize: number, inputDataType: number, outputDataType: number,\n funcCall: ElementwiseFunctionCall, additionalImplementation?: string): string => {\n const vecSize = Math.ceil(datasize / 4);\n\n let expression = '';\n if (typeof funcCall === 'string') {\n expression = `${funcCall}(a)`;\n } else {\n expression = funcCall('a');\n }\n\n const input = inputVariable('inputData', inputDataType, [vecSize], 4);\n const output = outputVariable('outputData', outputDataType, [vecSize], 4);\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n\n let a = ${input.getByOffset('global_idx')};\n ${output.setByOffset('global_idx', expression)}\n }`;\n };\n\nconst createElementwiseProgramInfo =\n (input: TensorView, name: string, funcCall: ElementwiseFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType: number = input.dataType): ProgramInfo => ({\n name,\n shaderCache: {hint: cacheKey, inputDependencies: ['type']},\n getShaderSource: shaderHelper => createElementwiseProgramShader(\n shaderHelper, ShapeUtil.size(input.dims), input.dataType, outputDataType, funcCall, additionalImplementation),\n getRunData: (inputTensors) => ({\n outputs: [{dims: input.dims, dataType: outputDataType}],\n dispatchGroup:\n {x: Math.ceil(ShapeUtil.size(inputTensors[0].dims) / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(input.dims) / 4)},\n ],\n })\n });\n\nexport const abs = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Abs', 'abs'));\n};\n\nexport const acos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acos', 'acos'));\n};\n\nexport const acosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Acosh', 'acosh'));\n};\n\nexport const asin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asin', 'asin'));\n};\n\nexport const asinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Asinh', 'asinh'));\n};\n\nexport const atan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atan', 'atan'));\n};\nexport const atanh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Atanh', 'atanh'));\n};\n\nexport interface CastAttributes extends AttributeWithCacheKey {\n readonly to: number;\n readonly saturate?: boolean;\n}\n\nexport const parseCastAttributes = (attributes: Record): CastAttributes =>\n createAttributeWithCacheKey(attributes as {to: number});\n\n\nexport const cast = (context: ComputeContext, attributes: CastAttributes): void => {\n let func: ElementwiseFunctionCall;\n switch (attributes.to) {\n case DataType.float16:\n func = 'vec4';\n break;\n case DataType.float:\n func = 'vec4';\n break;\n case DataType.uint32:\n func = 'vec4';\n break;\n case DataType.int32:\n func = 'vec4';\n break;\n case DataType.bool:\n func = 'vec4';\n break;\n default:\n throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${attributes.to}`);\n }\n context.compute(\n createElementwiseProgramInfo(context.inputs[0], 'Cast', func, undefined, attributes.cacheKey, attributes.to));\n};\n\nexport interface ClipAttributes extends AttributeWithCacheKey {\n readonly min: number;\n readonly max: number;\n}\n\nconst generateClipAttributesFromInputs = (inputs: readonly TensorView[]): ClipAttributes => {\n const min = (inputs.length >= 2 && inputs[1].data !== 0) ? inputs[1].getFloat32Array()[0] : MIN_CLIP;\n const max = (inputs.length >= 3 && inputs[2].data !== 0) ? inputs[2].getFloat32Array()[0] : MAX_CLIP;\n return createAttributeWithCacheKey({min, max});\n};\n\nexport const clip = (context: ComputeContext, clipAttributes: ClipAttributes): void => {\n const attributes = context.inputs.length === 1 ? clipAttributes : generateClipAttributesFromInputs(context.inputs);\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(\n createElementwiseProgramInfo(\n context.inputs[0], 'Clip', a => `clamp(${a}, clip_min_, clip_max_)`, `\n const clip_min_: vec4<${dataType}> = vec4(${dataType}(${attributes.min}));\n const clip_max_: vec4<${dataType}> = vec4(${dataType}(${attributes.max}));\n`,\n attributes.cacheKey),\n {inputs: [0]});\n};\n\nexport const ceil = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Ceil', 'ceil'));\n};\n\nexport const cos = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cos', 'cos'));\n};\n\nexport const cosh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Cosh', 'cosh'));\n};\n\nexport interface AlphaAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n}\n\nexport const parseAlphaAttributes = (attributes: Record): AlphaAttributes =>\n createAttributeWithCacheKey(attributes as {alpha: number});\n\nexport const elu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Elu', a => `elu_vf32(${a})`, `\n const elu_alpha_ = ${dataType}(${attributes.alpha});\n\n fn elu_f32(a: ${dataType}) -> ${dataType} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${dataType}>) -> vec4<${dataType}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,\n attributes.cacheKey));\n};\n\nexport const erfImpl = (varType = 'f32') => `\nconst r0: ${varType} = 0.3275911;\nconst r1: ${varType} = 0.254829592;\nconst r2: ${varType} = -0.284496736;\nconst r3: ${varType} = 1.421413741;\nconst r4: ${varType} = -1.453152027;\nconst r5: ${varType} = 1.061405429;\n\nfn erf_vf32(v: vec4<${varType}>) -> vec4<${varType}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`;\n\nexport const erf = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Erf', a => `erf_vf32(${a})`, erfImpl(dataType)));\n};\n\nexport const exp = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Exp', 'exp'));\n};\n\nexport const floor = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Floor', 'floor'));\n};\n\nexport const gelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Gelu', a => `0.5 * ${a} * (1.0 + erf_vf32(${a} * 0.7071067811865475))`, erfImpl(dataType)));\n};\n\nexport const leakyRelu = (context: ComputeContext, attributes: AlphaAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'LeakyRelu', a => `select(leaky_relu_alpha_ * ${a}, ${a}, ${a} >= vec4<${dataType}>(0.0))`,\n `const leaky_relu_alpha_ = ${dataType}(${attributes.alpha});`, attributes.cacheKey));\n};\n\nexport const not = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Not', a => `!${a}`));\n};\n\nexport const neg = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Neg', a => `-${a}`));\n};\n\nexport const reciprocal = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Reciprocal', a => `1.0/${a}`));\n};\n\nexport const relu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'Relu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > vec4<${dataType}>(0.0))`));\n};\n\nexport const sigmoid = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sigmoid', a => `(1.0 / (1.0 + exp(-${a})))`));\n};\n\nexport interface HardSigmoidAttributes extends AttributeWithCacheKey {\n readonly alpha: number;\n readonly beta: number;\n}\n\nexport const parseHardSigmoidAttributes = (attributes: Record): HardSigmoidAttributes =>\n createAttributeWithCacheKey(attributes as {\n alpha: number;\n beta: number;\n });\n\nexport const hardSigmoid = (context: ComputeContext, attributes: HardSigmoidAttributes): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'HardSigmoid',\n a => `max(vec4<${dataType}>(0.0), min(vec4<${dataType}>(1.0), ${attributes.alpha} * ${a} + vec4<${dataType}>(${\n attributes.beta})))`,\n undefined, attributes.cacheKey));\n};\n\nexport const sin = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sin', 'sin'));\n};\n\nexport const sinh = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sinh', 'sinh'));\n};\n\nexport const sqrt = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Sqrt', 'sqrt'));\n};\n\nexport const tan = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tan', 'tan'));\n};\n\nexport const tanhExpression = (a: string) => `sign(${a}) * (1 - exp(-2 * abs(${a}))) / (1 + exp(-2 * abs(${a})))`;\n\nexport const tanh = (context: ComputeContext): void => {\n // TODO: revisit after https://github.com/gpuweb/gpuweb/issues/4458 is resolved\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Tanh', tanhExpression));\n};\n\nexport const fastGeluImpl = (varType = 'f32') => `\nconst fast_gelu_a: ${varType} = 0.5;\nconst fast_gelu_b: ${varType} = 0.7978845608028654;\nconst fast_gelu_c: ${varType} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${varType}>) -> vec4<${varType}> {\n return ${tanhExpression('v')};\n}\n`;\n\nexport const fastGeluExpression = (x: string) =>\n `(fast_gelu_a + fast_gelu_a * tanh_v(${x} * (fast_gelu_c * ${x} * ${x} + fast_gelu_b))) * ${x}`;\n\nexport const fastGelu = (context: ComputeContext): void => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'FastGelu', fastGeluExpression, fastGeluImpl(dataType), undefined,\n context.inputs[0].dataType));\n};\n\nexport const thresholdedRelu = (context: ComputeContext, attributes: AlphaAttributes): number => {\n const dataType = tensorTypeToWsglValueType(context.inputs[0].dataType);\n context.compute(createElementwiseProgramInfo(\n context.inputs[0], 'ThresholdedRelu', a => `select(vec4<${dataType}>(0.0), ${a}, ${a} > thresholded_relu_alpha_)`,\n `const thresholded_relu_alpha_ = vec4<${dataType}>(${attributes.alpha});`, attributes.cacheKey));\n return 0;\n};\n\nexport const log = (context: ComputeContext): void => {\n context.compute(createElementwiseProgramInfo(context.inputs[0], 'Log', 'log'));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType} from './common';\nimport {erfImpl} from './unary-op';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (inputs[0].dims.length !== 3) {\n throw new Error('input should have 3 dimensions');\n }\n\n if (![2560, 5120, 10240].includes(inputs[0].dims[2])) {\n throw new Error('hidden state should be 2560, 5120 or 10240');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('bias is expected to have 1 dimensions');\n }\n\n if (inputs[0].dims[2] !== inputs[1].dims[0]) {\n throw new Error('last dimension of input and bias are not the same');\n }\n};\n\nconst createBiasSplitGeluProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const outputShape = inputs[0].dims.slice();\n outputShape[2] = outputShape[2] / 2;\n\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims, 4);\n const bias = inputVariable('bias', inputs[0].dataType, [inputs[0].dims[2]], 4);\n const output = outputVariable('output', inputs[0].dataType, outputShape, 4);\n\n const outputSize = ShapeUtil.size(outputShape) / 4;\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${inputs[0].dims[2] / 4 / 2}u;\n\n ${shaderHelper.declareVariables(input, bias, output)}\n\n ${erfImpl(dataType)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes(outputSize)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${output.setByOffset('global_idx', 'valueLeft * geluRight')}\n }`;\n\n return {\n name: 'BiasSplitGelu',\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)}\n }),\n getShaderSource,\n };\n};\n\nexport const biasSplitGelu = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createBiasSplitGeluProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype BuiltinFunctionName = string;\ntype BinaryCustomExpression = (expressionA: string, expressionB: string) => string;\ntype BinaryFunctionCall = BuiltinFunctionName|BinaryCustomExpression|{\n scalar: BinaryCustomExpression;\n vector: BinaryCustomExpression;\n};\n\nconst createBinaryOpProgramShader =\n (shaderHelper: ShaderHelper, dimsA: readonly number[], dimsB: readonly number[], dimsOutput: readonly number[],\n vectorize: boolean, doBroadcast: boolean, sharedDimensionDivisibleBy4: boolean, funcCall: BinaryFunctionCall,\n typeA: number, typeB: number, typeOutput: number, additionalImplementation?: string) => {\n let expressionScalar: BinaryCustomExpression;\n let expressionVector: BinaryCustomExpression;\n if (typeof funcCall === 'string') {\n expressionScalar = expressionVector = (a, b) => `${funcCall}((${a}),(${b}))`;\n } else if (typeof funcCall === 'function') {\n expressionScalar = expressionVector = funcCall;\n } else {\n expressionScalar = funcCall.scalar;\n expressionVector = funcCall.vector;\n }\n\n const output = outputVariable('outputData', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('aData', typeA, dimsA.length, 4);\n const b = inputVariable('bData', typeB, dimsB.length, 4);\n\n let assignment: string;\n if (vectorize) {\n if (doBroadcast) {\n const isAOneElement = ShapeUtil.size(dimsA) === 1;\n const isBOneElement = ShapeUtil.size(dimsB) === 1;\n const aLastDimDivisibleBy4 = dimsA.length > 0 && dimsA[dimsA.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = dimsB.length > 0 && dimsB[dimsB.length - 1] % 4 === 0;\n if (isAOneElement || isBOneElement) {\n assignment = output.setByOffset(\n 'global_idx',\n expressionVector(\n isAOneElement ? `${a.type.value}(${a.getByOffset('0')}.x)` : a.getByOffset('global_idx'),\n isBOneElement ? `${b.type.value}(${b.getByOffset('0')}.x)` : b.getByOffset('global_idx')));\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx * 4u')};\n let offsetA = ${a.broadcastedIndicesToOffset('outputIndices', output)};\n let offsetB = ${b.broadcastedIndicesToOffset('outputIndices', output)};\n ${\n output.setByOffset(\n 'global_idx',\n expressionVector(\n sharedDimensionDivisibleBy4 || aLastDimDivisibleBy4 ?\n a.getByOffset('offsetA / 4u') :\n `${a.type.value}(${a.getByOffset('offsetA / 4u')}[offsetA % 4u])`,\n sharedDimensionDivisibleBy4 || bLastDimDivisibleBy4 ?\n b.getByOffset('offsetB / 4u') :\n `${b.type.value}(${b.getByOffset('offsetB / 4u')}[offsetB % 4u])`))}\n `;\n }\n } else {\n assignment = output.setByOffset(\n 'global_idx', expressionVector(a.getByOffset('global_idx'), b.getByOffset('global_idx')));\n }\n } else {\n if (!doBroadcast) {\n throw new Error('no necessary to use scalar implementation for element-wise binary op implementation.');\n }\n\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `aData[indexA${x}][componentA${x}]`;\n const expressionB = `bData[indexB${x}][componentB${x}]`;\n return `\n let outputIndices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offsetA${x} = ${a.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let offsetB${x} = ${b.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let indexA${x} = offsetA${x} / 4u;\n let indexB${x} = offsetB${x} / 4u;\n let componentA${x} = offsetA${x} % 4u;\n let componentB${x} = offsetB${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expressionScalar(expressionA, expressionB)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('outputData[global_idx]', 0)}\n ${singleAssignment('outputData[global_idx]', 1)}\n ${singleAssignment('outputData[global_idx]', 2)}\n ${singleAssignment('outputData[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(a, b, output)}\n\n ${additionalImplementation ?? ''}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createBinaryOpProgramInfo =\n (name: string, cacheKey: string, a: TensorView, b: TensorView, funcCall: BinaryFunctionCall,\n additionalImplementation?: string, outputDataType: number = a.dataType): ProgramInfo => {\n const isBroadcast = !ShapeUtil.areEqual(a.dims, b.dims);\n let outputShape = a.dims;\n let outputSize = ShapeUtil.size(a.dims);\n\n let vectorize = false;\n let sharedDimensionDivisibleBy4 = false;\n\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n const cacheKeyAux = [isBroadcast];\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(a.dims, b.dims, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform binary op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n const isAOneElement = ShapeUtil.size(a.dims) === 1;\n const isBOneElement = ShapeUtil.size(b.dims) === 1;\n const aLastDimDivisibleBy4 = a.dims.length > 0 && a.dims[a.dims.length - 1] % 4 === 0;\n const bLastDimDivisibleBy4 = b.dims.length > 0 && b.dims[b.dims.length - 1] % 4 === 0;\n cacheKeyAux.push(isAOneElement);\n cacheKeyAux.push(isBOneElement);\n cacheKeyAux.push(aLastDimDivisibleBy4);\n cacheKeyAux.push(bLastDimDivisibleBy4);\n // check whether vectorize can be enabled\n let sharedDimension = 1;\n for (let i = 1; i < outputShape.length; i++) {\n const dimA = a.dims[a.dims.length - i] ?? 1;\n const dimB = b.dims[b.dims.length - i] ?? 1;\n if (dimA === dimB) {\n sharedDimension *= dimA;\n } else {\n break;\n }\n }\n if (sharedDimension % 4 === 0) {\n sharedDimensionDivisibleBy4 = true;\n vectorize = true;\n } else if (isAOneElement || isBOneElement || aLastDimDivisibleBy4 || bLastDimDivisibleBy4) {\n vectorize = true;\n }\n } else {\n // element-wise\n vectorize = true;\n }\n cacheKeyAux.push(vectorize);\n\n return {\n name,\n shaderCache: {\n hint: cacheKey + cacheKeyAux.map((x) => x.toString()).join('_'),\n inputDependencies: ['rank', 'rank'],\n },\n getShaderSource: (shaderHelper) => createBinaryOpProgramShader(\n shaderHelper, a.dims, b.dims, outputShape, vectorize, isBroadcast, sharedDimensionDivisibleBy4, funcCall,\n a.dataType, b.dataType, outputDataType, additionalImplementation),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* component size */)},\n programUniforms: [\n {type: DataType.uint32, data: Math.ceil(ShapeUtil.size(outputShape) / 4)},\n ...createTensorShapeVariables(a.dims, b.dims, outputShape)\n ],\n }),\n };\n };\n\nconst runBinaryOp =\n (context: ComputeContext, name: string, funcCall: BinaryFunctionCall, additionalImplementation?: string,\n cacheKey?: string, outputDataType?: number): void => {\n context.compute(createBinaryOpProgramInfo(\n name, cacheKey ?? '', context.inputs[0], context.inputs[1], funcCall, additionalImplementation,\n outputDataType));\n };\n\nexport const add = (context: ComputeContext): void => {\n runBinaryOp(context, 'Add', (a, b) => `${a}+${b}`);\n};\n\nexport const div = (context: ComputeContext): void => {\n runBinaryOp(context, 'Div', (a, b) => `${a}/${b}`);\n};\n\nexport const equal = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Equal', ({scalar: (a, b) => `u32(${a}==${b})`, vector: (a, b) => `vec4(${a}==${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const mul = (context: ComputeContext): void => {\n runBinaryOp(context, 'Mul', (a, b) => `${a}*${b}`);\n};\n\nexport const pow = (context: ComputeContext): void => {\n const type = inputVariable('input', context.inputs[0].dataType, context.inputs[0].dims).type.value;\n const roundStr = type === 'i32' ? 'round' : '';\n runBinaryOp(\n context, 'Pow', ({scalar: (a, b) => `pow_custom(${a},${b})`, vector: (a, b) => `pow_vector_custom(${a},${b})`}),\n `\n fn pow_custom(a : ${type}, b : ${type}) -> ${type} {\n if (b == ${type}(0.0)) {\n return ${type}(1.0);\n } else if (a < ${type}(0.0) && f32(b) != floor(f32(b))) {\n return ${type}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${type}(1.0), round(f32(abs(b) % ${type}(2.0))) != 1.0) * ${type}(${\n roundStr}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${type}>, b : vec4<${type}>) -> vec4<${type}> {\n // TODO: implement vectorized pow\n return vec4<${type}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `);\n};\n\nexport const sub = (context: ComputeContext): void => {\n runBinaryOp(context, 'Sub', (a, b) => `${a}-${b}`);\n};\n\nexport const greater = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Greater', ({scalar: (a, b) => `u32(${a}>${b})`, vector: (a, b) => `vec4(${a}>${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const less = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'Less', ({scalar: (a, b) => `u32(${a}<${b})`, vector: (a, b) => `vec4(${a}<${b})`}), undefined,\n undefined, DataType.bool);\n};\n\nexport const greaterOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'GreaterOrEqual', ({scalar: (a, b) => `u32(${a}>=${b})`, vector: (a, b) => `vec4(${a}>=${b})`}),\n undefined, undefined, DataType.bool);\n};\n\nexport const lessOrEqual = (context: ComputeContext): void => {\n runBinaryOp(\n context, 'LessOrEqual', ({scalar: (a, b) => `u32(${a}<=${b})`, vector: (a, b) => `vec4(${a}<=${b})`}),\n undefined, undefined, DataType.bool);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {MAX_CLIP, MIN_CLIP} from '../../util';\nimport {ProgramUniform} from '../types';\n\nimport {UniformsArrayType} from './common';\n\nexport interface InternalActivationAttributes {\n readonly activation: string;\n readonly clipMin?: number;\n readonly clipMax?: number;\n readonly alpha?: number;\n readonly beta?: number;\n}\n\nexport const getActivationSnippet =\n (attributes: InternalActivationAttributes, valueType: string, baseType = 'f32'): string => {\n switch (attributes.activation) {\n case 'Relu':\n return `value = max(value, ${valueType}(0.0));`;\n case 'Sigmoid':\n return `value = (${valueType}(1.0) / (${valueType}(1.0) + exp(-value)));`;\n case 'Clip':\n return `value = clamp(value, ${valueType}(${baseType}(uniforms.clip_min)), ${valueType}(${\n baseType}(uniforms.clip_max)));`;\n case 'HardSigmoid':\n return `value = max(${valueType}(0.0), min(${valueType}(1.0), ${baseType}(uniforms.alpha) * value + ${\n baseType}(uniforms.beta)));`;\n case 'LeakyRelu':\n return `value = select(${baseType}(uniforms.alpha) * value, value, value >= ${valueType}(0.0));`;\n case '':\n return '';\n // TODO: adding other activations that can be fused.\n default:\n throw new Error(`Unsupported activation ${attributes.activation}`);\n }\n };\n\nexport const appendActivationUniformsData =\n (attributes: InternalActivationAttributes, programUniform: ProgramUniform[]) => {\n if (attributes.activation === 'Clip') {\n programUniform.push(\n {type: DataType.float, data: attributes.clipMax!}, {type: DataType.float, data: attributes.clipMin!});\n } else if (attributes.activation === 'HardSigmoid') {\n programUniform.push(\n {type: DataType.float, data: attributes.alpha!}, {type: DataType.float, data: attributes.beta!});\n } else if (attributes.activation === 'LeakyRelu') {\n programUniform.push({type: DataType.float, data: attributes.alpha!});\n }\n };\n\nexport const appendActivationUniforms = (attributes: InternalActivationAttributes, uniforms: UniformsArrayType) => {\n if (attributes.activation === 'Clip') {\n uniforms.push({name: 'clip_max', type: 'f32'}, {name: 'clip_min', type: 'f32'});\n } else if (attributes.activation === 'HardSigmoid') {\n uniforms.push({name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'});\n } else if (attributes.activation === 'LeakyRelu') {\n uniforms.push({name: 'alpha', type: 'f32'});\n }\n};\n\nexport const parseInternalActivationAttributes =\n (attributes: Record|undefined): InternalActivationAttributes => {\n const activation = attributes?.activation as string || '';\n if (activation === 'HardSigmoid') {\n const [alpha, beta] = attributes?.activation_params as [number, number] || [0.2, 0.5];\n return {activation, alpha, beta};\n } else if (activation === 'Clip') {\n const [clipMin, clipMax] = attributes?.activation_params as [number, number] || [MIN_CLIP, MAX_CLIP];\n return {activation, clipMax, clipMin};\n } else if (activation === 'LeakyRelu') {\n const [alpha] = attributes?.activation_params as [number] || [0.01];\n return {activation, alpha};\n }\n return {activation};\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/activation_util.ts\n//\n// modified to fit the needs of the project\n\nexport const typeSnippet = (component: number, dataType: string) => {\n switch (component) {\n case 1:\n return dataType;\n case 2:\n return `vec2<${dataType}>`;\n case 3:\n return `vec3<${dataType}>`;\n case 4:\n return `vec4<${dataType}>`;\n default:\n throw new Error(`${component}-component is not supported.`);\n }\n};\n\nexport const biasSnippet = (hasBias: boolean): string => `\n ${hasBias ? 'value = value + getBiasByOutputCoords(coords);' : ''}\n `;\n", "/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-core/src/ops/conv_util.ts\n//\n// modified to fit the needs of the project\n\nexport const utilFunctions = (strideStr: string) => (`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${strideStr}.x), i32(${strideStr}.y), i32(${strideStr}.z), 1));\n}\n`);\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/matmul_packed_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, getBroadcastDims, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from '../fuse-utils';\n\nimport {typeSnippet} from './activation_util';\n\nconst writeDataToSubAVec4Snippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst calculateResultSnippet = (transposeA: boolean, innerElementSize: number) => {\n if (transposeA) {\n return `\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${innerElementSize === 3 ? '' : 'let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];'}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached3[i] + acc[i];'}\n }`;\n } else {\n return `\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${innerElementSize === 3 ? '' : 'acc[i] = BCached3 * ACached.w + acc[i];'}\n }`;\n }\n};\n\nexport const makeMatMulPackedVec4Source =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32): string => {\n const tileAOuter = workgroupSize[1] * workPerThread[1];\n const tileBOuter = workgroupSize[0] * workPerThread[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n const innerElementSize = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n\n if (!(((transposeA && innerElementSize === 4 && workPerThread[1] === 4) ||\n (!transposeA && (innerElementSize === 3 || innerElementSize === 4))) &&\n tileAWidth % workgroupSize[0] === 0 && tileInner % workgroupSize[1] === 0 && workPerThread[0] === 4)) {\n throw new Error(`If transposeA ${transposeA} is true, innerElementSize ${\n innerElementSize} and workPerThread[1] ${workPerThread[1]} must be 4.\n Otherwise, innerElementSize ${innerElementSize} must be 3 or 4.\n tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${workgroupSize[0]}. tileInner ${\n tileInner} must be divisible by workgroupSize[1] ${workgroupSize[1]}. colPerThread ${\n workPerThread[0]} must be 4.`);\n }\n return `\nvar mm_Asub: array, ${tileAWidth / innerElementSize}>, ${tileAHight}>;\nvar mm_Bsub: array, ${tileBOuter / workPerThread[0]}>, ${tileInner}>;\n\nconst rowPerThread = ${workPerThread[1]};\nconst colPerThread = ${workPerThread[0]};\nconst innerElementSize = ${innerElementSize};\nconst tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\n let num_tiles = ${splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${rowPerThreadB};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${writeDataToSubAVec4Snippet(transposeA, batchDims)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${\n batchDims ? ', batchIndices' : ''});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${innerElementSize === 3 ? '' : 'let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];'}\n\n ${calculateResultSnippet(transposeA, innerElementSize)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`;\n };\n\nconst writeDataToSubASnippet = (transpose: boolean, batchDims?: IndicesHelper) => {\n if (transpose) {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n\n } else {\n return `\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${batchDims ? ', batchIndices' : ''});\n `;\n }\n};\n\nconst readDataFromSubASnippet = (transposeA: boolean) =>\n transposeA ? 'let ACached = mm_Asub[k][tileRow + innerRow];' : 'let ACached = mm_Asub[tileRow + innerRow][k];';\n\n// sequentialAccessByThreads means sequential data in memory is accessed by\n// threads, instead of a single thread (default behavior).\nexport const makeMatMulPackedSource =\n (workPerThread: number[], workgroupSize: [number, number, number], type = 'f32', batchDims?: IndicesHelper,\n transposeA = false, tileInner = 32, splitK = false, splitedDimInner = 32,\n sequentialAccessByThreads = false): string => {\n const tileAOuter = workPerThread[1] * workgroupSize[1];\n const tileBOuter = workPerThread[0] * workgroupSize[0];\n const tileAWidth = transposeA ? tileAOuter : tileInner;\n const tileAHight = transposeA ? tileInner : tileAOuter;\n\n if (!(tileAHight % workgroupSize[1] === 0 && tileAWidth % workgroupSize[0] === 0 &&\n tileInner % workgroupSize[1] === 0)) {\n throw new Error(`tileAHight ${tileAHight} must be divisible by workgroupSize[1]${\n workgroupSize[1]}, tileAWidth ${tileAWidth} must be divisible by workgroupSize[0]${\n workgroupSize[0]}, tileInner ${tileInner} must be divisible by workgroupSize[1]${workgroupSize[1]}`);\n }\n const rowPerThreadA = tileAHight / workgroupSize[1];\n const colPerThreadA = tileAWidth / workgroupSize[0];\n const rowPerThreadB = tileInner / workgroupSize[1];\n const matmulSnippet = sequentialAccessByThreads ?\n `\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n let globalColStart = i32(workgroupId.x) * ${tileBOuter};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${tileAHight}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileAWidth}; inputCol = inputCol + ${workgroupSize[0]}) {\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${tileInner}; inputRow = inputRow + ${workgroupSize[1]}) {\n for (var inputCol = localCol; inputCol < ${tileBOuter}; inputCol = inputCol + ${workgroupSize[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${workgroupSize[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${\n transposeA ? `mm_Asub[k][localRow + innerRow * ${workgroupSize[1]}];` :\n `mm_Asub[localRow + innerRow * ${workgroupSize[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${workgroupSize[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${workgroupSize[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n ` :\n `\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${tileAOuter};\n\nlet tileRowA = i32(localId.y) * ${rowPerThreadA};\nlet tileColA = i32(localId.x) * ${colPerThreadA};\nlet tileRowB = i32(localId.y) * ${rowPerThreadB};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadA}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${colPerThreadA}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${writeDataToSubASnippet(transposeA, batchDims)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${rowPerThreadB}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${batchDims ? ', batchIndices' : ''});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${type}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${readDataFromSubASnippet(transposeA)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;\n\n return `\n var mm_Asub : array, ${tileAHight}>;\n var mm_Bsub : array, ${tileInner}>;\n const rowPerThread = ${workPerThread[1]};\n const colPerThread = ${workPerThread[0]};\n const tileInner = ${tileInner};\n\n@compute @workgroup_size(${workgroupSize[0]}, ${workgroupSize[1]}, ${workgroupSize[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${splitK ? '0' : 'i32(globalId.z)'};\n ${batchDims ? `let batchIndices = ${batchDims.offsetToIndices('u32(batch)')};` : ''}\n let num_tiles = ${\n splitK ? `${Math.ceil(splitedDimInner / tileInner)}` : '(uniforms.dim_inner - 1) / tileInner + 1'};\n var kStart = ${splitK ? `i32(globalId.z) * ${splitedDimInner}` : '0'};\n\n var acc : array, rowPerThread>;\n\n // Without this initialization strange values show up in acc.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = 0.0;\n }\n }\n ${matmulSnippet}\n }\n`;\n };\n\nconst matMulReadWriteFnSource =\n (component: number, hasBias: boolean, applyActivation: string, variables: IndicesHelper[],\n batchShapes: Array, isChannelsLast = false): string => {\n const [batchAShape, batchBShape, batchShape] = batchShapes;\n const [batchVariable, aVariable, bVariable, outputVariable] = variables;\n const broadCastADims = getBroadcastDims(batchAShape, batchShape);\n const broadCastBDims = getBroadcastDims(batchBShape, batchShape);\n const dataType = tensorTypeToWsglStorageType(variables[0].type.tensor);\n const getAIndices = () => {\n const aRank = aVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var aIndices: ${aVariable.type.indices};`;\n for (let i = aRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\naIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastADims.forEach(i => {\n resStr += `\\naIndices[${i}] = 0;`;\n });\n resStr += `\\naIndices[${aRank - 2}] = u32(row);\n aIndices[${aRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const getBIndices = () => {\n const bRank = bVariable.rank;\n const batchRank = batchVariable.rank;\n let resStr = `var bIndices: ${bVariable.type.indices};`;\n for (let i = bRank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\nbIndices[${i}] = ${batchRank > 1 ? `batchIndices[${j}]` : 'batchIndices'};`;\n }\n broadCastBDims.forEach(i => {\n resStr += `\\nbIndices[${i}] = 0;`;\n });\n resStr += `\\nbIndices[${bRank - 2}] = u32(row);\n bIndices[${bRank - 1}] = u32(colIn);`;\n return resStr;\n };\n const source = `\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${getAIndices()}\n value = ${aVariable.getByIndices('aIndices')};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${batchVariable.type.indices}) -> ${\n typeSnippet(component, dataType)} {\n var value = ${typeSnippet(component, dataType)}(0.0);\n let col = colIn * ${component};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${getBIndices()}\n value = ${bVariable.getByIndices('bIndices')};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${typeSnippet(component, dataType)}) {\n let col = colIn * ${component};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${\n hasBias ?\n `value = value + ${isChannelsLast ? 'bias[colIn]' : `${typeSnippet(component, dataType)}(bias[row])`};` :\n '' }\n ${applyActivation}\n ${outputVariable.setByIndices('vec3(coords)', 'value')}\n }\n }\n `;\n return source;\n };\n\nexport const createMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const dimAOuter = aShape[aShape.length - 2];\n const dimInner = aShape[aShape.length - 1];\n const dimBOuter = bShape[bShape.length - 1];\n const isVec4 = dimInner % 4 === 0 && dimBOuter % 4 === 0;\n\n // TODO: fine tune size\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const workgroupSize: [number, number, number] = [8, 8, 1];\n const dispatch = [\n Math.ceil(dimBOuter / workgroupSize[0] / elementsPerThread[0]),\n Math.ceil(dimAOuter / workgroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workgroupSize[2] / elementsPerThread[2])\n ];\n\n const components = isVec4 ? 4 : 1;\n const aShapeTemp = [...outerDimsA, dimAOuter, dimInner / components];\n const aRank = aShapeTemp.length;\n const bShapeTemp = [...outerDimsB, dimInner, dimBOuter / components];\n const bRank = bShapeTemp.length;\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShapeTemp, bShapeTemp));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n const hasBias = inputs.length > 2;\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchRank = outerDims.length;\n const batchDims = internalVariable('batchDims', inputs[0].dataType, batchRank, 1);\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const A = inputVariable('a', inputs[0].dataType, aRank, components);\n const B = inputVariable('b', inputs[1].dataType, bRank, components);\n const output = outputVariable('result', inputs[0].dataType, outputShapeTemp.length, components);\n const inputVariables = [A, B];\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n }\n const uniforms: UniformsArrayType =\n [{name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'}];\n appendActivationUniforms(activationAttributes, uniforms);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const declareFunctions = matMulReadWriteFnSource(\n components, hasBias, applyActivation, [batchDims, A, B, output], [outerDimsA, outerDimsB, outerDims],\n isChannelsLast);\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${declareFunctions}\n ${\n isVec4 ? makeMatMulPackedVec4Source(elementsPerThread, workgroupSize, dataType, batchDims) :\n makeMatMulPackedSource(elementsPerThread, workgroupSize, dataType, batchDims)}\n `;\n };\n return {\n name: 'MatMul',\n shaderCache: {\n hint: `${elementsPerThread};${activationAttributes.activation};${isVec4};${isChannelsLast}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv2d_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvAttributes} from '../conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet, typeSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dCommonSnippet =\n (isChannelsLast: boolean, fitAOuter: boolean, fitBOuter: boolean, fitInner: boolean, addBias = false,\n attributes: ConvAttributes, innerElementSizeX = 4, innerElementSizeW = 4, innerElementSize = 4,\n dataType = 'f32'): string => {\n const getXSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'resData = x[xIndex];';\n case 3:\n return `resData = vec3<${dataType}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;\n case 4:\n return 'resData = x[xIndex / 4];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[row * i32(uniforms.w_shape[3]) + colIn];';\n case 4:\n return 'return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];';\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, xRow, xCol, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, xRow, xCol);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n const readXSnippet = `\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${col} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${col} % inChannels;\n var resData = ${typeSnippet(innerElementSizeX, dataType)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${xHeight} && xCol >= 0 && xCol < ${xWidth}) {\n ${coordASnippet}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${getXSnippet(innerElementSizeX)}\n }\n return resData;`;\n\n const sampleX = isChannelsLast ? (fitAOuter && fitInner ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`) :\n (fitInner && fitBOuter ? `\n let col = colIn * ${innerElementSizeX};\n ${readXSnippet}` :\n `\n let col = colIn * ${innerElementSizeX};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readXSnippet}\n }\n return ${typeSnippet(innerElementSizeX, dataType)}(0.0);`);\n\n const sampleW = `${getWSnippet(innerElementSizeW)}`;\n\n const resType = typeSnippet(innerElementSize, dataType);\n const aType =\n isChannelsLast ? typeSnippet(innerElementSizeX, dataType) : typeSnippet(innerElementSizeW, dataType);\n const bType =\n isChannelsLast ? typeSnippet(innerElementSizeW, dataType) : typeSnippet(innerElementSizeX, dataType);\n const applyActivation = getActivationSnippet(attributes, resType, dataType);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${aType} {\n ${isChannelsLast ? sampleX : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${bType} {\n ${isChannelsLast ? sampleW : sampleX}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${resType}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[], dimAOuter: number,\n dimBOuter: number, dimInner: number, hasBias: boolean, sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 || inChannels % 3 === 0) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv2d_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? (isChannelsLast && inChannels % 4 !== 0 ? 3 : 4) : 1;\n const tileAOuter = workGroupSize[1] * elementsPerThread[1];\n const tileBOuter = workGroupSize[0] * elementsPerThread[0];\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const fitAOuter = dimAOuter % tileAOuter === 0;\n const fitBOuter = dimBOuter % tileBOuter === 0;\n const fitInner = dimInner % tileInner === 0;\n const elementsSize = isVec4 ? [innerElementSize, 4, 4] : [1, 1, 1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.int32, data: attributes.strides}, {type: DataType.int32, data: attributes.dilations}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'pad', type: 'i32', length: 2}, {name: 'stride', type: 'i32', length: 2},\n {name: 'dilation', type: 'i32', length: 2}\n ];\n appendActivationUniforms(attributes, uniforms);\n\n // TODO: support component 2, 3.\n const components = isVec4 ? 4 : 1;\n const t = tensorTypeToWsglStorageType(inputs[0].dataType);\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n result[flatIndex] = ${isVec4 ? `vec4<${t}>` : t}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${isVec4 ? `vec4<${t}>` : t}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${isVec4 ? '/ 4' : ''}, value);\n }`;\n const x = inputVariable(\n 'x', inputs[0].dataType, inputs[0].dims.length, innerElementSize === 3 ? 1 : innerElementSize);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, components);\n const inputVariables = [x, w];\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${t}>` : t} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n return `\n ${utilFunctions('uniforms.result_strides')}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n ${\n conv2dCommonSnippet(\n isChannelsLast, fitAOuter, fitBOuter, fitInner, hasBias, attributes, elementsSize[0], elementsSize[1],\n elementsSize[2], t)}\n ${\n isVec4 ?\n makeMatMulPackedVec4Source(elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, t, undefined, !isChannelsLast, tileInner, false, undefined,\n sequentialAccessByThreads)}`;\n };\n return {\n name: 'Conv2DMatMul',\n shaderCache: {\n hint: `${attributes.cacheKey};${innerElementSize};${isVec4};${fitAOuter};${fitBOuter};${fitInner};${\n tileAOuter};${tileBOuter};${tileInner}`,\n inputDependencies\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms,\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {calculateOutputShape, ConvAttributes} from './conv';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from './fuse-utils';\n\n/**\n * naive grouped conv implementation, supports 1d/2d conv\n * @param squeezeOutputShapeFunction - an optional function to squeeze the output shape, only used in conv1d\n */\nexport const createGroupedConvProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const xShape = inputs[0].dims;\n const wShape = inputs[1].dims;\n const outputChannelsPerGroup = wShape[0] / attributes.group;\n\n const isChannelLast = attributes.format === 'NHWC';\n const outputShape = calculateOutputShape(\n xShape, wShape, attributes.dilations, attributes.pads, attributes.strides, isChannelLast);\n const outputSize = ShapeUtil.size(outputShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.dilations},\n {type: DataType.uint32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.uint32, data: [attributes.pads[0], attributes.pads[1]]},\n {type: DataType.uint32, data: outputChannelsPerGroup}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length);\n const w = inputVariable('w', inputs[1].dataType, wShape.length);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims.length));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'dilations', type: 'u32', length: attributes.dilations.length},\n {name: 'strides', type: 'u32', length: 2}, {name: 'pads', type: 'u32', length: 2},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${isChannelLast ? 3 : 1}];\n let xRCCorner: vec2 = vec2(outputIndices[${isChannelLast ? 1 : 2}], outputIndices[${\n isChannelLast ? 2 : 3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${output.type.value} = ${output.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${isChannelLast ? 1 : 2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${isChannelLast ? 2 : 3}]) {\n continue;\n }\n\n let xVal = ${\n isChannelLast ? x.get('batch', 'xHeight', 'xWidth', 'input_channel') :\n x.get('batch', 'input_channel', 'xHeight', 'xWidth')};\n let wVal = ${w.get('output_channel', 'wInChannel', 'wHeight', 'wWidth')};\n value += xVal*wVal;\n }\n }\n }\n ${processBias}\n ${applyActivation}\n ${output.setByOffset('global_idx', 'value')}\n }`;\n };\n return {\n name: 'GroupedConv',\n shaderCache: {hint: attributes.cacheKey, inputDependencies},\n getRunData: () => ({\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const createGroupedConvVectorizeProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvAttributes, outputShape: readonly number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n const components = getMaxComponents(outputShape[3]);\n const outputNumber = getMaxComponents(outputShape[2]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const xShape = [inputs[0].dims[0], inputs[0].dims[1], inputs[0].dims[2], inputs[0].dims[3] / components];\n const wShape = [inputs[1].dims[0], inputs[1].dims[1], inputs[1].dims[2], inputs[1].dims[3] / components];\n const outputShapeInShader = [outputShape[0], outputShape[1], outputShape[2], outputShape[3] / components];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.int32, data: [attributes.strides[0], attributes.strides[1]]},\n {type: DataType.int32, data: [attributes.pads[0], attributes.pads[1]]}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(xShape, wShape, outputShapeInShader));\n const xNumber = (outputNumber - 1) * attributes.strides[1] + wShape[1];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(attributes, output.type.value, baseType);\n const x = inputVariable('x', inputs[0].dataType, xShape.length, components);\n const w = inputVariable('w', inputs[1].dataType, wShape.length, components);\n const inputVars = [x, w];\n if (hasBias) {\n inputVars.push(inputVariable('b', inputs[2].dataType, inputs[2].dims, components));\n }\n const processBias = hasBias ? 'value += b[output_channel];' : '';\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'strides', type: 'i32', length: 2},\n {name: 'pads', type: 'i32', length: 2},\n ];\n appendActivationUniforms(attributes, uniforms);\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVars, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${outputNumber}u;\n let col = (index1 % width1) * ${outputNumber}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${x.type.value}, ${xNumber}>;\n var values: array<${output.type.value}, ${outputNumber}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${wShape[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${xNumber}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${x.get('batch', 'u32(x_height)', 'u32(x_width)', 'input_channel')};\n } else {\n x_vals[i] = ${x.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${wShape[1]}; w_width++) {\n let w_val = ${w.get('w_height', 'w_width', '0', 'output_channel')};\n for (var i = 0u; i < ${outputNumber}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n ${output.set('batch', 'row', 'col + i', 'output_channel', 'value')};\n }\n }`;\n };\n\n return {\n name: 'GroupedConv-Vectorize',\n shaderCache: {\n hint: `${attributes.cacheKey};${components};${outputNumber};${xNumber};${wShape[0]};${wShape[1]}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'type'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createTensorShapeVariables, getBroadcastDims, getMaxComponents, IndicesHelper, inputVariable, internalVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet, InternalActivationAttributes} from './fuse-utils';\n\nexport const createNaiveMatmulProgramInfo =\n (inputs: readonly TensorView[], activationAttributes: InternalActivationAttributes, outputShape: readonly number[],\n reshapedOutputShape?: readonly number[],\n isChannelsLast = false /* only used for conv2dByMatMul*/): ProgramInfo => {\n const aShape = inputs[0].dims;\n const bShape = inputs[1].dims;\n\n const M = aShape[aShape.length - 2];\n const N = bShape[bShape.length - 1];\n const K = aShape[aShape.length - 1];\n const components = getMaxComponents(N);\n const aComponents = getMaxComponents(K);\n const outputNumber = getMaxComponents(M);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n const hasBias = inputs.length > 2;\n const outerDims = reshapedOutputShape ? reshapedOutputShape.slice(0, -2) : outputShape.slice(0, -2);\n const batchSize = ShapeUtil.size(outerDims);\n const outputShapeInShader = [batchSize, M, N];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}\n ];\n appendActivationUniformsData(activationAttributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(outerDims, aShape, bShape));\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n }\n programUniforms.push(...createTensorShapeVariables(outputShapeInShader));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const batchDims = internalVariable('batch_dims', inputs[0].dataType, outerDims.length);\n const a = inputVariable('a', inputs[0].dataType, aShape.length, aComponents);\n const b = inputVariable('b', inputs[1].dataType, bShape.length, components);\n const output = outputVariable('output', inputs[0].dataType, outputShapeInShader.length, components);\n const baseType = tensorTypeToWsglStorageType(output.type.tensor);\n const applyActivation = getActivationSnippet(activationAttributes, output.type.value, baseType);\n const inputVariables = [a, b];\n let processBias = '';\n if (hasBias) {\n const biasComponents = isChannelsLast ? components : 1;\n inputVariables.push(inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, biasComponents));\n processBias = `${\n isChannelsLast ? `value += bias[col / ${biasComponents}];` :\n `value += ${output.type.value}(bias[row + i]);`}`;\n }\n\n const outerDimsA = aShape.slice(0, -2);\n const outerDimsB = bShape.slice(0, -2);\n const broadCastADims = getBroadcastDims(outerDimsA, outerDims);\n const broadCastBDims = getBroadcastDims(outerDimsB, outerDims);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'},\n {name: 'K', type: 'u32'}\n ];\n appendActivationUniforms(activationAttributes, uniforms);\n\n const getIndices = (variable: IndicesHelper, broadCastDims: number[]) => {\n const rank = variable.rank;\n const name = variable.name;\n if (rank === 2) {\n return `var ${name}_indices = ${variable.type.indices}(0u, 0u);`;\n }\n const batchRank = batchDims.rank;\n let resStr = `var ${name}_indices: ${variable.type.indices};`;\n for (let i = rank - 2 - 1, j = batchRank - 1; i >= 0; i--, j--) {\n resStr += `\\n${name}_indices[${i}] = ${batchRank > 1 ? `batch_indices[${j}]` : 'batch_indices'};`;\n }\n broadCastDims.forEach(i => {\n resStr += `\\n${name}_indices[${i}] = 0;`;\n });\n resStr += `${name}_indices[${rank - 2}] = 0u;\n ${name}_indices[${rank - 1}] = 0u;`;\n return resStr;\n };\n\n const calcResult = (): string => {\n let calcStr = `var a_data: ${a.type.value};`;\n for (let i = 0; i < aComponents; i++) {\n calcStr += `\n let b_data${i} = b[(b_offset + (k + ${i}) * uniforms.N + col) / ${components}];`;\n }\n for (let i = 0; i < outputNumber; i++) {\n calcStr += `a_data = a[(a_offset + (row + ${i}) * uniforms.K + k) / ${aComponents}];`;\n\n for (let j = 0; j < aComponents; j++) {\n calcStr += `\n values[${i}] = fma(${b.type.value}(a_data${aComponents === 1 ? '' : `[${j}]`}), b_data${j}, values[${\n i}]);\\n`;\n }\n }\n return calcStr;\n };\n\n return `\n ${\n shaderHelper.registerUniforms(uniforms).registerInternalVariables(batchDims).declareVariables(\n ...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let col = (global_idx % (uniforms.N / ${components})) * ${components};\n var index1 = global_idx / (uniforms.N / ${components});\n let stride1 = uniforms.M / ${outputNumber};\n let row = (index1 % stride1) * ${outputNumber};\n let batch = index1 / stride1;\n\n ${outputShape.length === 2 ? '' : `let batch_indices = ${batchDims.offsetToIndices('batch')};`}\n ${getIndices(a, broadCastADims)}\n let a_offset = ${a.indicesToOffset('a_indices')};\n ${getIndices(b, broadCastBDims)}\n let b_offset = ${b.indicesToOffset('b_indices')};\n var values: array<${output.type.value}, ${outputNumber}>;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${aComponents}) {\n ${calcResult()}\n }\n for (var i = 0u; i < ${outputNumber}u; i++) {\n var value = values[i];\n ${processBias}\n ${applyActivation}\n let cur_indices = ${output.type.indices}(batch, row + i, col);\n let offset = ${output.indicesToOffset('cur_indices')};\n ${output.setByOffset(`offset / ${components}`, 'value')};\n }\n }\n `;\n };\n return {\n name: 'MatMulNaive',\n shaderCache: {\n hint: `${activationAttributes.activation};${components};${aComponents};${outputNumber};${isChannelsLast}`,\n inputDependencies: hasBias ? ['rank', 'rank', 'rank'] : ['rank', 'rank']\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('MatMul requires 2 inputs.');\n }\n\n if (inputs[0].dims[inputs[0].dims.length - 1] !== inputs[1].dims[inputs[1].dims.length - 2]) {\n throw new Error('shared dimension does not match.');\n }\n};\n\nexport const matMul = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n const outputShape = BroadcastUtil.calcShape(context.inputs[0].dims, context.inputs[1].dims, true);\n if (!outputShape) {\n throw new Error('Can\\'t use matmul on the given tensors');\n }\n const N = outputShape[outputShape.length - 1];\n const K = context.inputs[0].dims[context.inputs[0].dims.length - 1];\n if (N < 8 && K < 8) {\n context.compute(createNaiveMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n } else {\n context.compute(createMatmulProgramInfo(context.inputs, {activation: ''}, outputShape));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DMatMulProgramInfo} from './3rd-party/conv2d_mm_webgpu';\nimport {createMatmulProgramInfo} from './3rd-party/matmul_packed_webgpu';\nimport {createGroupedConvProgramInfo, createGroupedConvVectorizeProgramInfo} from './conv-grouped';\nimport {InternalActivationAttributes, parseInternalActivationAttributes} from './fuse-utils';\nimport {createNaiveMatmulProgramInfo} from './matmul';\nimport {createTransposeProgramInfo} from './transpose';\n\nexport const calculateOutputShape =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[],\n adjustPads: readonly number[], strides: readonly number[], isChannelLast: boolean): number[] => {\n const batchSize = inputShape[0];\n const inputSpatialShape = inputShape.slice(isChannelLast ? 1 : 2, isChannelLast ? 3 : 4);\n const spatialRank = inputSpatialShape.length;\n const outChannels = kernelShape[0];\n const kernelSpatialShape = kernelShape.slice(2);\n const dilatedKernelShape = kernelSpatialShape.map((v, i) => v + (v - 1) * (dilations[i] - 1));\n const inputSpatialShapeWithPad = inputSpatialShape.map((v, i) => v + adjustPads[i] + adjustPads[i + spatialRank]);\n const outputShape =\n inputSpatialShapeWithPad.map((v, i) => Math.floor((v - dilatedKernelShape[i] + strides[i]) / strides[i]));\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n return outputShape;\n };\n\nexport interface ConvAttributes extends InternalActivationAttributes, AttributeWithCacheKey {\n readonly autoPad: string;\n readonly dilations: readonly number[];\n readonly format: 'NHWC'|'NCHW';\n readonly group: number;\n readonly kernelShape: readonly number[];\n readonly pads: readonly number[];\n readonly strides: readonly number[];\n readonly wIsConst: boolean;\n}\n\n// for transposing weight tensor from [M, C/group, KH, KW] to [KH, KW, C/group, M]\nconst weightTransposeAttribute = [2, 3, 1, 0];\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/master/docs/Operators.md#Conv\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support conv 1D and 2D');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[1] * attributes.group;\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[1].dims[0] !== inputs[2].dims[0])) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n // wrong dilations dimension\n if (attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n // Wrong strides dimension\n if (attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n if (attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n if (attributes.kernelShape.length !== 0 && attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n};\n\nconst getAdjustedConvAttributes = (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n if (kernelShape[i - 2] === 0) {\n kernelShape[i - 2] = inputs[1].dims[i];\n }\n }\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPadsBasedOnAutoPad(\n inputs[0].dims, attributes.strides, attributes.dilations, kernelShape, pads, attributes.format === 'NHWC',\n attributes.autoPad);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads});\n return newAttributes;\n};\n\nexport const parseConvAttributes = (attributes: Record): ConvAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad = ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernel_shape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.w_is_const as () => boolean)();\n\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst conv2d = (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvAttributes): void => {\n const adjustedAttributes = getAdjustedConvAttributes(attributes, inputs);\n\n // check attributes\n\n // const hasPreluActivationWeights = false; /* TODO: add support for prelu activation weights */\n const isChannelsLast = attributes.format === 'NHWC';\n if (attributes.group !== 1) {\n // NVIDIA GPU with ampere architecture fails with below 2 cases, but we couldn't repro them with any other\n // GPUs. So just disable vectorize on NVIDIA ampere to ensure always correct outputs.\n // [webgpu]Conv - conv - vectorize group - B\n // [webgpu]Conv - conv - vectorize group - D\n const enableGroupedConvVectorize = !context.adapterInfo.isArchitecture('ampere');\n if (enableGroupedConvVectorize && isChannelsLast && inputs[1].dims[0] === attributes.group &&\n inputs[1].dims[1] === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1) {\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n const convInputs = [inputs[0], transposedWeight];\n if (inputs.length === 3) {\n convInputs.push(inputs[2]);\n }\n context.compute(\n createGroupedConvVectorizeProgramInfo(convInputs, adjustedAttributes, outputShape), {inputs: convInputs});\n } else {\n context.compute(createGroupedConvProgramInfo(inputs, adjustedAttributes));\n }\n return;\n }\n\n const hasBias = inputs.length === 3;\n const inputHeight = inputs[0].dims[isChannelsLast ? 1 : 2];\n const inputWidth = inputs[0].dims[isChannelsLast ? 2 : 3];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const outputShape = calculateOutputShape(\n inputs[0].dims, inputs[1].dims, attributes.dilations, adjustedAttributes.pads, attributes.strides,\n isChannelsLast);\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n\n const sameSize = isChannelsLast && weightHeight === inputHeight && weightWidth === inputWidth &&\n attributes.pads[0] === 0 && attributes.pads[1] === 0;\n if (sameSize ||\n (weightHeight === 1 && weightWidth === 1 && attributes.dilations[0] === 1 && attributes.dilations[1] === 1 &&\n attributes.strides[0] === 1 && attributes.strides[1] === 1 && attributes.pads[0] === 0 &&\n attributes.pads[1] === 0)) {\n // conv2dByMatMul\n const batch = outputShape[0];\n let xReshaped, wReshaped, matmulOutputShape;\n const matmulInputs = [];\n if (isChannelsLast) {\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n if (sameSize) {\n const sharedDim = inputHeight * inputWidth * inputChannels;\n xReshaped = inputs[0].reshape([1, batch, sharedDim]);\n wReshaped = transposedWeight.reshape([1, sharedDim, outChannels]);\n matmulOutputShape = [1, batch, outChannels];\n } else {\n xReshaped = inputs[0].reshape([batch, inputHeight * inputWidth, inputChannels]);\n wReshaped = transposedWeight.reshape([1, inputChannels, outChannels]);\n matmulOutputShape = [batch, outHeight * outWidth, outChannels];\n }\n matmulInputs.push(xReshaped);\n matmulInputs.push(wReshaped);\n } else {\n xReshaped = inputs[0].reshape([batch, inputChannels, inputHeight * inputWidth]);\n wReshaped = inputs[1].reshape([1, outChannels, inputChannels]);\n matmulOutputShape = [batch, outChannels, outHeight * outWidth];\n matmulInputs.push(wReshaped);\n matmulInputs.push(xReshaped);\n }\n if (hasBias) {\n matmulInputs.push(inputs[2]);\n }\n const N = matmulOutputShape[2];\n const K = matmulInputs[0].dims[matmulInputs[0].dims.length - 1];\n // Tune the threshold.\n if (N < 8 && K < 8) {\n context.compute(\n createNaiveMatmulProgramInfo(\n matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n } else {\n context.compute(\n createMatmulProgramInfo(matmulInputs, adjustedAttributes, outputShape, matmulOutputShape, isChannelsLast),\n {inputs: matmulInputs});\n }\n return;\n }\n\n // TODO: implement conv2dWithIm2Col()\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposeAttribute),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convInputs = [inputs[0], transposedWeight];\n if (hasBias) {\n convInputs.push(inputs[2]);\n }\n\n // STEP.3: compute matmul\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n context.compute(\n createConv2DMatMulProgramInfo(\n convInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convInputs});\n};\n\nconst conv1d = (context: ComputeContext, attributes: ConvAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n const pads = [0, attributes.pads[0], 0, attributes.pads[1]];\n const strides = [1].concat(attributes.strides);\n const dilations = [1].concat(attributes.dilations);\n const kernelShape = [1].concat(attributes.kernelShape);\n const adjustedAttributes = getAdjustedConvAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createGroupedConvProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] : []));\n};\n\nexport const conv = (context: ComputeContext, attributes: ConvAttributes): void => {\n validateInputs(context.inputs, attributes); // currently will fail if not conv1D/2D\n if (context.inputs[0].dims.length === 3) {\n conv1d(context, attributes);\n } else {\n conv2d(context, context.inputs, attributes);\n }\n};\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_mm_webgpu.ts\n//\n// modified to fit the needs of the project\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\nimport {appendActivationUniforms, appendActivationUniformsData, getActivationSnippet} from '../fuse-utils';\n\nimport {biasSnippet} from './activation_util';\nimport {utilFunctions} from './conv_util';\nimport {makeMatMulPackedSource, makeMatMulPackedVec4Source} from './matmul_packed_webgpu';\n\nconst conv2dTransposeCommonSnippet =\n (isChannelsLast: boolean, addBias = false, attributes: ConvTransposeAttributes, type: string,\n innerElementSize = 4): string => {\n const getWSnippet = (innerElementSize: number) => {\n switch (innerElementSize) {\n case 1:\n return 'return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];';\n case 4:\n return `\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${type}(v0, v1, v2, v3);\n `;\n default:\n throw new Error(`innerElementSize ${innerElementSize} is not supported.`);\n }\n };\n const coordASnippet = isChannelsLast ? `\n let coord = vec4(batch, iXR, iXC, xCh);\n ` :\n `\n let coord = vec4(batch, xCh, iXR, iXC);\n `;\n\n const coordResSnippet = isChannelsLast ? `\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n ` :\n `\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `;\n\n const xHeight = isChannelsLast ? 'i32(uniforms.x_shape[1])' : 'i32(uniforms.x_shape[2])';\n const xWidth = isChannelsLast ? 'i32(uniforms.x_shape[2])' : 'i32(uniforms.x_shape[3])';\n const row = isChannelsLast ? 'row' : 'col';\n const col = isChannelsLast ? 'col' : 'row';\n\n const readASnippet = `\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n let outRow = ${row} / outWidth;\n let outCol = ${row} % outWidth;\n\n let WRow = ${col} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${col} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${xHeight}) || fract(xR) > 0.0) {\n return ${type}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${xWidth}) || fract(xC) > 0.0) {\n return ${type}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${col} % inChannels;\n ${coordASnippet}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${innerElementSize}];`;\n\n const sampleA = isChannelsLast ? `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${readASnippet}\n }\n return ${type}(0.0);` :\n `\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${readASnippet}\n }\n return ${type}(0.0);`;\n\n const sampleW = `\n let col = colIn * ${innerElementSize};\n let inChannels = ${isChannelsLast ? 'i32(uniforms.x_shape[3])' : 'i32(uniforms.x_shape[1])'};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${\n isChannelsLast ? 'row < uniforms.dim_inner && col < uniforms.dim_b_outer' :\n 'row < uniforms.dim_inner && col < uniforms.dim_a_outer'} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${getWSnippet(innerElementSize)}\n }\n return ${type}(0.0);\n `;\n\n const applyActivation = getActivationSnippet(attributes, type);\n const userCode = `\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleA : sampleW}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${type} {\n ${isChannelsLast ? sampleW : sampleA}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${type}) {\n let col = colIn * ${innerElementSize};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${isChannelsLast ? 'i32(uniforms.result_shape[2])' : 'i32(uniforms.result_shape[3])'};\n ${coordResSnippet}\n ${biasSnippet(addBias)}\n ${applyActivation}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${innerElementSize}] = value;\n }\n }`;\n return userCode;\n };\n\nexport const createConv2DTransposeMatMulProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes, outputShape: readonly number[],\n dimAOuter: number, dimBOuter: number, dimInner: number, hasBias: boolean,\n sequentialAccessByThreads: boolean): ProgramInfo => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inChannels = isChannelsLast ? inputs[0].dims[3] : inputs[0].dims[1];\n const batchSize = outputShape[0];\n const outWidth = isChannelsLast ? outputShape[2] : outputShape[3];\n const outHeight = isChannelsLast ? outputShape[1] : outputShape[2];\n const outChannels = isChannelsLast ? outputShape[3] : outputShape[1];\n // TODO: enable vec4 for NCHW\n const isVec4 = isChannelsLast && (inChannels % 4 === 0 && inChannels % 3) && outChannels % 4 === 0;\n\n // TODO: fine tune size\n const dispatchX = isChannelsLast ? outChannels : outWidth * outHeight;\n const dispatchY = isChannelsLast ? outWidth * outHeight : outChannels;\n const workGroupSize: [number, number, number] = [8, 8, 1];\n const elementsPerThread = dimAOuter <= 8 ? [4, 1, 1] : [4, 4, 1];\n const dispatch = [\n Math.ceil(dispatchX / workGroupSize[0] / elementsPerThread[0]),\n Math.ceil(dispatchY / workGroupSize[1] / elementsPerThread[1]),\n Math.ceil(batchSize / workGroupSize[2] / elementsPerThread[2])\n ];\n\n LOG_DEBUG('verbose', () => `[conv_backprop_mm_webgpu] dispatch = ${dispatch}`);\n\n const innerElementSize = isVec4 ? 4 : 1;\n const tileInner = Math.max(workGroupSize[0] * innerElementSize, workGroupSize[1]);\n const components = isVec4 ? 4 : 1;\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const effectiveFilterDims = [\n filterDims[0] + (attributes.dilations[0] <= 1 ? 0 : (filterDims[0] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] + (attributes.dilations[1] <= 1 ? 0 : (filterDims[1] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor((attributes.pads[1] + attributes.pads[3]) / 2)\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.int32, data: dimAOuter}, {type: DataType.int32, data: dimBOuter},\n {type: DataType.int32, data: dimInner}, {type: DataType.int32, data: attributes.strides},\n {type: DataType.int32, data: attributes.dilations}, {type: DataType.int32, data: filterDims},\n {type: DataType.int32, data: pads}\n ];\n appendActivationUniformsData(attributes, programUniforms);\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, inputs[1].dims));\n\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputs[0].dims.length, components);\n const w = inputVariable('w', inputs[1].dataType, inputs[1].dims.length, 1);\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n const inputVariables = [x, w];\n\n let declareFunctions = '';\n if (hasBias) {\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims.length, components);\n inputVariables.push(bias);\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${bias.type.value} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'dim_a_outer', type: 'i32'}, {name: 'dim_b_outer', type: 'i32'}, {name: 'dim_inner', type: 'i32'},\n {name: 'strides', type: 'i32', length: 2}, {name: 'dilations', type: 'i32', length: 2},\n {name: 'filter_dims', type: 'i32', length: filterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}\n ];\n appendActivationUniforms(attributes, uniforms);\n const elemType = tensorTypeToWsglStorageType(inputs[0].dataType, 1);\n if (elemType !== 'f16' && elemType !== 'f32') {\n throw new Error(`elemType ${elemType} is not supported.`);\n }\n return `\n ${utilFunctions('uniforms.result_strides')}\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)};\n ${declareFunctions}\n ${conv2dTransposeCommonSnippet(isChannelsLast, hasBias, attributes, x.type.value, innerElementSize)}\n ${\n isVec4 ? makeMatMulPackedVec4Source(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner) :\n makeMatMulPackedSource(\n elementsPerThread, workGroupSize, elemType, undefined, !isChannelsLast, tileInner, false,\n undefined, sequentialAccessByThreads)}`;\n };\n\n return {\n name: 'Conv2DTransposeMatMul',\n shaderCache:\n {hint: `${attributes.cacheKey};${elementsPerThread};${workGroupSize};${isVec4}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n programUniforms\n }),\n getShaderSource\n };\n };\n", "/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n\n// sampled from [@tensorflow/tfjs] tfjs-backend-webgpu/src/conv_backprop_webgpu.ts\n\nimport {DataType} from '../../../../wasm-common';\nimport {LOG_DEBUG} from '../../../log';\nimport {TensorView} from '../../../tensor-view';\nimport {ShapeUtil} from '../../../util';\nimport {ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../../types';\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from '../common';\nimport {ConvTransposeAttributes} from '../conv-transpose';\n\nconst createConvTranspose2DOpProgramShaderSource =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], outputShape: readonly number[], hasBias: boolean,\n is1DimensionDispatch: boolean, isVec4 = false, dataType: string, uniforms: UniformsArrayType,\n isChannelsLast = false): string => {\n const rowDim = isChannelsLast ? 1 : 2;\n const colDim = isChannelsLast ? 2 : 3;\n const channelDim = isChannelsLast ? 3 : 1;\n const workPerThread = isVec4 ? 2 : 1;\n\n let declareFunctions = `\n fn setOutputAtIndex(flatIndex : u32, value : ${isVec4 ? `vec4<${dataType}>` : dataType}) {\n result[flatIndex] = ${isVec4 ? `vec4<${dataType}>` : dataType}(value);\n }`;\n if (hasBias) {\n declareFunctions += `\n fn getBiasByOutputCoords(coords : vec4) -> ${isVec4 ? `vec4<${dataType}>` : dataType} {\n return bias[coords.${isChannelsLast ? 'w' : 'y'}${isVec4 ? '/ 4' : ''}];\n }`;\n }\n const components = isVec4 ? 4 : 1;\n const w = inputVariable('W', inputs[1].dataType, inputs[1].dims.length, components);\n const dy = inputVariable('Dy', inputs[0].dataType, inputs[0].dims.length, components);\n const inputVariables = [dy, w];\n if (hasBias) {\n inputVariables.push(inputVariable('bias', inputs[2].dataType, [outputShape[channelDim]].length, components));\n }\n const output = outputVariable('result', inputs[0].dataType, outputShape.length, components);\n\n const codeSnippet4 = `{\n let batch: u32 = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} / uniforms.result_shape[1];\n let r = ${is1DimensionDispatch ? 'global_id.z' : 'workgroup_id.z'} % uniforms.result_shape[1];\n let c = ${is1DimensionDispatch ? 'global_id.y' : 'workgroup_id.y'} * ${workPerThread};\n let d1: u32 = ${is1DimensionDispatch ? 'global_id.x' : 'workgroup_id.x'} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${workPerThread}>;\n for (var i = 0; i < ${workPerThread}; i++) {\n dotProd[i] = vec4<${dataType}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${dataType}(dyCorner.x) + ${dataType}(wR)) / ${dataType}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${dataType}(dyCorner.y) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let dyC2 = (${dataType}(dyCorner.y) + 1.0 + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${dataType}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n\n dotProd[1] = dotProd[1] + vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${channelDim}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1', 'd2')};\n let wValue1 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 1', 'd2')};\n let wValue2 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 2', 'd2')};\n let wValue3 = ${w.get('u32(wRPerm)', 'u32(wCPerm)', 'd1 + 3', 'd2')};\n\n var xValue = ${dy.get('batch', 'idyR', 'idyC2', 'd2')};\n let tmpval = vec4<${dataType}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${workPerThread}; i = i + 1) {\n let value = dotProd[i] + ${hasBias ? 'bias[c+i]' : `vec4<${dataType}>(0.0)`};\n ${output.set('batch', 'r', 'c + i', 'd1', 'value')};\n }\n }`;\n const codeSnippet = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let batch = ${output.indicesGet('outputIndices', 0)};\n let d1 = ${output.indicesGet('outputIndices', channelDim)};\n let r = ${output.indicesGet('outputIndices', rowDim)};\n let c = ${output.indicesGet('outputIndices', colDim)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${dataType}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${dataType}(dyRCorner) + ${dataType}(wR)) / ${dataType}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${dataType}(uniforms.Dy_shape[${rowDim}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${dataType}(dyCCorner) + ${dataType}(wC)) / ${dataType}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${dataType}(uniforms.Dy_shape[${colDim}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${\n isChannelsLast ? dy.get('batch', 'idyR', 'idyC', 'inputChannel') :\n dy.get('batch', 'inputChannel', 'idyR', 'idyC')};\n let wValue = ${w.get('inputChannel', 'wOutChannel', 'u32(wRPerm)', 'u32(wCPerm)')};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${hasBias ? 'bias[d1]' : `${dataType}(0.0)`};\n ${output.setByOffset('global_idx', 'value')};\n `;\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${declareFunctions}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')};\n ${isVec4 ? codeSnippet4 : codeSnippet}}`;\n };\n\nexport const createConvTranspose2DProgramInfo =\n (inputs: readonly TensorView[], attributes: ConvTransposeAttributes,\n squeezeOutputShapeFunction?: (shape: readonly number[]) => number[]): ProgramInfo => {\n const hasBias = inputs.length > 2;\n // const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = attributes.outputShape;\n const outputSize = ShapeUtil.size(outputShape);\n\n // const inChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // TODO Enable isVec4 for performance\n // Disabled due to weight matrix layout issue\n // const isVec4 = attributes.group === 1 && isChannelsLast && inChannels % 4 === 0 && outChannels % 4 === 0;\n const dispatch = [\n Math.ceil(outputSize / 64),\n 1,\n 1,\n ];\n LOG_DEBUG('verbose', () => `[conv2d_backprop_webgpu] dispatch = ${dispatch}`);\n\n const isChannelsLast = attributes.format === 'NHWC';\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n const strides = [attributes.strides[0], attributes.strides[1]];\n const filterDims =\n [attributes.kernelShape[isChannelsLast ? 1 : 2], attributes.kernelShape[isChannelsLast ? 2 : 3]];\n const dilations = [attributes.dilations[0], attributes.dilations[1]];\n const effectiveFilterDims = [\n filterDims[0] +\n (attributes.dilations[0] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 1 : 2] - 1) * (attributes.dilations[0] - 1)),\n filterDims[1] +\n (attributes.dilations[1] <= 1 ?\n 0 :\n (attributes.kernelShape[isChannelsLast ? 2 : 3] - 1) * (attributes.dilations[1] - 1))\n ];\n const pads = [\n effectiveFilterDims[0] - 1 - Math.floor((attributes.pads[0] + attributes.pads[2]) / 2),\n effectiveFilterDims[1] - 1 - Math.floor(attributes.pads[1] + attributes.pads[3]) / 2\n ];\n\n const isVec4 = false;\n const group = attributes.group;\n const wShape = inputs[1].dims;\n const inputChannelsPerGroup = wShape[0] / group;\n const outputChannelsPerGroup = wShape[1];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: strides},\n {type: DataType.uint32, data: filterDims}, {type: DataType.uint32, data: dilations},\n {type: DataType.uint32, data: effectiveFilterDims}, {type: DataType.int32, data: pads},\n {type: DataType.uint32, data: inputChannelsPerGroup}, {type: DataType.uint32, data: outputChannelsPerGroup},\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims)\n ];\n if (hasBias) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const is1DimensionDispatch = dispatch[1] === 1 && dispatch[2] === 1;\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'strides', type: 'u32', length: strides.length},\n {name: 'filter_dims', type: 'u32', length: filterDims.length},\n {name: 'dilations', type: 'u32', length: filterDims.length},\n {name: 'effective_filter_dims', type: 'u32', length: effectiveFilterDims.length},\n {name: 'pads', type: 'i32', length: pads.length}, {name: 'input_channels_per_group', type: 'u32'},\n {name: 'output_channels_per_group', type: 'u32'}\n ];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `${\n createConvTranspose2DOpProgramShaderSource(\n shaderHelper, inputs, outputShape, hasBias, is1DimensionDispatch, isVec4, dataType, uniforms,\n isChannelsLast)}`;\n };\n return {\n name: 'ConvTranspose2D',\n shaderCache: {hint: `${attributes.cacheKey};`, inputDependencies},\n getRunData: () => ({\n dispatchGroup: {x: dispatch[0], y: dispatch[1], z: dispatch[2]},\n outputs: [{\n dims: squeezeOutputShapeFunction ? squeezeOutputShapeFunction(outputShape) : outputShape,\n dataType: inputs[0].dataType\n }],\n programUniforms\n }),\n getShaderSource\n };\n };\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TensorView} from '../../tensor-view';\nimport {ComputeContext} from '../types';\n\nimport {createConv2DTransposeMatMulProgramInfo} from './3rd-party/conv_backprop_mm_webgpu';\nimport {createConvTranspose2DProgramInfo} from './3rd-party/conv_backprop_webgpu';\nimport {ConvAttributes} from './conv';\nimport {parseInternalActivationAttributes} from './fuse-utils';\nimport {createTransposeProgramInfo} from './transpose';\n\nconst computeTotalPad =\n (inDim: number, stride: number, adj: number, kernel: number, dilation: number, outSize: number) =>\n (inDim - 1) * stride + adj + (kernel - 1) * dilation + 1 - outSize;\n\nconst distributePadding = (totalPad: number, autoPad: string, pads: number[], head: number, tail: number) => {\n const smallPad = Math.floor(totalPad / 2);\n if (autoPad === 'SAME_UPPER') {\n pads[head] = smallPad;\n pads[tail] = totalPad - smallPad;\n } else if (autoPad === 'SAME_LOWER') {\n pads[head] = totalPad - smallPad;\n pads[tail] = smallPad;\n }\n};\n\nconst calculateOutputShapeAndPads =\n (inputShape: readonly number[], kernelShape: readonly number[], dilations: readonly number[], autoPad: string,\n group: number, pads: number[], strides: readonly number[], isChannelLast: boolean, outputPadding: number[],\n outputShape: number[]) => {\n const spatialRank = inputShape.length - 2;\n const updateOutputShape = outputShape.length === 0;\n if (outputPadding.length === 0) {\n for (let i = 0; i < spatialRank; ++i) {\n outputPadding.push(0);\n }\n }\n const batchSize = inputShape[0];\n const outChannels = kernelShape[isChannelLast ? 3 : 1] * group;\n for (let i = 0, j = inputShape.length - spatialRank - (isChannelLast ? 1 : 0); i < spatialRank; ++i, ++j) {\n const inSize = inputShape[j];\n const outSize = updateOutputShape ? inSize * strides[i] : outputShape[i];\n const totalPad = computeTotalPad(inSize, strides[i], pads[i], kernelShape[j], dilations[i], outSize);\n distributePadding(totalPad, autoPad, pads, i, i + spatialRank);\n if (updateOutputShape) {\n outputShape.push(\n strides[i] * (inSize - 1) + outputPadding[i] + (kernelShape[j] - 1) * dilations[i] + 1 - pads[i] -\n pads[i + spatialRank]);\n }\n }\n outputShape.splice(0, 0, batchSize);\n outputShape.splice(isChannelLast ? 3 : 1, 0, outChannels);\n };\n\nexport interface ConvTransposeAttributes extends ConvAttributes {\n readonly outputPadding: readonly number[];\n readonly outputShape: readonly number[];\n}\n\nconst getAdjustedConvTransposeAttributes =\n (attributes: T, inputs: readonly TensorView[]): T => {\n const kernelShape = attributes.kernelShape.slice();\n // if kernelShape is not specified in the attributes of this op, infer it from the weight tensor dims\n if (attributes.kernelShape.length === 0 || attributes.kernelShape.reduce((a, b) => a * b, 1) === 0) {\n kernelShape.length = 0;\n for (let i = 2; i < inputs[1].dims.length; ++i) {\n kernelShape.push(inputs[1].dims[i]);\n }\n }\n const isChannelsLast = attributes.format === 'NHWC';\n kernelShape.splice(0, 0, inputs[1].dims[0]);\n kernelShape.splice(isChannelsLast ? 3 : 1, 0, inputs[1].dims[1]);\n\n const pads = attributes.pads.slice();\n const outputShape = attributes.outputShape.slice();\n const outputPadding = attributes.outputPadding.slice();\n const inputShape = inputs[0].dims;\n let dilations = attributes.dilations.slice();\n if (dilations.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n dilations = new Array(spatialRank).fill(1);\n }\n let strides = attributes.strides.slice();\n if (strides.reduce((a, b) => a + b, 0) === 0) {\n const spatialRank = inputs[0].dims.length - 2;\n strides = new Array(spatialRank).fill(1);\n }\n // If outputShape is not specified in the attributes of this op, infer it from the parameters\n // Similarly, automatically infer pads if not specified\n calculateOutputShapeAndPads(\n inputShape, kernelShape, dilations, attributes.autoPad, attributes.group, pads, strides, isChannelsLast,\n outputPadding, outputShape);\n\n // always return a new object so does not modify the original attributes\n const newAttributes: T = Object.assign({}, attributes);\n Object.assign(newAttributes, {kernelShape, pads, outputPadding, outputShape, dilations, strides});\n return newAttributes;\n };\n\nexport const parseConvTransposeAttributes = (attributes: Record): ConvTransposeAttributes => {\n const activationAttributes = parseInternalActivationAttributes(attributes);\n // TODO : Make this generic enough to compute default attributes for multi-dimensional conv\n const format = attributes.format as 'NHWC' | 'NCHW';\n const autoPad =\n ['NOTSET', 'VALID', 'SAME_UPPER',\n 'SAME_LOWER'][typeof attributes.autoPad == 'undefined' ? 0 : attributes.autoPad as number];\n const dilations = attributes.dilations as [number, number];\n const group = attributes.group as number;\n const kernelShape = attributes.kernelShape as [number, number];\n const pads = attributes.pads as [number, number, number, number];\n const strides = attributes.strides as [number, number];\n const wIsConst = (attributes.wIsConst as () => boolean)();\n const outputPadding = attributes.outputPadding as [number, number, number, number];\n const outputShape = attributes.outputShape as [number, number];\n return {\n autoPad,\n format,\n dilations,\n group,\n kernelShape,\n outputPadding,\n outputShape,\n pads,\n strides,\n wIsConst,\n ...activationAttributes,\n cacheKey: `${attributes.format};${activationAttributes.activation};`\n };\n};\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n // Refer to the below link for all input checks\n // https://github.com/onnx/onnx/blob/main/docs/Operators.md#ConvTranspose\n if (!inputs || (inputs.length !== 2 && inputs.length !== 3)) {\n throw new Error('Conv requires 2 or 3 inputs');\n }\n\n // TODO : Need to add support for multi-dimensional conv\n if (inputs[0].dims.length !== 4 && inputs[0].dims.length !== 3) {\n throw new Error('currently only support 2-dimensional conv');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error('filter does not have same dimension as input');\n }\n\n // FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\n const dataChannel = inputs[0].dims[attributes.format === 'NHWC' ? inputs[0].dims.length - 1 : 1];\n const filterInChannel = inputs[1].dims[0];\n if (dataChannel !== filterInChannel) {\n throw new Error('FILTER_IN_CHANNEL should be equal to DATA_CHANNEL');\n }\n\n const featureMaps = inputs[1].dims[1] * attributes.group;\n\n // if bias is provided it should be 1D and the number of elements should be equal to the number of feature maps\n if (inputs.length === 3 && (inputs[2].dims.length !== 1 || inputs[2].dims[0] !== featureMaps)) {\n throw new Error('invalid bias');\n }\n\n const spatialRank = inputs[0].dims.length - 2;\n const dilationsSet = attributes.dilations.reduce((a, b) => a + b, 0) > 0;\n // wrong dilations dimension\n if (dilationsSet && attributes.dilations.length !== spatialRank) {\n throw new Error(`dilations should be ${spatialRank}D`);\n }\n\n const stridesSet = attributes.strides.reduce((a, b) => a + b, 0) > 0;\n // Wrong strides dimension\n if (stridesSet && attributes.strides.length !== spatialRank) {\n throw new Error(`strides should be ${spatialRank}D`);\n }\n\n // Wrong pads dimension\n const padsSet = attributes.pads.reduce((a, b) => a + b, 0) > 0;\n if (padsSet && attributes.pads.length !== spatialRank * 2) {\n throw new Error(`pads should be ${spatialRank * 2}D`);\n }\n\n // Wrong output padding dimension\n if (attributes.outputPadding.length !== spatialRank && attributes.outputPadding.length !== 0) {\n throw new Error(`output_padding should be ${spatialRank}D`);\n }\n\n // if kernelShape is specified, it's data length must be 2 less than dims length of the weights tensor\n // (the first 2 dims are batch_size and channels)\n const kernelShapeSet = attributes.kernelShape.reduce((a, b) => a + b, 0) > 0;\n if (kernelShapeSet && attributes.kernelShape.length !== 0 &&\n attributes.kernelShape.length !== inputs[1].dims.length - 2) {\n throw new Error('invalid kernel shape');\n }\n\n // as with kernelShape, must have same number of spatial dims as input\n if (attributes.outputShape.length !== 0 && attributes.outputShape.length !== inputs[0].dims.length - 2) {\n throw new Error('invalid output shape');\n }\n};\n\n// for transposing weight tensor from [C, M/group, KH, KW] to [KH, KW, M/group, C]\nconst weightTransposePerm = [2, 3, 1, 0];\n\nconst convTranspose2d =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: ConvTransposeAttributes): void => {\n const adjustedAttributes = getAdjustedConvTransposeAttributes(attributes, inputs);\n const isChannelsLast = attributes.format === 'NHWC';\n const outputShape = adjustedAttributes.outputShape;\n const outChannels = outputShape[isChannelsLast ? 3 : 1];\n const inputChannels = inputs[0].dims[isChannelsLast ? 3 : 1];\n // Switch to naive method when outChannels and inputChannels are very small. It's because that in this case it's\n // not suitable for matmul version since matmul uses tile size 32x32 resulting the underlying execution unit\n // utilization rate is very low.\n if (adjustedAttributes.group !== 1 || (outChannels === 1 && inputChannels === 1)) {\n context.compute(createConvTranspose2DProgramInfo(inputs, adjustedAttributes));\n return;\n }\n const outHeight = outputShape[isChannelsLast ? 1 : 2];\n const outWidth = outputShape[isChannelsLast ? 2 : 3];\n const weightHeight = inputs[1].dims[2];\n const weightWidth = inputs[1].dims[3];\n\n const dimAOuter = isChannelsLast ? outHeight * outWidth : outChannels;\n const dimBOuter = isChannelsLast ? outChannels : outHeight * outWidth;\n const dimInner = weightHeight * weightWidth * inputChannels;\n\n const sequentialAccessByThreads = /* backend.adapterInfo.isIntel() */ true;\n\n\n // STEP.1: transpose weight\n const transposedWeight = (context.kernelCustomData.wT as TensorView | undefined) ??\n context.compute(\n createTransposeProgramInfo(inputs[1], weightTransposePerm),\n {inputs: [1], outputs: [attributes.wIsConst ? -2 : -1]})[0];\n if (attributes.wIsConst && !context.kernelCustomData.wT) {\n context.kernelCustomData.wT = transposedWeight;\n }\n\n // STEP.2: prepare reshaped inputs\n const convTransposeInputs = [inputs[0], transposedWeight];\n const hasBias = inputs.length === 3;\n if (hasBias) {\n if (!isChannelsLast && inputs[2].dims.length === 1) {\n convTransposeInputs.push(inputs[2].reshape([inputs[2].dims[0], 1, 1]));\n } else {\n convTransposeInputs.push(inputs[2]);\n }\n }\n\n // STEP.3: compute matmul\n context.compute(\n createConv2DTransposeMatMulProgramInfo(\n convTransposeInputs, adjustedAttributes, outputShape, dimAOuter, dimBOuter, dimInner, hasBias,\n sequentialAccessByThreads),\n {inputs: convTransposeInputs});\n };\n\nconst convTranspose1d = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n // extend the input to 2D by adding H dimension\n const isChannelLast = attributes.format === 'NHWC';\n\n const inputs = [\n context.inputs[0].reshape(\n isChannelLast ?\n // [N, W, C] -> [N, H=1, W, C]\n [context.inputs[0].dims[0], 1, context.inputs[0].dims[1], context.inputs[0].dims[2]] :\n // [N, C, W] -> [N, C, H=1, W]\n [context.inputs[0].dims[0], context.inputs[0].dims[1], 1, context.inputs[0].dims[2]]),\n //[FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kW] -> [FILTER_OUT_CHANNEL, FILTER_IN_CHANNEL, kH=1, kW]\n context.inputs[1].reshape([context.inputs[1].dims[0], context.inputs[1].dims[1], 1, context.inputs[1].dims[2]])\n ];\n if (context.inputs.length === 3) {\n inputs.push(context.inputs[2]);\n }\n let kernelShape = attributes.kernelShape;\n if (kernelShape.length === 0 || kernelShape[0] === 0) {\n kernelShape = [context.inputs[1].dims[2]];\n }\n let dilations = attributes.dilations;\n if (dilations.length === 0 || dilations[0] === 0) {\n dilations = [1];\n }\n let strides = attributes.strides;\n if (strides.length === 0 || strides[0] === 0) {\n strides = [1];\n }\n let pads = attributes.pads;\n if (pads.length === 0) {\n pads = [0, 0];\n }\n pads = [0, pads[0], 0, pads[1]];\n strides = [1].concat(strides);\n dilations = [1].concat(dilations);\n kernelShape = [1].concat(kernelShape);\n const adjustedAttributes =\n getAdjustedConvTransposeAttributes({...attributes, pads, strides, dilations, kernelShape}, inputs);\n context.compute(createConvTranspose2DProgramInfo(\n inputs, adjustedAttributes,\n outputShape => isChannelLast ? [outputShape[0], outputShape[2], outputShape[3]] :\n [outputShape[0], outputShape[1], outputShape[3]]));\n};\n\nexport const convTranspose = (context: ComputeContext, attributes: ConvTransposeAttributes): void => {\n validateInputs(context.inputs, attributes);\n if (context.inputs[0].dims.length === 3) {\n convTranspose1d(context, attributes);\n } else {\n convTranspose2d(context, context.inputs, attributes);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, inputVariable, outputVariable, ShaderHelper} from './common';\n\n\nexport interface CumSumAttributes extends AttributeWithCacheKey {\n readonly exclusive: boolean;\n readonly reverse: boolean;\n}\nconst createCumsumProgramInfo =\n (inputType: number, inputShape: readonly number[], axisInput: TensorView, attributes: CumSumAttributes):\n ProgramInfo => {\n const outputSize = ShapeUtil.size(inputShape); // outputShape is same as inputShape.\n const rank = inputShape.length; // input/output rank\n const input = inputVariable('input', inputType, rank);\n const output = outputVariable('output', inputType, rank);\n const axisValue = axisInput.dataType === DataType.int32 ? axisInput.getInt32Array()[0] :\n Number(axisInput.getBigInt64Array()[0]);\n const axis = ShapeUtil.normalizeAxis(axisValue, rank);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const index = ` i32(${input.indicesGet('inputIndices', 'uniforms.axis')}) `;\n const max = getElementAt('uniforms.input_shape', 'uniforms.axis', rank);\n const lowerLimit = attributes.reverse ? index + (attributes.exclusive ? ' + 1' : '') : '0';\n const upperLimit = attributes.reverse ? max : index + (attributes.exclusive ? '' : ' + 1');\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var inputIndices = ${output.offsetToIndices('global_idx')};\n var sum = ${output.type.value}(0);\n let first : i32 = ${lowerLimit};\n let last : i32 = ${upperLimit};\n for (var i : i32 = first; i < last; i++) {\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(i)')};\n sum = sum + ${input.getByIndices('inputIndices')};\n }\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'CumSum',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: inputShape, dataType: inputType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: axis},\n ...createTensorShapeVariables(inputShape, inputShape)\n ]\n\n }),\n getShaderSource\n };\n };\n\n\nexport const cumsum = (context: ComputeContext, attributes: CumSumAttributes): void => {\n const inputShape = context.inputs[0].dims;\n const inputType = context.inputs[0].dataType;\n const axis = context.inputs[1];\n context.compute(createCumsumProgramInfo(inputType, inputShape, axis, attributes), {inputs: [0]});\n};\n\nexport const parseCumSumAttributes = (attributes: Record): CumSumAttributes => {\n const exclusive = attributes.exclusive as number === 1;\n const reverse = attributes.reverse as number === 1;\n return createAttributeWithCacheKey({exclusive, reverse});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface DepthToSpaceAttributes extends FormatAttributes, AttributeWithCacheKey {\n readonly blocksize: number;\n readonly mode: string;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('DepthToSpace requires 1 input.');\n }\n if (inputs[0].dims.length !== 4) {\n throw new Error('DepthToSpace requires 4D input.');\n }\n};\n\nconst permFunctionBody = (perm: number[], rank: number, input: IndicesHelper, output: IndicesHelper): string => {\n const reverseFunc = [];\n reverseFunc.push(`fn perm(i: ${output.type.indices}) -> ${input.type.indices} {\n var a: ${input.type.indices};`);\n for (let i = 0; i < rank; ++i) {\n reverseFunc.push(input.indicesSet('a', perm[i], `i[${i}]`));\n }\n reverseFunc.push('return a;}');\n return reverseFunc.join('\\n');\n};\n\nconst createDepthToSpaceProgramInfo = (inputTensor: TensorView, attributes: DepthToSpaceAttributes): ProgramInfo => {\n let n: number, h: number, w: number, c: number;\n let shape: number[];\n let perm: number[];\n const isChannelLast = attributes.format === 'NHWC';\n const blocksize = attributes.blocksize;\n const isDCRmode = attributes.mode === 'DCR';\n if (isChannelLast) {\n [n, h, w, c] = inputTensor.dims;\n shape = isDCRmode ? [n, h, w, blocksize, blocksize, c / (blocksize ** 2)] :\n [n, h, w, c / (blocksize ** 2), blocksize, blocksize];\n perm = isDCRmode ? [0, 1, 3, 2, 4, 5] : [0, 1, 4, 2, 5, 3];\n } else {\n [n, h, w, c] = [inputTensor.dims[0], inputTensor.dims[2], inputTensor.dims[3], inputTensor.dims[1]];\n shape = isDCRmode ? [n, blocksize, blocksize, c / (blocksize ** 2), h, w] :\n [n, c / (blocksize ** 2), blocksize, blocksize, h, w];\n perm = isDCRmode ? [0, 3, 4, 1, 5, 2] : [0, 1, 4, 2, 5, 3];\n }\n const reshapedInputTensor = inputTensor.reshape(shape);\n const reshapedInputRank = reshapedInputTensor.dims.length;\n const inputDataType = inputTensor.dataType;\n\n const reshapedInput = inputVariable('a', inputDataType, reshapedInputRank);\n const permedOutput = outputVariable('output', inputDataType, reshapedInputRank);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(reshapedInput, permedOutput)}\n\n ${permFunctionBody(perm, reshapedInputRank, reshapedInput, permedOutput)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${permedOutput.offsetToIndices('global_idx')};\n let aIndices = perm(indices);\n\n ${permedOutput.setByOffset('global_idx', reshapedInput.getByIndices('aIndices'))}\n }`;\n\n return {\n name: 'DepthToSpace',\n shaderCache: {hint: `${inputTensor.dims};${attributes.blocksize};${attributes.mode}`, inputDependencies: ['rank']},\n getRunData: (inputs) => {\n const outputShape = isChannelLast ? [n, h * blocksize, w * blocksize, c / (blocksize ** 2)] :\n [n, c / (blocksize ** 2), h * blocksize, w * blocksize];\n const outputSize = ShapeUtil.size(outputShape);\n const shapeBeforePerm = reshapedInputTensor.dims;\n const shapeAfterPerm = ShapeUtil.sortBasedOnPerm(shapeBeforePerm, perm);\n return {\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(shapeBeforePerm, shapeAfterPerm)],\n };\n },\n getShaderSource,\n };\n};\n\nexport const depthToSpace = (context: ComputeContext, attributes: DepthToSpaceAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createDepthToSpaceProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseDepthToSpaceAttributes = (attributes: Record): DepthToSpaceAttributes =>\n createAttributeWithCacheKey({\n blocksize: attributes.blocksize as number,\n mode: attributes.mode as string,\n format: attributes.format as 'NHWC' | 'NCHW'\n });\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface EinsumAttributes extends AttributeWithCacheKey {\n readonly equation: string;\n}\n// The equation attribute value is a string which consists of left hand side (LHS) and optionally right hand side (RHS)\n// separated by '->'. Ex. \"ij,jk -> ik\" expresses matrix multiplication\n// \"ij->ji\" expresses matrix transpose\n// \"ii->i\" diagonal elements of a square matrix\n// LHS consists of a sequence of terms separated by commas. Each term corresponds to an input variable.\n// Each symbol corresponds to a dimension in the input variable. The symbol can be either a letter, 'a' to 'z' or 'A' to\n// 'Z' or '...' to represent arbitrary dimensions.\n\nconst symbolPattern =\n '[a-zA-Z]|\\\\.\\\\.\\\\.'; // The pattern each symbol in each term in the symbolic equation should match\nconst termPattern = '(' + symbolPattern + ')+'; // The pattern each term in the symbolic equation should match\nconst termPatternOnly = '^' + termPattern + '$'; // The patterns only matchs a term begin to end.\nconst lhsPattern = '(' + termPattern + ',)*' + termPattern; // The pattern the LHS should match\nconst lhsPatternOnly = '^' + lhsPattern + '$'; // The patterns only matchs a LHS begin to end.\n\ninterface SymbolInfo {\n count: number; // Symbol corresponding to a dimmension of an input\n inputIndices: number[]; // Number of input variables the symbol corresponds to\n dimValue: number; // Number of dimensions the symbol corresponds to\n}\n\nclass EinsumTerm {\n constructor(inputIndex = -1) {\n this.symbolToIndices = new Map();\n this.inputIndex = inputIndex;\n }\n\n // Add a symbol to the term\n addSymbol(symbol: string, index: number) {\n let value = this.symbolToIndices.get(symbol);\n if (value === undefined) {\n value = [index];\n } else {\n value.push(index);\n }\n this.symbolToIndices.set(symbol, value);\n }\n\n symbolToIndices: Map; // Map from symbol to dimensions of the input corresponding to the term\n inputIndex: number; // -1 for output and 0, 1, 2, ... for inputs\n}\n\nclass EinsumEquation {\n constructor(inputs: readonly TensorView[], public readonly equation: string) {\n this.hasEllipsis = false;\n this.symbolToInfo = new Map();\n this.lhs = new Array();\n this.outputDims = [];\n // As rhs needs to be updated allow using let instead of const for both lhs and rhs.\n // eslint-disable-next-line prefer-const\n let [lhs, rhs] = equation.includes('->') ? equation.split('->', 2) : [equation, ''];\n if (!lhs.match(RegExp(lhsPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const inputTerms = lhs.split(',');\n inputTerms.forEach((inputTerm, index) => {\n const dims = inputs[index].dims.slice();\n if (!inputTerm.match(RegExp(termPatternOnly))) {\n throw new Error('Invalid LHS term');\n }\n const einsumTerm = this.processTerm(inputTerm, true, dims, index);\n this.lhs.push(einsumTerm);\n });\n\n // Initialize the RHS if not specified\n if (rhs === '') {\n // Construct RHS from LHS terms/symbols\n rhs += [...this.symbolToInfo.entries()]\n .filter(([sym, info]) => (info.count === 1 || sym === '...'))\n .map(([sym]) => sym)\n .join('');\n } else {\n if (!rhs.match(RegExp(termPattern))) {\n throw new Error('Invalid RHS');\n }\n }\n\n // Compute output dims\n const rhsSymbols = rhs.match(RegExp(symbolPattern, 'g'));\n rhsSymbols?.forEach((symbol) => {\n if (symbol === '...') {\n this.outputDims = this.outputDims.concat(this.ellipsisDims);\n } else {\n const info = this.symbolToInfo.get(symbol);\n if (info === undefined) {\n throw new Error('Invalid RHS symbol');\n }\n this.outputDims.push(info.dimValue);\n }\n });\n this.rhs = this.processTerm(rhs, false, this.outputDims);\n } // End of EinsumEqation constructor\n\n // Add a symbol to the equation\n addSymbol(symbol: string, dimValue: number, inputIndex: number) {\n let info = this.symbolToInfo.get(symbol);\n if (info !== undefined) {\n if (info.dimValue !== dimValue && info.count !== 1) {\n throw new Error('Dimension mismatch');\n } else {\n info.count++;\n info.inputIndices.push(inputIndex);\n }\n } else {\n info = {count: 1, dimValue, inputIndices: [inputIndex]};\n }\n this.symbolToInfo.set(symbol, info);\n }\n\n // Process one input/output term\n processTerm(term: string, isInput: boolean, dims: readonly number[], index = -1): EinsumTerm {\n const rank = dims.length;\n let ellipsis = false;\n let ellipsisDims = [];\n let nextDim = 0;\n // For output empty string is allowed because the output may be reduced to a scalar value\n if (!term.match(RegExp(termPatternOnly)) && (!isInput && term !== '')) {\n throw new Error('Invalid LHS term');\n }\n const indexSymbols = term.match(RegExp(symbolPattern, 'g'));\n const einsumTerm = new EinsumTerm(index);\n // symbol can be either a lettre, 'a' to 'z' or 'A' to 'Z', or '...'\n indexSymbols?.forEach((symbol: string, i: number) => {\n if (symbol === '...') {\n if (ellipsis) {\n throw new Error('Only one ellipsis is allowed per input term');\n }\n ellipsis = true;\n const ellipsisDimLength = rank - indexSymbols.length + 1;\n if (ellipsisDimLength < 0) {\n throw new Error('Ellipsis out of bounds');\n }\n ellipsisDims = dims.slice(nextDim, nextDim + ellipsisDimLength);\n if (this.hasEllipsis) {\n if (this.ellipsisDims.length !== ellipsisDims.length ||\n this.ellipsisDims.toString() !== ellipsisDims.toString()) {\n throw new Error('Ellipsis dimensions mismatch');\n }\n } else if (isInput) {\n this.hasEllipsis = true;\n this.ellipsisDims = ellipsisDims;\n } else {\n throw new Error('Ellipsis must be specified in the LHS');\n }\n // Add '0', '1', '2', '3', '4', etc to represent ellipsis dimensions to avoid special handling\n for (let j = 0; j < ellipsisDims.length; j++) {\n const symbol = String.fromCharCode('0'.charCodeAt(0) + j);\n einsumTerm.addSymbol(symbol, i + j);\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n } else {\n einsumTerm.addSymbol(symbol, i + (this.hasEllipsis ? this.ellipsisDims.length - 1 : 0));\n this.addSymbol(symbol, dims[nextDim++], index);\n }\n });\n return einsumTerm;\n }\n\n symbolToInfo: Map; // All symbols in the equation\n hasEllipsis: boolean; // The equation has ellipsis or not\n ellipsisDims: number[]; // The dimensions of the equation ellipsis corresponds to.\n lhs: EinsumTerm[]; // Terms on the left-hand side of the equation\n rhs: EinsumTerm; // Term on the right-hand side of the equation\n outputDims: number[]; // Output dimensions of the equation\n} // End of class EinsumEquation\n\nconst appendMax = (name: string): string => name + '_max';\n\nconst createEinsumProgramInfo =\n (inputShapes: Array, dataType: number, einsumEquation: EinsumEquation,\n outputShape: readonly number[]): ProgramInfo => {\n const ranks = inputShapes.map((dims) => dims.length);\n const inputVars = ranks.map((rank, index) => inputVariable(`input${index}`, dataType, rank));\n const outputSize = ShapeUtil.size(outputShape);\n const output = outputVariable('output', dataType, outputShape.length);\n const uniformsSymbols =\n [...einsumEquation.symbolToInfo.keys()].filter((symbol) => !einsumEquation.rhs.symbolToIndices.has(symbol));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const idxCopy: string[] = [];\n const initProd = 'var prod = 1.0;';\n const initSum = 'var sum = 0.0;';\n const updateSum = 'sum += prod;';\n const reduceOpsSetIndices: string[] = [];\n const reduceOpsLoopHeaders: string[] = [];\n const reduceOpsLoopFooters: string[] = [];\n const reduceOpCompute: string[] = [];\n const isReduceOpsWithoutLoop = einsumEquation.symbolToInfo.size === einsumEquation.rhs.symbolToIndices.size;\n einsumEquation.symbolToInfo.forEach((info, symbol) => {\n if (einsumEquation.rhs.symbolToIndices.has(symbol)) {\n const outputIndex = einsumEquation.rhs.symbolToIndices.get(symbol)?.[0];\n if (outputIndex !== undefined) {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n idxCopy.push(`${\n inputVars[i].indicesSet(\n `input${i}Indices`, index, output.indicesGet('outputIndices', outputIndex))}`);\n });\n }\n });\n }\n } else {\n einsumEquation.lhs.forEach((term, i) => {\n if (info.inputIndices.includes(i)) {\n const indices = term.symbolToIndices.get(symbol);\n if (indices === undefined) {\n throw new Error('Invalid symbol error');\n }\n indices.forEach((index) => {\n reduceOpsSetIndices.push(`${inputVars[i].indicesSet(`input${i}Indices`, index, `${symbol}`)}`);\n });\n reduceOpCompute.push(`prod *= ${inputVars[i].getByIndices(`input${i}Indices`)};`);\n }\n });\n reduceOpsLoopHeaders.push(\n `for(var ${symbol}: u32 = 0; ${symbol} < uniforms.${appendMax(symbol)}; ${symbol}++) {`);\n reduceOpsLoopFooters.push('}');\n }\n });\n const reduceOps = isReduceOpsWithoutLoop ?\n [\n ...idxCopy,\n `let sum = ${inputVars.map((inputVar, i) => inputVar.getByIndices(`input${i}Indices`)).join(' * ')};`\n ] :\n [\n ...idxCopy,\n initSum,\n ...reduceOpsLoopHeaders,\n ...reduceOpsSetIndices,\n initProd,\n ...reduceOpCompute,\n updateSum,\n ...reduceOpsLoopFooters,\n ];\n return `\n ${\n shaderHelper\n .registerUniforms(uniformsSymbols.map((symbol) => ({name: `${appendMax(symbol)}`, type: 'u32'})))\n .registerUniform('outputSize', 'u32')\n .declareVariables(...inputVars, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n var outputIndices = ${output.offsetToIndices('global_idx')};\n ${inputVars.map((_var, i) => `var input${i}Indices: ${inputVars[i].type.indices};`).join('\\n')}\n ${reduceOps.join('\\n')};\n ${output.setByOffset('global_idx', 'sum')};\n }`;\n };\n return {\n name: 'Einsum',\n shaderCache: {hint: einsumEquation.equation, inputDependencies: inputShapes.map(() => 'rank')},\n getRunData: () => {\n // The symbols from uniformSymbols array are guaranteed to exist in einsumEquations.symbolToInfo map. The\n // filter is added to make sure that dimValue is never 0.\n const programUniformsInit: ProgramUniform[] =\n uniformsSymbols.filter((symbol) => einsumEquation.symbolToInfo.has(symbol))\n .map(\n (symbol) =>\n ({type: DataType.uint32, data: einsumEquation.symbolToInfo.get(symbol)?.dimValue || 0}));\n programUniformsInit.push({type: DataType.uint32, data: outputSize});\n const programUniforms: ProgramUniform[] =\n inputShapes.map((dims, _) => [...createTensorShapeVariables(dims)])\n .reduce((acc, inputProgramUniforms) => acc.concat(inputProgramUniforms), programUniformsInit);\n programUniforms.push(...createTensorShapeVariables(outputShape));\n return ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n });\n },\n getShaderSource,\n };\n };\n\nexport const einsum = (context: ComputeContext, attributes: EinsumAttributes): void => {\n const einsumEquation = new EinsumEquation(context.inputs, attributes.equation);\n const outputShape = einsumEquation.outputDims;\n const inputShapes = context.inputs.map((input, _) => input.dims);\n context.compute(createEinsumProgramInfo(inputShapes, context.inputs[0].dataType, einsumEquation, outputShape));\n};\n\nexport const parseEinsumAttributes = (attributes: Record): EinsumAttributes => {\n const equation = (attributes.equation as string).replace(/\\s+/g, '');\n return createAttributeWithCacheKey({equation});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Expand requires 2 input.');\n }\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n\n let shapeIndex = shape.length < inputShape.length ? 0 : shape.length - inputShape.length;\n let inputShapeIndex = inputShape.length < shape.length ? 0 : inputShape.length - shape.length;\n for (; shapeIndex < shape.length && inputShapeIndex < inputShape.length; ++shapeIndex, ++inputShapeIndex) {\n if (shape[shapeIndex] !== inputShape[inputShapeIndex] && shape[shapeIndex] !== 1 &&\n inputShape[inputShapeIndex] !== 1) {\n throw new Error('Expand requires shape to be broadcastable to input');\n }\n }\n};\n\nconst getAdjustedShape = (shape1: readonly number[], shape2: readonly number[]): number[] => {\n const diff = shape1.length - shape2.length;\n const shape: number[] = [];\n for (let i = 0; i < diff; ++i) {\n shape.push(shape1[i]);\n }\n for (let i = 0; i < shape2.length; ++i) {\n shape.push(shape2[i] === 1 ? shape1[i + diff] : shape2[i]);\n }\n return shape;\n};\n\nconst calculateOutputShape = (inputShape: readonly number[], shape: readonly number[]): number[] =>\n (inputShape.length > shape.length) ? getAdjustedShape(inputShape, shape) : getAdjustedShape(shape, inputShape);\n\n\nconst createExpandProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const shape = Array.from(inputs[1].getBigInt64Array(), Number);\n const outputShape: number[] = calculateOutputShape(inputShape, shape);\n const dataType = inputs[0].dataType;\n const components = dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', dataType, inputShape.length, components);\n const output = outputVariable('output', dataType, outputShape.length, components);\n let assignment: string;\n if (dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n let offset${x} = ${input.broadcastedIndicesToOffset(`outputIndices${x}`, output)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${input.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n ${output.setByOffset('global_idx', 'data')}\n }`;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n let inputOffset = ${input.broadcastedIndicesToOffset('outputIndices', output)};\n ${output.setByOffset('global_idx', input.getByOffset('inputOffset'))}\n }`;\n }\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}`;\n };\n\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputShape, outputShape)];\n return {\n name: 'Expand',\n shaderCache: {hint: `${outputShape.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const expand = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createExpandProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglValueType, UniformsArrayType, WORKGROUP_SIZE} from './common';\nimport * as unary from './unary-op';\n\n// GELU is defined as Y=0.5*X*(1+tanh(0.797885*X+0.035677*X*X*X)), where X may pre-add a bias.\n\nconst createFastGeluProgramInfo = (inputTensors: readonly TensorView[]): ProgramInfo => {\n const dataType = inputTensors[0].dataType;\n const outputSize = ShapeUtil.size(inputTensors[0].dims);\n const biasLength = ShapeUtil.size(inputTensors[1].dims);\n // can only use vec4 when bias length is multiple of 4\n const useVec4 = biasLength % 4 === 0;\n const getShaderSource = (shaderHelper: ShaderHelper): string => {\n const x = inputVariable('x', dataType, [1], 4);\n const bias = inputVariable('bias', dataType, [1], 4);\n const y = outputVariable('y', dataType, [1], 4);\n\n const uniforms: UniformsArrayType = [{name: 'output_vec_size', type: 'u32'}, {name: 'bias_size', type: 'u32'}];\n\n const singleElementBias = (i: 0|1|2|3) => `\n let bias${i}_offset: u32 = (global_idx * 4 + ${i}) % uniforms.bias_size;\n let bias${i} = ${bias.getByOffset(`bias${i}_offset / 4`)}[bias${i}_offset % 4];`;\n const biasGetExpression = useVec4 ?\n `\n let bias = ${bias.getByOffset('global_idx % (uniforms.bias_size / 4)')};` :\n `${singleElementBias(0)}${singleElementBias(1)}${singleElementBias(2)}${singleElementBias(3)}\n let bias = ${x.type.value}(bias0, bias1, bias2, bias3);`;\n\n return `${shaderHelper.registerUniforms(uniforms).declareVariables(x, bias, y)}\n\n ${unary.fastGeluImpl(tensorTypeToWsglValueType(dataType))}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_vec_size')}\n\n let x = ${x.getByOffset('global_idx')};\n ${biasGetExpression}\n let x_in = x + bias;\n ${y.setByOffset('global_idx', unary.fastGeluExpression('x_in'))}\n }`;\n };\n\n return {\n name: 'FastGeluWithBias',\n shaderCache: {hint: `${useVec4}`, inputDependencies: ['type', 'type']},\n getShaderSource,\n getRunData: (inputs) => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n programUniforms:\n [{type: DataType.uint32, data: Math.ceil(outputSize / 4)}, {type: DataType.uint32, data: biasLength}],\n dispatchGroup: {x: Math.ceil(outputSize / WORKGROUP_SIZE / 4)}\n })\n };\n};\n\nexport const fastGelu = (context: ComputeContext): void => {\n if (context.inputs.length < 2 || ShapeUtil.size(context.inputs[1].dims) === 0) {\n unary.fastGelu(context);\n } else {\n context.compute(createFastGeluProgramInfo(context.inputs));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Gather requires 2 inputs.');\n }\n};\n\nconst createGatherProgramInfo = (inputs: readonly TensorView[], attributes: GatherAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const indicesShape = inputs[1].dims;\n\n const inputRank = inputShape.length;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n\n const outputShape = inputShape.slice(0);\n outputShape.splice(axis, 1, ...indicesShape);\n\n const axisDimLimit = inputShape[axis];\n const components = inputs[0].dataType === DataType.bool ? 4 : 1;\n const outputSize = Math.ceil(ShapeUtil.size(outputShape) / components);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}, ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const data = inputVariable('data', inputs[0].dataType, inputs[0].dims.length, components);\n const indices = inputVariable('inputIndices', inputs[1].dataType, inputs[1].dims.length);\n const output = outputVariable('output', inputs[0].dataType, outputShape.length, components);\n\n const calcDataIndices = (x: number|string): string => {\n const indicesRank = indicesShape.length;\n let calcStr = `var indicesIndices${x} = ${indices.type.indices}(0);`;\n for (let i = 0; i < indicesRank; i++) {\n calcStr += `${indicesRank > 1 ? `indicesIndices${x}[${i}]` : `indicesIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[uniforms.axis + ${i}]` : `outputIndices${x}`};`;\n }\n calcStr += `\n var idx${x} = ${indices.getByIndices(`indicesIndices${x}`)};\n if (idx${x} < 0) {\n idx${x} = idx${x} + uniforms.axisDimLimit;\n }\n var dataIndices${x} : ${data.type.indices};\n `;\n for (let i = 0, j = 0; i < inputRank; i++) {\n if (i === axis) {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = u32(idx${x});`;\n j += indicesRank;\n } else {\n calcStr += `${inputRank > 1 ? `dataIndices${x}[${i}]` : `dataIndices${x}`} = ${\n outputShape.length > 1 ? `outputIndices${x}[${j}]` : `outputIndices${x}`};`;\n j++;\n }\n }\n return calcStr;\n };\n let assignment: string;\n if (inputs[0].dataType === DataType.bool) {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => `\n let outputIndices${x} = ${output.offsetToIndices(`outputOffset + ${x}u`)};\n ${calcDataIndices(x)};\n let offset${x} = ${data.indicesToOffset(`dataIndices${x}`)};\n let index${x} = offset${x} / 4u;\n let component${x} = offset${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${data.getByOffset(`index${x}`)}[component${x}]);\n `;\n assignment = `\n let outputOffset = global_idx * ${components};\n var value = vec4(0);\n ${singleAssignment('value', 0, 'u32')}\n ${singleAssignment('value', 1, 'u32')}\n ${singleAssignment('value', 2, 'u32')}\n ${singleAssignment('value', 3, 'u32')}\n ${output.setByOffset('global_idx', 'value')}\n `;\n } else {\n assignment = `\n let outputIndices = ${output.offsetToIndices('global_idx')};\n ${calcDataIndices('')};\n let value = ${data.getByIndices('dataIndices')};\n ${output.setByOffset('global_idx', 'value')};\n `;\n }\n return `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(data, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n ${assignment}\n }`;\n };\n return {\n name: 'Gather',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank', 'rank']},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGatherAttributes = (attributes: Record): GatherAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gather = (context: ComputeContext, attributes: GatherAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface GatherElementsAttributes extends AttributeWithCacheKey {\n axis: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('GatherElements requires 2 inputs.');\n }\n\n if (inputs[0].dims.length < 1) {\n throw new Error('GatherElements requires that the data input be rank >= 1.');\n }\n\n if (inputs[0].dims.length !== inputs[1].dims.length) {\n throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`);\n }\n};\n\nconst createGatherElementsProgramInfo =\n (inputs: readonly TensorView[], attributes: GatherElementsAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputOutputDataType = inputs[0].dataType;\n const inputRank = inputShape.length;\n\n const indicesShape = inputs[1].dims;\n const indicesDataType = inputs[1].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputRank);\n const axisDimLimit = inputShape[axis];\n\n const outputShape = indicesShape.slice(0);\n const outputSize = ShapeUtil.size(outputShape);\n\n const input = inputVariable('input', inputOutputDataType, inputRank);\n const indices = inputVariable('indicesInput', indicesDataType, indicesShape.length);\n const output = outputVariable('output', inputOutputDataType, outputShape.length);\n\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: axisDimLimit},\n {type: DataType.uint32, data: axis}\n ];\n programUniforms.push(...createTensorShapeVariables(inputShape, indicesShape, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'rank'];\n\n // int64 indices would be treated as little endian i32 with assumption they fall in i32 limits\n // That assumption is safe as it's not possible to allocate >2gb buffer for input tensor\n // Input data will be treated as u32 or two u32 for 8-byte tensors\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('outputSize', 'u32')\n .registerUniform('axisDimLimit', 'i32')\n .registerUniform('axis', 'u32')\n .declareVariables(input, indices, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let outputIndices = ${output.offsetToIndices('global_idx')};\n\n var idx = ${indices.getByOffset('global_idx')};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${input.type.indices}(outputIndices);\n ${input.indicesSet('inputIndices', 'uniforms.axis', 'u32(idx)')};\n let value = ${input.getByIndices('inputIndices')};\n\n ${output.setByOffset('global_idx', 'value')};\n }`;\n\n return {\n name: 'GatherElements',\n shaderCache: {inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nexport const parseGatherElementsAttributes = (attributes: Record): GatherElementsAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n\nexport const gatherElements = (context: ComputeContext, attributes: GatherElementsAttributes): void => {\n const inputs = context.inputs;\n validateInputs(inputs);\n context.compute(createGatherElementsProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {GemmUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs) {\n throw new Error('Input is missing');\n }\n if (inputs.length < 2 || inputs.length > 3) {\n throw new Error('Invaid input number.');\n }\n\n // 'C' can be of dimensionality 0, 1 or 2 only\n if (inputs.length === 3 && inputs[2].dims.length > 2) {\n throw new Error('Invalid input shape of C');\n }\n\n if ((inputs[0].dataType !== inputs[1].dataType) ||\n (inputs.length === 3 && inputs[0].dataType !== inputs[2].dataType)) {\n throw new Error('Input types are mismatched');\n }\n};\n\nexport interface GemmAttributes extends AttributeWithCacheKey {\n transA: boolean;\n transB: boolean;\n alpha: number;\n beta: number;\n}\n\nconst createGemmProgramInfo = (inputs: readonly TensorView[], attributes: GemmAttributes): ProgramInfo => {\n const aShape = inputs[0].dims.slice();\n const bShape = inputs[1].dims.slice();\n const [M, N, K] = GemmUtil.getShapeOfGemmResult(\n aShape, attributes.transA, bShape, attributes.transB, inputs.length === 3 ? inputs[2].dims : undefined);\n const outputShape = [M, N];\n if (!outputShape) {\n throw new Error('Can\\'t use gemm on the given tensors');\n }\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: M}, {type: DataType.uint32, data: N},\n {type: DataType.uint32, data: K}, {type: DataType.float, data: attributes.alpha},\n {type: DataType.float, data: attributes.beta}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n if (inputs.length === 3) {\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n inputDependencies.push('rank');\n }\n programUniforms.push(...createTensorShapeVariables(outputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n let line = '';\n if (attributes.transA && attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[n * uniforms.K + k];';\n } else if (attributes.transA && !attributes.transB) {\n line = 'value += a[k * uniforms.M + m] * b[k * uniforms.N + n];';\n } else if (!attributes.transA && attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[n * uniforms.K + k];';\n } else if (!attributes.transA && !attributes.transB) {\n line = 'value += a[m * uniforms.K + k] * b[k * uniforms.N + n];';\n }\n\n const calculateAlpha = attributes.alpha === 1 ? '' : 'value *= uniforms.alpha;';\n const a = inputVariable('a', inputs[0].dataType, inputs[0].dims);\n const b = inputVariable('b', inputs[1].dataType, inputs[1].dims);\n const dataType = a.type.value;\n let c: IndicesHelper|null = null;\n const variables = [a, b];\n if (inputs.length === 3) {\n c = inputVariable('c', inputs[2].dataType, inputs[2].dims.length);\n variables.push(c);\n }\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n variables.push(output);\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'M', type: 'u32'}, {name: 'N', type: 'u32'}, {name: 'K', type: 'u32'},\n {name: 'alpha', type: 'f32'}, {name: 'beta', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${dataType}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${line}\n }\n\n ${calculateAlpha}\n ${(() => {\n if (c != null) {\n return `let cOffset = ${c.broadcastedIndicesToOffset('vec2(m, n)', output)}; value += ${\n dataType}(uniforms.beta) * ${c.getByOffset('cOffset')};`;\n }\n return '';\n })()}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Gemm',\n shaderCache: {hint: `${attributes.cacheKey}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nexport const parseGemmAttributes = (attributes: Record): GemmAttributes => {\n const transA = attributes.transA as boolean;\n const transB = attributes.transB as boolean;\n const alpha = attributes.alpha as number;\n const beta = attributes.beta as number;\n return {transA, transB, alpha, beta, cacheKey: `${attributes.transA};${attributes.transB};${attributes.alpha === 1}`};\n};\n\nexport const gemm = (context: ComputeContext, attributes: GemmAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createGemmProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface InstanceNormAttributes {\n epsilon: number;\n format: 'NHWC'|'NCHW';\n}\n\nconst createInstanceNormProgramInfo =\n (inputs: readonly TensorView[], attributes: InstanceNormAttributes): ProgramInfo => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const axis = 2;\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n const components = getMaxComponents(normSize);\n const normPackedSize = normSize / components;\n const inputShape = [xShape[0], xShape[1], normPackedSize];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank', 'type', 'type'];\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: normSize}, {type: DataType.uint32, data: normPackedSize}];\n programUniforms.push(...createTensorShapeVariables(inputShape, inputShape));\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const x = inputVariable('x', inputs[0].dataType, inputShape.length, components);\n const scale = inputVariable('scale', inputs[1].dataType, inputs[1].dims);\n const bias = inputVariable('bias', inputs[2].dataType, inputs[2].dims);\n const output = outputVariable('output', inputs[0].dataType, inputShape.length, components);\n const variables = [x, scale, bias, output];\n const dataType = x.type.value;\n const f32Type = components === 1 ? 'f32' : `vec${components}`;\n const workgroupSize = 64;\n\n const uniforms: UniformsArrayType = [{name: 'normSize', type: 'u32'}, {name: 'normPackedSize', type: 'u32'}];\n return `\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${f32Type}, ${workgroupSize}>;\n const workgroupSize = ${workgroupSize}u;\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart(workgroupSize)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${f32Type}(${x.get('batch', 'channel', 'h')});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${sumVector('workgroupShared[0]', components)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${f32Type}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${f32Type}(${x.get('batch', 'channel', 'h')}) - ${f32Type}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${sumVector('workgroupShared[0]', components)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${attributes.epsilon}));\n let channelScale = invStdDev * f32(${scale.getByOffset('channel')});\n let channelShift = f32(${bias.getByOffset('channel')}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get('batch', 'channel', 'h')} * ${dataType}(${f32Type}(channelScale)) + ${dataType}(${\n f32Type}(channelShift));\n ${output.set('batch', 'channel', 'h', 'value')};\n }\n }`;\n };\n return {\n ...{name: 'InstanceNormalization'},\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${attributes.epsilon};${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: outputShape, dataType: inputs[0].dataType},\n ],\n dispatchGroup: {x: normCount},\n programUniforms\n }),\n getShaderSource,\n };\n };\n\nconst computeMean =\n (context: ComputeContext, input: TensorView, scale: TensorView, bias: TensorView, n: number, h: number, c: number,\n epsilon: number) => {\n const components = getMaxComponents(c);\n const WG = 64;\n // we will store channel scale and channel shift in [2, components] matrix\n // or in vec2 when components == 1\n const outputType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const sumCastType = components === 1 ? 'f32' : `vec${components}f`;\n const setOutputValue = (var1: string, var2: string) => `${outputType}(${var1}, ${var2})`;\n const unitsOfWork = n * c / components;\n const wgSize = Math.ceil(h / WG);\n\n const meanInputDependencies: ProgramInputTensorInfoDependency[] = ['type'];\n const meanProgramUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: wgSize}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(h * c / components)}\n ];\n\n const getMeanShaderSource = (shaderHelper: ShaderHelper) => {\n const inputHelper = inputVariable('input', input.dataType, input.dims, components);\n return `\n ${shaderHelper.declareVariables(inputHelper)}\n @group(0) @binding(1) var output : array<${outputType}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart(WG)}\n let currentImageNumber = global_idx / ${WG} / uniforms.C;\n let currentChannelNumber = (global_idx / ${WG}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${sumCastType}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${setOutputValue('sum', 'squaredSum')};\n }`;\n };\n\n const meanValues = context.compute(\n {\n name: 'InstanceNormComputeMean',\n shaderCache: {hint: `${components}`, inputDependencies: meanInputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, WG, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: n * c / components},\n programUniforms: meanProgramUniforms\n }),\n getShaderSource: getMeanShaderSource,\n },\n {inputs: [input], outputs: [-1]})[0];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: unitsOfWork}, {type: DataType.uint32, data: h},\n {type: DataType.uint32, data: Math.floor(c / components)},\n {type: DataType.uint32, data: Math.floor(WG * c / components)}\n ];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type', 'type'];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const scaleHelper = inputVariable('scale', scale.dataType, scale.dims, components);\n const biasHelper = inputVariable('bias', bias.dataType, bias.dims, components);\n return `\n @group(0) @binding(0) var input : array<${outputType}>;\n @group(0) @binding(1) var scale : array<${scaleHelper.type.storage}>;\n @group(0) @binding(2) var bias : array<${biasHelper.type.storage}>;\n @group(0) @binding(3) var output : array<${outputType}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.units_of_work')}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${fillVector('f32', components)};\n var squaredSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < min(${WG}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${WG}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${epsilon}));\n let channelScale = invStdDev * ${sumCastType}(scale[currentChannelNumber]);\n let channelShift = ${sumCastType}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${setOutputValue('channelScale', 'channelShift')};\n }`;\n };\n return context.compute(\n {\n name: 'InstanceNormComputeChannelScaleShift',\n // TODO: use epsilon as uniform. Currently epsilon as uniform fails test_instancenorm_epsilon.\n shaderCache: {hint: `${components};${epsilon}`, inputDependencies},\n getRunData: () => ({\n outputs: [\n {dims: [n, c, 2], dataType: DataType.float},\n ],\n dispatchGroup: {x: Math.ceil(unitsOfWork / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [meanValues, scale, bias], outputs: [-1]})[0];\n };\n\nconst createInstanceNormNHWCProgramInfo =\n (context: ComputeContext, inputs: readonly TensorView[], attributes: InstanceNormAttributes) => {\n const xShape = inputs[0].dims;\n const outputShape = xShape;\n const N = xShape[0];\n const C = xShape[xShape.length - 1];\n const H = ShapeUtil.sizeFromDimension(xShape, 1) / C;\n const components = getMaxComponents(C);\n const outputSize = ShapeUtil.size(outputShape) / components;\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: H}, {type: DataType.uint32, data: Math.floor(C / components)}];\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n // first compute mean\n const channelScaleShift = computeMean(context, inputs[0], inputs[1], inputs[2], N, H, C, attributes.epsilon);\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const scaleType = components === 1 ? 'vec2f' : `mat2x${components}f`;\n const scaleCastType = components === 1 ? dataType : `vec${components}<${dataType}>`;\n\n const inputHelper = inputVariable('input', inputs[0].dataType, inputs[0].dims, components);\n const outputHelper = outputVariable('output', inputs[0].dataType, outputShape, components);\n\n return `\n @group(0) @binding(0) var input : array<${inputHelper.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${scaleType}>;\n @group(0) @binding(2) var output : array<${outputHelper.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${shaderHelper.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${scaleCastType}(scale[0]), ${scaleCastType}(scale[1]));\n }`;\n };\n context.compute(\n {\n name: 'InstanceNormalizationNHWC',\n shaderCache: {hint: `${components}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [inputs[0], channelScaleShift]});\n };\n\nexport const instanceNorm = (context: ComputeContext, attributes: InstanceNormAttributes): void => {\n if (attributes.format === 'NHWC') {\n createInstanceNormNHWCProgramInfo(context, context.inputs, attributes);\n } else {\n context.compute(createInstanceNormProgramInfo(context.inputs, attributes));\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType,} from './common';\n\ninterface LayerNormAttributes {\n simplified: boolean;\n axis: number;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 2) {\n throw new Error('layerNorm requires at least 2 inputs.');\n }\n};\n\nconst createLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: LayerNormAttributes, outputCount: number): ProgramInfo => {\n const simplified = attributes.simplified;\n\n const xShape = inputs[0].dims;\n const scale = inputs[1];\n const bias = !simplified && inputs[2];\n\n const outputShape = xShape;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, xShape.length);\n const normCount = ShapeUtil.sizeToDimension(xShape, axis);\n const normSize = ShapeUtil.sizeFromDimension(xShape, axis);\n\n const scaleSize = ShapeUtil.size(scale.dims);\n const biasSize = bias ? ShapeUtil.size(bias.dims) : 0;\n if (scaleSize !== normSize || (bias && biasSize !== normSize)) {\n throw new Error(`Size of X.shape()[axis:] == ${normSize}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${scaleSize} and bias size of ${biasSize}`);\n }\n\n const meanInvStdDevDim: number[] = [];\n for (let i = 0; i < xShape.length; ++i) {\n if (i < axis) {\n meanInvStdDevDim.push(xShape[i]);\n } else {\n meanInvStdDevDim.push(1);\n }\n }\n const components = getMaxComponents(normSize);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['type', 'type'];\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: normCount}, {type: DataType.float, data: normSize},\n {type: DataType.uint32, data: Math.floor(normSize / components)},\n {type: DataType.float, data: attributes.epsilon}\n ];\n if (bias) {\n inputDependencies.push('type');\n }\n const hasMeanDataOutput = outputCount > 1;\n const hasInvStdOutput = outputCount > 2;\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('scale', scale.dataType, scale.dims, components),\n ];\n if (bias) {\n variables.push(inputVariable('bias', bias.dataType, bias.dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanDataOutput) {\n variables.push(outputVariable('mean_data_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n\n const uniforms: UniformsArrayType = [\n {name: 'norm_count', type: 'u32'}, {name: 'norm_size', type: 'f32'},\n {name: 'norm_size_vectorized', type: 'u32'}, {name: 'epsilon', type: 'f32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...variables)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.norm_count')}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${fillVector('f32', components)};\n var mean_square_vector = ${fillVector('f32', components)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${castToF32(dataType, components, 'x[h + offset]')};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${sumVector('mean_vector', components)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${sumVector('mean_square_vector', components)} / uniforms.norm_size ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${castToF32(dataType, components, 'x[j + offset]')};\n let f32scale = ${castToF32(dataType, components, 'scale[j]')};\n output[j + offset] = ${variables[0].type.value}((f32input ${simplified ? '' : '- mean'}) * inv_std_dev * f32scale\n ${bias ? `+ ${castToF32(dataType, components, 'bias[j]')}` : ''}\n );\n }\n\n ${hasMeanDataOutput ? 'mean_data_output[global_idx] = mean' : ''};\n ${hasInvStdOutput ? 'inv_std_output[global_idx] = inv_std_dev' : ''};\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (hasMeanDataOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (hasInvStdOutput) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n\n return {\n name: 'LayerNormalization',\n shaderCache: {hint: `${components};${outputCount};${simplified}`, inputDependencies},\n getRunData: () =>\n ({outputs, dispatchGroup: {x: Math.ceil(normCount / 64 /* workgroup size */)}, programUniforms}),\n getShaderSource,\n };\n };\n\nexport const layerNorm = (context: ComputeContext, attributes: LayerNormAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createLayerNormProgramInfo(context.inputs, attributes, context.outputCount));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType, getTensorElementSize} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getMaxComponents, inputVariable, outputVariable, ShaderHelper, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\n// TODO support quantization bits not equal to 4\nexport interface MatMulNBitsAttributes extends AttributeWithCacheKey {\n k: number;\n n: number;\n accuracyLevel: number;\n bits: number;\n blockSize: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes): void => {\n if (inputs.length < 3 || inputs.length > 4) {\n throw new Error('MatMulNBits requires 3 or 4 inputs');\n }\n const a = inputs[0];\n const aRank = a.dims.length;\n if (a.dims[aRank - 1] !== attributes.k) {\n throw new Error('The last dim of input shape does not match the k value');\n }\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const b = inputs[1];\n if (!ShapeUtil.areEqual(b.dims, [attributes.n, nBlocksPerCol, blobSize])) {\n throw new Error('The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize');\n }\n const scales = inputs[2];\n const scalesShape = scales.dims;\n if (ShapeUtil.size(scalesShape) !== attributes.n * nBlocksPerCol) {\n throw new Error('scales input size error.');\n }\n if (inputs.length === 4) {\n const zeroPoints = inputs[3];\n const zeroPointsShape = zeroPoints.dims;\n const expectedZeroPointsSize =\n attributes.bits > 4 ? (attributes.n * nBlocksPerCol) : attributes.n * Math.floor((nBlocksPerCol + 1) / 2);\n if (ShapeUtil.size(zeroPointsShape) !== expectedZeroPointsSize) {\n throw new Error('zeroPoints input size error.');\n }\n }\n};\n\nexport const createMatMulNBitsProgramInfo =\n (inputs: readonly TensorView[], attributes: MatMulNBitsAttributes,\n maxComputeWorkgroupSizes: [number, number, number], maxComputeWorkgroupStorageSize: number): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const aRank = inputShape.length;\n const nBlocksPerCol = Math.floor((attributes.k + attributes.blockSize - 1) / attributes.blockSize);\n const dimAOuter = inputShape[aRank - 2];\n const dimInner = attributes.k;\n const dimBOuter = attributes.n;\n const batchDims = inputShape.slice(0, aRank - 2);\n const batchSize = ShapeUtil.size(batchDims);\n const blobSize = attributes.blockSize / 8 * attributes.bits;\n const blobSizeInWords = blobSize / 4;\n const dataType = inputs[0].dataType;\n const outputNumber = getMaxComponents(dimAOuter);\n const aComponents = getMaxComponents(attributes.k);\n const bComponents = getMaxComponents(blobSizeInWords);\n const elementSize = getTensorElementSize(dataType)!;\n const workgroupOutputSize = dimAOuter * nBlocksPerCol * elementSize;\n const maxNumberOfComponents = Math.floor(maxComputeWorkgroupStorageSize / workgroupOutputSize);\n const useBlockwiseMatMulNBits = nBlocksPerCol <= maxComputeWorkgroupSizes[0] && maxNumberOfComponents > 0;\n const components = (!useBlockwiseMatMulNBits || maxNumberOfComponents >= 4) ? getMaxComponents(dimBOuter) :\n ((maxNumberOfComponents >= 2) && getMaxComponents(dimBOuter) >= 2) ? 2 :\n 1;\n const outputShape = batchDims.concat([dimAOuter, dimBOuter]);\n const outputSize = ShapeUtil.size(outputShape) / components / outputNumber;\n\n const programUniforms: ProgramUniform[] = useBlockwiseMatMulNBits ?\n [] :\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: attributes.blockSize}];\n const inputShapeTemp = [batchSize, dimAOuter, dimInner / aComponents];\n const bShape = ShapeUtil.convertShape(inputs[1].dims).slice();\n bShape.splice(-1, 1, blobSizeInWords / bComponents);\n programUniforms.push(...createTensorShapeVariables(inputShapeTemp));\n programUniforms.push(...createTensorShapeVariables(bShape));\n programUniforms.push(...createTensorShapeVariables(inputs[2].dims));\n if (inputs.length === 4) {\n programUniforms.push(...createTensorShapeVariables(ShapeUtil.convertShape(inputs[3].dims)));\n }\n const outputShapeTemp = [batchSize, dimAOuter, dimBOuter / components];\n programUniforms.push(...createTensorShapeVariables(outputShapeTemp));\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const inputRank = inputShapeTemp.length;\n const a = inputVariable('a', inputs[0].dataType, inputRank, aComponents);\n const b = inputVariable('b', DataType.uint32, bShape.length, bComponents);\n const scales = inputVariable('scales', inputs[2].dataType, inputs[2].dims.length);\n const inputVariables = [a, b, scales];\n const zeroPoints =\n inputs.length === 4 ? inputVariable('zero_points', DataType.uint32, inputs[3].dims.length) : undefined;\n if (zeroPoints) {\n inputVariables.push(zeroPoints);\n }\n const outputRank = outputShapeTemp.length;\n const output = outputVariable('output', inputs[0].dataType, outputRank, components);\n const uniforms: UniformsArrayType = [{name: 'output_size', type: 'u32'}, {name: 'block_size', type: 'u32'}];\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n\n const qDqDataType = (() => {\n switch (aComponents) {\n case 1:\n return `array<${dataType}, 8>`;\n case 2:\n return `mat4x2<${dataType}>`;\n case 4:\n return `mat2x4<${dataType}>`;\n default:\n throw new Error(`${aComponents}-component is not supported.`);\n }\n })();\n\n const processOneBlock = `\n for (var word: u32 = 0; word < ${blobSizeInWords}; word += ${bComponents}) {\n ${b.indicesSet('b_indices', '2', 'word')};\n let b_data = ${b.getByIndices('b_indices')};\n for (var i: u32 = 0; i < ${bComponents}; i++) {\n let b_value: u32 = ${bComponents === 1 ? 'b_data' : 'b_data[word + i]'};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${qDqDataType}(${\n Array.from({length: 4}, (_, i) => `${dataType}(b_value_lower[${i}]), ${dataType}(b_value_upper[${i}])`)\n .join(', ')});\n let b_dequantized_values = ${(() => {\n if (aComponents === 1) {\n return `${qDqDataType}(${\n Array.from({length: 8}, (_, i) => `(b_quantized_values[${i}] - zero_point) * scale`).join(', ')});`;\n } else {\n return `(b_quantized_values - ${qDqDataType}(${Array(8).fill('zero_point').join(',')})) * scale;`;\n }\n })()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${useBlockwiseMatMulNBits ? dimAOuter : outputNumber}u; m++) {\n ${a.indicesSet('a_indices', inputRank - 2, useBlockwiseMatMulNBits ? 'm' : `row * ${outputNumber} + m`)};\n ${a.indicesSet('a_indices', inputRank - 1, 'word_offset')};\n var input_offset = ${a.indicesToOffset('a_indices')};\n var a_data: ${qDqDataType};\n for (var j: u32 = 0; j < ${8 / aComponents}; j++) {\n a_data[j] = ${a.getByOffset('input_offset')};\n input_offset++;\n }\n ${useBlockwiseMatMulNBits ? 'workgroup_shared[workgroup_shared_offset + m]' : 'output_values[m]'}${\n components > 1 ? '[c]' : ''} += ${\n Array\n .from(\n {length: 8 / aComponents},\n (_, i) => `${\n aComponents === 1 ? `a_data[${i}] * b_dequantized_values[${i}]` :\n `dot(a_data[${i}], b_dequantized_values[${i}])`}`)\n .join(' + ')};\n }\n word_offset += ${8 / aComponents};\n }\n }`;\n const updateZeroPointIndex = zeroPoints ? `\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${zeroPoints.getByOffset('zero_point_index')};\n }` :\n '';\n\n return useBlockwiseMatMulNBits ? `\n var workgroup_shared: array<${output.type.value}, ${dimAOuter * nBlocksPerCol}>;\n ${shaderHelper.declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart([\n nBlocksPerCol, 1, 1\n ])}\n var a_indices: ${a.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${a.indicesSet('a_indices', '0', 'batch')};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${components}; c++) {\n let col_times_components_plus_c = col * ${components} + c;\n ${\n zeroPoints ? `\n var zero_point_bytes_per_col: u32 = (${nBlocksPerCol} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_word_index')} >> zero_point_bits_offset;` :\n ''}\n var b_indices: ${b.type.indices};\n ${b.indicesSet('b_indices', '0', 'col_times_components_plus_c')};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${nBlocksPerCol} + block;\n let scale = ${scales.getByOffset('scales_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? '(zero_point_word) & 0xFu' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block * ${attributes.blockSize / aComponents};\n var workgroup_shared_offset: u32 = block * ${dimAOuter};\n ${processOneBlock}\n }\n workgroupBarrier();\n if (local_id.x == 0u) {\n var output_indices: ${output.type.indices};\n ${output.indicesSet('output_indices', '0', 'batch')};\n ${output.indicesSet('output_indices', outputRank - 1, 'col')};\n ${output.indicesSet('output_indices', outputRank - 2, '0')};\n var output_offset = ${output.indicesToOffset('output_indices')};\n for (var m: u32 = 0u; m < ${dimAOuter}u; m++) {\n var output_value: ${output.type.value} = ${output.type.value}(0);\n var workgroup_shared_offset: u32 = m;\n for (var b: u32 = 0u; b < ${nBlocksPerCol}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${dimAOuter};\n }\n ${output.setByOffset('output_offset', 'output_value')};\n output_offset += ${dimBOuter / components};\n }\n }\n }` :\n `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(...inputVariables, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n var output_values: array<${output.type.value}, ${outputNumber}>;\n var output_indices = ${output.offsetToIndices('global_idx')};\n var col = ${output.indicesGet('output_indices', outputRank - 1)};\n var row = ${output.indicesGet('output_indices', outputRank - 2)};\n var a_indices: ${a.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${\n zeroPoints ? `\n var zero_point_abs_offset = col * ${components} * ((${nBlocksPerCol} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${zeroPoints.getByOffset('zero_point_index')};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;` :\n ''}\n var scale_index = col * ${nBlocksPerCol * components};\n var b_indices: ${b.type.indices};\n for (var c: u32 = 0; c < ${components}; c++) {\n ${b.indicesSet('b_indices', '0', `col * ${components} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${nBlocksPerCol}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${scales.getByOffset('scale_index')};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${dataType}(${zeroPoints ? 'extractBits(zero_point_word, zero_point_offset, 4)' : 8.0});\n ${b.indicesSet('b_indices', '1', 'block')};\n var word_offset: u32 = block_offset;\n ${processOneBlock}\n scale_index++;\n ${updateZeroPointIndex}\n block_offset += uniforms.block_size / ${aComponents};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${\n zeroPoints ? `if (zero_point_offset % 8 > 0) {\n ${updateZeroPointIndex}\n }` :\n ''}\n }\n for (var k: u32 = 0u; k < ${outputNumber}u; k++) {\n ${output.indicesSet('output_indices', outputRank - 2, `${outputNumber} * row + k`)};\n ${output.setByIndices('output_indices', 'output_values[k]')}\n }\n }`;\n };\n return {\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n shaderCache: {\n hint: `${attributes.cacheKey};${dimAOuter};${dataType};${inputs.length}`,\n inputDependencies: Array(inputs.length).fill('rank')\n },\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n name: useBlockwiseMatMulNBits ? 'BlockwiseMatMulNBits' : 'MatMulNBits',\n dispatchGroup: useBlockwiseMatMulNBits ? {x: 1, y: Math.ceil(dimBOuter / components), z: batchSize} :\n {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource\n };\n };\n\nexport const matMulNBits = (context: ComputeContext, attributes: MatMulNBitsAttributes): void => {\n validateInputs(context.inputs, attributes);\n const maxComputeWorkgroupSizes: [number, number, number] = context.getMaxComputeWorkgroupSizes();\n const maxComputeWorkgroupStorageSize = context.getMaxComputeWorkgroupStoragesize();\n context.compute(createMatMulNBitsProgramInfo(\n context.inputs, attributes, maxComputeWorkgroupSizes, maxComputeWorkgroupStorageSize));\n};\n\nexport const parseMatMulNBitsAttributes = (attributes: Record): MatMulNBitsAttributes =>\n createAttributeWithCacheKey(attributes as Omit);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, GpuDataType, ProgramUniform} from '../types';\n\nimport {applyAttention, AttentionAttrs, AttentionMaskType, AttentionParameters, AttentionQkvFormat} from './attention';\nimport {inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\nimport {createTransposeProgramInfo, TransposeAttributes} from './transpose';\n\nconst getInput = (inputs: readonly TensorView[], i: number) =>\n (inputs.length > i) && (inputs[i].dims.length > 0) && (ShapeUtil.size(inputs[i].dims)) > 0 ? inputs[i] : undefined;\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: AttentionAttrs): AttentionParameters => {\n const query = inputs[0];\n const key = getInput(inputs, 1);\n const value = getInput(inputs, 2);\n const bias = getInput(inputs, 3);\n const keyPaddingMask = getInput(inputs, 4);\n const relativePositionBias = getInput(inputs, 5);\n const pastKey = getInput(inputs, 6);\n const pastValue = getInput(inputs, 7);\n\n // Abbreviation and Meanings:\n // B: batch_size\n // S: sequence_length (input sequence length of query)\n // P: past_sequence_length (past sequence length of key or value)\n // L: kv_sequence_length (input sequence length of key or value)\n // M: max_sequence_length\n // T: total_sequence_length = past_sequence_length + kv_sequence_length\n // N: num_heads\n // H: head size for Q and K, aka q_head_size or k_head_size or qk_head_size\n // H_v: v_head_size\n // D_i: input hidden size\n // D: hidden size for Q and K (D = N * H), aka q_hidden_size or k_hidden_size or qk_hidden_size\n // D_v: v_hidden_size = num_heads * v_head_size\n\n // key_padding_mask (K/V) : (B) or (2*B + 1) or (B, L) or None\n // relative_position_bias : (B, 1, S, L)\n // past_key : (B, N, S*, H)\n // past_value : (B, N, S*, H)\n // When no packing for q/k/v:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, D) or (B, N, S*, H)\n // value (V) : (B, L, D_v) or (B, N, S*, H)\n // bias (Q/K/V) : (D + D + D_v)\n // When packed kv is used:\n // query (Q) : (B, S, D)\n // key (K) : (B, L, N, 2, H)\n // value (V) : None\n // bias (Q/K/V) : None\n // When packed qkv is used:\n // query (Q) : (B, L, N, 3, H) or (B, S, 3*D)\n // key (K) : None\n // value (V) : None\n // bias (Q/K/V) : None or (D + D + D_v)\n\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input query is expected to have 3 or 5 dimensions');\n }\n\n const dmmhaPacking = false;\n const batchSize = query.dims[0];\n const sequenceLength = query.dims[1];\n const hiddenSize = query.dims.length === 3 ? (dmmhaPacking ? query.dims[2] / 3 : query.dims[2]) :\n attributes.numHeads * query.dims[4];\n let kvSequenceLength = sequenceLength;\n\n let pastSequenceLength = 0;\n let maxSequenceLength = 0;\n const headSize = Math.floor(hiddenSize / attributes.numHeads);\n if (pastKey && pastValue) {\n if (pastKey.dims.length !== 4) {\n throw new Error('Input \"past_key\" is expected to have 4 dimensions');\n }\n if (pastKey.dims[0] !== batchSize || pastKey.dims[1] !== attributes.numHeads || pastKey.dims[3] !== headSize) {\n throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastValue.dims[0] !== batchSize || pastValue.dims[1] !== attributes.numHeads ||\n pastValue.dims[3] !== headSize) {\n throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');\n }\n if (pastKey.dims[2] !== pastValue.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');\n }\n if (pastValue.dims.length !== 4) {\n throw new Error('Input \"past_value\" is expected to have 4 dimensions');\n }\n pastSequenceLength = pastKey.dims[2];\n maxSequenceLength = pastKey.dims[2];\n } else if (pastKey || pastValue) {\n throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');\n }\n\n let qkvFormat: AttentionQkvFormat;\n if (key) {\n if (query.dims.length !== 3) {\n throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');\n }\n if (key.dims.length < 3 || key.dims.length > 5) {\n throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');\n }\n if (query.dims[0] !== key.dims[0]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');\n }\n\n if (key.dims.length === 3) {\n if (key.dims[2] !== query.dims[2]) {\n throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');\n }\n qkvFormat = AttentionQkvFormat.qkvBSNH;\n kvSequenceLength = key.dims[1];\n } else if (key.dims.length === 5) {\n if (key.dims[2] !== attributes.numHeads || key.dims[3] !== 2 || key.dims[4] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');\n }\n if (value) {\n throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');\n }\n qkvFormat = AttentionQkvFormat.qKvBSNHxBSN2H;\n kvSequenceLength = key.dims[1];\n } else { // key_dims.size() == 4 (cross-attention with past_key)\n if (key.dims[1] !== attributes.numHeads || key.dims[3] !== headSize) {\n throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');\n }\n\n qkvFormat = AttentionQkvFormat.unknown;\n kvSequenceLength = key.dims[2];\n }\n } else { // packed QKV\n if (query.dims.length !== 3 && query.dims.length !== 5) {\n throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');\n }\n if (query.dims.length === 5 && (query.dims[2] !== attributes.numHeads || query.dims[3] !== 3)) {\n throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');\n }\n\n qkvFormat = AttentionQkvFormat.qkvBSN3H;\n }\n\n if (bias) {\n if (bias.dims.length !== 1) {\n throw new Error('Input \"bias\" is expected to have 1 dimension');\n }\n\n if (value) {\n if (query.dims.length === 5 && query.dims[3] === 2) {\n throw new Error('bias is not allowed for packed kv.');\n }\n }\n }\n\n let maskType: AttentionMaskType = AttentionMaskType.none;\n if (keyPaddingMask) {\n maskType = AttentionMaskType.maskUnknown;\n const maskDims = keyPaddingMask.dims;\n if (maskDims.length === 1) {\n if (maskDims[0] === batchSize) {\n maskType = AttentionMaskType.mask1dKeySeqLen;\n } else if (maskDims[0] === 3 * batchSize + 2) {\n maskType = AttentionMaskType.mask1DKeySeqLenStart;\n }\n } else if (maskDims.length === 2 && maskDims[0] === batchSize && maskDims[1] === kvSequenceLength) {\n maskType = AttentionMaskType.mask2dKeyPadding;\n }\n if (maskType === AttentionMaskType.maskUnknown) {\n throw new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)');\n }\n throw new Error('Mask not supported');\n }\n\n let passPastInKv = false;\n let vHiddenSize = hiddenSize;\n if (value) {\n if (value.dims.length !== 3 && value.dims.length !== 4) {\n throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');\n }\n\n if (query.dims[0] !== value.dims[0]) {\n throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');\n }\n\n if (value.dims.length === 3) {\n if (kvSequenceLength !== value.dims[1]) {\n throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[2];\n } else {\n if (kvSequenceLength !== value.dims[2]) {\n throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');\n }\n vHiddenSize = value.dims[1] * value.dims[3];\n passPastInKv = true;\n }\n }\n\n const totalSequenceLength = pastSequenceLength + kvSequenceLength;\n const broadcastResPosBias = false;\n\n if (keyPaddingMask) {\n throw new Error('Key padding mask is not supported');\n }\n\n if (relativePositionBias) {\n if (relativePositionBias.dims.length !== 4) {\n throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');\n }\n if ((relativePositionBias.dims[0] !== batchSize && relativePositionBias.dims[0] !== 1) ||\n relativePositionBias.dims[1] !== attributes.numHeads || relativePositionBias.dims[2] !== sequenceLength ||\n relativePositionBias.dims[3] !== totalSequenceLength) {\n throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)');\n }\n }\n\n return {\n batchSize,\n sequenceLength,\n pastSequenceLength,\n kvSequenceLength,\n totalSequenceLength,\n maxSequenceLength,\n inputHiddenSize: 0,\n hiddenSize,\n vHiddenSize,\n headSize,\n vHeadSize: Math.floor(vHiddenSize / attributes.numHeads),\n numHeads: attributes.numHeads,\n isUnidirectional: false,\n pastPresentShareBuffer: false,\n maskFilterValue: attributes.maskFilterValue,\n maskType,\n scale: attributes.scale,\n broadcastResPosBias,\n passPastInKv,\n qkvFormat,\n };\n};\n\nexport const parseMultiHeadAttentionAttributes = (attributes: AttentionAttrs): AttentionAttrs =>\n createAttributeWithCacheKey({...attributes});\n\nconst weightTransposeAttribute: TransposeAttributes = createAttributeWithCacheKey({perm: [0, 2, 1, 3]});\n\nconst addBiasTranspose =\n (context: ComputeContext, qkv: TensorView, bias: TensorView, batchSize: number, sequenceLength: number,\n hiddenSize: number, biasOffset: number) => {\n const outputShape = [batchSize, sequenceLength, hiddenSize];\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: biasOffset},\n {type: DataType.uint32, data: hiddenSize}\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('qkv_with_bias', qkv.dataType, outputShape);\n const qkvInput = inputVariable('qkv', qkv.dataType, outputShape);\n const biasInput = inputVariable('bias', bias.dataType, outputShape);\n\n const uniforms: UniformsArrayType = [\n {name: 'output_size', type: 'u32'}, {name: 'bias_offset', type: 'u32'}, {name: 'hidden_size', type: 'u32'}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(qkvInput, biasInput, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`;\n };\n\n return context.compute(\n {\n name: 'MultiHeadAttentionAddBias',\n shaderCache: {inputDependencies: ['type', 'type']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: qkv.dataType, gpuDataType: GpuDataType.default}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n },\n {inputs: [qkv, bias], outputs: [-1]})[0];\n };\n\nconst maybeTransposeToBNSHAndAddBias =\n (context: ComputeContext, batchSize: number, numHeads: number, sequenceLength: number, headSize: number,\n input: TensorView, bias?: TensorView, biasOffset?: number) => {\n // const newDims = [];\n\n let reshapedInput = input;\n if (!bias) {\n if (input.dims.length === 3) {\n reshapedInput = input.reshape([batchSize, sequenceLength, numHeads, headSize]);\n }\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n } else {\n if (sequenceLength === 1) {\n throw new Error('AddBiasReshape is not implemented. Please export your model with packed QKV or KV');\n } else {\n reshapedInput =\n addBiasTranspose(context, input, bias, batchSize, sequenceLength, numHeads * headSize, biasOffset!);\n reshapedInput = reshapedInput.reshape([batchSize, sequenceLength, numHeads, headSize]);\n return context.compute(\n createTransposeProgramInfo(reshapedInput, weightTransposeAttribute.perm),\n {inputs: [reshapedInput], outputs: [-1]})[0];\n }\n }\n };\n\nexport const multiHeadAttention = (context: ComputeContext, attributes: AttentionAttrs): void => {\n const params = validateInputs(context.inputs, attributes);\n const query = context.inputs[0];\n const key = getInput(context.inputs, 1);\n const value = getInput(context.inputs, 2);\n const bias = getInput(context.inputs, 3);\n const keyPaddingMask = getInput(context.inputs, 4);\n const relativePositionBias = getInput(context.inputs, 5);\n const pastKey = getInput(context.inputs, 6);\n const pastValue = getInput(context.inputs, 7);\n if (query.dims.length === 5) {\n throw new Error('Packed QKV is not implemented');\n }\n\n if (key?.dims.length === 5) {\n throw new Error('Packed KV is not implemented');\n }\n\n // applyAttention expects BNSH inputs\n const kvBNSH = key && value && key.dims.length === 4 && value.dims.length === 4;\n\n const Q = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.sequenceLength, params.headSize, query, bias, 0);\n\n if (kvBNSH) {\n return applyAttention(\n context, Q, key, value, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params,\n attributes);\n }\n if (!key || !value) {\n throw new Error('key and value must be provided');\n }\n const K = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.headSize, key, bias,\n params.hiddenSize);\n\n const V = maybeTransposeToBNSHAndAddBias(\n context, params.batchSize, params.numHeads, params.kvSequenceLength, params.vHeadSize, value, bias,\n 2 * params.hiddenSize);\n\n applyAttention(\n context, Q, K, V, keyPaddingMask, undefined, pastKey, pastValue, relativePositionBias, params, attributes);\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\ninterface PadAttributes {\n // 0-constant, 1-reflect, 2-edge, 3-wrap\n readonly mode: number;\n readonly value: number;\n readonly pads: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('Too few inputs');\n }\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.float16) {\n throw new Error('Input type must be float or float16.');\n }\n\n if (inputs.length >= 2) {\n let validPads = inputs[0].dims.length * 2 === inputs[1].dims[0];\n if (inputs.length === 4) {\n validPads = inputs[3].dims[0] * 2 === inputs[1].dims[0];\n }\n if (!validPads) {\n throw new Error('The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].');\n }\n }\n};\n\nconst getPadConstant = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n break;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n value = ${output.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n }\n `;\n};\n\nconst getPadReflect = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadEdge = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k = i32(${getElementAt('uniforms.x_shape', i, inputRank)}) - 1;\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadWrap = (output: IndicesHelper, inputRank: number, padsLength: number): string => {\n let block = '';\n for (let i = inputRank - 1; i >= 0; --i) {\n block += `\n k = i32(${output.indicesGet('indices', i)}) - ${getElementAt('uniforms.pads', i, padsLength)};\n if (k < 0) {\n k += i32(${getElementAt('uniforms.x_shape', i, inputRank)}]);\n }\n if (k >= i32(${getElementAt('uniforms.x_shape', i, inputRank)})) {\n k -= i32(${getElementAt('uniforms.x_shape', i, inputRank)});\n }\n offset += k * i32(${getElementAt('uniforms.x_strides', i, inputRank)});\n `;\n }\n\n return `\n var offset = 0;\n var k = 0;\n ${block}\n value = x[offset];\n `;\n};\n\nconst getPadSnippet = (output: IndicesHelper, inputRank: number, attributes: PadAttributes): string => {\n switch (attributes.mode) {\n case 0:\n return getPadConstant(output, inputRank, attributes.pads.length);\n case 1:\n return getPadReflect(output, inputRank, attributes.pads.length);\n case 2:\n return getPadEdge(output, inputRank, attributes.pads.length);\n case 3:\n return getPadWrap(output, inputRank, attributes.pads.length);\n default:\n throw new Error('Invalid mode');\n }\n};\n\nconst createPadProgramInfo = (inputs: readonly TensorView[], attributes: PadAttributes): ProgramInfo => {\n const outputShape = ShapeUtil.padShape(inputs[0].dims.slice(), attributes.pads);\n const inputDims = inputs[0].dims;\n const outputSize = ShapeUtil.size(outputShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.int32, data: attributes.pads}];\n if (attributes.mode === 0) {\n programUniforms.push({type: inputs[0].dataType, data: attributes.value});\n }\n\n programUniforms.push(...createTensorShapeVariables(inputs[0].dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('x', inputs[0].dataType, inputDims.length);\n const dataType = input.type.value;\n const padSnippet = getPadSnippet(output, inputDims.length, attributes);\n const uniforms: UniformsArrayType =\n [{name: 'output_size', type: 'u32'}, {name: 'pads', type: 'i32', length: attributes.pads.length}];\n if (attributes.mode === 0) {\n uniforms.push({name: 'constant_value', type: dataType as UniformDataElementType});\n }\n\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(0);\n ${padSnippet}\n output[global_idx] = value;\n }`;\n };\n\n return {\n name: 'Pad',\n shaderCache: {hint: `${attributes.mode}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource,\n };\n};\n\nconst createPadAttributesFromInputs = (inputs: readonly TensorView[], attributes: PadAttributes): PadAttributes => {\n if (inputs.length > 1) {\n const bigInt64Pads = inputs[1].getBigInt64Array();\n const value = (inputs.length >= 3 && inputs[2].data) ? inputs[2].getFloat32Array()[0] : 0.0;\n\n const inputRank = inputs[0].dims.length;\n const updatePads = new Int32Array(2 * inputRank).fill(0);\n if (inputs.length >= 4) {\n const axes = inputs[3].getBigInt64Array();\n for (let i = 0; i < axes.length; i++) {\n updatePads[Number(axes[i])] = Number(bigInt64Pads[i]);\n updatePads[Number(axes[i]) + inputRank] = Number(bigInt64Pads[i + axes.length]);\n }\n } else {\n bigInt64Pads.forEach((v, i) => updatePads[Number(i)] = (Number(v)));\n }\n\n const pads: number[] = [];\n updatePads.forEach(v => pads.push(v));\n\n return {mode: attributes.mode, value, pads};\n } else {\n return attributes;\n }\n};\n\nexport const pad = (context: ComputeContext, attributes: PadAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes = createPadAttributesFromInputs(context.inputs, attributes);\n context.compute(createPadProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {PoolConvUtil, ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramInputTensorInfoDependency, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\n// TODO: support:\n// - ceil_mode \"test_maxpool_2d_ceil\"\n// - storage_order \"test_maxpool_with_argmax_2d_precomputed_strides\"\n// - [MaxPool] dilations \"test_maxpool_2d_dilations\"\n// - [MaxPool] output[1] \"test_maxpool_with_argmax_2d_precomputed_pads\"\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (env.webgpu.validateInputContent && (!inputs || inputs.length !== 1)) {\n throw new Error('Pool ops requires 1 input.');\n }\n};\n\nconst getAdjustedPoolAttributesAndOutputShape = (\n input: TensorView, attributes: AttributeType, isGlobalOperator: boolean): [AttributeType, number[]] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const inputShapeAsChannelFirst = input.dims.slice();\n if (isChannelsLast) {\n inputShapeAsChannelFirst.splice(1, 0, inputShapeAsChannelFirst.pop()!); // Move channel to the second position.\n }\n const hasDilations = Object.hasOwnProperty.call(attributes, 'dilations');\n const kernelShape = attributes.kernelShape.slice();\n const strides = attributes.strides.slice();\n const dilations: number[] = hasDilations ? (attributes as MaxPoolAttributes).dilations.slice() : [];\n const pads = attributes.pads.slice();\n PoolConvUtil.adjustPoolAttributes(isGlobalOperator, inputShapeAsChannelFirst, kernelShape, strides, dilations, pads);\n\n const outputShapeAsChannelFirst = PoolConvUtil.computePoolOutputShape(\n isGlobalOperator, inputShapeAsChannelFirst, strides, dilations, kernelShape, pads, attributes.autoPad);\n\n const newAttributes = Object.assign({}, attributes);\n if (hasDilations) {\n Object.assign(newAttributes, {kernelShape, strides, pads, dilations, cacheKey: attributes.cacheKey});\n } else {\n Object.assign(newAttributes, {kernelShape, strides, pads, cacheKey: attributes.cacheKey});\n }\n const outputShapeAsChannelLast = outputShapeAsChannelFirst.slice();\n outputShapeAsChannelLast.push(outputShapeAsChannelLast.splice(1, 1)[0]);\n return [newAttributes, isChannelsLast ? outputShapeAsChannelLast : outputShapeAsChannelFirst];\n};\n\nconst getUniformAndPadInfo = (\n outputShape: readonly number[],\n attributes: AttributeType): [ProgramUniform[], UniformsArrayType, boolean, boolean, boolean] => {\n const isChannelsLast = attributes.format === 'NHWC';\n const outputSize = ShapeUtil.size(outputShape);\n const kernelSize = ShapeUtil.size(attributes.kernelShape);\n const programUniforms: ProgramUniform[] =\n [{type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: kernelSize}];\n const uniforms: UniformsArrayType = [{name: 'outputSize', type: 'u32'}, {name: 'kernelSize', type: 'u32'}];\n if (attributes.kernelShape.length <= 2) {\n const kw = attributes.kernelShape[attributes.kernelShape.length - 1];\n const sw = attributes.strides[attributes.strides.length - 1];\n const pwStart = attributes.pads[attributes.pads.length / 2 - 1];\n const pwEnd = attributes.pads[attributes.pads.length - 1];\n const pwStartEndNotZero = !!(pwStart + pwEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kw},\n {type: DataType.uint32, data: sw},\n {type: DataType.uint32, data: pwStart},\n {type: DataType.uint32, data: pwEnd},\n );\n uniforms.push(\n {name: 'kw', type: 'u32'}, {name: 'sw', type: 'u32'}, {name: 'pwStart', type: 'u32'},\n {name: 'pwEnd', type: 'u32'});\n\n let phStartEndNotZero = false;\n if (attributes.kernelShape.length === 2) {\n const kh = attributes.kernelShape[attributes.kernelShape.length - 2];\n const sh = attributes.strides[attributes.strides.length - 2];\n const phStart = attributes.pads[attributes.pads.length / 2 - 2];\n const phEnd = attributes.pads[attributes.pads.length - 2];\n phStartEndNotZero = !!(phStart + phEnd);\n programUniforms.push(\n {type: DataType.uint32, data: kh}, {type: DataType.uint32, data: sh}, {type: DataType.uint32, data: phStart},\n {type: DataType.uint32, data: phEnd});\n\n uniforms.push(\n {name: 'kh', type: 'u32'}, {name: 'sh', type: 'u32'}, {name: 'phStart', type: 'u32'},\n {name: 'phEnd', type: 'u32'});\n }\n return [programUniforms, uniforms, true, pwStartEndNotZero, phStartEndNotZero];\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const kernelStrides = ShapeUtil.computeStrides(attributes.kernelShape);\n programUniforms.push(\n {type: DataType.uint32, data: kernelStrides}, {type: DataType.uint32, data: attributes.pads},\n {type: DataType.uint32, data: attributes.strides});\n uniforms.push(\n {name: 'kernelStrides', type: 'u32', length: kernelStrides.length},\n {name: 'pads', type: 'u32', length: attributes.pads.length},\n {name: 'strides', type: 'u32', length: attributes.strides.length});\n\n const hasPads = attributes.pads.reduce((sum, cur) => sum + cur);\n return [programUniforms, uniforms, !!hasPads, false, false];\n }\n};\n\nconst generatePoolingCode = (\n shaderHelper: ShaderHelper, x: IndicesHelper, rank: number, outputShapeRank: number, attributes: AttributeType,\n op1: string, op2: string, start: number, uniforms: UniformsArrayType, hasPads: boolean, pwStartEndNotZero: boolean,\n phStartEndNotZero: boolean): string => {\n const isChannelsLast = attributes.format === 'NHWC';\n const dataType = x.type.value;\n const output = outputVariable('output', x.type.tensor, outputShapeRank);\n\n if (attributes.kernelShape.length <= 2) {\n let codeW = '';\n let codeH = '';\n let codeHEnd = '';\n const dimIdxW = rank - (isChannelsLast ? 2 : 1);\n if (pwStartEndNotZero) {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${dimIdxW}] < 0 || xIndices[${dimIdxW}]\n >= uniforms.x_shape[${dimIdxW}]) {\n pad++;\n continue;\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n codeW = `\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${dimIdxW}] = indices[${dimIdxW}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n }\n\n if (attributes.kernelShape.length === 2) {\n const dimIdxH = rank - (isChannelsLast ? 3 : 2);\n if (phStartEndNotZero) {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${dimIdxH}] < 0 || xIndices[${dimIdxH}] >= uniforms.x_shape[${dimIdxH}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `;\n } else {\n codeH = `\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${dimIdxH}] = indices[${dimIdxH}] * uniforms.sh - uniforms.phStart + j;\n `;\n }\n codeHEnd = `\n }\n `;\n }\n\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var value = ${dataType}(${start});\n var pad = 0;\n ${codeH}\n ${codeW}\n ${codeHEnd}\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n } else {\n if (isChannelsLast) {\n throw new Error('Pooling with kernelShape.length > 2 is not supported for NHWC format.');\n }\n const stridesRank = attributes.kernelShape.length;\n const padsRank = attributes.pads.length;\n let padCode = '';\n if (hasPads) {\n padCode = `\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n }`;\n } else {\n padCode = `\n }\n let x_val = x[${x.indicesToOffset('xIndices')}];\n ${op1}\n `;\n }\n const poolingCode = `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(x, output)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let indices = ${output.offsetToIndices('global_idx')};\n var xIndices = ${output.offsetToIndices('global_idx')};\n\n var offsets: array;\n\n var value = ${dataType}(${start});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${stridesRank - 1}u; j++) {\n offsets[j] = offset / ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n offset -= offsets[j] * ${getElementAt('uniforms.kernelStrides', 'j', stridesRank)};\n }\n offsets[${stridesRank - 1}] = offset;\n\n isPad = false;\n for (var j = ${rank - stridesRank}u; j < ${rank}u; j++) {\n xIndices[j] = indices[j] * ${\n getElementAt('uniforms.strides', `j - ${rank - stridesRank}u`, stridesRank)}\n + offsets[j - ${rank - stridesRank}u] - ${getElementAt('uniforms.pads', 'j - 2u', padsRank)};\n ${padCode}\n }\n ${op2}\n\n output[global_idx] = value;\n }`;\n return poolingCode;\n }\n};\n\nexport interface FormatAttributes {\n readonly format: 'NHWC'|'NCHW';\n}\n\nexport interface PoolCommonAttributes extends FormatAttributes {\n readonly autoPad: string;\n readonly ceilMode: number;\n readonly kernelShape: readonly number[];\n readonly strides: readonly number[];\n readonly pads: readonly number[];\n}\n\nconst createShaderKeyFromAttributes = (attributes: PoolCommonAttributes): string =>\n (`${attributes.format};${attributes.ceilMode};${attributes.autoPad};${attributes.kernelShape.length}`);\n\nconst createAveragePoolShaderKeyFromAttributes = (attributes: AveragePoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.countIncludePad}`);\n\nconst createMaxPoolShaderKeyFromAttributes = (attributes: MaxPoolAttributes): string =>\n (`${createShaderKeyFromAttributes(attributes)};${attributes.storageOrder};${attributes.dilations}`);\n\nconst parsePoolCommonAttributes = (attributes: Record): PoolCommonAttributes => ({\n format: attributes.format as FormatAttributes['format'],\n autoPad: ['NOTSET', 'VALID', 'SAME_UPPER', 'SAME_LOWER'][attributes.auto_pad as number],\n ceilMode: attributes.ceil_mode as number,\n kernelShape: attributes.kernel_shape as [number, number],\n strides: attributes.strides as [number, number],\n pads: attributes.pads as [number, number, number, number]\n});\n\nexport interface AveragePoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly countIncludePad: boolean;\n}\n\nconst createAveragePoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: AveragePoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const x = inputVariable('x', input.dataType, input.dims.length);\n const dataType = x.type.value;\n\n const op1 = 'value += x_val;';\n let op2 = '';\n if (adjustedAttributes.countIncludePad) {\n op2 += `value /= ${dataType}(uniforms.kernelSize);`;\n } else {\n op2 += `value /= ${dataType}(i32(uniforms.kernelSize) - pad);`;\n }\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2, 0.0, uniforms,\n hasPads, pwStartEndNotZero, phStartEndNotZero),\n };\n };\n\nexport const parseAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const countIncludePad = (attributes.count_include_pad as number) === 0 ? false : true;\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode'\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for AveragePool');\n }\n const averagePoolAttributes = {countIncludePad, ...attr, cacheKey: ''};\n return {...averagePoolAttributes, cacheKey: createAveragePoolShaderKeyFromAttributes(averagePoolAttributes)};\n};\n\nexport const averagePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('AveragePool', context.inputs[0], false, attributes));\n};\n\nconst globalPoolAttributes = {\n autoPad: '',\n ceilMode: 0,\n countIncludePad: false,\n kernelShape: [],\n strides: [],\n pads: [],\n storageOrder: 0,\n dilations: []\n};\n\nexport const parseGlobalAveragePoolAttributes = (attributes: Record): AveragePoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalAveragePool = (context: ComputeContext, attributes: AveragePoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createAveragePoolProgramInfo('GlobalAveragePool', context.inputs[0], true, attributes));\n};\n\nexport interface MaxPoolAttributes extends PoolCommonAttributes, AttributeWithCacheKey {\n readonly storageOrder: number;\n readonly dilations: number[];\n}\n\nconst createMaxPoolProgramInfo =\n (name: string, input: TensorView, isGlobalOperator: boolean, attributes: MaxPoolAttributes): ProgramInfo => {\n const [adjustedAttributes, outputShape] =\n getAdjustedPoolAttributesAndOutputShape(input, attributes, isGlobalOperator);\n const op1 = `\n value = max(x_val, value);\n `;\n const op2 = '';\n const x = inputVariable('x', input.dataType, input.dims.length);\n const inputDependencies: ProgramInputTensorInfoDependency[] = ['rank'];\n const [programUniforms, uniforms, hasPads, pwStartEndNotZero, phStartEndNotZero] =\n getUniformAndPadInfo(outputShape, adjustedAttributes);\n programUniforms.push(...createTensorShapeVariables(input.dims, outputShape));\n return {\n name,\n shaderCache:\n {hint: `${attributes.cacheKey};${hasPads};${pwStartEndNotZero};${phStartEndNotZero}`, inputDependencies},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: input.dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(outputShape) / 64 /* workgroup size */)},\n programUniforms\n }),\n getShaderSource: shaderHelper => generatePoolingCode(\n shaderHelper, x, input.dims.length, outputShape.length, adjustedAttributes, op1, op2,\n (input.dataType === DataType.float16) ? -65504 : -1e5, uniforms, hasPads, pwStartEndNotZero,\n phStartEndNotZero),\n };\n };\n\nexport const maxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('MaxPool', context.inputs[0], false, attributes));\n};\n\nexport const parseMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const storageOrder = attributes.storage_order as number;\n const dilations = attributes.dilations as [number, number];\n\n const attr = parsePoolCommonAttributes(attributes);\n // TODO: support attribute 'ceil_mode' and 'storage_order'\n if (storageOrder !== 0) {\n throw new Error('column major storage order is not yet supported for MaxPool');\n }\n if (attr.ceilMode !== 0) {\n throw new Error('using ceil() in shape computation is not yet supported for MaxPool');\n }\n const maxPoolAttributes = {storageOrder, dilations, ...attr, cacheKey: ''};\n return {...maxPoolAttributes, cacheKey: createMaxPoolShaderKeyFromAttributes(maxPoolAttributes)};\n};\n\nexport const parseGlobalMaxPoolAttributes = (attributes: Record): MaxPoolAttributes => {\n const format = attributes.format as FormatAttributes['format'];\n return {format, ...globalPoolAttributes, cacheKey: format};\n};\n\nexport const globalMaxPool = (context: ComputeContext, attributes: MaxPoolAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createMaxPoolProgramInfo('GlobalMaxPool', context.inputs[0], true, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env} from 'onnxruntime-common';\n\nimport {DataType} from '../../../wasm-common';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, outputVariable, ShaderHelper, UniformDataElementType, UniformsArrayType} from './common';\n\nconst validateInputsContent = (start: number, limit: number, delta: number): void => {\n const sameStartLimit = start === limit;\n const increasingRangeNegativeStep = start < limit && delta < 0;\n const decreasingRangePositiveStep = start > limit && delta > 0;\n\n if (sameStartLimit || increasingRangeNegativeStep || decreasingRangePositiveStep) {\n throw new Error('Range these inputs\\' contents are invalid.');\n }\n};\n\nconst createRangeProgramInfo = (start: number, limit: number, delta: number, dataType: DataType): ProgramInfo => {\n const numElements = Math.abs(Math.ceil((limit - start) / delta));\n const outputShape: number[] = [numElements];\n const outputSize = numElements;\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: dataType, data: start}, {type: dataType, data: delta},\n ...createTensorShapeVariables(outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const output = outputVariable('output', dataType, outputShape.length);\n const wgslType = output.type.value;\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'start', type: wgslType as UniformDataElementType},\n {name: 'delta', type: wgslType as UniformDataElementType}\n ];\n return `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n output[global_idx] = uniforms.start + ${wgslType}(global_idx) * uniforms.delta;\n }`;\n };\n\n return {\n name: 'Range',\n shaderCache: {hint: `${dataType}`},\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const range = (context: ComputeContext): void => {\n let start = 0;\n let limit = 0;\n let delta = 0;\n if (context.inputs[0].dataType === DataType.int32) {\n start = context.inputs[0].getInt32Array()[0];\n limit = context.inputs[1].getInt32Array()[0];\n delta = context.inputs[2].getInt32Array()[0];\n } else if (context.inputs[0].dataType === DataType.float) {\n start = context.inputs[0].getFloat32Array()[0];\n limit = context.inputs[1].getFloat32Array()[0];\n delta = context.inputs[2].getFloat32Array()[0];\n }\n if (env.webgpu.validateInputContent) {\n validateInputsContent(start, limit, delta);\n }\n\n context.compute(createRangeProgramInfo(start, limit, delta, context.inputs[0].dataType), {inputs: []});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\ntype CoordinateTransformMode = 'half_pixel'|'asymmetric'|'pytorch_half_pixel'|'tf_half_pixel_for_nn'|'align_corners'|\n 'tf_crop_and_resize'|'half_pixel_symmetric';\n\ntype KeepAspectRatioPolicy = 'stretch'|'not_smaller'|'not_larger';\n\ntype Mode = 'nearest'|'linear'|'cubic';\n\ntype NearestMode = 'round_prefer_floor'|'round_prefer_ceil'|'floor'|'ceil'|'simple';\n\nexport interface ResizeAttributes extends AttributeWithCacheKey {\n antialias: number;\n axes: number[];\n coordinateTransformMode: CoordinateTransformMode;\n cubicCoeffA: number;\n excludeOutside: boolean;\n extrapolationValue: number;\n keepAspectRatioPolicy: KeepAspectRatioPolicy;\n mode: Mode;\n nearestMode: NearestMode;\n}\n\nconst validateScales = (scales: number[], attributes: ResizeAttributes): void => {\n scales.every((value) => value > 0 || (() => {\n throw new Error('Resize requires scales input values to be positive');\n }));\n // Check scales dims based on mode: LINEAR, CUBIC\n if (scales.length > 0) {\n if (attributes.mode === 'linear') {\n if (!(scales.length === 2 || scales.length === 3 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1) ||\n (scales.length === 5 && scales[0] === 1 && scales[1] === 1))) {\n throw new Error(\n `For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`);\n }\n } else if (attributes.mode === 'cubic') {\n if (!(scales.length === 2 || (scales.length === 4 && scales[0] === 1 && scales[1] === 1) ||\n (scales.length === 4 && scales[0] === 1 && scales[3] === 1))) {\n throw new Error('Resize requires scales input size to be 2 or 4 for cubic mode');\n }\n }\n }\n};\n\nconst updateScales = (scales: readonly number[], axes: readonly number[], rank: number): number[] => {\n axes.every((value) => value >= 0 && value < rank || (() => {\n throw new Error('Resize requires axes input values to be positive and less than rank');\n }));\n const newScales = new Array(rank).fill(1.0);\n axes.forEach((value, index) => newScales[value] = scales[index]);\n return newScales;\n};\n\nconst validateInputs =\n (inputs: readonly TensorView[], attributes: ResizeAttributes, opsetVersion: number, scales: number[],\n sizes: number[], roi: number[]): void => {\n const [roiInputIndex, scalesInputIndex, sizesInputIndex] =\n (opsetVersion > 10) ? [1, 2, 3] : [-1, (inputs.length > 1) ? 1 : -1, -1];\n const rank = inputs[0].dims.length;\n if (roiInputIndex > 0 && inputs.length > roiInputIndex && inputs[roiInputIndex].dims.length > 0) {\n inputs[roiInputIndex].getFloat32Array().forEach((value) => roi.push(value));\n } else if (attributes.coordinateTransformMode === 'tf_crop_and_resize') {\n throw new Error('Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize');\n }\n\n if (scalesInputIndex > 0 && inputs.length > scalesInputIndex && inputs[scalesInputIndex].dims.length > 0) {\n inputs[scalesInputIndex].getFloat32Array().forEach((value) => scales.push(value));\n if (scales.length !== 0 &&\n (scales.length !== rank && (opsetVersion >= 18 && scales.length !== attributes.axes.length))) {\n throw new Error(\n 'Resize requires scales input size to be same as input rank or axes size for opset 18 and up');\n }\n validateScales(scales, attributes);\n if (attributes.axes.length > 0) {\n updateScales(scales, attributes.axes, rank).forEach((value, index) => scales[index] = value);\n }\n }\n if (sizesInputIndex > 0 && inputs.length > sizesInputIndex) {\n inputs[sizesInputIndex].getBigInt64Array().forEach((value) => sizes.push(Number(value)));\n if (sizes.length !== rank || (opsetVersion >= 18 && sizes.length === attributes.axes.length)) {\n throw new Error('Resize requires sizes input size to be same as input rank or axes size for opset 18 and up');\n }\n }\n\n if (attributes.axes.length > 0) {\n if (scales.length !== attributes.axes.length) {\n throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');\n }\n if (sizes.length !== attributes.axes.length) {\n throw new Error(\n 'Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified');\n }\n }\n if (typeof scales !== 'undefined' && typeof sizes !== 'undefined' && scales.length > 0 && sizes.length > rank) {\n throw new Error('Resize requires only of scales or sizes to be specified');\n }\n };\n\nconst getOriginalCoordinateFromResizedCoordinate =\n (coordinateTransferMode: CoordinateTransformMode, dType: string): string =>\n `fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${dType} { ` +\n (() => {\n switch (coordinateTransferMode) {\n case 'asymmetric':\n return `return ${dType}(xResized) / ${dType}(xScale);`;\n case 'pytorch_half_pixel':\n return `if (lengthResized > 1) {\n return (${dType}(xResized) + 0.5) / ${dType}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;\n case 'tf_half_pixel_for_nn':\n return `return (${dType}(xResized) + 0.5) / ${dType}(xScale);`;\n case 'align_corners':\n return `if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${dType}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${dType}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${dType}(lengthResized - 1);\n return whole + fract;\n }`;\n case 'tf_crop_and_resize':\n return `if (lengthResized > 1) {\n return ${dType}(roiStart) * ${dType}(lengthOriginal - 1) +\n (${dType}(xResized) * ${dType}(roiEnd - roiStart) * ${dType}(lengthOriginal - 1)) /\n ${dType}(lengthResized - 1);\n } else {\n return 0.5 * ${dType}(roiStart + roiEnd) * ${dType}(lengthOriginal - 1);\n }`;\n case 'half_pixel_symmetric':\n return `const outputWidth = ${dType}xScale * ${dType}(lengthResized);\n const adjustment = ${dType}(lengthResized) / outputWidth;\n const center = ${dType}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n case 'half_pixel':\n return `return ((${dType}(xResized) + 0.5) / ${dType}(xScale)) - 0.5;`;\n default:\n throw new Error(`Coordinate transform mode ${coordinateTransferMode} is not supported`);\n }\n })() +\n '}';\n\nconst getNearestPixelFromOriginal = (nearestMode: NearestMode, opsetVersion: number, dType: string): string =>\n `fn getNearestPixelFromOriginal(xOriginal: ${dType}, isDownSample: bool) -> ${dType} {` + (() => {\n switch (nearestMode) {\n case 'round_prefer_ceil':\n return 'if (fract(xOriginal) == 0.5) { \\\n return ceil(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'floor':\n return 'return floor(xOriginal);';\n case 'ceil':\n return 'return ceil(xOriginal);';\n case 'round_prefer_floor':\n return 'if (fract(xOriginal) == 0.5) { \\\n return floor(xOriginal); \\\n } else { \\\n return round(xOriginal); \\\n }';\n case 'simple':\n default:\n if (opsetVersion < 11) {\n return 'if (isDownSample) \\\n { \\\n return ceil(xOriginal); \\\n } else { \\\n return xOriginal; \\\n }';\n }\n throw new Error(`Nearest mode ${nearestMode} is not supported`);\n }\n })() +\n '}';\n\nconst updateRoI = (roi: readonly number[], axes: readonly number[], rank: number): number[] => {\n const roiTmp = new Array(rank).fill(0).concat(new Array(rank).fill(1));\n const roiLocal = roi.length === 0 ? roiTmp : roi.slice();\n if (axes.length > 0) {\n axes.forEach((v, i) => {\n roiTmp[v] = roiLocal[i];\n roiTmp[i + rank] = roiLocal[axes.length + i];\n });\n return roiTmp;\n }\n return roiLocal;\n};\n\nconst initOutputShape =\n (inputShape: readonly number[], scales: readonly number[], sizes: readonly number[], axes: readonly number[]):\n number[] => {\n let outputShape: number[] = [];\n if (sizes.length > 0) {\n if (axes.length > 0) {\n inputShape.forEach((v) => outputShape.push(v));\n if (Math.max(...axes) > inputShape.length) {\n throw new Error('axes is out of bound');\n }\n axes.forEach((v, i) => outputShape[v] = sizes[i]);\n } else {\n sizes.forEach((v) => outputShape.push(v));\n }\n } else {\n if (scales.length === 0) {\n throw new Error('Resize requires either scales or sizes.');\n } else {\n outputShape = inputShape.map((value, index) => Math.round(value * scales[index]));\n }\n }\n return outputShape;\n };\n\nconst adjustOutputShape = (inputShape: readonly number[], scales: number[], attributes: ResizeAttributes) => {\n const scaleInPolicy = (() => {\n switch (attributes.keepAspectRatioPolicy) {\n case 'not_larger':\n return attributes.axes.length > 0 ? Math.min(...attributes.axes.map(i => scales[i]), Number.MAX_VALUE) :\n Math.min(...scales, Number.MAX_VALUE);\n case 'not_smaller':\n return attributes.axes.length > 0 ? Math.max(...attributes.axes.map(i => scales[i]), Number.MIN_VALUE) :\n Math.max(...scales, Number.MIN_VALUE);\n default:\n throw new Error(`Keep aspect ratio policy ${attributes.keepAspectRatioPolicy} is not supported`);\n }\n })();\n scales.fill(1.0, 0, scales.length);\n const adjustedOutputShape = inputShape.slice();\n if (attributes.axes.length > 0) {\n attributes.axes.forEach((v) => scales[v] = scaleInPolicy);\n attributes.axes.forEach((v) => adjustedOutputShape[v] = Math.round(inputShape[v] * scales[v]));\n } else {\n scales.fill(scaleInPolicy, 0, scales.length);\n adjustedOutputShape.forEach((v, i) => adjustedOutputShape[i] = Math.round(v * scales[i]));\n }\n return adjustedOutputShape;\n};\n\nconst calculateOriginalIndicesFromOutputIndices =\n (output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[], scalesLength: number,\n roiLength: number): string => `\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> array<${\n output.type.value}, ${outputShape.length}> {\n var original_indices: array<${output.type.value}, ${outputShape.length}>;\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n if (scale == 1.0) {\n original_indices[i] = ${output.type.value}(output_index);\n } else {\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`;\n\nconst calculateInputIndicesFromOutputIndices =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scalesLength: number, roiLength: number, useExtrapolation: boolean): string => `\n fn calculateInputIndicesFromOutputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n for (var i:u32 = 0; i < ${outputShape.length}; i++) {\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index: u32;\n var scale = ${getElementAt('uniforms.scales', 'i', scalesLength)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${getElementAt('uniforms.roi', 'i', roiLength)};\n var roi_hi = ${getElementAt('uniforms.roi', `i + ${inputShape.length}`, roiLength)};\n var input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n var output_shape_i = ${getElementAt('uniforms.output_shape', 'i', outputShape.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${useExtrapolation} || (original_idx >= 0 && original_idx < ${output.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${output.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${input.indicesSet('input_indices', 'i', ' input_index')}\n }\n return input_indices;\n }`;\nconst checkInputIndices = (input: IndicesHelper, inputShape: readonly number[]): string => `\n fn checkInputIndices(input_indices: ${input.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${inputShape.length}; i++) {\n var input_index = ${input.indicesGet('input_indices', 'i')};\n if (input_index < 0 || input_index >= ${getElementAt('uniforms.input_shape', 'i', inputShape.length)}) {\n return false;\n }\n }\n return true;\n }`;\n\nconst setChannelAndBatchIndices =\n (input: IndicesHelper, channelIdx: number, batchIdx: number, spacialDims: number): string =>\n input.rank > spacialDims ? `\n ${input.indicesSet('input_indices', channelIdx, 'channel')};\n ${input.indicesSet('input_indices', batchIdx, 'batch')};\n` :\n '';\n\nconst bilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 2 ? [-1, 0, 1, -1] : (isNchw ? [0, 2, 3, 1] : [0, 1, 2, 3]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(row, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(col, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 2)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn bilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${dType} = originalIndices[${heightIdx}];\n var col:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ?\n `if (row < 0 || row > (${inputShape[heightIdx]} - 1) || col < 0 || col > (${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n row = max(0, min(row, ${inputShape[heightIdx]} - 1));\n col = max(0, min(col, ${inputShape[widthIdx]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 2 ? `u32(originalIndices[${batchIdx}])` : '0'};\n var x11: ${dType} = getInputValue(batch, channel, row1, col1);\n var x12: ${dType} = getInputValue(batch, channel, row1, col2);\n var x21: ${dType} = getInputValue(batch, channel, row2, col1);\n var x22: ${dType} = getInputValue(batch, channel, row2, col2);\n var dx1: ${dType} = abs(row - ${dType}(row1));\n var dx2: ${dType} = abs(${dType}(row2) - row);\n var dy1: ${dType} = abs(col - ${dType}(col1));\n var dy2: ${dType} = abs(${dType}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`;\n };\n\nconst bicubicInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], outputShape: readonly number[],\n scales: readonly number[], roi: readonly number[], cubicCoeffA: number, useExtrapolation: boolean,\n extrapolationValue: number, excludeOutside: boolean): string => {\n const is2D = inputShape.length === 2;\n const isNchw = true;\n const [heightIdx, widthIdx] = is2D ? [0, 1] : isNchw ? [2, 3] : [1, 2];\n const dType = input.type.value;\n const createCubicInterpolationFunction = (idx: number): string => {\n const direction = idx === heightIdx ? 'row' : 'col';\n return `\n fn ${direction}CubicInterpolation(input_indices: ${input.type.indices}, output_indices: ${\n output.type.indices}) -> ${dType} {\n var output_index = ${output.indicesGet('output_indices', idx)};\n var originalIdx: ${dType} = getOriginalCoordinateFromResizedCoordinate(output_index, ${scales[idx]},\n ${outputShape[idx]}, ${inputShape[idx]}, ${roi[idx]}, ${roi[idx]} + ${inputShape.length});\n var fractOriginalIdx: ${dType} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${useExtrapolation} && (originalIdx < 0 || originalIdx > (${inputShape[idx]} - 1))) {\n return ${extrapolationValue};\n }\n var data: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${direction}: ${dType} = originalIdx + ${dType}(i);\n if (${direction} < 0 || ${direction} >= ${inputShape[idx]}) {\n ${(() => {\n if (excludeOutside) {\n return `coefs[i + 1] = 0.0;\n continue;`;\n } else if (useExtrapolation) {\n return `return ${extrapolationValue};`;\n } else {\n return `${direction} = max(0, min(${direction}, ${inputShape[idx]} - 1));`;\n }\n })()};\n }\n var input_indices_copy: ${input.type.indices} = input_indices;\n ${input.indicesSet('input_indices_copy', idx, `u32(${direction})`)};\n data[i + 1] = ${\n idx === heightIdx ? input.getByIndices('input_indices_copy') :\n 'rowCubicInterpolation(input_indices_copy, output_indices)'};\n }\n return cubicInterpolation1D(data, coefs);\n }`;\n };\n\n return `\n ${createCubicInterpolationFunction(heightIdx)};\n ${createCubicInterpolationFunction(widthIdx)};\n fn getCubicInterpolationCoefs(s: ${dType}) -> array<${dType}, 4> {\n var absS = abs(s);\n var coeffs: array<${dType}, 4> = array<${dType}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${dType} = 1.0 - absS;\n var twoMinusAbsS: ${dType} = 2.0 - absS;\n var onePlusAbsS: ${dType} = 1.0 + absS;\n coeffs[0] = ((${cubicCoeffA} * onePlusAbsS - 5 * ${cubicCoeffA}) * onePlusAbsS + 8 * ${\n cubicCoeffA}) * onePlusAbsS - 4 * ${cubicCoeffA};\n coeffs[1] = ((${cubicCoeffA} + 2) * absS - (${cubicCoeffA} + 3)) * absS * absS + 1;\n coeffs[2] = ((${cubicCoeffA} + 2) * oneMinusAbsS - (${cubicCoeffA} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${cubicCoeffA} * twoMinusAbsS - 5 * ${cubicCoeffA}) * twoMinusAbsS + 8 * ${\n cubicCoeffA}) * twoMinusAbsS - 4 * ${cubicCoeffA};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${dType}, 4>, coefs: array<${dType}, 4>) -> ${dType} {\n var coefsSum: ${dType} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var input_indices: ${input.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `;\n };\n\nconst trilinearInterpolation =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[], useExtrapolation: boolean,\n extrapolationValue: number): string => {\n const isNchw = true;\n const [batchIdx, depthIdx, heightIdx, widthIdx, channelIdx] =\n inputShape.length === 3 ? [-1, 0, 1, 2, -1] : (isNchw ? [0, 2, 3, 4, 1] : [0, 1, 2, 3, 4]);\n const dType = input.type.value;\n return `\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${dType} {\n var input_indices: ${input.type.indices};\n ${input.indicesSet('input_indices', depthIdx, `max(0, min(depth, ${inputShape[depthIdx]} - 1))`)};\n ${input.indicesSet('input_indices', heightIdx, `max(0, min(height, ${inputShape[heightIdx]} - 1))`)};\n ${input.indicesSet('input_indices', widthIdx, `max(0, min(width, ${inputShape[widthIdx]} - 1))`)};\n ${setChannelAndBatchIndices(input, channelIdx, batchIdx, 3)}\n return ${input.getByIndices('input_indices')};\n }\n\n fn trilinearInterpolation(output_indices: ${output.type.indices}) -> ${dType} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${dType} = originalIndices[${depthIdx}];\n var height:${dType} = originalIndices[${heightIdx}];\n var width:${dType} = originalIndices[${widthIdx}];\n ${\n useExtrapolation ? `if (depth < 0 || depth > (${inputShape[depthIdx]} - 1) || height < 0 || height > (${\n inputShape[heightIdx]} - 1) || width < 0 || (width > ${inputShape[widthIdx]} - 1)) {\n return ${extrapolationValue};\n }` :\n ''};\n\n depth = max(0, min(depth, ${inputShape[depthIdx]} - 1));\n height = max(0, min(height, ${inputShape[heightIdx]} - 1));\n width = max(0, min(width, ${inputShape[widthIdx]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${channelIdx}])` : '0'};\n var batch: u32 = ${inputShape.length > 3 ? `u32(originalIndices[${batchIdx}])` : '0'};\n\n var x111: ${dType} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${dType} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${dType} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${dType} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${dType} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${dType} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${dType} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${dType} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${dType} = abs(depth - ${dType}(depth1));\n var dx2: ${dType} = abs(${dType}(depth2) - depth);\n var dy1: ${dType} = abs(height - ${dType}(height1));\n var dy2: ${dType} = abs(${dType}(height2) - height);\n var dz1: ${dType} = abs(width - ${dType}(width1));\n var dz2: ${dType} = abs(${dType}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`;\n };\n\nconst createResizeProgramInfo =\n (inputTensor: TensorView, attributes: ResizeAttributes, opsetVersion: number, scalesInput: readonly number[],\n sizes: readonly number[], roiInput: readonly number[]): ProgramInfo => {\n const inputShape = inputTensor.dims;\n const roi = updateRoI(roiInput, attributes.axes, inputShape.length);\n\n let outputShape = initOutputShape(inputShape, scalesInput, sizes, attributes.axes);\n let scales = scalesInput.slice();\n if (scalesInput.length === 0) {\n scales = inputShape.map((value, index) => value === 0 ? 1.0 : outputShape[index] / value);\n if (attributes.keepAspectRatioPolicy !== 'stretch') {\n outputShape = adjustOutputShape(inputShape, scales, attributes);\n }\n }\n const output = outputVariable('output', inputTensor.dataType, outputShape.length);\n const input = inputVariable('input', inputTensor.dataType, inputShape.length);\n const outputSize = ShapeUtil.size(outputShape);\n const noScale = inputShape.length === outputShape.length && inputShape.every((d, i) => d === outputShape[i]);\n const useExtrapolation = attributes.coordinateTransformMode === 'tf_crop_and_resize';\n const extrapolationValue = attributes.extrapolationValue;\n const dataType = input.type.value;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${noScale ? '' : `\n ${getOriginalCoordinateFromResizedCoordinate(attributes.coordinateTransformMode, dataType)};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `\n ${checkInputIndices(input, inputShape)};\n ${getNearestPixelFromOriginal(attributes.nearestMode, opsetVersion, dataType)};\n ${\n calculateInputIndicesFromOutputIndices(\n input, output, inputShape, outputShape, scales.length, roi.length, useExtrapolation)};\n `;\n case 'linear':\n return `\n ${calculateOriginalIndicesFromOutputIndices(output, inputShape, outputShape, scales.length, roi.length)};\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${bilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else if (inputShape.length === 3 || inputShape.length === 5) {\n return `${trilinearInterpolation(input, output, inputShape, useExtrapolation, extrapolationValue)}`;\n } else {\n throw Error('Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.');\n }\n })()};\n `;\n case 'cubic':\n return `\n ${(() => {\n if (inputShape.length === 2 || inputShape.length === 4) {\n return `${\n bicubicInterpolation(\n input, output, inputShape, outputShape, scales, roi, attributes.cubicCoeffA, useExtrapolation,\n attributes.extrapolationValue, attributes.excludeOutside)}`;\n } else {\n throw Error('Cubic mode only supports input dims 2 and 4 are supported in linear mode.');\n }\n })()};\n `;\n default:\n throw Error('Invalid resize mode');\n }\n })()};\n `}\n ${\n shaderHelper.registerUniform('output_size', 'u32')\n .registerUniform('scales', 'f32', scales.length)\n .registerUniform('roi', 'f32', roi.length)\n .declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n ${noScale ? 'output[global_idx] = input[global_idx];' : `\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n ${(() => {\n switch (attributes.mode) {\n case 'nearest':\n return `input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${input.getByIndices('input_indices')};\n } else {\n output[global_idx] = ${attributes.extrapolationValue};\n }`;\n case 'linear':\n return `output[global_idx] = ${\n (inputShape.length === 2 || inputShape.length === 4) ? 'bilinearInterpolation' :\n 'trilinearInterpolation'}(output_indices);`;\n case 'cubic':\n return 'output[global_idx] = bicubicInterpolation(output_indices);';\n default:\n throw Error(`Unsupported resize mode: ${attributes.mode}`);\n }\n })()};\n`}\n }`;\n\n return {\n name: 'Resize',\n shaderCache: {\n hint: `${attributes.cacheKey}|${opsetVersion}|${scales.length > 0 ? scales : ''}|${\n sizes.length > 0 ? sizes : ''}|${roi.length > 0 ? roi : ''}|${noScale}|${inputShape}`,\n inputDependencies: ['rank']\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputTensor.dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms: [\n {type: DataType.uint32, data: outputSize}, {type: DataType.float, data: scales},\n {type: DataType.float, data: roi}, ...createTensorShapeVariables(inputShape, outputShape)\n ]\n })\n };\n };\n\nconst getOpsetVersionFromCustomDataBuffer = (context: ComputeContext): number => {\n const customDataBuffer = context.customDataBuffer;\n const customDataBuffer32 = new Uint32Array(customDataBuffer, customDataBuffer.byteOffset, 1);\n const opsetVersion = customDataBuffer32[0];\n return opsetVersion;\n};\n\nexport const resize = (context: ComputeContext, attributes: ResizeAttributes): void => {\n const scales: number[] = [];\n const sizes: number[] = [];\n const roi: number[] = [];\n\n // Note that scales in resize are always f32. roi can be f32 or f16.\n // TODO: Currently this code does not support f16 for roi when passed as optional input.\n\n const opsetVersion = getOpsetVersionFromCustomDataBuffer(context);\n if (attributes.antialias !== 0) {\n throw Error('Only default value (0) for Antialias attribute is supported');\n }\n validateInputs(context.inputs, attributes, opsetVersion, scales, sizes, roi);\n context.compute(\n createResizeProgramInfo(context.inputs[0], attributes, opsetVersion, scales, sizes, roi), {inputs: [0]});\n};\n\nexport const parseResizeAttributes = (attributes: Record): ResizeAttributes => {\n const antialias = attributes.antialias as number;\n const axes = attributes.axes as number[];\n const coordinateTransformMode: CoordinateTransformMode =\n attributes.coordinateTransformMode as CoordinateTransformMode;\n const cubicCoeffA = attributes.cubicCoeffA as number;\n const excludeOutside = attributes.excludeOutside as number !== 0;\n const extrapolationValue = attributes.extrapolationValue as number;\n const keepAspectRatioPolicy: KeepAspectRatioPolicy = attributes.keepAspectRatioPolicy as KeepAspectRatioPolicy;\n const mode: Mode = attributes.mode as Mode;\n // If nearestMode is not specified, use simple mode.\n const nearestMode: NearestMode = (attributes.nearestMode === '' ? 'simple' : attributes.nearestMode) as NearestMode;\n return createAttributeWithCacheKey({\n antialias,\n axes,\n coordinateTransformMode,\n cubicCoeffA,\n excludeOutside,\n extrapolationValue,\n keepAspectRatioPolicy,\n mode,\n nearestMode\n });\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper, WORKGROUP_SIZE} from './common';\n\nexport interface RotaryEmbeddingAttributes {\n readonly interleaved: boolean;\n readonly numHeads: number;\n readonly rotaryEmbeddingDim: number;\n readonly scale: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): void => {\n const [input, positionIds, cosCache, sinCache] = inputs;\n const {numHeads, rotaryEmbeddingDim} = attributes;\n\n if (input.dims.length !== 3 && input.dims.length !== 4) {\n throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${input.dims.length}`);\n }\n if (!ShapeUtil.areEqual(positionIds.dims, []) && !ShapeUtil.areEqual(positionIds.dims, [1]) &&\n positionIds.dims.length !== 2) {\n throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${positionIds.dims.length}`);\n }\n if (cosCache.dims.length !== 2) {\n throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${cosCache.dims.length}`);\n }\n if (sinCache.dims.length !== 2) {\n throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${sinCache.dims.length}`);\n }\n if (!ShapeUtil.areEqual(cosCache.dims, sinCache.dims)) {\n throw new Error('Inputs \\'cos_cache\\' and \\'sin_cache\\' are expected to have the same shape');\n }\n\n if (rotaryEmbeddingDim > 0 && numHeads === 0) {\n throw new Error('num_heads must be provided if rotary_embedding_dim is specified');\n }\n\n const batchSize = input.dims[0];\n const sequenceLength = input.dims[input.dims.length - 2];\n const maxSequenceLength = cosCache.dims[0];\n const hiddenSize = ShapeUtil.sizeFromDimension(input.dims, 1) / sequenceLength;\n const headSize = rotaryEmbeddingDim === 0 ? cosCache.dims[1] * 2 : hiddenSize / numHeads;\n if (rotaryEmbeddingDim > headSize) {\n throw new Error('rotary_embedding_dim must be less than or equal to head_size');\n }\n\n if (positionIds.dims.length === 2) {\n if (batchSize !== positionIds.dims[0]) {\n throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${positionIds.dims[0]}`);\n }\n if (sequenceLength !== positionIds.dims[1]) {\n throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${positionIds.dims[1]}`);\n }\n }\n\n if (headSize / 2 !== cosCache.dims[1] && rotaryEmbeddingDim / 2 !== cosCache.dims[1]) {\n throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${\n cosCache.dims[1]}`);\n }\n\n if (sequenceLength > maxSequenceLength) {\n throw new Error('Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported');\n }\n};\n\nconst createRotaryEmbeddingProgramInfo =\n (inputs: readonly TensorView[], attributes: RotaryEmbeddingAttributes): ProgramInfo => {\n const {interleaved, numHeads, rotaryEmbeddingDim, scale} = attributes;\n const batchSize = inputs[0].dims[0];\n const batchStride = ShapeUtil.sizeFromDimension(inputs[0].dims, 1);\n const sequenceLength = inputs[0].dims[inputs[0].dims.length - 2];\n const hiddenSize = batchStride / sequenceLength;\n const halfRotaryEmbeddingDim = inputs[2].dims[1];\n const headSize = rotaryEmbeddingDim === 0 ? halfRotaryEmbeddingDim * 2 : hiddenSize / numHeads;\n\n // Rotary embeddings will be calculated in a pair-wise fashion. In accordance, use the shape\n // [batch size, sequence length, num of heads, num of pairs to rotate + num of dims to copy]\n // to unfold the global index in shader.\n const globalShape =\n new Array(batchSize, sequenceLength, hiddenSize / headSize, headSize - halfRotaryEmbeddingDim);\n const globalStrides = ShapeUtil.computeStrides(globalShape);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.float, data: scale},\n {type: DataType.uint32, data: globalShape},\n {type: DataType.uint32, data: globalStrides},\n\n // strides for addressing the input/output tensor, in permutated order to align with the unfolded global index,\n // i.e. BSNH\n ...(inputs[0].dims.length === 3 ?\n new Array({type: DataType.uint32, data: [batchStride, hiddenSize, headSize, 1]}) :\n []),\n ...(inputs[0].dims.length === 4 ?\n new Array(\n {type: DataType.uint32, data: [batchStride, headSize, sequenceLength * headSize, 1]}) :\n []),\n\n ...createTensorShapeVariables(inputs[0].dims, inputs[1].dims, inputs[2].dims, inputs[3].dims, inputs[0].dims),\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const positionIds = inputVariable('position_ids', inputs[1].dataType, inputs[1].dims.length);\n const cosCache = inputVariable('cos_cache', inputs[2].dataType, inputs[2].dims.length);\n const sinCache = inputVariable('sin_cache', inputs[3].dataType, inputs[3].dims.length);\n const output = outputVariable('output', inputs[0].dataType, inputs[0].dims.length);\n\n shaderHelper.registerUniforms([\n {name: 'scale', type: 'f32'},\n {name: 'global_shape', type: 'u32', length: globalShape.length},\n {name: 'global_strides', type: 'u32', length: globalStrides.length},\n {name: 'input_output_strides', type: 'u32', length: globalStrides.length},\n ]);\n\n return `\n ${shaderHelper.declareVariables(input, positionIds, cosCache, sinCache, output)}\n\n ${shaderHelper.mainStart(WORKGROUP_SIZE)}\n let half_rotary_emb_dim = uniforms.${cosCache.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('size')}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${positionIds.broadcastedIndicesToOffset('bsnh.xy', outputVariable('', positionIds.type.tensor, 2))};\n let position_id =\n u32(${positionIds.getByOffset('position_ids_idx')}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${interleaved});\n let j = i + select(half_rotary_emb_dim, 1, ${interleaved});\n let re = ${input.getByOffset('i')} * ${cosCache.get('position_id', 'bsnh[3]')} -\n ${input.getByOffset('j')} * ${sinCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('i', 're')}\n let im = ${input.getByOffset('i')} * ${sinCache.get('position_id', 'bsnh[3]')} +\n ${input.getByOffset('j')} * ${cosCache.get('position_id', 'bsnh[3]')};\n ${output.setByOffset('j', 'im')}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${output.setByOffset('k', input.getByOffset('k'))}\n }\n }`;\n };\n\n return {\n name: 'RotaryEmbedding',\n shaderCache: {\n hint: createAttributeWithCacheKey({\n interleaved,\n }).cacheKey,\n inputDependencies: ['rank', 'rank', 'rank', 'rank'],\n },\n getShaderSource,\n getRunData: () => ({\n outputs: [{dims: inputs[0].dims, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(ShapeUtil.size(globalShape) / WORKGROUP_SIZE)},\n programUniforms,\n }),\n };\n };\n\nexport const rotaryEmbedding = (context: ComputeContext, attributes: RotaryEmbeddingAttributes): void => {\n validateInputs(context.inputs, attributes);\n context.compute(createRotaryEmbeddingProgramInfo(context.inputs, attributes));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo, ProgramUniform} from '../types';\n\nimport {castToF32, fillVector, getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType, UniformsArrayType} from './common';\n\nexport interface SkipLayerNormAttributes {\n simplified: boolean;\n epsilon: number;\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 3) {\n throw new Error('layerNorm requires at least 3 inputs.');\n }\n\n const input: TensorView = inputs[0];\n const skip: TensorView = inputs[1];\n const gamma: TensorView = inputs[2];\n\n if (input.dataType !== skip.dataType || input.dataType !== gamma.dataType) {\n throw new Error('All inputs must have the same data type');\n }\n\n if (input.dims.length !== 3 && input.dims.length !== 2) {\n throw new Error('Input must be 2D or 3D');\n }\n\n if (skip.dims.length !== 3 && skip.dims.length !== 2) {\n throw new Error('Skip must be 2D or 3D');\n }\n\n const hiddenSize = input.dims[input.dims.length - 1];\n const sequenceLength = input.dims[input.dims.length - 2];\n if (skip.dims[skip.dims.length - 1] !== hiddenSize) {\n throw new Error('Skip must have the same hidden size as input');\n }\n if (skip.dims[skip.dims.length - 2] !== sequenceLength) {\n throw new Error('Skip must have the same sequence length as input');\n }\n\n if (gamma.dims.length !== 1) {\n throw new Error('Gamma must be 1D');\n }\n if (gamma.dims[gamma.dims.length - 1] !== hiddenSize) {\n throw new Error('Gamma must have the same hidden size as input');\n }\n if (inputs.length > 3) {\n const beta: TensorView = inputs[3];\n if (beta.dims.length !== 1) {\n throw new Error('Beta must be 1D');\n }\n if (beta.dims[beta.dims.length - 1] !== hiddenSize) {\n throw new Error('Beta must have the same hidden size as input');\n }\n }\n\n if (inputs.length > 4) {\n const bias: TensorView = inputs[4];\n if (bias.dims.length !== 1) {\n throw new Error('Bias must be 1D');\n }\n if (bias.dims[bias.dims.length - 1] !== hiddenSize) {\n throw new Error('Bias must have the same hidden size as input');\n }\n }\n};\n\nconst createSkipLayerNormProgramInfo =\n (inputs: readonly TensorView[], attributes: SkipLayerNormAttributes, outputCount: number, isTraining: boolean):\n ProgramInfo => {\n const simplified = attributes.simplified;\n\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const outputShape = inputShape;\n const outputSize = inputSize;\n const hiddenSize = inputShape.slice(-1)[0];\n const meanInvStdDevDim = isTraining ? inputShape.slice(0, -1).concat(1) : [];\n const hasBetaInput = !simplified && inputs.length > 3;\n const hasBiasInput = inputs.length > 4;\n const hasMeanOutput = isTraining && outputCount > 1;\n const hasInvStdDevOutput = isTraining && outputCount > 2;\n const hasInputSkipBiasSumOutput = outputCount > 3;\n\n const components = getMaxComponents(hiddenSize);\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize},\n {type: DataType.uint32, data: components},\n {type: DataType.uint32, data: hiddenSize},\n {type: DataType.float, data: attributes.epsilon},\n ];\n const getShaderSource = (shaderHelper: ShaderHelper) => {\n const uniformsArray: UniformsArrayType = [\n {name: 'output_size', type: 'u32'},\n {name: 'components', type: 'u32'},\n {name: 'hidden_size', type: 'u32'},\n {name: 'epsilon', type: 'f32'},\n ];\n const variables = [\n inputVariable('x', inputs[0].dataType, inputs[0].dims, components),\n inputVariable('skip', inputs[1].dataType, inputs[1].dims, components),\n inputVariable('gamma', inputs[2].dataType, inputs[2].dims, components),\n ];\n if (hasBetaInput) {\n variables.push(inputVariable('beta', inputs[3].dataType, inputs[3].dims, components));\n }\n if (hasBiasInput) {\n variables.push(inputVariable('bias', inputs[4].dataType, inputs[4].dims, components));\n }\n variables.push(outputVariable('output', inputs[0].dataType, outputShape, components));\n if (hasMeanOutput) {\n variables.push(outputVariable('mean_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInvStdDevOutput) {\n variables.push(outputVariable('inv_std_output', DataType.float, meanInvStdDevDim));\n }\n if (hasInputSkipBiasSumOutput) {\n variables.push(outputVariable('input_skip_bias_sum', inputs[0].dataType, outputShape, components));\n }\n const dataType = tensorTypeToWsglStorageType(inputs[0].dataType);\n return `\n\n ${shaderHelper.registerUniforms(uniformsArray).declareVariables(...variables)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size / uniforms.hidden_size')}\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n let offset = global_idx * hidden_size_vectorized;\n var sum = ${fillVector('f32', components)};\n var squareSum = ${fillVector('f32', components)};\n for (var i: u32 = 0; i < hidden_size_vectorized; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${hasBiasInput ? 'bias[i]' : dataType + '(0.0)'};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${hasInputSkipBiasSumOutput ? 'input_skip_bias_sum[offset + i] = value;' : ''}\n output[offset + i] = value;\n let f32_value = ${castToF32(dataType, components, 'value')};\n sum += f32_value;\n squareSum += f32_value * f32_value;\n }\n let mean = ${sumVector('sum', components)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${sumVector('squareSum', components)} / f32(uniforms.hidden_size) ${\n simplified ? '' : '- mean * mean'} + uniforms.epsilon);\n ${hasMeanOutput ? 'mean_output[global_idx] = mean;' : ''}\n ${hasInvStdDevOutput ? 'inv_std_output[global_idx] = inv_std_dev;' : ''}\n for (var i: u32 = 0; i < hidden_size_vectorized; i++) {\n output[offset + i] = (output[offset + i] ${simplified ? '' : `- ${dataType}(mean)`}) * ${\n dataType}(inv_std_dev) * gamma[i] ${hasBetaInput ? '+ beta[i]' : ''};\n }\n }`;\n };\n const outputs = [{dims: outputShape, dataType: inputs[0].dataType}];\n if (outputCount > 1) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 2) {\n outputs.push({dims: meanInvStdDevDim, dataType: DataType.float});\n }\n if (outputCount > 3) {\n outputs.push({dims: inputShape, dataType: inputs[0].dataType});\n }\n return {\n name: 'SkipLayerNormalization',\n shaderCache: {\n hint: `${components};${hasMeanOutput};${hasInvStdDevOutput};${hasInputSkipBiasSumOutput}`,\n inputDependencies: inputs.map((_input, _index) => 'type')\n },\n getShaderSource,\n getRunData: () => ({outputs, dispatchGroup: {x: Math.ceil(outputSize / hiddenSize / 64)}, programUniforms}),\n };\n };\n\nexport const skipLayerNorm = (context: ComputeContext, attributes: SkipLayerNormAttributes): void => {\n // TODO: initialize isTraining from ComputeContext\n const isTraining = false;\n validateInputs(context.inputs);\n // Mean and InvStdDev are only used in training mode and are not required for inference.\n // They are added here for completeness only.\n const outputs = [0];\n if (context.outputCount > 1) {\n outputs.push(isTraining ? 1 : -3);\n }\n if (context.outputCount > 2) {\n outputs.push(isTraining ? 2 : -3);\n }\n if (context.outputCount > 3) {\n outputs.push(3);\n }\n context.compute(\n createSkipLayerNormProgramInfo(context.inputs, attributes, context.outputCount, isTraining), {outputs});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper, UniformsArrayType} from './common';\n\nexport interface SliceAttributes extends AttributeWithCacheKey {\n readonly starts: number[];\n readonly ends: number[];\n readonly axes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[], attributes: SliceAttributes): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n if (attributes.axes.length !== 0) {\n if (attributes.axes.length !== attributes.starts.length || attributes.axes.length !== attributes.ends.length) {\n throw new Error('axes, starts and ends must have the same length');\n }\n } else if (attributes.starts.length !== attributes.ends.length) {\n throw new Error('starts and ends must have the same length');\n }\n inputs.slice(1).forEach((_, idx) => {\n if (inputs[idx + 1].dataType !== DataType.int32 && inputs[idx + 1].dataType !== DataType.int64) {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n });\n};\n\nconst readInput = (inputs: readonly TensorView[], idx: number): number[] => {\n const input: number[] = [];\n if (inputs.length > idx) {\n if (inputs[idx].dataType === DataType.int64) {\n inputs[idx].getBigInt64Array().forEach(v => input.push(Number(v)));\n } else if (inputs[idx].dataType === DataType.int32) {\n inputs[idx].getInt32Array().forEach(v => input.push(Number(v)));\n } else {\n throw new Error(`Input ${idx} must be an array of int32 or int64`);\n }\n }\n return input;\n};\n\nconst createSliceAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SliceAttributes): SliceAttributes => {\n if (inputs.length > 1) {\n const starts: number[] = readInput(inputs, 1);\n const ends: number[] = readInput(inputs, 2);\n let axes: number[] = readInput(inputs, 3);\n if (axes.length === 0) {\n axes = [...Array(inputs[0].dims.length).keys()];\n }\n return createAttributeWithCacheKey({starts, ends, axes});\n } else {\n return attributes;\n }\n };\n\nconst fixStartEndValues =\n (value: number, index: number, inputShape: readonly number[], axes: readonly number[], steps: readonly number[]):\n number => {\n let newValue = value;\n if (value < 0) {\n newValue += inputShape[axes[index]];\n }\n if (steps[index] < 0) {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]] - 1));\n } else {\n return Math.max(0, Math.min(newValue, inputShape[axes[index]]));\n }\n };\n\nconst calculateInputIndicesImpl =\n (input: IndicesHelper, output: IndicesHelper, inputShape: readonly number[]): string =>\n `fn calculateInputIndices(output_indices: ${output.type.indices}) -> ${input.type.indices} {\n var input_indices: ${input.type.indices};\n var carry = 0u;\n for (var i = ${inputShape.length}; i >= 0; i--) {\n let input_shape_i = ${getElementAt('uniforms.input_shape', 'i', inputShape.length)};\n let steps_i = ${getElementAt('uniforms.steps', 'i', inputShape.length)};\n let signs_i = ${getElementAt('uniforms.signs', 'i', inputShape.length)};\n let starts_i = ${getElementAt('uniforms.starts', 'i', inputShape.length)};\n var output_index = ${output.indicesGet('output_indices', 'i')};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${input.indicesSet('input_indices', 'i', 'input_index')};\n }\n return input_indices;\n }`;\n\nconst createSliceProgramInfo = (inputs: readonly TensorView[], attributes: SliceAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const axes = (attributes.axes.length > 0) ? ShapeUtil.normalizeAxes(attributes.axes, inputShape.length) :\n [...Array(inputShape.length).keys()];\n let steps = readInput(inputs, 4);\n steps.forEach((step) => step !== 0 || (() => {\n throw new Error('step cannot be 0');\n }));\n if (steps.length === 0) {\n steps = Array(axes.length).fill(1);\n }\n const starts = attributes.starts.map((start, i) => fixStartEndValues(start, i, inputShape, axes, steps));\n\n const ends = attributes.ends.map((end, i) => fixStartEndValues(end, i, inputShape, axes, steps));\n\n if (axes.length !== starts.length || axes.length !== ends.length) {\n throw new Error('start, ends and axes should have the same number of elements');\n }\n\n if (axes.length !== inputShape.length) {\n for (let i = 0; i < inputShape.length; ++i) {\n if (!axes.includes(i)) {\n starts.splice(i, 0, 0);\n ends.splice(i, 0, inputShape[i]);\n steps.splice(i, 0, 1);\n }\n }\n }\n const signs = steps.map(step => Math.sign(step));\n // Convert negative steps to positive steps and reverse starts and ends\n steps.forEach((step, i, array) => {\n if (step < 0) {\n const numSteps = (ends[i] - starts[i]) / step;\n const newEnd = starts[i];\n const newStart = newEnd + numSteps * steps[i];\n starts[i] = newStart;\n ends[i] = newEnd;\n array[i] = -step;\n }\n });\n // Output rank is expected to be less than or equal to the input rank.\n const outputShape = inputShape.slice(0);\n axes.forEach((axis, _) => {\n outputShape[axis] = Math.ceil((ends[axis] - starts[axis]) / steps[axis]);\n });\n const outputTensorInfo: TensorInfo = {dims: outputShape, dataType: inputs[0].dataType};\n\n const output = outputVariable('output', inputs[0].dataType, outputShape.length);\n const input = inputVariable('input', inputs[0].dataType, inputs[0].dims.length);\n const outputSize = ShapeUtil.size(outputShape);\n const uniforms: UniformsArrayType = [\n {name: 'outputSize', type: 'u32'}, {name: 'starts', type: 'u32', length: starts.length},\n {name: 'signs', type: 'i32', length: signs.length}, {name: 'steps', type: 'u32', length: steps.length}\n ];\n\n const programUniforms: ProgramUniform[] = [\n {type: DataType.uint32, data: outputSize}, {type: DataType.uint32, data: starts},\n {type: DataType.int32, data: signs}, {type: DataType.uint32, data: steps},\n ...createTensorShapeVariables(inputs[0].dims, outputShape)\n ];\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${shaderHelper.registerUniforms(uniforms).declareVariables(input, output)}\n ${calculateInputIndicesImpl(input, output, inputShape)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.outputSize')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n let input_indices = calculateInputIndices(output_indices);\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n return {\n name: 'Slice',\n shaderCache: {hint: `${signs.length}_${starts.length}_${steps.length}`, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: [outputTensorInfo],\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const slice = (context: ComputeContext, attributes: SliceAttributes): void => {\n validateInputs(context.inputs, attributes);\n const updatedAttributes = createSliceAttributesFromInputs(context.inputs, attributes);\n context.compute(createSliceProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n // if (ShapeUtil.size(program.outputs[0].dims) > 0) {\n // context.compute(programInfoLoader, {inputs: [0]});\n // } else {\n // // TODO: support empty output\n // throw new Error('slice: output size is 0');\n // }\n};\n\nexport const parseSliceAttributes = (attributes: Record): SliceAttributes => {\n const starts = attributes.starts as number[];\n const ends = attributes.ends as number[];\n const axes = attributes.axes as number[];\n return createAttributeWithCacheKey({starts, ends, axes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// TODO: this is the same naive implementation we use for reduce that has\n// performance limitations when the reduced axis is long. Need to add\n// a optimized codepath for this.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {getMaxComponents, inputVariable, outputVariable, ShaderHelper, sumVector, tensorTypeToWsglStorageType} from './common';\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 1) {\n throw new Error('Softmax op requires 1 input.');\n }\n};\n\nexport interface SoftmaxAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n}\n\nconst createSoftmaxProgramInfo = (input: TensorView, attributes: SoftmaxAttributes): ProgramInfo => {\n const shape = input.dims;\n const outputSize = ShapeUtil.size(shape);\n const WG = 64;\n let axis = attributes.axis;\n if (axis < 0) {\n axis = shape.length + axis;\n }\n if (axis < shape.length - 1) {\n throw new Error('softmax only supports last axis for now.');\n }\n\n const cols = shape[axis];\n const rows = outputSize / cols;\n const components = getMaxComponents(cols);\n const packedCols = cols / components;\n\n const maxVector = (name: string, components: number) => {\n if (components === 4) {\n return `max(max(${name}.x, ${name}.y), max(${name}.z, ${name}.w))`;\n } else if (components === 2) {\n return `max(${name}.x, ${name}.y)`;\n } else if (components === 3) {\n return `max(max(${name}.x, ${name}.y), ${name}.z)`;\n }\n\n return name;\n };\n const x = inputVariable('x', input.dataType, input.dims, components);\n const output = outputVariable('result', input.dataType, input.dims, components);\n const valueType = x.type.value;\n // 6.2.4 in wgsl spec\n const threadMaxDecl = tensorTypeToWsglStorageType(input.dataType) === 'f32' ?\n `var threadMax = ${valueType}(-3.402823e+38f);` :\n `var threadMax = ${valueType}(-65504.0h);`;\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n var rowMaxShared : ${valueType};\n var rowSumShared : ${valueType};\n var threadShared : array<${valueType}, ${WG}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${valueType} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${valueType}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${shaderHelper.registerUniform('packedCols', 'i32').declareVariables(x, output)}\n ${shaderHelper.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${WG};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${threadMaxDecl}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${valueType}(${maxVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${valueType}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${valueType}(${sumVector('threadShared[0]', components)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;\n return {\n name: 'Softmax',\n shaderCache: {hint: `${components}`, inputDependencies: ['type']},\n getRunData: () => ({\n outputs: [{dims: shape, dataType: input.dataType}],\n dispatchGroup: {x: rows},\n programUniforms: [{type: DataType.int32, data: packedCols}]\n }),\n getShaderSource,\n };\n};\n\nexport const softmax = (context: ComputeContext, attributes: SoftmaxAttributes): void => {\n validateInputs(context.inputs);\n context.compute(createSoftmaxProgramInfo(context.inputs[0], attributes));\n};\n\nexport const parseSoftmaxAttributes = (attributes: Record): SoftmaxAttributes =>\n createAttributeWithCacheKey({axis: attributes.axis as number});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {AttributeWithCacheKey, createAttributeWithCacheKey} from '../attribute-with-cache-key';\nimport {ComputeContext, ProgramInfo, ProgramUniform, TensorInfo} from '../types';\n\nimport {createTensorShapeVariables, getElementAt, IndicesHelper, inputVariable, outputVariable, ShaderHelper} from './common';\n\nexport interface SplitAttributes extends AttributeWithCacheKey {\n readonly axis: number;\n readonly numOutputs: number;\n readonly splitSizes: number[];\n}\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length < 1) {\n throw new Error('too few inputs');\n }\n};\n\nconst createSplitAttributesFromInputs =\n (inputs: readonly TensorView[], attributes: SplitAttributes): SplitAttributes => {\n const splitSizes: number[] = [];\n let numOutputs: number = attributes.numOutputs;\n if (inputs[1].dims[0] > 0) {\n inputs[1].getBigInt64Array().forEach(v => splitSizes.push(Number(v)));\n numOutputs = splitSizes.length;\n }\n return createAttributeWithCacheKey({numOutputs, axis: attributes.axis, splitSizes});\n };\n\nconst calculateOutputIndexImpl = (numberOfTensors: number): string => `\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${numberOfTensors}u; i += 1u ) {\n if (index < ${getElementAt('uniforms.size_in_split_axis', 'i', numberOfTensors)}) {\n return i;\n }\n }\n return ${numberOfTensors}u;\n}`;\nconst writeBufferDataImpl = (outputs: readonly IndicesHelper[]) => {\n const numberOfTensors = outputs.length;\n const codeLines: string[] = [];\n for (let i = 0; i < numberOfTensors; ++i) {\n const returnSnippet = outputs[i].setByIndices('indices', 'input[global_idx]');\n if (numberOfTensors === 1) {\n codeLines.push(returnSnippet);\n } else if (i === 0) {\n codeLines.push(`if (output_number == ${i}u) { ${returnSnippet} }`);\n } else if (i === numberOfTensors - 1) {\n codeLines.push(`else { ${returnSnippet} }`);\n } else {\n codeLines.push(`else if (output_number == ${i}) { ${returnSnippet} }`);\n }\n }\n return `\n fn writeBufferData(output_number: u32, indices: ${outputs[0].type.indices}, global_idx: u32) {\n ${codeLines.join('\\n')}\n }`;\n};\n\nconst createSplitProgramInfo = (inputs: readonly TensorView[], attributes: SplitAttributes): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const inputSize = ShapeUtil.size(inputShape);\n const dataType = inputs[0].dataType;\n const axis = ShapeUtil.normalizeAxis(attributes.axis, inputShape.length);\n const outputs = new Array(attributes.numOutputs);\n const input = inputVariable('input', dataType, inputShape.length);\n const sizeInSplitAxis = new Array(attributes.numOutputs);\n const outputsTensorInfo: TensorInfo[] = [];\n const outputShapes: number[][] = [];\n let previousSum = 0;\n const programUniforms: ProgramUniform[] = [{type: DataType.uint32, data: inputSize}];\n for (let i = 0; i < attributes.numOutputs; i++) {\n previousSum += attributes.splitSizes[i];\n sizeInSplitAxis[i] = previousSum;\n const outputShape = inputShape.slice();\n outputShape[attributes.axis] = attributes.splitSizes[i];\n outputShapes.push(outputShape);\n outputs[i] = outputVariable(`output${i}`, dataType, outputShape.length);\n outputsTensorInfo.push({dims: outputShapes[i], dataType: inputs[0].dataType});\n }\n programUniforms.push(\n {type: DataType.uint32, data: sizeInSplitAxis}, ...createTensorShapeVariables(inputShape, ...outputShapes));\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n ${\n shaderHelper.registerUniform('input_size', 'u32')\n .registerUniform('size_in_split_axis', 'u32', sizeInSplitAxis.length)\n .declareVariables(input, ...outputs)}\n ${calculateOutputIndexImpl(sizeInSplitAxis.length)}\n ${writeBufferDataImpl(outputs)}\n\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.input_size')}\n\n var indices = ${input.offsetToIndices('global_idx')};\n var index = ${input.indicesGet('indices', axis)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${getElementAt('uniforms.size_in_split_axis', 'output_number - 1u', sizeInSplitAxis.length)};\n ${input.indicesSet('indices', axis, 'index')};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;\n return {\n name: 'Split',\n shaderCache: {hint: attributes.cacheKey, inputDependencies: ['rank']},\n getShaderSource,\n getRunData: () => ({\n outputs: outputsTensorInfo,\n dispatchGroup: {x: Math.ceil(inputSize / 64 /* workgroup size */)},\n programUniforms\n })\n };\n};\n\nexport const split = (context: ComputeContext, attributes: SplitAttributes): void => {\n validateInputs(context.inputs);\n const updatedAttributes =\n context.inputs.length === 1 ? attributes : createSplitAttributesFromInputs(context.inputs, attributes);\n context.compute(createSplitProgramInfo(context.inputs, updatedAttributes), {inputs: [0]});\n};\n\nexport const parseSplitAttributes = (attributes: Record): SplitAttributes => {\n const axis = attributes.axis as number;\n const splitSizes: number[] = attributes.splitSizes as number[];\n const numOutputs = attributes.numOutputs as number < 0 ? splitSizes.length : attributes.numOutputs as number;\n if (numOutputs !== splitSizes.length) {\n throw new Error('numOutputs and splitSizes lengh must be equal');\n }\n return createAttributeWithCacheKey({axis, numOutputs, splitSizes});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst getRepeats = (repeatsTensorView: TensorView): readonly number[] =>\n Array.from(repeatsTensorView.getBigInt64Array(), Number);\n\n\nconst validateInputs = (inputs: readonly TensorView[]): void => {\n if (!inputs || inputs.length !== 2) {\n throw new Error('Tile requires 2 inputs.');\n }\n\n if (inputs[0].dataType !== DataType.float && inputs[0].dataType !== DataType.int32 &&\n inputs[0].dataType !== DataType.uint32) {\n throw new Error('Tile only support float, int32, and uint32 data types');\n }\n\n if (inputs[1].dataType !== DataType.int64) {\n throw new Error('Tile `repeats` input should be of int64 data type');\n }\n\n if (inputs[1].dims.length !== 1) {\n throw new Error('Tile `repeats` input should be 1-D');\n }\n\n const repeats: readonly number[] = getRepeats(inputs[1]);\n\n if (repeats.length !== inputs[0].dims.length) {\n throw new Error('Tile `repeats` input should have same number of elements as rank of input data tensor');\n }\n};\n\nconst getOutputShape = (inputShape: readonly number[], repeats: readonly number[]): readonly number[] => {\n const outputShape: number[] = [];\n\n for (let i = 0; i < inputShape.length; ++i) {\n outputShape.push(inputShape[i] * repeats[i]);\n }\n\n return outputShape;\n};\n\nexport const createTileProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const inputShape = inputs[0].dims;\n const repeats: readonly number[] = getRepeats(inputs[1]);\n const outputShape = getOutputShape(inputShape, repeats);\n const outputSize = ShapeUtil.size(outputShape);\n\n const dataType = inputs[0].dataType;\n const input = inputVariable('input', dataType, inputShape.length);\n const output = outputVariable('output', dataType, outputShape.length);\n\n const getShaderSource = (shaderHelper: ShaderHelper) => `\n const inputShape = ${input.indices(...inputShape)};\n ${shaderHelper.registerUniform('output_size', 'u32').declareVariables(input, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.output_size')}\n let output_indices = ${output.offsetToIndices('global_idx')};\n var input_indices: ${input.type.indices};\n for (var i = 0; i < ${inputShape.length}; i++) {\n let input_dim_i = ${input.indicesGet('uniforms.input_shape', 'i')};\n let input_dim_value = ${output.indicesGet('output_indices', 'i')} % input_dim_i;\n\n ${input.indicesSet('input_indices', 'i', 'input_dim_value')}\n }\n ${output.setByOffset('global_idx', input.getByIndices('input_indices'))}\n }`;\n\n return {\n name: 'Tile',\n shaderCache: {hint: `${repeats}`, inputDependencies: ['rank']},\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: inputs[0].dataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */)},\n programUniforms:\n [{type: DataType.uint32, data: outputSize}, ...createTensorShapeVariables(inputs[0].dims, outputShape)],\n }),\n getShaderSource,\n };\n};\n\nexport const tile = (context: ComputeContext): void => {\n validateInputs(context.inputs);\n context.compute(createTileProgramInfo(context.inputs), {inputs: [0]});\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {DataType} from '../../../wasm-common';\nimport {TensorView} from '../../tensor-view';\nimport {BroadcastUtil, ShapeUtil} from '../../util';\nimport {ComputeContext, ProgramInfo} from '../types';\n\nimport {createTensorShapeVariables, inputVariable, outputVariable, ShaderHelper} from './common';\n\nconst createWhereOpProgramShader =\n (shaderHelper: ShaderHelper, inputs: readonly TensorView[], dimsOutput: readonly number[], isBroadcast: boolean,\n typeOutput: number) => {\n const output = outputVariable('output_data', typeOutput, dimsOutput.length, 4);\n const a = inputVariable('a_data', inputs[1].dataType, inputs[1].dims.length, 4);\n const b = inputVariable('b_data', inputs[2].dataType, inputs[2].dims.length, 4);\n const c = inputVariable('c_data', inputs[0].dataType, inputs[0].dims.length, 4);\n\n let assignment: string;\n const expression = (a: string, b: string, c: string) => `select(${b}, ${a}, ${c})`;\n if (!isBroadcast) {\n assignment = output.setByOffset(\n 'global_idx',\n expression(a.getByOffset('global_idx'), b.getByOffset('global_idx'), c.getByOffset('global_idx')));\n } else {\n const singleAssignment = (resStr: string, x: number, typeCast = '') => {\n const expressionA = `a_data[index_a${x}][component_a${x}]`;\n const expressionB = `b_data[index_b${x}][component_b${x}]`;\n // eslint-disable-next-line no-bitwise\n const expressionC = `bool(c_data[index_c${x}] & (0xffu << (component_c${x} * 8)))`;\n return `\n let output_indices${x} = ${output.offsetToIndices(`global_idx * 4u + ${x}u`)};\n let offset_a${x} = ${a.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_b${x} = ${b.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let offset_c${x} = ${c.broadcastedIndicesToOffset(`output_indices${x}`, output)};\n let index_a${x} = offset_a${x} / 4u;\n let index_b${x} = offset_b${x} / 4u;\n let index_c${x} = offset_c${x} / 4u;\n let component_a${x} = offset_a${x} % 4u;\n let component_b${x} = offset_b${x} % 4u;\n let component_c${x} = offset_c${x} % 4u;\n ${resStr}[${x}] = ${typeCast}(${expression(expressionA, expressionB, expressionC)});\n `;\n };\n if (typeOutput === DataType.bool) {\n assignment = `\n var data = vec4(0);\n ${singleAssignment('data', 0, 'u32')}\n ${singleAssignment('data', 1, 'u32')}\n ${singleAssignment('data', 2, 'u32')}\n ${singleAssignment('data', 3, 'u32')}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`;\n } else {\n assignment = `\n ${singleAssignment('output_data[global_idx]', 0)}\n ${singleAssignment('output_data[global_idx]', 1)}\n ${singleAssignment('output_data[global_idx]', 2)}\n ${singleAssignment('output_data[global_idx]', 3)}\n `;\n }\n }\n\n return `\n ${shaderHelper.registerUniform('vec_size', 'u32').declareVariables(c, a, b, output)}\n ${shaderHelper.mainStart()}\n ${shaderHelper.guardAgainstOutOfBoundsWorkgroupSizes('uniforms.vec_size')}\n ${assignment}\n }`;\n };\n\nconst createWhereOpProgramInfo = (inputs: readonly TensorView[]): ProgramInfo => {\n const dimsA = inputs[1].dims;\n const dimsB = inputs[2].dims;\n const dimsC = inputs[0].dims;\n const outputDataType = inputs[1].dataType;\n\n const isBroadcast = !(ShapeUtil.areEqual(dimsA, dimsB) && ShapeUtil.areEqual(dimsB, dimsC));\n let outputShape = dimsA;\n let outputSize = ShapeUtil.size(dimsA);\n // TODO: deal with zero-sized tensors (eg. dims=[1,0])\n\n if (isBroadcast) {\n const calculatedShape = BroadcastUtil.calcShape(BroadcastUtil.calcShape(dimsA, dimsB, false)!, dimsC, false);\n if (!calculatedShape) {\n throw new Error('Can\\'t perform where op on the given tensors');\n }\n outputShape = calculatedShape;\n outputSize = ShapeUtil.size(outputShape);\n }\n\n const vecSize = Math.ceil(outputSize / 4);\n\n return {\n name: 'Where',\n shaderCache: {inputDependencies: ['rank', 'rank', 'rank']},\n getShaderSource: (shaderHelper) =>\n createWhereOpProgramShader(shaderHelper, inputs, outputShape, isBroadcast, outputDataType),\n getRunData: () => ({\n outputs: [{dims: outputShape, dataType: outputDataType}],\n dispatchGroup: {x: Math.ceil(outputSize / 64 /* workgroup size */ / 4 /* vec size */)},\n programUniforms:\n [{type: DataType.uint32, data: vecSize}, ...createTensorShapeVariables(dimsC, dimsA, dimsB, outputShape)],\n }),\n };\n};\n\nexport const where = (context: ComputeContext): void => {\n context.compute(createWhereOpProgramInfo(context.inputs));\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {argMax, argMin, parseArgMinMaxAttributes} from './ops/argminmax';\nimport {attention} from './ops/attention';\nimport {batchNorm} from './ops/batch-norm';\nimport {biasAdd} from './ops/bias-add';\nimport {biasSplitGelu} from './ops/bias-split-gelu';\nimport * as binaryOps from './ops/binary-op';\nimport {concat, parseConcatAttributes} from './ops/concat';\nimport {conv, parseConvAttributes} from './ops/conv';\nimport {convTranspose, parseConvTransposeAttributes} from './ops/conv-transpose';\nimport {cumsum, parseCumSumAttributes} from './ops/cumsum';\nimport {depthToSpace, parseDepthToSpaceAttributes} from './ops/depth-to-space';\nimport {einsum, parseEinsumAttributes} from './ops/einsum';\nimport {expand} from './ops/expand';\nimport {fastGelu} from './ops/fast-gelu';\nimport {gather, parseGatherAttributes} from './ops/gather';\nimport {gatherElements, parseGatherElementsAttributes} from './ops/gather-elements';\nimport {gemm, parseGemmAttributes} from './ops/gemm';\nimport {instanceNorm} from './ops/instance-norm';\nimport {layerNorm} from './ops/layer-norm';\nimport {matMul} from './ops/matmul';\nimport {matMulNBits, parseMatMulNBitsAttributes} from './ops/matmulnbits';\nimport {multiHeadAttention, parseMultiHeadAttentionAttributes} from './ops/multihead-attentiion';\nimport {pad} from './ops/pad';\nimport * as pool from './ops/pool';\nimport {range} from './ops/range';\nimport {reduceL1, reduceL2, reduceLogSum, reduceLogSumExp, reduceMax, reduceMean, reduceMin, reduceProd, reduceSum, reduceSumSquare} from './ops/reduce';\nimport {parseResizeAttributes, resize} from './ops/resize';\nimport {rotaryEmbedding} from './ops/rotary-embedding';\nimport {skipLayerNorm} from './ops/skip-layer-norm';\nimport {parseSliceAttributes, slice} from './ops/slice';\nimport {parseSoftmaxAttributes, softmax} from './ops/softmax';\nimport {parseSplitAttributes, split} from './ops/split';\nimport {tile} from './ops/tile';\nimport {parseTransposeAttributes, transpose} from './ops/transpose';\nimport * as unaryOps from './ops/unary-op';\nimport {where} from './ops/where';\nimport {ComputeContext} from './types';\n\nexport type RunFunction = (context: ComputeContext, attribute?: unknown) => void;\nexport type ParseAttributeFunction = (attributeRaw: unknown) => unknown;\nexport type OperatorImplementation = [RunFunction]|[RunFunction, ParseAttributeFunction];\n\nexport const WEBGPU_OP_RESOLVE_RULES: Map = new Map([\n ['Abs', [unaryOps.abs]],\n ['Acos', [unaryOps.acos]],\n ['Acosh', [unaryOps.acosh]],\n ['Add', [binaryOps.add]],\n ['ArgMax', [argMax, parseArgMinMaxAttributes]],\n ['ArgMin', [argMin, parseArgMinMaxAttributes]],\n ['Asin', [unaryOps.asin]],\n ['Asinh', [unaryOps.asinh]],\n ['Atan', [unaryOps.atan]],\n ['Atanh', [unaryOps.atanh]],\n ['Attention', [attention]],\n // TODO: support new attributes for AveragePool-10\n ['AveragePool', [pool.averagePool, pool.parseAveragePoolAttributes]],\n ['BatchNormalization', [batchNorm]],\n ['BiasAdd', [biasAdd]],\n ['BiasSplitGelu', [biasSplitGelu]],\n ['Cast', [unaryOps.cast, unaryOps.parseCastAttributes]],\n ['Ceil', [unaryOps.ceil]],\n ['Clip', [unaryOps.clip]],\n ['Concat', [concat, parseConcatAttributes]],\n ['Conv', [conv, parseConvAttributes]],\n ['ConvTranspose', [convTranspose, parseConvTransposeAttributes]],\n ['Cos', [unaryOps.cos]],\n ['Cosh', [unaryOps.cosh]],\n ['CumSum', [cumsum, parseCumSumAttributes]],\n ['DepthToSpace', [depthToSpace, parseDepthToSpaceAttributes]],\n ['Div', [binaryOps.div]],\n ['Einsum', [einsum, parseEinsumAttributes]],\n ['Elu', [unaryOps.elu, unaryOps.parseAlphaAttributes]],\n ['Equal', [binaryOps.equal]],\n ['Erf', [unaryOps.erf]],\n ['Exp', [unaryOps.exp]],\n ['Expand', [expand]],\n ['FastGelu', [fastGelu]],\n ['Floor', [unaryOps.floor]],\n ['FusedConv', [conv, parseConvAttributes]],\n ['Gather', [gather, parseGatherAttributes]],\n ['GatherElements', [gatherElements, parseGatherElementsAttributes]],\n ['Gelu', [unaryOps.gelu]],\n ['Gemm', [gemm, parseGemmAttributes]],\n ['GlobalAveragePool', [pool.globalAveragePool, pool.parseGlobalAveragePoolAttributes]],\n ['GlobalMaxPool', [pool.globalMaxPool, pool.parseGlobalMaxPoolAttributes]],\n ['Greater', [binaryOps.greater]],\n ['GreaterOrEqual', [binaryOps.greaterOrEqual]],\n ['HardSigmoid', [unaryOps.hardSigmoid, unaryOps.parseHardSigmoidAttributes]],\n ['InstanceNormalization', [instanceNorm]],\n ['LayerNormalization', [layerNorm]],\n ['LeakyRelu', [unaryOps.leakyRelu, unaryOps.parseAlphaAttributes]],\n ['Less', [binaryOps.less]],\n ['LessOrEqual', [binaryOps.lessOrEqual]],\n ['Log', [unaryOps.log]],\n ['MatMul', [matMul]],\n ['MatMulNBits', [matMulNBits, parseMatMulNBitsAttributes]],\n // TODO: support new attributes for MaxPool-8 and MaxPool-10\n ['MaxPool', [pool.maxPool, pool.parseMaxPoolAttributes]],\n ['Mul', [binaryOps.mul]],\n ['MultiHeadAttention', [multiHeadAttention, parseMultiHeadAttentionAttributes]],\n ['Neg', [unaryOps.neg]],\n ['Not', [unaryOps.not]],\n ['Pad', [pad]],\n ['Pow', [binaryOps.pow]],\n ['Range', [range]],\n ['Reciprocal', [unaryOps.reciprocal]],\n ['ReduceMin', [reduceMin]],\n ['ReduceMean', [reduceMean]],\n ['ReduceMax', [reduceMax]],\n ['ReduceSum', [reduceSum]],\n ['ReduceProd', [reduceProd]],\n ['ReduceL1', [reduceL1]],\n ['ReduceL2', [reduceL2]],\n ['ReduceLogSum', [reduceLogSum]],\n ['ReduceLogSumExp', [reduceLogSumExp]],\n ['ReduceSumSquare', [reduceSumSquare]],\n ['Relu', [unaryOps.relu]],\n ['Resize', [resize, parseResizeAttributes]],\n ['RotaryEmbedding', [rotaryEmbedding]],\n ['Sigmoid', [unaryOps.sigmoid]],\n ['Sin', [unaryOps.sin]],\n ['Sinh', [unaryOps.sinh]],\n ['Slice', [slice, parseSliceAttributes]],\n ['SkipLayerNormalization', [skipLayerNorm]],\n ['Split', [split, parseSplitAttributes]],\n ['Sqrt', [unaryOps.sqrt]],\n ['Softmax', [softmax, parseSoftmaxAttributes]],\n ['Sub', [binaryOps.sub]],\n ['Tan', [unaryOps.tan]],\n ['Tanh', [unaryOps.tanh]],\n ['ThresholdedRelu', [unaryOps.thresholdedRelu, unaryOps.parseAlphaAttributes]],\n ['Tile', [tile]],\n ['Transpose', [transpose, parseTransposeAttributes]],\n ['Where', [where]],\n]);\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {WebGpuBackend} from '../backend-webgpu';\nimport {LOG_DEBUG} from '../log';\n\nimport {createShaderHelper} from './ops/common';\nimport {Artifact, GpuData, ProgramInfo} from './types';\n\n/**\n * ProgramManager is the main class behind running computations\n * It builds ProgramInfo's into Artifacts\n * It compiles given ProgramInfo's into WebGL Prorams (cached as Artifacts)\n * Uses the artifact to run the computation by calling Draw on\n * the WebGL drawing buffer\n * ProgramManager automatically maps (binds) input variables to their\n * corresponding Location's in the binary program\n */\nexport class ProgramManager {\n repo: Map; // this should be per-session object\n attributesBound: boolean;\n\n constructor(private backend: WebGpuBackend) {\n this.repo = new Map();\n this.attributesBound = false;\n }\n getArtifact(key: unknown): Artifact|undefined {\n return this.repo.get(key);\n }\n setArtifact(key: unknown, artifact: Artifact): void {\n this.repo.set(key, artifact);\n }\n run(buildArtifact: Artifact, inputs: GpuData[], outputs: GpuData[], dispatchGroup: [number, number, number],\n uniformBufferBinding: GPUBindingResource|undefined): void {\n TRACE_FUNC_BEGIN(buildArtifact.programInfo.name);\n const device = this.backend.device;\n const computePassEncoder = this.backend.getComputePassEncoder();\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2);\n const entries = [];\n for (const input of inputs) {\n entries.push({binding: entries.length, resource: {buffer: input.buffer}});\n }\n for (const output of outputs) {\n entries.push({binding: entries.length, resource: {buffer: output.buffer}});\n }\n if (uniformBufferBinding) {\n entries.push({binding: entries.length, resource: uniformBufferBinding});\n }\n const bindGroup = device.createBindGroup(\n {layout: buildArtifact.computePipeline.getBindGroupLayout(0), entries, label: buildArtifact.programInfo.name});\n\n if (this.backend.sessionStatus === 'capturing') {\n const commandInfo = {\n kernelId: this.backend.currentKernelId!,\n computePipeline: buildArtifact.computePipeline,\n bindGroup,\n dispatchGroup\n };\n const sessionCommandList = this.backend.capturedCommandList.get(this.backend.currentSessionId!);\n sessionCommandList!.push(commandInfo);\n }\n\n computePassEncoder.setPipeline(buildArtifact.computePipeline);\n computePassEncoder.setBindGroup(0, bindGroup);\n computePassEncoder.dispatchWorkgroups(...dispatchGroup);\n this.backend.writeTimestamp(this.backend.pendingDispatchNumber * 2 + 1);\n this.backend.pendingDispatchNumber++;\n\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber ||\n this.backend.queryType === 'at-passes') {\n this.backend.endComputePass();\n }\n if (this.backend.pendingDispatchNumber >= this.backend.maxDispatchNumber) {\n this.backend.flush();\n }\n TRACE_FUNC_END(buildArtifact.programInfo.name);\n }\n dispose(): void {\n // this.repo.forEach(a => this.glContext.deleteProgram(a.program));\n }\n build(programInfo: ProgramInfo, normalizedDispatchGroupSize: [number, number, number]): Artifact {\n TRACE_FUNC_BEGIN(programInfo.name);\n const device = this.backend.device;\n const extensions: string[] = [];\n if (device.features.has('shader-f16')) {\n extensions.push('enable f16;');\n }\n const shaderHelper = createShaderHelper(normalizedDispatchGroupSize, this.backend.device.limits);\n const userCode = programInfo.getShaderSource(shaderHelper);\n const code = `${extensions.join('\\n')}\\n${shaderHelper.additionalImplementations}\\n${userCode}`;\n const shaderModule = device.createShaderModule({code, label: programInfo.name});\n LOG_DEBUG('verbose', () => `[WebGPU] ${programInfo.name} shader code: ${code}`);\n\n const computePipeline = device.createComputePipeline(\n {compute: {module: shaderModule, entryPoint: 'main'}, layout: 'auto', label: programInfo.name});\n\n TRACE_FUNC_END(programInfo.name);\n return {programInfo, computePipeline, uniformVariablesInfo: shaderHelper.variablesInfo};\n }\n\n normalizeDispatchGroupSize(dispatchGroup: ReturnType['dispatchGroup']):\n [number, number, number] {\n const x = typeof dispatchGroup === 'number' ? dispatchGroup : dispatchGroup.x;\n const y = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.y || 1);\n const z = typeof dispatchGroup === 'number' ? 1 : (dispatchGroup.z || 1);\n const limitPerDimension = this.backend.device.limits.maxComputeWorkgroupsPerDimension;\n if (x <= limitPerDimension && y <= limitPerDimension && z <= limitPerDimension) {\n return [x, y, z];\n }\n const size = x * y * z;\n let dispatchAverage = Math.ceil(Math.sqrt(size));\n if (dispatchAverage > limitPerDimension) {\n dispatchAverage = Math.ceil(Math.cbrt(size));\n if (dispatchAverage > limitPerDimension) {\n throw new Error('Total dispatch size exceeds WebGPU maximum.');\n }\n return [dispatchAverage, dispatchAverage, dispatchAverage];\n } else {\n return [dispatchAverage, dispatchAverage, 1];\n }\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, Tensor, TRACE, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {DataType, tensorDataTypeEnumToString} from '../wasm-common';\n\nimport {configureLogger, LOG_DEBUG} from './log';\nimport {createView, TensorView} from './tensor-view';\nimport {createGpuDataManager, downloadGpuData, GpuDataManager} from './webgpu/gpu-data-manager';\nimport {RunFunction, WEBGPU_OP_RESOLVE_RULES} from './webgpu/op-resolve-rules';\nimport {ProgramManager} from './webgpu/program-manager';\nimport {AdapterInfo, ComputeContext, GpuArchitecture, GpuData, GpuVendor, ProgramInfo, ProgramInputTensorInfoDependency, SessionState, TimestampQuery} from './webgpu/types';\n\ninterface CommandInfo {\n readonly kernelId: number;\n readonly computePipeline: GPUComputePipeline;\n readonly bindGroup: GPUBindGroup;\n readonly dispatchGroup: [number, number, number];\n}\n\ninterface KernelInfo {\n readonly kernelType: string;\n readonly kernelName: string;\n readonly kernelEntry: RunFunction;\n readonly attributes: [((attribute: unknown) => unknown)|undefined, unknown];\n}\n\ninterface PendingKernelInfo {\n readonly kernelId: number;\n readonly programName: string;\n readonly inputTensorViews: readonly TensorView[];\n readonly outputTensorViews: readonly TensorView[];\n}\n\nconst getProgramInputTensorInfoDependencyKey =\n (inputTensors: readonly TensorView[], inputDependencies: readonly ProgramInputTensorInfoDependency[]): string => {\n if (inputDependencies.length !== inputTensors.length) {\n throw new Error(`inputDependencies length ${inputDependencies.length} is not equal to inputTensors length ${\n inputTensors.length}.`);\n }\n\n const inputInfos: string[] = [];\n for (let i = 0; i < inputTensors.length; ++i) {\n const type = inputTensors[i].dataType;\n switch (inputDependencies[i]) {\n case 'none': {\n inputInfos.push('');\n break;\n }\n case 'type': {\n inputInfos.push(`${type}`);\n break;\n }\n case 'rank': {\n const rank = inputTensors[i].dims.length;\n inputInfos.push(`${type};${rank}`);\n break;\n }\n case 'dims': {\n const dims = inputTensors[i].dims.join(',');\n inputInfos.push(`${type};${dims}`);\n break;\n }\n default:\n throw new Error(`unsupported input dependency: ${inputDependencies[i]}`);\n }\n }\n\n return inputInfos.join('|');\n };\n\n/**\n * get a unique key representing the program from the program info, input shapes and types.\n *\n * @returns a unique key is a shorter string than the shader source, which contains all the information to identify a\n * program. if the key is the same, the program shader source should be the same, so we can reuse the program.\n *\n */\nconst getProgramInfoUniqueKey =\n (programInfo: ProgramInfo, inputTensors: readonly TensorView[], is1DimensionDispatch: boolean): string => {\n // final key format:\n // []:is1DimensionDispatch:||...\n let key = programInfo.name;\n if (programInfo.shaderCache?.hint) {\n key += '[' + programInfo.shaderCache.hint + ']';\n }\n key += ':' + is1DimensionDispatch +\n `:${\n getProgramInputTensorInfoDependencyKey(\n inputTensors,\n programInfo.shaderCache?.inputDependencies ??\n new Array(inputTensors.length).fill('dims'))}`;\n return key;\n };\n\nclass AdapterInfoImpl implements AdapterInfo {\n readonly architecture?: string;\n readonly vendor?: string;\n\n constructor(adapterInfo: GPUAdapterInfo) {\n if (adapterInfo) {\n this.architecture = adapterInfo.architecture;\n this.vendor = adapterInfo.vendor;\n }\n }\n\n isArchitecture(architecture: GpuArchitecture): boolean {\n return this.architecture === architecture;\n }\n\n isVendor(vendor: GpuVendor): boolean {\n return this.vendor === vendor;\n }\n}\n\n/**\n * this class is designed to store status and being used as a singleton for JSEP. It will be passed to jsepInit() as\n * the first parameter so that it is stored for future use.\n */\nexport class WebGpuBackend {\n adapterInfo: AdapterInfoImpl;\n device: GPUDevice;\n /**\n * an instance of GpuDataManager to manage a GpuDataId -> GpuBuffer mapping\n */\n gpuDataManager: GpuDataManager;\n /**\n * an instance of ProgramManager to build and run WebGPU compute shader program, and manage a ProgramKey -> Program\n * artifacts mapping\n */\n programManager: ProgramManager;\n\n /**\n * representing the session ID of which is currently being run.\n * `null` means no session is being run.\n * only valid when session.run is executed.\n */\n currentSessionId: number|null = null;\n\n /**\n * representing the kernel ID of which is currently being computed (CPU code perspective).\n * `null` means no kernel is being computed.\n * only one kernel can be computed at a moment.\n */\n currentKernelId: number|null = null;\n /**\n * a list of temporary GPU data for the current kernel. should release when the kernel done computation.\n */\n private temporaryData: GpuData[];\n /**\n * a KernelID -> a GPU data list, which stores persistent GPU data owned by the specific kernel.\n */\n private kernelPersistentData: Map;\n /**\n * a KernelID -> a custom data, which stores custom data owned by the specific kernel.\n */\n private kernelCustomData: Map;\n /**\n * get the custom data of the current kernel\n */\n get currentKernelCustomData(): {[key: string]: unknown} {\n if (this.currentKernelId === null) {\n throw new Error('currentKernelCustomData(): currentKernelId is null. (should not happen)');\n }\n\n let data = this.kernelCustomData.get(this.currentKernelId);\n if (!data) {\n data = {};\n this.kernelCustomData.set(this.currentKernelId, data);\n }\n\n return data;\n }\n\n // KernelID -> kernelInfo mapping\n kernels: Map;\n private commandEncoder: GPUCommandEncoder|null = null;\n private computePassEncoder: GPUComputePassEncoder|null = null;\n maxDispatchNumber = 16;\n pendingDispatchNumber = 0;\n\n // info of kernels pending submission for a single batch\n private pendingKernels: PendingKernelInfo[] = [];\n // queryReadBuffer -> pendingKernels mapping for all the batches\n private pendingQueries: Map = new Map();\n private queryResolveBuffer?: GPUBuffer;\n private querySet?: GPUQuerySet;\n private queryTimeBase?: bigint;\n queryType: TimestampQuery;\n\n env: Env;\n sessionStatus: SessionState = 'default';\n /**\n * a SessionID -> CommandInfo[] mapping. It's used to record all GPU commands for corresponding session.\n */\n capturedCommandList: Map = new Map();\n\n /**\n * a SessionID -> PendingKernelInfo[] mapping for profiling.\n */\n private capturedPendingKernels: Map = new Map();\n\n /**\n * a SessionID -> a Map of (InputOutputIndex -> [ID, GPUBuffer]) mapping.\n */\n sessionExternalDataMapping: Map> = new Map();\n\n async initialize(env: Env, adapter: GPUAdapter): Promise {\n this.env = env;\n const requiredFeatures: GPUFeatureName[] = [];\n const deviceDescriptor: GPUDeviceDescriptor = {\n requiredLimits: {\n maxComputeWorkgroupStorageSize: adapter.limits.maxComputeWorkgroupStorageSize,\n maxComputeWorkgroupsPerDimension: adapter.limits.maxComputeWorkgroupsPerDimension,\n maxStorageBufferBindingSize: adapter.limits.maxStorageBufferBindingSize,\n maxBufferSize: adapter.limits.maxBufferSize,\n maxComputeInvocationsPerWorkgroup: adapter.limits.maxComputeInvocationsPerWorkgroup,\n maxComputeWorkgroupSizeX: adapter.limits.maxComputeWorkgroupSizeX,\n maxComputeWorkgroupSizeY: adapter.limits.maxComputeWorkgroupSizeY,\n maxComputeWorkgroupSizeZ: adapter.limits.maxComputeWorkgroupSizeZ,\n },\n requiredFeatures,\n };\n\n if (adapter.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n requiredFeatures.push('chromium-experimental-timestamp-query-inside-passes' as GPUFeatureName);\n } else if (adapter.features.has('timestamp-query')) {\n requiredFeatures.push('timestamp-query');\n }\n if (adapter.features.has('shader-f16')) {\n requiredFeatures.push('shader-f16');\n }\n\n this.device = await adapter.requestDevice(deviceDescriptor);\n this.adapterInfo = new AdapterInfoImpl(await adapter.requestAdapterInfo());\n this.gpuDataManager = createGpuDataManager(this);\n this.programManager = new ProgramManager(this);\n this.kernels = new Map();\n this.kernelPersistentData = new Map();\n this.kernelCustomData = new Map();\n\n // set up flags for logger\n configureLogger(env.logLevel!, !!env.debug);\n\n // TODO: set up flags\n\n this.device.onuncapturederror = ev => {\n if (ev.error instanceof GPUValidationError) {\n // eslint-disable-next-line no-console\n console.error(`An uncaught WebGPU validation error was raised: ${ev.error.message}`);\n }\n };\n\n Object.defineProperty(\n this.env.webgpu, 'device', {value: this.device, writable: false, enumerable: true, configurable: false});\n Object.defineProperty(\n this.env.webgpu, 'adapter', {value: adapter, writable: false, enumerable: true, configurable: false});\n\n // init queryType, which is necessary for InferenceSession.create\n this.setQueryType();\n }\n\n dispose(): void {\n if (typeof this.querySet !== 'undefined') {\n this.querySet.destroy();\n }\n this.gpuDataManager.dispose();\n }\n\n getCommandEncoder(): GPUCommandEncoder {\n if (!this.commandEncoder) {\n this.commandEncoder = this.device.createCommandEncoder();\n }\n return this.commandEncoder;\n }\n\n getComputePassEncoder(): GPUComputePassEncoder {\n if (!this.computePassEncoder) {\n const commandEncoder = this.getCommandEncoder();\n const computePassDescriptor: GPUComputePassDescriptor = {};\n\n if (this.queryType === 'at-passes') {\n computePassDescriptor.timestampWrites = {\n querySet: this.querySet!,\n beginningOfPassWriteIndex: this.pendingDispatchNumber * 2,\n endOfPassWriteIndex: this.pendingDispatchNumber * 2 + 1,\n };\n }\n\n this.computePassEncoder = commandEncoder.beginComputePass(computePassDescriptor);\n }\n return this.computePassEncoder;\n }\n\n endComputePass(): void {\n if (this.computePassEncoder) {\n this.computePassEncoder.end();\n this.computePassEncoder = null;\n }\n }\n\n flush(): void {\n if (!this.commandEncoder) {\n return;\n }\n\n TRACE_FUNC_BEGIN();\n\n this.endComputePass();\n let queryReadBuffer: GPUBuffer;\n if (this.queryType !== 'none') {\n this.commandEncoder.resolveQuerySet(\n this.querySet!, 0, this.pendingDispatchNumber * 2, this.queryResolveBuffer!, 0);\n\n queryReadBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.pendingDispatchNumber * 2 * 8, usage: GPUBufferUsage.MAP_READ | GPUBufferUsage.COPY_DST});\n\n this.pendingQueries.set(queryReadBuffer, this.pendingKernels);\n this.pendingKernels = [];\n this.commandEncoder.copyBufferToBuffer(\n this.queryResolveBuffer!, 0, queryReadBuffer, 0, this.pendingDispatchNumber * 2 * 8);\n }\n\n this.device.queue.submit([this.commandEncoder.finish()]);\n this.gpuDataManager.refreshPendingBuffers();\n this.commandEncoder = null;\n this.pendingDispatchNumber = 0;\n\n if (this.queryType !== 'none') {\n void queryReadBuffer!.mapAsync(GPUMapMode.READ).then(() => {\n const mappedData = new BigUint64Array(queryReadBuffer.getMappedRange());\n const pendingKernels = this.pendingQueries.get(queryReadBuffer)!;\n for (let i = 0; i < mappedData.length / 2; i++) {\n const pendingKernelInfo = pendingKernels[i];\n const kernelId = pendingKernelInfo.kernelId;\n const kernelInfo = this.kernels.get(kernelId)!;\n const kernelType = kernelInfo.kernelType;\n const kernelName = kernelInfo.kernelName;\n const programName = pendingKernelInfo.programName;\n const inputTensorViews = pendingKernelInfo.inputTensorViews;\n const outputTensorViews = pendingKernelInfo.outputTensorViews;\n const startTimeU64 = mappedData[i * 2];\n const endTimeU64 = mappedData[i * 2 + 1];\n\n if (typeof this.queryTimeBase === 'undefined') {\n this.queryTimeBase = startTimeU64;\n }\n\n const startTime = Number(startTimeU64 - this.queryTimeBase);\n const endTime = Number(endTimeU64 - this.queryTimeBase);\n\n if (!Number.isSafeInteger(startTime) || !Number.isSafeInteger(endTime)) {\n throw new RangeError('incorrect timestamp range');\n }\n\n if (this.env.webgpu.profiling?.ondata) {\n this.env.webgpu.profiling.ondata({\n version: 1,\n inputsMetadata: inputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n outputsMetadata: outputTensorViews.map(\n value => ({dims: value.dims, dataType: tensorDataTypeEnumToString(value.dataType)})),\n kernelId,\n kernelType,\n kernelName,\n programName,\n startTime,\n endTime,\n });\n } else {\n // if no callback is provided, print the profiling message to console\n let inputShapes = '';\n inputTensorViews.forEach((value, i) => {\n inputShapes += `input[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n let outputShapes = '';\n outputTensorViews.forEach((value, i) => {\n outputShapes += `output[${i}]: [${value.dims}] | ${tensorDataTypeEnumToString(value.dataType)}, `;\n });\n // eslint-disable-next-line no-console\n console.log(`[profiling] kernel \"${kernelId}|${kernelType}|${kernelName}|${programName}\" ${inputShapes}${\n outputShapes}execution time: ${endTime - startTime} ns`);\n }\n TRACE('GPU', `${programName}::${startTimeU64}::${endTimeU64}`);\n }\n queryReadBuffer.unmap();\n this.pendingQueries.delete(queryReadBuffer);\n });\n }\n TRACE_FUNC_END();\n }\n\n /**\n * run a WebGPU program.\n * @param program a ProgramInfo instance\n * @param inputTensorViews a TensorView array. each element represents a value already exists in GPU.\n * @param outputIndices an indices array. each element can be either -1 (temporary data), -2 (persistent data) or an\n * index to the kernel's output.\n * @param createKernelOutput a callback function that create a value to kernel's output with the given index\n * @param createIntermediateOutput a callback function that create a value as a intermediate value, either temporary\n * or persistent (owned by the current kernel)\n * @returns a TensorView array representing the result.\n */\n run(program: ProgramInfo, inputTensorViews: readonly TensorView[], outputIndices: readonly number[],\n createKernelOutput: (index: number, dataType: number, dims: readonly number[]) => TensorView,\n createIntermediateOutput: (dataType: number, dims: readonly number[]) => TensorView,\n outputCount: number): TensorView[] {\n TRACE_FUNC_BEGIN(program.name);\n // create info for inputs\n const inputDatas: GpuData[] = [];\n for (let i = 0; i < inputTensorViews.length; ++i) {\n const data = inputTensorViews[i].data;\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(data);\n if (!gpuData) {\n throw new Error(`no GPU data for input: ${data}`);\n }\n inputDatas.push(gpuData);\n }\n\n const {outputs, dispatchGroup, programUniforms} = program.getRunData(inputTensorViews);\n\n // check output indices\n const validatedOutputIndices = outputIndices.length === 0 ? outputs.map((_, i) => i) : outputIndices;\n if (validatedOutputIndices.length !== outputs.length) {\n throw new Error(`Output size ${validatedOutputIndices.length} must be equal to ${outputs.length}.`);\n }\n\n // create info for outputs\n const outputTensorViews: TensorView[] = [];\n const outputDatas: GpuData[] = [];\n for (let i = 0; i < outputs.length; ++i) {\n // value -1 and -2 are used for creating temporary and persistent outputs.\n // value -3 is used for placeholder output. So -3, -2, -1 and 0, 1, 2, ... are valid\n // output indices. see type definition of ComputeContextInputsOutputsMapping for more details.\n if (!Number.isInteger(validatedOutputIndices[i]) || validatedOutputIndices[i] < -3 ||\n validatedOutputIndices[i] >= outputCount) {\n throw new Error(`Invalid output index: ${validatedOutputIndices[i]}`);\n }\n if (validatedOutputIndices[i] === -3) {\n continue;\n }\n const isTemporary = validatedOutputIndices[i] === -1;\n const isPersistent = validatedOutputIndices[i] === -2;\n const tensorView = (isTemporary || isPersistent) ?\n createIntermediateOutput(outputs[i].dataType, outputs[i].dims) :\n createKernelOutput(validatedOutputIndices[i], outputs[i].dataType, outputs[i].dims);\n outputTensorViews.push(tensorView);\n // if tensor view data is 0, it means the output is zero-sized tensor, and there is no GPU data for it.\n if (tensorView.data === 0) {\n continue;\n }\n const gpuData = this.gpuDataManager.get(tensorView.data);\n if (!gpuData) {\n throw new Error(`no GPU data for output: ${tensorView.data}`);\n }\n if (isTemporary) {\n this.temporaryData.push(gpuData);\n }\n if (isPersistent) {\n let persistentData = this.kernelPersistentData.get(this.currentKernelId!);\n if (!persistentData) {\n persistentData = [];\n this.kernelPersistentData.set(this.currentKernelId!, persistentData);\n }\n persistentData.push(gpuData);\n }\n outputDatas.push(gpuData);\n }\n\n // when there are any zero-sized tensor in the inputs or outputs, we should report error unless all outputs are\n // zero-sized tensors.\n if (inputDatas.length !== inputTensorViews.length || outputDatas.length !== outputTensorViews.length) {\n // if all outputs are zero-sized tensors, there is no need to run the program.\n if (outputDatas.length === 0) {\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n // if some outputs are zero-sized tensors, report an error.\n //\n // TODO: so far we don't see any use case that outputs include both zero-sized tensors and non-zero-sized tensors.\n // If we see such use case, we need to make a change here to support it.\n throw new Error(\n `Program ${program.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`);\n }\n\n // load uniforms\n // TODO: add cache for uniform (is it necessary?)\n //\n let uniformBufferBinding: GPUBindingResource|undefined;\n if (programUniforms) {\n let currentOffset = 0;\n const offsets: number[] = [];\n\n programUniforms.forEach(v => {\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (data.length === 0) {\n return;\n }\n // https://www.w3.org/TR/WGSL/#alignof\n const sizeOfElement = v.type === DataType.float16 ? 2 : 4;\n let sizeOfVecOrMat;\n let baseAlignment;\n if (v.type === DataType.float16) {\n baseAlignment = data.length > 4 ? 16 : (data.length > 2 ? 8 : data.length * sizeOfElement);\n sizeOfVecOrMat = data.length > 4 ? 16 : sizeOfElement * data.length;\n } else {\n baseAlignment = data.length <= 2 ? data.length * sizeOfElement : 16;\n sizeOfVecOrMat = 16;\n }\n currentOffset = Math.ceil(currentOffset / baseAlignment) * baseAlignment;\n offsets.push(currentOffset);\n // For non-float16 type, when data.length > 4, the uniform variable is of type array,N>, where\n // N = Math.ceil(data.length / 4) and SizeOf(vec4) = 16. The total byte length is N *\n // SizeOf(vec4). For float16 type, when data.length > 4, the uniform variable is of type\n // array,N>, where N = Math.ceil(data.length / 8) and SizeOf(mat2x4) = 16. The total byte\n // length is N * SizeOf(mat2x4).\n const elementPerVecOrMat = v.type === DataType.float16 ? 8 : 4;\n currentOffset += data.length > 4 ? Math.ceil(data.length / elementPerVecOrMat) * sizeOfVecOrMat :\n data.length * sizeOfElement;\n });\n\n // Meet alignment of struct here: https://www.w3.org/TR/WGSL/#alignment-and-size. For simplicity, set\n // maxAlignmentOfField to 16 since the underlying buffer has been rounded up to 16.\n const maxAlignmentOfField = 16;\n currentOffset = Math.ceil(currentOffset / maxAlignmentOfField) * maxAlignmentOfField;\n const arrayBuffer = new ArrayBuffer(currentOffset);\n programUniforms.forEach((v, i) => {\n const offset = offsets[i];\n const data = typeof v.data === 'number' ? [v.data] : v.data;\n if (v.type === DataType.int32) {\n new Int32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.uint32) {\n new Uint32Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float16) {\n // TODO: use Float16Array.\n new Uint16Array(arrayBuffer, offset, data.length).set(data);\n } else if (v.type === DataType.float) {\n new Float32Array(arrayBuffer, offset, data.length).set(data);\n } else {\n throw new Error(`Unsupported uniform type: ${tensorDataTypeEnumToString(v.type)}`);\n }\n });\n\n const uniformBufferData =\n // eslint-disable-next-line no-bitwise\n this.gpuDataManager.create(currentOffset, GPUBufferUsage.COPY_DST | GPUBufferUsage.UNIFORM);\n this.device.queue.writeBuffer(uniformBufferData.buffer, 0, arrayBuffer, 0, currentOffset);\n this.gpuDataManager.release(uniformBufferData.id);\n uniformBufferBinding = {offset: 0, size: currentOffset, buffer: uniformBufferData.buffer};\n }\n\n const normalizedDispatchGroup = this.programManager.normalizeDispatchGroupSize(dispatchGroup);\n const is1DimensionDispatch = normalizedDispatchGroup[1] === 1 && normalizedDispatchGroup[2] === 1;\n // get program info\n const key = getProgramInfoUniqueKey(program, inputTensorViews, is1DimensionDispatch);\n let artifact = this.programManager.getArtifact(key);\n if (!artifact) {\n artifact = this.programManager.build(program, normalizedDispatchGroup);\n this.programManager.setArtifact(key, artifact);\n LOG_DEBUG('info', () => `[artifact] key: ${key}, programName: ${program.name}`);\n }\n\n // validate uniform variables\n if (programUniforms && artifact.uniformVariablesInfo) {\n if (programUniforms.length !== artifact.uniformVariablesInfo.length) {\n throw new Error(`Uniform variables count mismatch: expect ${artifact.uniformVariablesInfo.length}, got ${\n programUniforms.length} in program \"${artifact.programInfo.name}\".`);\n }\n for (let i = 0; i < programUniforms.length; i++) {\n const uniform = programUniforms[i];\n const actualType = uniform.type;\n const actualLength = typeof uniform.data === 'number' ? 1 : uniform.data.length;\n const [type, length] = artifact.uniformVariablesInfo[i];\n if (actualType !== type || actualLength !== length) {\n throw new Error(`Uniform variable ${i} mismatch: expect type ${type} with size ${length}, got type ${\n actualType} with size ${actualLength} in program \"${artifact.programInfo.name}\".`);\n }\n }\n }\n\n LOG_DEBUG(\n 'info',\n () => `[ProgramManager] run \"${program.name}\" (key=${key}) with ${normalizedDispatchGroup[0]}x${\n normalizedDispatchGroup[1]}x${normalizedDispatchGroup[2]}`);\n\n if (this.queryType !== 'none' || this.sessionStatus === 'capturing') {\n const pendingKernelInfo: PendingKernelInfo = {\n kernelId: this.currentKernelId!,\n programName: artifact.programInfo.name,\n inputTensorViews,\n outputTensorViews,\n };\n this.pendingKernels.push(pendingKernelInfo);\n\n if (this.sessionStatus === 'capturing') {\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n sessionPendingKernels!.push(pendingKernelInfo);\n }\n }\n\n this.programManager.run(artifact, inputDatas, outputDatas, normalizedDispatchGroup, uniformBufferBinding);\n\n TRACE_FUNC_END(program.name);\n return outputTensorViews;\n }\n\n upload(gpuDataId: number, data: Uint8Array): void {\n this.gpuDataManager.upload(gpuDataId, data);\n }\n\n memcpy(src: number, dst: number): void {\n this.gpuDataManager.memcpy(src, dst);\n }\n\n async download(gpuDataId: number, getTargetBuffer: () => Uint8Array): Promise {\n // the underlying buffer may be changed after the async function is called. so we use a getter function to make sure\n // the buffer is up-to-date.\n await this.gpuDataManager.download(gpuDataId, getTargetBuffer);\n }\n\n alloc(size: number): number {\n return this.gpuDataManager.create(size).id;\n }\n\n free(ptr: number): number {\n return this.gpuDataManager.release(ptr);\n }\n\n createKernel(kernelType: string, kernelId: number, attribute: unknown, kernelName: string): void {\n const op = WEBGPU_OP_RESOLVE_RULES.get(kernelType);\n if (!op) {\n throw new Error(`kernel not implemented: ${kernelType}`);\n }\n\n const kernelInfo: KernelInfo = {\n kernelType,\n kernelName,\n kernelEntry: op[0],\n attributes: [op[1], attribute],\n };\n this.kernels.set(kernelId, kernelInfo);\n }\n\n releaseKernel(kernelId: number): void {\n const persistentData = this.kernelPersistentData.get(kernelId);\n if (persistentData) {\n for (const data of persistentData) {\n this.gpuDataManager.release(data.id);\n }\n this.kernelPersistentData.delete(kernelId);\n }\n\n this.kernelCustomData.delete(kernelId);\n this.kernels.delete(kernelId);\n }\n\n computeKernel(kernelId: number, context: ComputeContext, errors: Array>): number {\n const kernel = this.kernels.get(kernelId);\n if (!kernel) {\n throw new Error(`kernel not created: ${kernelId}`);\n }\n const kernelType = kernel.kernelType;\n const kernelName = kernel.kernelName;\n const kernelEntry = kernel.kernelEntry;\n const attributes = kernel.attributes;\n if (this.currentKernelId !== null) {\n throw new Error(`kernel \"[${kernelType}] ${kernelName}\" is not allowed to be called recursively`);\n }\n this.currentKernelId = kernelId;\n\n // parse attributes if necessary\n if (attributes[0]) {\n attributes[1] = attributes[0](attributes[1]);\n attributes[0] = undefined;\n }\n\n LOG_DEBUG('info', () => `[WebGPU] Start to run kernel \"[${kernelType}] ${kernelName}\"...`);\n\n const useErrorScope = this.env.debug;\n\n this.temporaryData = [];\n try {\n if (useErrorScope) {\n this.device.pushErrorScope('validation');\n }\n\n kernelEntry(context, attributes[1]);\n return 0; // ORT_OK\n } catch (e) {\n errors.push(Promise.resolve(`[WebGPU] Kernel \"[${kernelType}] ${kernelName}\" failed. ${e}`));\n return 1; // ORT_FAIL\n } finally {\n if (useErrorScope) {\n errors.push(this.device.popErrorScope().then(\n err => err ? `GPU validation error for kernel \"[${kernelType}] ${kernelName}\": ${err.message}` : null));\n }\n\n for (const data of this.temporaryData) {\n this.gpuDataManager.release(data.id);\n }\n this.temporaryData = [];\n this.currentKernelId = null;\n }\n }\n\n // #region external buffer\n registerBuffer(sessionId: number, index: number, buffer: GPUBuffer, size: number): number {\n let sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (!sessionInputOutputMapping) {\n sessionInputOutputMapping = new Map();\n this.sessionExternalDataMapping.set(sessionId, sessionInputOutputMapping);\n }\n\n const previousBuffer = sessionInputOutputMapping.get(index);\n const id = this.gpuDataManager.registerExternalBuffer(buffer, size, previousBuffer?.[1]);\n sessionInputOutputMapping.set(index, [id, buffer]);\n return id;\n }\n unregisterBuffers(sessionId: number): void {\n const sessionInputOutputMapping = this.sessionExternalDataMapping.get(sessionId);\n if (sessionInputOutputMapping) {\n sessionInputOutputMapping.forEach(bufferInfo => this.gpuDataManager.unregisterExternalBuffer(bufferInfo[1]));\n this.sessionExternalDataMapping.delete(sessionId);\n }\n }\n getBuffer(gpuDataId: number): GPUBuffer {\n const gpuData = this.gpuDataManager.get(gpuDataId);\n if (!gpuData) {\n throw new Error(`no GPU data for buffer: ${gpuDataId}`);\n }\n return gpuData.buffer;\n }\n createDownloader(gpuBuffer: GPUBuffer, size: number, type: Tensor.GpuBufferDataTypes):\n () => Promise {\n return async () => {\n const data = await downloadGpuData(this, gpuBuffer, size);\n return createView(data.buffer, type);\n };\n }\n // #endregion\n writeTimestamp(index: number): void {\n if (this.queryType !== 'inside-passes') {\n return;\n }\n\n // eslint-disable-next-line @typescript-eslint/no-explicit-any\n (this.computePassEncoder as any).writeTimestamp(this.querySet, index);\n }\n setQueryType(): void {\n this.queryType = 'none';\n if (this.env.webgpu.profiling?.mode === 'default' ||\n (typeof this.env.trace === 'undefined' ? this.env.wasm.trace : this.env.trace)) {\n if (this.device.features.has('chromium-experimental-timestamp-query-inside-passes')) {\n this.queryType = 'inside-passes';\n } else if (this.device.features.has('timestamp-query')) {\n this.queryType = 'at-passes';\n }\n\n if (this.queryType !== 'none' && typeof this.querySet === 'undefined') {\n this.querySet = this.device.createQuerySet({\n type: 'timestamp',\n count: this.maxDispatchNumber * 2,\n });\n this.queryResolveBuffer = this.device.createBuffer(\n // eslint-disable-next-line no-bitwise\n {size: this.maxDispatchNumber * 2 * 8, usage: GPUBufferUsage.COPY_SRC | GPUBufferUsage.QUERY_RESOLVE});\n }\n }\n }\n\n captureBegin(): void {\n LOG_DEBUG('info', 'captureBegin');\n if (!this.capturedCommandList.get(this.currentSessionId!)) {\n this.capturedCommandList.set(this.currentSessionId!, []);\n }\n if (!this.capturedPendingKernels.get(this.currentSessionId!)) {\n this.capturedPendingKernels.set(this.currentSessionId!, []);\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'capturing';\n }\n captureEnd(): void {\n LOG_DEBUG('info', 'captureEnd');\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n replay(): void {\n LOG_DEBUG('info', 'replay');\n this.sessionStatus = 'replaying';\n const sessionCommandList = this.capturedCommandList.get(this.currentSessionId!);\n const sessionPendingKernels = this.capturedPendingKernels.get(this.currentSessionId!);\n const length = sessionCommandList!.length;\n this.pendingKernels = [];\n for (let i = 0; i < length; i++) {\n const computePassEncoder = this.getComputePassEncoder();\n const command = sessionCommandList![i];\n this.writeTimestamp(this.pendingDispatchNumber * 2);\n computePassEncoder.setPipeline(command.computePipeline);\n computePassEncoder.setBindGroup(0, command.bindGroup);\n computePassEncoder.dispatchWorkgroups(...command.dispatchGroup);\n this.writeTimestamp(this.pendingDispatchNumber * 2 + 1);\n this.pendingDispatchNumber++;\n if (this.queryType !== 'none') {\n this.pendingKernels.push(sessionPendingKernels![i]);\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber || this.queryType === 'at-passes') {\n this.endComputePass();\n }\n if (this.pendingDispatchNumber >= this.maxDispatchNumber) {\n this.flush();\n }\n }\n // flush the left commands before we change the status.\n this.flush();\n this.sessionStatus = 'default';\n }\n\n onReleaseSession(sessionId: number): void {\n this.unregisterBuffers(sessionId);\n if (this.capturedCommandList.has(sessionId)) {\n this.capturedCommandList.delete(sessionId);\n }\n if (this.capturedPendingKernels.has(sessionId)) {\n this.capturedPendingKernels.delete(sessionId);\n }\n this.gpuDataManager.onReleaseSession(sessionId);\n }\n\n onRunStart(sessionId: number): void {\n this.currentSessionId = sessionId;\n this.setQueryType();\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env} from 'onnxruntime-common';\n\nimport {OrtWasmModule} from '../binding/ort-wasm';\nimport {DataType, getTensorElementSize} from '../wasm-common';\n\nimport {WebGpuBackend} from './backend-webgpu';\nimport {LOG_DEBUG} from './log';\nimport {TensorView} from './tensor-view';\nimport {ShapeUtil} from './util';\nimport {AdapterInfo, ComputeContext, ComputeContextInputsOutputsMapping, ProgramInfo} from './webgpu/types';\n\n/* eslint-disable no-bitwise */\n\nclass TensorViewImpl implements TensorView {\n constructor(\n private module: OrtWasmModule, public readonly dataType: number, public readonly data: number,\n public readonly dims: readonly number[]) {}\n\n getFloat32Array(): Float32Array {\n if (this.dataType !== DataType.float) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Float32Array() :\n new Float32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getBigInt64Array(): BigInt64Array {\n if (this.dataType !== DataType.int64) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new BigInt64Array() :\n new BigInt64Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n getInt32Array(): Int32Array {\n if (this.dataType !== DataType.int32) {\n throw new Error('Invalid data type');\n }\n const elementCount = ShapeUtil.size(this.dims);\n return elementCount === 0 ? new Int32Array() : new Int32Array(this.module.HEAP8.buffer, this.data, elementCount);\n }\n\n reshape(newDims: readonly number[]): TensorView {\n if (ShapeUtil.size(newDims) !== ShapeUtil.size(this.dims)) {\n throw new Error('Invalid new shape');\n }\n return new TensorViewImpl(this.module, this.dataType, this.data, newDims);\n }\n}\n\nclass ComputeContextImpl implements ComputeContext {\n readonly adapterInfo: AdapterInfo;\n readonly opKernelContext: number;\n readonly inputs: readonly TensorView[];\n readonly outputCount: number;\n get kernelCustomData(): {[key: string]: unknown} {\n return this.backend.currentKernelCustomData;\n }\n get customDataBuffer(): Uint8Array {\n return this.module.HEAPU8.subarray(this.customDataOffset, this.customDataOffset + this.customDataSize);\n }\n private customDataOffset = 0;\n private customDataSize = 0;\n constructor(private module: OrtWasmModule, private backend: WebGpuBackend, contextDataOffset: number) {\n this.adapterInfo = backend.adapterInfo;\n const heapU32 = module.HEAPU32;\n\n // extract context data\n let dataIndex = (contextDataOffset >>> 2);\n this.opKernelContext = heapU32[dataIndex++];\n const inputCount = heapU32[dataIndex++];\n this.outputCount = heapU32[dataIndex++];\n this.customDataOffset = heapU32[dataIndex++];\n this.customDataSize = heapU32[dataIndex++];\n\n const inputs: TensorView[] = [];\n for (let i = 0; i < inputCount; i++) {\n const dataType = heapU32[dataIndex++];\n const data = heapU32[dataIndex++];\n const dim = heapU32[dataIndex++];\n const dims: number[] = [];\n for (let d = 0; d < dim; d++) {\n dims.push(heapU32[dataIndex++]);\n }\n inputs.push(new TensorViewImpl(module, dataType, data, dims));\n }\n this.inputs = inputs;\n }\n\n getMaxComputeWorkgroupSizes(): [number, number, number] {\n return [\n this.backend.device.limits.maxComputeWorkgroupSizeX, this.backend.device.limits.maxComputeWorkgroupSizeY,\n this.backend.device.limits.maxComputeWorkgroupSizeZ\n ];\n }\n\n getMaxComputeWorkgroupStoragesize(): number {\n return this.backend.device.limits.maxComputeWorkgroupStorageSize;\n }\n\n compute(program: ProgramInfo, inputsOutputsMapping?: ComputeContextInputsOutputsMapping): TensorView[] {\n // prepare inputs. inputs should always be valid data.\n const mappedInputs =\n inputsOutputsMapping?.inputs?.map(i => typeof i === 'number' ? this.inputs[i] : i) ?? this.inputs;\n // prepare outputs.\n const outputIndices = inputsOutputsMapping?.outputs ?? [];\n const createKernelOutput = (index: number, dataType: number, dims: readonly number[]): TensorView =>\n new TensorViewImpl(this.module, dataType, this.output(index, dims), dims);\n const createTemporaryOutput = (dataType: number, dims: readonly number[]): TensorView => {\n const elementSize = getTensorElementSize(dataType);\n if (!elementSize) {\n throw new Error(`Unsupported data type: ${dataType}`);\n }\n const bufferSize = elementSize * ShapeUtil.size(dims);\n const gpuDataId = bufferSize > 0 ? this.backend.gpuDataManager.create(bufferSize).id : 0;\n return new TensorViewImpl(this.module, dataType, gpuDataId, dims);\n };\n return this.backend.run(\n program, mappedInputs, outputIndices, createKernelOutput, createTemporaryOutput, this.outputCount);\n }\n\n output(index: number, dims: readonly number[]): number {\n const stack = this.module.stackSave();\n try {\n const data = this.module.stackAlloc((1 + dims.length) * 4 /* sizeof(size_t) */);\n let offset = data >> 2;\n this.module.HEAPU32[offset++] = dims.length;\n for (let i = 0; i < dims.length; i++) {\n this.module.HEAPU32[offset++] = dims[i];\n }\n return this.module._JsepOutput!(this.opKernelContext, index, data);\n } catch (e) {\n throw new Error(\n `Failed to generate kernel's output[${index}] with dims [${dims}]. ` +\n 'If you are running with pre-allocated output, please make sure the output type/dims are correct. ' +\n `Error: ${e}`);\n } finally {\n this.module.stackRestore(stack);\n }\n }\n}\n\n/**\n * Initialize JSEP with WebGPU backend.\n *\n * This function will be called after the WebAssembly module is loaded and initialized (\"_OrtInit\" is called), once for\n * each of the following EPs if they are specified:\n * - \"webgpu\"\n * - \"webnn\"\n *\n * For WebGPU, this function expects:\n * - WebGPU is enabled in build (BUILD_DEFS.DISABLE_WEBGPU === false).\n * - WebGPU is available in current environment. (a valid GPUAdapter is passed in)\n *\n * For WebNN, this function expects:\n * - WebNN is enabled in build (BUILD_DEFS.DISABLE_WEBGPU === false).\n * - WebNN is available in current environment. (navigator.ml is not undefined)\n *\n * If the WebAssembly module is not built with JSEP support, this function will throw an error. This will invalidate\n * 'webgpu'/'webnn' backend.\n *\n * @param name - the name of the EP, either \"webgpu\" or \"webnn\"\n * @param module - the ORT WebAssembly module\n * @param env - the ORT environment variable (ort.env)\n * @param gpuAdapter - the pre-created GPU adapter\n */\nexport const init =\n async(name: 'webgpu'|'webnn', module: OrtWasmModule, env: Env, gpuAdapter?: GPUAdapter): Promise => {\n const jsepInit = module.jsepInit;\n if (!jsepInit) {\n throw new Error('Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.');\n }\n\n if (name === 'webgpu') {\n const backend = new WebGpuBackend();\n await backend.initialize(env, gpuAdapter!);\n\n jsepInit('webgpu', [\n // backend\n backend,\n\n // jsepAlloc()\n (size: number) => backend.alloc(size),\n\n // jsepFree()\n (ptr: number) => backend.free(ptr),\n\n // jsepCopy(src, dst, size, isSourceGpu)\n (src: number, dst: number, size: number, isSourceGpu = false) => {\n if (isSourceGpu) {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyGpuToGpu: src=${src}, dst=${dst}, size=${size}`);\n backend.memcpy(src, dst);\n } else {\n LOG_DEBUG('verbose', () => `[WebGPU] jsepCopyCpuToGpu: dataOffset=${src}, gpuDataId=${dst}, size=${size}`);\n const data = module.HEAPU8.subarray(src >>> 0, (src >>> 0) + size);\n backend.upload(dst, data);\n }\n },\n\n // jsepCopyAsync(src, dst, size)\n async(gpuDataId: number, dataOffset: number, size: number):\n Promise => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepCopyGpuToCpu: gpuDataId=${gpuDataId}, dataOffset=${dataOffset}, size=${size}`);\n\n await backend.download(\n gpuDataId, () => module.HEAPU8.subarray(dataOffset >>> 0, (dataOffset >>> 0) + size));\n },\n\n // jsepCreateKernel\n (kernelType: string, kernelId: number, attribute: unknown) => backend.createKernel(\n kernelType, kernelId, attribute, module.UTF8ToString(module._JsepGetNodeName!(kernelId))),\n\n // jsepReleaseKernel\n (kernel: number) => backend.releaseKernel(kernel),\n\n // jsepRun\n (kernel: number, contextDataOffset: number, sessionHandle: number, errors: Array>) => {\n LOG_DEBUG(\n 'verbose',\n () => `[WebGPU] jsepRun: sessionHandle=${sessionHandle}, kernel=${kernel}, contextDataOffset=${\n contextDataOffset}`);\n const context = new ComputeContextImpl(module, backend, contextDataOffset);\n return backend.computeKernel(kernel, context, errors);\n },\n // jsepCaptureBegin\n () => backend.captureBegin(),\n // jsepCaptureEnd\n () => backend.captureEnd(),\n // jsepReplay\n () => backend.replay()\n ]);\n } else {\n jsepInit('webnn');\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {Env, InferenceSession, Tensor} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport {setRunOptions} from './run-options';\nimport {setSessionOptions} from './session-options';\nimport {dataLocationStringToEnum, getTensorElementSize, isGpuBufferSupportedType, logLevelStringToEnum, tensorDataTypeEnumToString, tensorDataTypeStringToEnum, tensorTypeToTypedArrayConstructor} from './wasm-common';\nimport {getInstance} from './wasm-factory';\nimport {allocWasmString, checkLastError} from './wasm-utils';\nimport {loadFile} from './wasm-utils-load-file';\n\n// #region Initializations\n\n/**\n * There are 4 different \"initialization\" steps for ORT. They happen in different places and different time.\n *\n * 1. JavaScript initialization for onnxruntime-common and onnxruntime-web.\n * This is the first initialization step. In this step, onnxruntime-web calls onnxruntime-common's registerBackend()\n * function multiple times to register all the available backends. The backend registration is very fast. It only\n * registers the backend name with the uninitialized backend object. No heavy initialization is done in this step.\n * Refer to web/lib/index.ts for the backend registration.\n *\n * 2. WebAssembly artifact initialization.\n * This happens when any registered wasm backend is used for the first time (ie. `ort.InferenceSession.create()` or\n * `ort.TrainingSession.create()` is called). In this step, onnxruntime-web does the followings:\n * - create a proxy worker and make sure the proxy worker is ready to receive messages, if proxy is enabled.\n * - perform feature detection, locate correct WebAssembly artifact path and call the Emscripten generated\n * JavaScript code to initialize the WebAssembly runtime.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-wasm'.\n * - downloading the 'ort-wasm{...}.wasm' file is done in this step.\n * - if multi-thread is enabled, one or more webworker will be created to initialize the PThread threadpool.\n *\n * 3. ORT environment initialization.\n * This happens after step 2. In this step, onnxruntime-web performs ONNX Runtime environment initialization.\n * Function `_OrtInit()` is called in this step.\n * - if proxy is enabled, this step happens in the proxy worker using message 'init-ort'.\n * - logging level (ort.env.logLevel) and thread number (ort.env.wasm.numThreads) are set in this step.\n *\n * 4. Session initialization.\n * This happens when `ort.InferenceSession.create()` or `ort.TrainingSession.create()` is called. Unlike the first 3\n * steps (they only called once), this step will be done for each session. In this step, onnxruntime-web does the\n * followings:\n * If the parameter is a URL:\n * - download the model data from the URL.\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - dereference the model buffer. This step allows the original ArrayBuffer to be garbage collected.\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n * If the parameter is a Uint8Array object:\n * - copy the model data to the WASM heap. (proxy: 'copy-from')\n * - call `_OrtCreateSession()` to create the session. (proxy: 'create')\n *\n *\n */\n\n/**\n * initialize ORT environment.\n *\n * @param numThreads SetGlobalIntraOpNumThreads(numThreads)\n * @param loggingLevel CreateEnv(static_cast(logging_level))\n */\nconst initOrt = (numThreads: number, loggingLevel: number): void => {\n const errorCode = getInstance()._OrtInit(numThreads, loggingLevel);\n if (errorCode !== 0) {\n checkLastError('Can\\'t initialize onnxruntime.');\n }\n};\n\n/**\n * intialize runtime environment.\n * @param env passed in the environment config object.\n */\nexport const initRuntime = async(env: Env): Promise => {\n // init ORT\n initOrt(env.wasm.numThreads!, logLevelStringToEnum(env.logLevel));\n};\n\n/**\n * perform EP specific initialization.\n *\n * @param env\n * @param epName\n */\nexport const initEp = async(env: Env, epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WEBGPU) {\n // eslint-disable-next-line @typescript-eslint/no-require-imports, @typescript-eslint/no-var-requires\n const initJsep = require('./jsep/init').init;\n\n if (epName === 'webgpu') {\n // perform WebGPU availability check\n if (typeof navigator === 'undefined' || !navigator.gpu) {\n throw new Error('WebGPU is not supported in current environment');\n }\n\n let adapter = env.webgpu.adapter as GPUAdapter | null;\n if (!adapter) {\n // if adapter is not set, request a new adapter.\n const powerPreference = env.webgpu.powerPreference;\n if (powerPreference !== undefined && powerPreference !== 'low-power' &&\n powerPreference !== 'high-performance') {\n throw new Error(`Invalid powerPreference setting: \"${powerPreference}\"`);\n }\n const forceFallbackAdapter = env.webgpu.forceFallbackAdapter;\n if (forceFallbackAdapter !== undefined && typeof forceFallbackAdapter !== 'boolean') {\n throw new Error(`Invalid forceFallbackAdapter setting: \"${forceFallbackAdapter}\"`);\n }\n adapter = await navigator.gpu.requestAdapter({powerPreference, forceFallbackAdapter});\n if (!adapter) {\n throw new Error(\n 'Failed to get GPU adapter. ' +\n 'You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.');\n }\n } else {\n // if adapter is set, validate it.\n if (typeof adapter.limits !== 'object' || typeof adapter.features !== 'object' ||\n typeof adapter.requestDevice !== 'function') {\n throw new Error('Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.');\n }\n }\n\n if (!env.wasm.simd) {\n throw new Error(\n 'Not supported for WebGPU=ON and SIMD=OFF. Please set `env.wasm.simd` to true when using `webgpu` EP');\n }\n\n await initJsep('webgpu', getInstance(), env, adapter);\n }\n if (epName === 'webnn') {\n // perform WebNN availability check\n if (typeof navigator === 'undefined' || !(navigator as unknown as {ml: unknown}).ml) {\n throw new Error('WebNN is not supported in current environment');\n }\n\n await initJsep('webnn', getInstance(), env);\n }\n }\n};\n\n// #endregion Initializations\n\n/**\n * valid data locations for input/output tensors.\n */\ntype SupportedTensorDataLocationForInputOutput = 'cpu'|'cpu-pinned'|'gpu-buffer';\n\ntype IOBindingState = {\n /**\n * the handle of IO binding.\n */\n readonly handle: number;\n\n /**\n * the preferred location for each output tensor.\n *\n * value is one of 'cpu', 'cpu-pinned', 'gpu-buffer'.\n */\n readonly outputPreferredLocations: readonly SupportedTensorDataLocationForInputOutput[];\n\n /**\n * enum value of the preferred location for each output tensor.\n */\n readonly outputPreferredLocationsEncoded: readonly number[];\n};\n\n/**\n * tuple elements are: InferenceSession ID; inputNamesUTF8Encoded; outputNamesUTF8Encoded; bindingState\n */\ntype SessionMetadata = [\n inferenceSessionId: number, inputNamesUTF8Encoded: number[], outputNamesUTF8Encoded: number[],\n bindingState: IOBindingState|null, enableGraphCapture: boolean, inputOutputBound: boolean\n];\n\nconst activeSessions = new Map();\n\n/**\n * get the input/output count of the session.\n * @param sessionHandle the handle representing the session. should be non-zero.\n * @returns a tuple including 2 numbers, representing the input count and output count.\n */\nconst getSessionInputOutputCount = (sessionHandle: number): [number, number] => {\n const wasm = getInstance();\n const stack = wasm.stackSave();\n try {\n const dataOffset = wasm.stackAlloc(8);\n const errorCode = wasm._OrtGetInputOutputCount(sessionHandle, dataOffset, dataOffset + 4);\n if (errorCode !== 0) {\n checkLastError('Can\\'t get session input/output count.');\n }\n return [wasm.HEAP32[dataOffset / 4], wasm.HEAP32[dataOffset / 4 + 1]];\n } finally {\n wasm.stackRestore(stack);\n }\n};\n\n/**\n * allocate the memory and memcpy the external buffer.\n *\n * @param model - the external buffer containing the model data. Must not be the same buffer as the WASM heap.\n * @returns a 2-elements tuple - the pointer and size of the allocated buffer\n */\nexport const copyFromExternalBuffer = (model: Uint8Array): [number, number] => {\n const wasm = getInstance();\n const modelDataOffset = wasm._malloc(model.byteLength);\n if (modelDataOffset === 0) {\n throw new Error(`Can't create a session. failed to allocate a buffer of size ${model.byteLength}.`);\n }\n wasm.HEAPU8.set(model, modelDataOffset);\n return [modelDataOffset, model.byteLength];\n};\n\n/**\n * create an inference session from a model data buffer.\n *\n * @param modelData - either a Uint8Array object representing the model data, or a 2-elements tuple containing the\n * pointer and size of the model data buffer.\n * @param options an optional session options object.\n * @returns a 3-elements tuple containing [session handle, input names, output names]\n */\nexport const createSession = async(\n modelData: Uint8Array|SerializableInternalBuffer,\n options?: InferenceSession.SessionOptions): Promise => {\n let modelDataOffset: number, modelDataLength: number;\n const wasm = getInstance();\n\n if (Array.isArray(modelData)) {\n // if model data is an array, it must be a 2-elements tuple containing the pointer and size of the model data\n [modelDataOffset, modelDataLength] = modelData;\n } else if (modelData.buffer === wasm.HEAPU8.buffer) {\n // if model data uses the same buffer as the WASM heap, we don't need to copy it.\n [modelDataOffset, modelDataLength] = [modelData.byteOffset, modelData.byteLength];\n } else {\n // otherwise, copy the model data to the WASM heap.\n [modelDataOffset, modelDataLength] = copyFromExternalBuffer(modelData);\n }\n\n let sessionHandle = 0;\n let sessionOptionsHandle = 0;\n let ioBindingHandle = 0;\n let allocs: number[] = [];\n const inputNamesUTF8Encoded = [];\n const outputNamesUTF8Encoded = [];\n\n try {\n [sessionOptionsHandle, allocs] = setSessionOptions(options);\n\n if (options?.externalData && wasm.mountExternalData) {\n const loadingPromises = [];\n for (const file of options.externalData) {\n const path = typeof file === 'string' ? file : file.path;\n loadingPromises.push(loadFile(typeof file === 'string' ? file : file.data).then(data => {\n wasm.mountExternalData!(path, data);\n }));\n }\n\n // wait for all external data files to be loaded\n await Promise.all(loadingPromises);\n }\n\n sessionHandle = await wasm._OrtCreateSession(modelDataOffset, modelDataLength, sessionOptionsHandle);\n if (sessionHandle === 0) {\n checkLastError('Can\\'t create a session.');\n }\n\n const [inputCount, outputCount] = getSessionInputOutputCount(sessionHandle);\n\n const enableGraphCapture = !!options?.enableGraphCapture;\n\n const inputNames = [];\n const outputNames = [];\n const outputPreferredLocations: SupportedTensorDataLocationForInputOutput[] = [];\n for (let i = 0; i < inputCount; i++) {\n const name = wasm._OrtGetInputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an input name.');\n }\n inputNamesUTF8Encoded.push(name);\n inputNames.push(wasm.UTF8ToString(name));\n }\n for (let i = 0; i < outputCount; i++) {\n const name = wasm._OrtGetOutputName(sessionHandle, i);\n if (name === 0) {\n checkLastError('Can\\'t get an output name.');\n }\n outputNamesUTF8Encoded.push(name);\n const nameString = wasm.UTF8ToString(name);\n outputNames.push(nameString);\n\n if (!BUILD_DEFS.DISABLE_WEBGPU) {\n if (enableGraphCapture && options?.preferredOutputLocation === undefined) {\n outputPreferredLocations.push('gpu-buffer');\n continue;\n }\n const location = typeof options?.preferredOutputLocation === 'string' ?\n options.preferredOutputLocation :\n options?.preferredOutputLocation?.[nameString] ?? 'cpu';\n if (location !== 'cpu' && location !== 'cpu-pinned' && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${location}.`);\n }\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(`Not supported preferred output location: ${\n location}. Only 'gpu-buffer' location is supported when enableGraphCapture is true.`);\n }\n outputPreferredLocations.push(location);\n }\n }\n\n // use IO binding only when at least one output is preffered to be on GPU.\n let bindingState: IOBindingState|null = null;\n if (!BUILD_DEFS.DISABLE_WEBGPU && outputPreferredLocations.some(l => l === 'gpu-buffer')) {\n ioBindingHandle = wasm._OrtCreateBinding(sessionHandle);\n if (ioBindingHandle === 0) {\n checkLastError('Can\\'t create IO binding.');\n }\n\n bindingState = {\n handle: ioBindingHandle,\n outputPreferredLocations,\n outputPreferredLocationsEncoded: outputPreferredLocations.map(l => dataLocationStringToEnum(l)),\n };\n }\n\n activeSessions.set(\n sessionHandle,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, bindingState, enableGraphCapture, false]);\n return [sessionHandle, inputNames, outputNames];\n } catch (e) {\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n\n if (ioBindingHandle !== 0) {\n wasm._OrtReleaseBinding(ioBindingHandle);\n }\n\n if (sessionHandle !== 0) {\n wasm._OrtReleaseSession(sessionHandle);\n }\n throw e;\n } finally {\n wasm._free(modelDataOffset);\n if (sessionOptionsHandle !== 0) {\n wasm._OrtReleaseSessionOptions(sessionOptionsHandle);\n }\n allocs.forEach(alloc => wasm._free(alloc));\n\n // unmount external data if necessary\n wasm.unmountExternalData?.();\n }\n};\n\nexport const releaseSession = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot release session. invalid session id: ${sessionId}`);\n }\n const [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture] = session;\n\n if (ioBindingState) {\n if (enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n }\n wasm._OrtReleaseBinding(ioBindingState.handle);\n }\n\n wasm.jsepOnReleaseSession?.(sessionId);\n\n inputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n outputNamesUTF8Encoded.forEach(buf => wasm._OrtFree(buf));\n wasm._OrtReleaseSession(sessionHandle);\n activeSessions.delete(sessionId);\n};\n\nexport const prepareInputOutputTensor =\n (tensor: TensorMetadata|null, tensorHandles: number[], allocs: number[], sessionId: number, index: number,\n enableGraphCapture = false): void => {\n if (!tensor) {\n tensorHandles.push(0);\n return;\n }\n\n const wasm = getInstance();\n\n const dataType = tensor[0];\n const dims = tensor[1];\n const location = tensor[3];\n\n let rawData: number;\n let dataByteLength: number;\n\n if (dataType === 'string' && location === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n\n if (enableGraphCapture && location !== 'gpu-buffer') {\n throw new Error(\n `External buffer must be provided for input/output index ${index} when enableGraphCapture is true.`);\n }\n\n if (location === 'gpu-buffer') {\n const gpuBuffer = tensor[2].gpuBuffer as GPUBuffer;\n const elementSizeInBytes = getTensorElementSize(tensorDataTypeStringToEnum(dataType))!;\n dataByteLength = dims.reduce((a, b) => a * b, 1) * elementSizeInBytes;\n\n const registerBuffer = wasm.jsepRegisterBuffer;\n if (!registerBuffer) {\n throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');\n }\n rawData = registerBuffer(sessionId, index, gpuBuffer, dataByteLength);\n } else {\n const data = tensor[2];\n\n if (Array.isArray(data)) {\n // string tensor\n dataByteLength = 4 * data.length;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n let dataIndex = rawData / 4;\n for (let i = 0; i < data.length; i++) {\n if (typeof data[i] !== 'string') {\n throw new TypeError(`tensor data at index ${i} is not a string`);\n }\n wasm.HEAPU32[dataIndex++] = allocWasmString(data[i], allocs);\n }\n } else {\n dataByteLength = data.byteLength;\n rawData = wasm._malloc(dataByteLength);\n allocs.push(rawData);\n wasm.HEAPU8.set(new Uint8Array(data.buffer, data.byteOffset, dataByteLength), rawData);\n }\n }\n\n const stack = wasm.stackSave();\n const dimsOffset = wasm.stackAlloc(4 * dims.length);\n try {\n let dimIndex = dimsOffset / 4;\n dims.forEach(d => wasm.HEAP32[dimIndex++] = d);\n const tensor = wasm._OrtCreateTensor(\n tensorDataTypeStringToEnum(dataType), rawData, dataByteLength, dimsOffset, dims.length,\n dataLocationStringToEnum(location));\n if (tensor === 0) {\n checkLastError(`Can't create tensor for input/output. session=${sessionId}, index=${index}.`);\n }\n tensorHandles.push(tensor);\n } finally {\n wasm.stackRestore(stack);\n }\n };\n\n/**\n * perform inference run\n */\nexport const run = async(\n sessionId: number, inputIndices: number[], inputTensors: TensorMetadata[], outputIndices: number[],\n outputTensors: Array, options: InferenceSession.RunOptions): Promise => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error(`cannot run inference. invalid session id: ${sessionId}`);\n }\n const sessionHandle = session[0];\n const inputNamesUTF8Encoded = session[1];\n const outputNamesUTF8Encoded = session[2];\n const ioBindingState = session[3];\n const enableGraphCapture = session[4];\n const inputOutputBound = session[5];\n\n const inputCount = inputIndices.length;\n const outputCount = outputIndices.length;\n\n let runOptionsHandle = 0;\n let runOptionsAllocs: number[] = [];\n\n const inputTensorHandles: number[] = [];\n const outputTensorHandles: number[] = [];\n const inputOutputAllocs: number[] = [];\n\n const beforeRunStack = wasm.stackSave();\n const inputValuesOffset = wasm.stackAlloc(inputCount * 4);\n const inputNamesOffset = wasm.stackAlloc(inputCount * 4);\n const outputValuesOffset = wasm.stackAlloc(outputCount * 4);\n const outputNamesOffset = wasm.stackAlloc(outputCount * 4);\n\n try {\n [runOptionsHandle, runOptionsAllocs] = setRunOptions(options);\n\n // create input tensors\n for (let i = 0; i < inputCount; i++) {\n prepareInputOutputTensor(\n inputTensors[i], inputTensorHandles, inputOutputAllocs, sessionId, inputIndices[i], enableGraphCapture);\n }\n\n // create output tensors\n for (let i = 0; i < outputCount; i++) {\n prepareInputOutputTensor(\n outputTensors[i], outputTensorHandles, inputOutputAllocs, sessionId, inputCount + outputIndices[i],\n enableGraphCapture);\n }\n\n let inputValuesIndex = inputValuesOffset / 4;\n let inputNamesIndex = inputNamesOffset / 4;\n let outputValuesIndex = outputValuesOffset / 4;\n let outputNamesIndex = outputNamesOffset / 4;\n for (let i = 0; i < inputCount; i++) {\n wasm.HEAPU32[inputValuesIndex++] = inputTensorHandles[i];\n wasm.HEAPU32[inputNamesIndex++] = inputNamesUTF8Encoded[inputIndices[i]];\n }\n for (let i = 0; i < outputCount; i++) {\n wasm.HEAPU32[outputValuesIndex++] = outputTensorHandles[i];\n wasm.HEAPU32[outputNamesIndex++] = outputNamesUTF8Encoded[outputIndices[i]];\n }\n\n if (!BUILD_DEFS.DISABLE_WEBGPU && ioBindingState && !inputOutputBound) {\n const {handle, outputPreferredLocations, outputPreferredLocationsEncoded} = ioBindingState;\n\n if (inputNamesUTF8Encoded.length !== inputCount) {\n throw new Error(`input count from feeds (${\n inputCount}) is expected to be always equal to model's input count (${inputNamesUTF8Encoded.length}).`);\n }\n\n // process inputs\n for (let i = 0; i < inputCount; i++) {\n const index = inputIndices[i];\n const errorCode = await wasm._OrtBindInput(handle, inputNamesUTF8Encoded[index], inputTensorHandles[i]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind input[${i}] for session=${sessionId}.`);\n }\n }\n\n // process pre-allocated outputs\n for (let i = 0; i < outputCount; i++) {\n const index = outputIndices[i];\n const location = outputTensors[i]?.[3]; // undefined means output is not pre-allocated.\n\n if (location) {\n // output is pre-allocated. bind the tensor.\n const errorCode = wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], outputTensorHandles[i], 0);\n if (errorCode !== 0) {\n checkLastError(`Can't bind pre-allocated output[${i}] for session=${sessionId}.`);\n }\n } else {\n // output is not pre-allocated. reset preferred location.\n const errorCode =\n wasm._OrtBindOutput(handle, outputNamesUTF8Encoded[index], 0, outputPreferredLocationsEncoded[index]);\n if (errorCode !== 0) {\n checkLastError(`Can't bind output[${i}] to ${outputPreferredLocations[i]} for session=${sessionId}.`);\n }\n }\n }\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, true]);\n }\n\n wasm.jsepOnRunStart?.(sessionHandle);\n let errorCode: number;\n if (!BUILD_DEFS.DISABLE_WEBGPU && ioBindingState) {\n errorCode = await wasm._OrtRunWithBinding(\n sessionHandle, ioBindingState.handle, outputCount, outputValuesOffset, runOptionsHandle);\n } else {\n errorCode = await wasm._OrtRun(\n sessionHandle, inputNamesOffset, inputValuesOffset, inputCount, outputNamesOffset, outputCount,\n outputValuesOffset, runOptionsHandle);\n }\n\n if (errorCode !== 0) {\n checkLastError('failed to call OrtRun().');\n }\n\n const output: TensorMetadata[] = [];\n\n for (let i = 0; i < outputCount; i++) {\n const tensor = wasm.HEAPU32[outputValuesOffset / 4 + i];\n if (tensor === outputTensorHandles[i]) {\n // output tensor is pre-allocated. no need to copy data.\n output.push(outputTensors[i]!);\n continue;\n }\n\n const beforeGetTensorDataStack = wasm.stackSave();\n // stack allocate 4 pointer value\n const tensorDataOffset = wasm.stackAlloc(4 * 4);\n\n let keepOutputTensor = false;\n let type: Tensor.Type|undefined, dataOffset = 0;\n try {\n const errorCode = wasm._OrtGetTensorData(\n tensor, tensorDataOffset, tensorDataOffset + 4, tensorDataOffset + 8, tensorDataOffset + 12);\n if (errorCode !== 0) {\n checkLastError(`Can't access output tensor data on index ${i}.`);\n }\n let tensorDataIndex = tensorDataOffset / 4;\n const dataType = wasm.HEAPU32[tensorDataIndex++];\n dataOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsOffset = wasm.HEAPU32[tensorDataIndex++];\n const dimsLength = wasm.HEAPU32[tensorDataIndex++];\n const dims = [];\n for (let i = 0; i < dimsLength; i++) {\n dims.push(wasm.HEAPU32[dimsOffset / 4 + i]);\n }\n wasm._OrtFree(dimsOffset);\n\n const size = dims.reduce((a, b) => a * b, 1);\n type = tensorDataTypeEnumToString(dataType);\n\n const preferredLocation = ioBindingState?.outputPreferredLocations[outputIndices[i]];\n\n if (type === 'string') {\n if (preferredLocation === 'gpu-buffer') {\n throw new Error('String tensor is not supported on GPU.');\n }\n const stringData: string[] = [];\n let dataIndex = dataOffset / 4;\n for (let i = 0; i < size; i++) {\n const offset = wasm.HEAPU32[dataIndex++];\n const maxBytesToRead = i === size - 1 ? undefined : wasm.HEAPU32[dataIndex] - offset;\n stringData.push(wasm.UTF8ToString(offset, maxBytesToRead));\n }\n output.push([type, dims, stringData, 'cpu']);\n } else {\n // If a certain output's preferred location is GPU but the tensor is empty, we still need to create a CPU\n // tensor for it. There is no mapping GPU buffer for an empty tensor.\n if (preferredLocation === 'gpu-buffer' && size > 0) {\n const getBuffer = wasm.jsepGetBuffer;\n if (!getBuffer) {\n throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');\n }\n const gpuBuffer = getBuffer(dataOffset);\n const elementSize = getTensorElementSize(dataType);\n if (elementSize === undefined || !isGpuBufferSupportedType(type)) {\n throw new Error(`Unsupported data type: ${type}`);\n }\n\n // do not release the tensor right now. it will be released when user calls tensor.dispose().\n keepOutputTensor = true;\n\n output.push([\n type, dims, {\n gpuBuffer,\n download: wasm.jsepCreateDownloader!(gpuBuffer, size * elementSize, type),\n dispose: () => {\n wasm._OrtReleaseTensor(tensor);\n }\n },\n 'gpu-buffer'\n ]);\n } else {\n const typedArrayConstructor = tensorTypeToTypedArrayConstructor(type);\n const data = new typedArrayConstructor(size);\n new Uint8Array(data.buffer, data.byteOffset, data.byteLength)\n .set(wasm.HEAPU8.subarray(dataOffset, dataOffset + data.byteLength));\n output.push([type, dims, data, 'cpu']);\n }\n }\n } finally {\n wasm.stackRestore(beforeGetTensorDataStack);\n if (type === 'string' && dataOffset) {\n wasm._free(dataOffset);\n }\n if (!keepOutputTensor) {\n wasm._OrtReleaseTensor(tensor);\n }\n }\n }\n\n if (ioBindingState && !enableGraphCapture) {\n wasm._OrtClearBoundOutputs(ioBindingState.handle);\n activeSessions.set(\n sessionId,\n [sessionHandle, inputNamesUTF8Encoded, outputNamesUTF8Encoded, ioBindingState, enableGraphCapture, false]);\n }\n return output;\n } finally {\n wasm.stackRestore(beforeRunStack);\n\n inputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n outputTensorHandles.forEach(v => wasm._OrtReleaseTensor(v));\n inputOutputAllocs.forEach(p => wasm._free(p));\n\n if (runOptionsHandle !== 0) {\n wasm._OrtReleaseRunOptions(runOptionsHandle);\n }\n runOptionsAllocs.forEach(p => wasm._free(p));\n }\n};\n\n/**\n * end profiling\n */\nexport const endProfiling = (sessionId: number): void => {\n const wasm = getInstance();\n const session = activeSessions.get(sessionId);\n if (!session) {\n throw new Error('invalid session id');\n }\n const sessionHandle = session[0];\n\n // profile file name is not used yet, but it must be freed.\n const profileFileName = wasm._OrtEndProfiling(sessionHandle);\n if (profileFileName === 0) {\n checkLastError('Can\\'t get an profile file name.');\n }\n wasm._OrtFree(profileFileName);\n};\n\nexport const extractTransferableBuffers = (tensors: readonly SerializableTensorMetadata[]): ArrayBufferLike[] => {\n const buffers: ArrayBufferLike[] = [];\n for (const tensor of tensors) {\n const data = tensor[2];\n if (!Array.isArray(data) && 'buffer' in data) {\n buffers.push(data.buffer);\n }\n }\n return buffers;\n};\n", "/*!\n * ONNX Runtime Web v1.18.0\n * Copyright (c) Microsoft Corporation. All rights reserved.\n * Licensed under the MIT License.\n */\n\"use strict\";(()=>{var Ai=Object.defineProperty;var kh=Object.getOwnPropertyDescriptor;var Oh=Object.getOwnPropertyNames;var Rh=Object.prototype.hasOwnProperty;var ae=(e,r)=>()=>(e&&(r=e(e=0)),r);var un=(e,r)=>()=>(r||e((r={exports:{}}).exports,r),r.exports),Hn=(e,r)=>{for(var t in r)Ai(e,t,{get:r[t],enumerable:!0})},Bh=(e,r,t,u)=>{if(r&&typeof r==\"object\"||typeof r==\"function\")for(let s of Oh(r))!Rh.call(e,s)&&s!==t&&Ai(e,s,{get:()=>r[s],enumerable:!(u=kh(r,s))||u.enumerable});return e};var ir=e=>Bh(Ai({},\"__esModule\",{value:!0}),e);var Ln={};Hn(Ln,{createReadStream:()=>qs,readFile:()=>Dh,readFileSync:()=>zh});var Dh,zh,qs,Fn=ae(()=>{Dh=void 0,zh=void 0,qs=void 0});var Ti={};Hn(Ti,{join:()=>Mh});var Mh,Ei=ae(()=>{Mh=void 0});var Zs=un((Ys,Pi)=>{\"use strict\";var Ks=(()=>{var e=typeof document<\"u\"?document.currentScript?.src:void 0;return typeof __filename<\"u\"&&(e||=__filename),function(r={}){var t=r,u,s,c=new Promise((n,i)=>{u=n,s=i});t.mountExternalData=(n,i)=>{(t.Ph||(t.Ph=new Map)).set(n,i)},t.unmountExternalData=()=>{delete t.Ph};let f=()=>{let n=(o,a,l)=>(...p)=>{let h=yt,y=a?.();p=o(...p);let b=a?.();return y!==b&&(o=b,l(y),a=l=null),yt!=h?Tn():p},i=o=>async(...a)=>{try{if(t.Oh)throw Error(\"Session already started\");let l=t.Oh={gi:a[0],errors:[]},p=await o(...a);if(t.Oh!==l)throw Error(\"Session mismatch\");t.Uh?.flush();let h=l.errors;if(0b),0t._OrtCreateSession,o=>t._OrtCreateSession=o),t._OrtRun=i(n(t._OrtRun,()=>t._OrtRun,o=>t._OrtRun=o)),t._OrtRunWithBinding=i(n(t._OrtRunWithBinding,()=>t._OrtRunWithBinding,o=>t._OrtRunWithBinding=o)),t._OrtBindInput=n(t._OrtBindInput,()=>t._OrtBindInput,o=>t._OrtBindInput=o),f=void 0};t.jsepInit=(n,i)=>{if(f?.(),n===\"webgpu\"){[t.Uh,t.Zh,t.ci,t.Vh,t.bi,t.je,t.di,t.fi,t.$h,t.ai,t.ei]=i;let o=t.Uh;t.jsepRegisterBuffer=(a,l,p,h)=>o.registerBuffer(a,l,p,h),t.jsepGetBuffer=a=>o.getBuffer(a),t.jsepCreateDownloader=(a,l,p)=>o.createDownloader(a,l,p),t.jsepOnReleaseSession=a=>{o.onReleaseSession(a)},t.jsepOnRunStart=a=>o.onRunStart(a)}};var d=Object.assign({},t),g=\"./this.program\",w=(n,i)=>{throw i},C=typeof window==\"object\",$=typeof importScripts==\"function\",A=typeof process==\"object\"&&typeof process.versions==\"object\"&&typeof process.versions.node==\"string\",P=\"\",x,E,O;if(A){var B=(Fn(),ir(Ln)),R=(Ei(),ir(Ti));P=$?R.dirname(P)+\"/\":__dirname+\"/\",x=(n,i)=>(n=tt(n)?new URL(n):R.normalize(n),B.readFileSync(n,i?void 0:\"utf8\")),O=n=>(n=x(n,!0),n.buffer||(n=new Uint8Array(n)),n),E=(n,i,o,a=!0)=>{n=tt(n)?new URL(n):R.normalize(n),B.readFile(n,a?void 0:\"utf8\",(l,p)=>{l?o(l):i(a?p.buffer:p)})},!t.thisProgram&&1{throw process.exitCode=n,i}}else(C||$)&&($?P=self.location.href:typeof document<\"u\"&&document.currentScript&&(P=document.currentScript.src),e&&(P=e),P.startsWith(\"blob:\")?P=\"\":P=P.substr(0,P.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1),x=n=>{var i=new XMLHttpRequest;return i.open(\"GET\",n,!1),i.send(null),i.responseText},$&&(O=n=>{var i=new XMLHttpRequest;return i.open(\"GET\",n,!1),i.responseType=\"arraybuffer\",i.send(null),new Uint8Array(i.response)}),E=(n,i,o)=>{var a=new XMLHttpRequest;a.open(\"GET\",n,!0),a.responseType=\"arraybuffer\",a.onload=()=>{a.status==200||a.status==0&&a.response?i(a.response):o()},a.onerror=o,a.send(null)});var j=console.log.bind(console),U=console.error.bind(console);Object.assign(t,d),d=null;var L,F=!1,te,J,oe,le,ge,X,pe,we,ue,me,Ee;function Pe(){var n=L.buffer;t.HEAP8=J=new Int8Array(n),t.HEAP16=le=new Int16Array(n),t.HEAPU8=oe=new Uint8Array(n),t.HEAPU16=ge=new Uint16Array(n),t.HEAP32=X=new Int32Array(n),t.HEAPU32=pe=new Uint32Array(n),t.HEAPF32=we=new Float32Array(n),t.HEAPF64=Ee=new Float64Array(n),t.HEAP64=ue=new BigInt64Array(n),t.HEAPU64=me=new BigUint64Array(n)}var Ce=[],be=[],Ae=[],_e=0,Je=null,ee=null;function ce(n){throw n=\"Aborted(\"+n+\")\",U(n),F=!0,te=1,n=new WebAssembly.RuntimeError(n+\". Build with -sASSERTIONS for more info.\"),s(n),n}var Be=n=>n.startsWith(\"data:application/octet-stream;base64,\"),tt=n=>n.startsWith(\"file://\"),Ye;if(Ye=\"ort-wasm-simd.wasm\",!Be(Ye)){var Ke=Ye;Ye=t.locateFile?t.locateFile(Ke,P):P+Ke}function Pt(n){if(O)return O(n);throw\"both async and sync fetching of the wasm failed\"}function Ze(n){if(C||$){if(typeof fetch==\"function\"&&!tt(n))return fetch(n,{credentials:\"same-origin\"}).then(i=>{if(!i.ok)throw`failed to load wasm binary file at '${n}'`;return i.arrayBuffer()}).catch(()=>Pt(n));if(E)return new Promise((i,o)=>{E(n,a=>i(new Uint8Array(a)),o)})}return Promise.resolve().then(()=>Pt(n))}function rt(n,i,o){return Ze(n).then(a=>WebAssembly.instantiate(a,i)).then(o,a=>{U(`failed to asynchronously prepare wasm: ${a}`),ce(a)})}function ct(n,i){var o=Ye;return typeof WebAssembly.instantiateStreaming!=\"function\"||Be(o)||tt(o)||A||typeof fetch!=\"function\"?rt(o,n,i):fetch(o,{credentials:\"same-origin\"}).then(a=>WebAssembly.instantiateStreaming(a,n).then(i,function(l){return U(`wasm streaming compile failed: ${l}`),U(\"falling back to ArrayBuffer instantiation\"),rt(o,n,i)}))}var Ut={1261504:(n,i,o,a)=>{if(typeof t>\"u\"||!t.Ph)return 1;if(n=qe(n>>>0),n.startsWith(\"./\")&&(n=n.substring(2)),n=t.Ph.get(n),!n)return 2;if(i>>>=0,o>>>=0,i+o>n.byteLength)return 3;try{return oe.set(n.subarray(i,i+o),a>>>0>>>0),0}catch{return 4}},1262005:(n,i,o)=>{o=qe(o);let a=new Uint8Array(i);a.set(oe.subarray(n>>>0,n+i>>>0)),typeof process==\"object\"&&typeof process.versions==\"object\"&&typeof process.versions.node==\"string\"?(Fn(),ir(Ln)).writeFileSync(o,a):(n=new File([a],o,{type:\"application/octet-stream\"}),n=URL.createObjectURL(n),window.open(n,\"_blank\"))},1262513:()=>{t.$h()},1262544:()=>{t.ai()},1262573:()=>{t.ei()},1262598:n=>t.Zh(n),1262631:n=>t.ci(n),1262663:(n,i,o)=>{t.Vh(n,i,o,!0)},1262702:(n,i,o)=>{t.Vh(n,i,o)},1262735:n=>{t.je(\"Abs\",n,void 0)},1262786:n=>{t.je(\"Neg\",n,void 0)},1262837:n=>{t.je(\"Floor\",n,void 0)},1262890:n=>{t.je(\"Ceil\",n,void 0)},1262942:n=>{t.je(\"Reciprocal\",n,void 0)},1263e3:n=>{t.je(\"Sqrt\",n,void 0)},1263052:n=>{t.je(\"Exp\",n,void 0)},1263103:n=>{t.je(\"Erf\",n,void 0)},1263154:n=>{t.je(\"Sigmoid\",n,void 0)},1263209:(n,i,o)=>{t.je(\"HardSigmoid\",n,{alpha:i,beta:o})},1263288:n=>{t.je(\"Log\",n,void 0)},1263339:n=>{t.je(\"Sin\",n,void 0)},1263390:n=>{t.je(\"Cos\",n,void 0)},1263441:n=>{t.je(\"Tan\",n,void 0)},1263492:n=>{t.je(\"Asin\",n,void 0)},1263544:n=>{t.je(\"Acos\",n,void 0)},1263596:n=>{t.je(\"Atan\",n,void 0)},1263648:n=>{t.je(\"Sinh\",n,void 0)},1263700:n=>{t.je(\"Cosh\",n,void 0)},1263752:n=>{t.je(\"Asinh\",n,void 0)},1263805:n=>{t.je(\"Acosh\",n,void 0)},1263858:n=>{t.je(\"Atanh\",n,void 0)},1263911:n=>{t.je(\"Tanh\",n,void 0)},1263963:n=>{t.je(\"Not\",n,void 0)},1264014:(n,i,o)=>{t.je(\"Clip\",n,{min:i,max:o})},1264083:n=>{t.je(\"Clip\",n,void 0)},1264135:(n,i)=>{t.je(\"Elu\",n,{alpha:i})},1264193:n=>{t.je(\"Relu\",n,void 0)},1264245:(n,i)=>{t.je(\"LeakyRelu\",n,{alpha:i})},1264309:(n,i)=>{t.je(\"ThresholdedRelu\",n,{alpha:i})},1264379:(n,i)=>{t.je(\"Cast\",n,{to:i})},1264437:n=>{t.je(\"Add\",n,void 0)},1264488:n=>{t.je(\"Sub\",n,void 0)},1264539:n=>{t.je(\"Mul\",n,void 0)},1264590:n=>{t.je(\"Div\",n,void 0)},1264641:n=>{t.je(\"Pow\",n,void 0)},1264692:n=>{t.je(\"Equal\",n,void 0)},1264745:n=>{t.je(\"Greater\",n,void 0)},1264800:n=>{t.je(\"GreaterOrEqual\",n,void 0)},1264862:n=>{t.je(\"Less\",n,void 0)},1264914:n=>{t.je(\"LessOrEqual\",n,void 0)},1264973:(n,i,o,a,l)=>{t.je(\"ReduceMean\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265132:(n,i,o,a,l)=>{t.je(\"ReduceMax\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265290:(n,i,o,a,l)=>{t.je(\"ReduceMin\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265448:(n,i,o,a,l)=>{t.je(\"ReduceProd\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265607:(n,i,o,a,l)=>{t.je(\"ReduceSum\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265765:(n,i,o,a,l)=>{t.je(\"ReduceL1\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1265922:(n,i,o,a,l)=>{t.je(\"ReduceL2\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1266079:(n,i,o,a,l)=>{t.je(\"ReduceLogSum\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1266240:(n,i,o,a,l)=>{t.je(\"ReduceSumSquare\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1266404:(n,i,o,a,l)=>{t.je(\"ReduceLogSumExp\",n,{keepDims:!!i,noopWithEmptyAxes:!!o,axes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1266568:n=>{t.je(\"Where\",n,void 0)},1266621:(n,i,o)=>{t.je(\"Transpose\",n,{perm:i?Array.from(X.subarray(i>>>0,o>>>0)):[]})},1266729:(n,i,o,a)=>{t.je(\"DepthToSpace\",n,{blocksize:i,mode:qe(o),format:a?\"NHWC\":\"NCHW\"})},1266862:(n,i,o,a)=>{t.je(\"DepthToSpace\",n,{blocksize:i,mode:qe(o),format:a?\"NHWC\":\"NCHW\"})},1266995:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>{t.je(\"ConvTranspose\",n,{format:b?\"NHWC\":\"NCHW\",autoPad:i,dilations:[o],group:a,kernelShape:[l],pads:[p,h],strides:[y],wIsConst:()=>!!J[_>>>0],outputPadding:S?Array.from(X.subarray(S>>>0,k>>>0)):[],outputShape:z?Array.from(X.subarray(z>>>0,T>>>0)):[],activation:qe(V)})},1267396:(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>{t.je(\"ConvTranspose\",n,{format:y?\"NHWC\":\"NCHW\",autoPad:i,dilations:Array.from(X.subarray(o>>>0,(o>>>0)+2>>>0)),group:a,kernelShape:Array.from(X.subarray(l>>>0,(l>>>0)+2>>>0)),pads:Array.from(X.subarray(p>>>0,(p>>>0)+4>>>0)),strides:Array.from(X.subarray(h>>>0,(h>>>0)+2>>>0)),wIsConst:()=>!!J[b>>>0],outputPadding:_?Array.from(X.subarray(_>>>0,S>>>0)):[],outputShape:k?Array.from(X.subarray(k>>>0,z>>>0)):[],activation:qe(T)})},1267961:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>{t.je(\"ConvTranspose\",n,{format:b?\"NHWC\":\"NCHW\",autoPad:i,dilations:[o],group:a,kernelShape:[l],pads:[p,h],strides:[y],wIsConst:()=>!!J[_>>>0],outputPadding:S?Array.from(X.subarray(S>>>0,k>>>0)):[],outputShape:z?Array.from(X.subarray(z>>>0,T>>>0)):[],activation:qe(V)})},1268362:(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>{t.je(\"ConvTranspose\",n,{format:y?\"NHWC\":\"NCHW\",autoPad:i,dilations:Array.from(X.subarray(o>>>0,(o>>>0)+2>>>0)),group:a,kernelShape:Array.from(X.subarray(l>>>0,(l>>>0)+2>>>0)),pads:Array.from(X.subarray(p>>>0,(p>>>0)+4>>>0)),strides:Array.from(X.subarray(h>>>0,(h>>>0)+2>>>0)),wIsConst:()=>!!J[b>>>0],outputPadding:_?Array.from(X.subarray(_>>>0,S>>>0)):[],outputShape:k?Array.from(X.subarray(k>>>0,z>>>0)):[],activation:qe(T)})},1268927:(n,i)=>{t.je(\"GlobalAveragePool\",n,{format:i?\"NHWC\":\"NCHW\"})},1269018:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>{t.je(\"AveragePool\",n,{format:Y?\"NHWC\":\"NCHW\",auto_pad:i,ceil_mode:o,count_include_pad:a,storage_order:l,dilations:[p,h],kernel_shape:[y,b],pads:[_,S,k,z],strides:[T,V]})},1269302:(n,i)=>{t.je(\"GlobalAveragePool\",n,{format:i?\"NHWC\":\"NCHW\"})},1269393:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>{t.je(\"AveragePool\",n,{format:Y?\"NHWC\":\"NCHW\",auto_pad:i,ceil_mode:o,count_include_pad:a,storage_order:l,dilations:[p,h],kernel_shape:[y,b],pads:[_,S,k,z],strides:[T,V]})},1269677:(n,i)=>{t.je(\"GlobalMaxPool\",n,{format:i?\"NHWC\":\"NCHW\"})},1269764:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>{t.je(\"MaxPool\",n,{format:Y?\"NHWC\":\"NCHW\",auto_pad:i,ceil_mode:o,count_include_pad:a,storage_order:l,dilations:[p,h],kernel_shape:[y,b],pads:[_,S,k,z],strides:[T,V]})},1270044:(n,i)=>{t.je(\"GlobalMaxPool\",n,{format:i?\"NHWC\":\"NCHW\"})},1270131:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>{t.je(\"MaxPool\",n,{format:Y?\"NHWC\":\"NCHW\",auto_pad:i,ceil_mode:o,count_include_pad:a,storage_order:l,dilations:[p,h],kernel_shape:[y,b],pads:[_,S,k,z],strides:[T,V]})},1270411:(n,i,o,a,l)=>{t.je(\"Gemm\",n,{alpha:i,beta:o,transA:a,transB:l})},1270515:n=>{t.je(\"MatMul\",n,void 0)},1270569:(n,i,o,a)=>{t.je(\"ArgMax\",n,{keepDims:!!i,selectLastIndex:!!o,axis:a})},1270677:(n,i,o,a)=>{t.je(\"ArgMin\",n,{keepDims:!!i,selectLastIndex:!!o,axis:a})},1270785:(n,i)=>{t.je(\"Softmax\",n,{axis:i})},1270848:(n,i)=>{t.je(\"Concat\",n,{axis:i})},1270908:(n,i,o,a,l)=>{t.je(\"Split\",n,{axis:i,numOutputs:o,splitSizes:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1271048:n=>{t.je(\"Expand\",n,void 0)},1271102:(n,i)=>{t.je(\"Gather\",n,{axis:Number(i)})},1271173:(n,i)=>{t.je(\"GatherElements\",n,{axis:Number(i)})},1271252:(n,i,o,a,l,p,h,y,b,_,S)=>{t.je(\"Resize\",n,{antialias:i,axes:o?Array.from(X.subarray(o>>>0,a>>>0)):[],coordinateTransformMode:qe(l),cubicCoeffA:p,excludeOutside:h,extrapolationValue:y,keepAspectRatioPolicy:qe(b),mode:qe(_),nearestMode:qe(S)})},1271598:(n,i,o,a,l,p,h)=>{t.je(\"Slice\",n,{starts:i?Array.from(X.subarray(i>>>0,o>>>0)):[],ends:a?Array.from(X.subarray(a>>>0,l>>>0)):[],axes:p?Array.from(X.subarray(p>>>0,h>>>0)):[]})},1271814:n=>{t.je(\"Tile\",n,void 0)},1271866:(n,i,o,a)=>{t.je(\"LayerNormalization\",n,{axis:i,epsilon:o,simplified:!!a})},1271977:(n,i,o)=>{t.je(\"InstanceNormalization\",n,{epsilon:i,format:o?\"NHWC\":\"NCHW\"})},1272091:(n,i,o)=>{t.je(\"InstanceNormalization\",n,{epsilon:i,format:o?\"NHWC\":\"NCHW\"})},1272205:n=>{t.je(\"Range\",n,void 0)},1272258:(n,i)=>{t.je(\"Einsum\",n,{equation:qe(i)})},1272339:(n,i,o,a,l)=>{t.je(\"Pad\",n,{mode:i,value:o,pads:a?Array.from(X.subarray(a>>>0,l>>>0)):[]})},1272466:(n,i,o,a,l,p)=>{t.je(\"BatchNormalization\",n,{epsilon:i,momentum:o,spatial:!!l,trainingMode:!!a,format:p?\"NHWC\":\"NCHW\"})},1272635:(n,i,o,a,l,p)=>{t.je(\"BatchNormalization\",n,{epsilon:i,momentum:o,spatial:!!l,trainingMode:!!a,format:p?\"NHWC\":\"NCHW\"})},1272804:(n,i,o)=>{t.je(\"CumSum\",n,{exclusive:Number(i),reverse:Number(o)})},1272901:(n,i,o,a,l,p,h,y,b)=>{t.je(\"Attention\",n,{numHeads:i,isUnidirectional:o,maskFilterValue:a,scale:l,doRotary:p,qkvHiddenSizes:h?Array.from(X.subarray(Number(y)>>>0,Number(y)+h>>>0)):[],pastPresentShareBuffer:!!b})},1273173:n=>{t.je(\"BiasAdd\",n,void 0)},1273228:n=>{t.je(\"BiasSplitGelu\",n,void 0)},1273289:n=>{t.je(\"FastGelu\",n,void 0)},1273345:(n,i,o,a,l,p,h,y,b,_,S,k,z)=>{t.je(\"Conv\",n,{format:b?\"NHWC\":\"NCHW\",auto_pad:i,dilations:[o],group:a,kernel_shape:[l],pads:p?Array.from(X.subarray(p>>>0,h>>>0)):[],strides:[y],w_is_const:()=>!!J[_>>>0],activation:qe(S),activation_params:k?Array.from(we.subarray(k>>>0,z>>>0)):[]})},1273715:(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>{t.je(\"Conv\",n,{format:k?\"NHWC\":\"NCHW\",auto_pad:i,dilations:[o,a],group:l,kernel_shape:[p,h],pads:y?Array.from(X.subarray(y>>>0,b>>>0)):[],strides:[_,S],w_is_const:()=>!!J[z>>>0],activation:qe(T),activation_params:V?Array.from(we.subarray(V>>>0,Y>>>0)):[]})},1274106:n=>{t.je(\"Gelu\",n,void 0)},1274158:(n,i,o,a,l,p)=>{t.je(\"MatMulNBits\",n,{k:i,n:o,accuracyLevel:a,bits:l,blockSize:p})},1274285:(n,i,o,a,l,p)=>{t.je(\"MultiHeadAttention\",n,{numHeads:i,isUnidirectional:o,maskFilterValue:a,scale:l,doRotary:p})},1274444:(n,i,o,a,l)=>{t.je(\"RotaryEmbedding\",n,{interleaved:!!i,numHeads:o,rotaryEmbeddingDim:a,scale:l})},1274583:(n,i,o)=>{t.je(\"SkipLayerNormalization\",n,{epsilon:i,simplified:!!o})},1274685:(n,i,o)=>{t.je(\"SkipLayerNormalization\",n,{epsilon:i,simplified:!!o})},1274787:(n,i,o,a)=>{t.je(\"LayerNormalization\",n,{axis:i,epsilon:o,simplified:!!a})},1274898:n=>{t.di(n)},1274932:(n,i)=>t.fi(n,i,t.Oh.gi,t.Oh.errors)};function Vt(n){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${n})`,this.status=n}var pt=[],qt=0,ut=0;class cr{constructor(i){this.Nh=i,this.Ih=i-24}}var fr=n=>{var i=ut;if(!i)return er(0),0;var o=new cr(i);pe[o.Ih+16>>>2>>>0]=i;var a=pe[o.Ih+4>>>2>>>0];if(!a)return er(0),i;for(var l in n){var p=n[l];if(p===0||p===a)break;if(Vn(p,a,o.Ih+16))return er(p),i}return er(a),i},zr=typeof TextDecoder<\"u\"?new TextDecoder(\"utf8\"):void 0,hn=(n,i,o)=>{i>>>=0;var a=i+o;for(o=i;n[o]&&!(o>=a);)++o;if(16l?a+=String.fromCharCode(l):(l-=65536,a+=String.fromCharCode(55296|l>>10,56320|l&1023))}}else a+=String.fromCharCode(l)}return a},qe=(n,i)=>(n>>>=0)?hn(oe,n,i):\"\",Kt=n=>{for(var i=0,o=0;o=a?i++:2047>=a?i+=2:55296<=a&&57343>=a?(i+=4,++o):i+=3}return i},vt=(n,i,o,a)=>{if(o>>>=0,!(0=h){var y=n.charCodeAt(++p);h=65536+((h&1023)<<10)|y&1023}if(127>=h){if(o>=a)break;i[o++>>>0]=h}else{if(2047>=h){if(o+1>=a)break;i[o++>>>0]=192|h>>6}else{if(65535>=h){if(o+2>=a)break;i[o++>>>0]=224|h>>12}else{if(o+3>=a)break;i[o++>>>0]=240|h>>18,i[o++>>>0]=128|h>>12&63}i[o++>>>0]=128|h>>6&63}i[o++>>>0]=128|h&63}}return i[o>>>0]=0,o-l},pr,mt=n=>{for(var i=\"\";oe[n>>>0];)i+=pr[oe[n++>>>0]];return i},mr={},hr={},gn={},it;function yn(n,i,o={}){var a=i.name;if(!n)throw new it(`type \"${a}\" must have a positive integer typeid pointer`);if(hr.hasOwnProperty(n)){if(o.Xh)return;throw new it(`Cannot register type '${a}' twice`)}hr[n]=i,delete gn[n],mr.hasOwnProperty(n)&&(i=mr[n],delete mr[n],i.forEach(l=>l()))}function ht(n,i,o={}){if(!(\"argPackAdvance\"in i))throw new TypeError(\"registerType registeredInstance requires argPackAdvance\");return yn(n,i,o)}var Se=(n,i,o)=>{switch(i){case 1:return o?a=>J[a>>>0]:a=>oe[a>>>0];case 2:return o?a=>le[a>>>1>>>0]:a=>ge[a>>>1>>>0];case 4:return o?a=>X[a>>>2>>>0]:a=>pe[a>>>2>>>0];case 8:return o?a=>ue[a>>>3]:a=>me[a>>>3];default:throw new TypeError(`invalid integer width (${i}): ${n}`)}},Wt=[],wt=[];function Mr(n){n>>>=0,9{if(!n)throw new it(\"Cannot use deleted val. handle = \"+n);return wt[n]},gt=n=>{switch(n){case void 0:return 2;case null:return 4;case!0:return 6;case!1:return 8;default:let i=Wt.pop()||wt.length;return wt[i]=n,wt[i+1]=1,i}};function gr(n){return this.fromWireType(pe[n>>>2>>>0])}var bn={name:\"emscripten::val\",fromWireType:n=>{var i=ot(n);return Mr(n),i},toWireType:(n,i)=>gt(i),argPackAdvance:8,readValueFromPointer:gr,Mh:null},vn=(n,i)=>{switch(i){case 4:return function(o){return this.fromWireType(we[o>>>2>>>0])};case 8:return function(o){return this.fromWireType(Ee[o>>>3>>>0])};default:throw new TypeError(`invalid float width (${i}): ${n}`)}},jr=typeof TextDecoder<\"u\"?new TextDecoder(\"utf-16le\"):void 0,Ur=(n,i)=>{for(var o=n>>1,a=o+i/2;!(o>=a)&&ge[o>>>0];)++o;if(o<<=1,32>>0,o>>>0));for(o=\"\",a=0;!(a>=i/2);++a){var l=le[n+2*a>>>1>>>0];if(l==0)break;o+=String.fromCharCode(l)}return o},wn=(n,i,o)=>{if(o??=2147483647,2>o)return 0;o-=2;var a=i;o=o<2*n.length?o/2:n.length;for(var l=0;l>>1>>>0]=n.charCodeAt(l),i+=2;return le[i>>>1>>>0]=0,i-a},_n=n=>2*n.length,$n=(n,i)=>{for(var o=0,a=\"\";!(o>=i/4);){var l=X[n+4*o>>>2>>>0];if(l==0)break;++o,65536<=l?(l-=65536,a+=String.fromCharCode(55296|l>>10,56320|l&1023)):a+=String.fromCharCode(l)}return a},Cn=(n,i,o)=>{if(i>>>=0,o??=2147483647,4>o)return 0;var a=i;o=a+o-4;for(var l=0;l=p){var h=n.charCodeAt(++l);p=65536+((p&1023)<<10)|h&1023}if(X[i>>>2>>>0]=p,i+=4,i+4>o)break}return X[i>>>2>>>0]=0,i-a},Sn=n=>{for(var i=0,o=0;o=a&&++o,i+=4}return i},yr=(n,i)=>{var o=hr[n];if(o===void 0)throw n=qr(n),o=mt(n),_t(n),new it(`${i} has unknown type ${o}`);return o},Vr=(n,i,o)=>{var a=[];return n=n.toWireType(a,o),a.length&&(pe[i>>>2>>>0]=gt(a)),n},Yt=n=>{try{n()}catch(i){ce(i)}},xn=n=>{if(!F)try{n();try{te=te=n=te,t.onExit?.(n),F=!0,w(n,new Vt(n))}catch(i){i instanceof Vt||i==\"unwind\"||w(1,i)}}catch(i){i instanceof Vt||i==\"unwind\"||w(1,i)}};function In(){var n=M,i={};for(let[o,a]of Object.entries(n))i[o]=typeof a==\"function\"?(...l)=>{Zt.push(o);try{return a(...l)}finally{F||(Zt.pop(),yt&&Ct===1&&Zt.length===0&&(Ct=0,Yt(Gs),typeof Fibers<\"u\"&&Fibers.mi()))}}:a;return i}var Ct=0,yt=null,Wr=0,Zt=[],br={},Qt={},Nt=0,vr=null,An=[];function Tn(){return new Promise((n,i)=>{vr={resolve:n,reject:i}})}function En(){var n=Jt(65548),i=n+12;pe[n>>>2>>>0]=i,pe[n+4>>>2>>>0]=i+65536,i=Zt[0];var o=br[i];return o===void 0&&(o=Nt++,br[i]=o,Qt[o]=i),X[n+8>>>2>>>0]=o,n}function Pn(n){if(!F){if(Ct===0){var i=!1,o=!1;n((a=0)=>{if(!F&&(Wr=a,i=!0,o)){Ct=2,Yt(()=>Hs(yt)),typeof Browser<\"u\"&&Browser.Sh.Wh&&Browser.Sh.resume(),a=!1;try{var l=(0,M[Qt[X[yt+8>>>2>>>0]]])()}catch(y){l=y,a=!0}var p=!1;if(!yt){var h=vr;h&&(vr=null,(a?h.reject:h.resolve)(l),p=!0)}if(a&&!p)throw l}}),o=!0,i||(Ct=1,yt=En(),typeof Browser<\"u\"&&Browser.Sh.Wh&&Browser.Sh.pause(),Yt(()=>Ns(yt)))}else Ct===2?(Ct=0,Yt(Ls),_t(yt),yt=null,An.forEach(xn)):ce(`invalid state: ${Ct}`);return Wr}}function wr(n){return Pn(i=>{n().then(i)})}var Xt=[],_i={},Ge=n=>{var i=_i[n];return i===void 0?mt(n):i},_r=()=>typeof globalThis==\"object\"?globalThis:Function(\"return this\")(),Nr=n=>{var i=Xt.length;return Xt.push(n),i},kn=(n,i)=>{for(var o=Array(n),a=0;a>>2>>>0],\"parameter \"+a);return o},$r=(n,i)=>Object.defineProperty(i,\"name\",{value:n});function On(n){var i=Function;if(!(i instanceof Function))throw new TypeError(`new_ called with constructor type ${typeof i} which is not a function`);var o=$r(i.name||\"unknownFunctionName\",function(){});return o.prototype=i.prototype,o=new o,n=i.apply(o,n),n instanceof Object?n:o}var jt=n=>n%4===0&&(n%100!==0||n%400===0),Gr=[0,31,60,91,121,152,182,213,244,274,305,335],Hr=[0,31,59,90,120,151,181,212,243,273,304,334],Cr=[],Rn=(n,i)=>{Cr.length=0;for(var o;o=oe[n++>>>0];){var a=o!=105;a&=o!=112,i+=a&&i%8?4:0,Cr.push(o==112?pe[i>>>2>>>0]:o==106?ue[i>>>3]:o==105?X[i>>>2>>>0]:Ee[i>>>3>>>0]),i+=a?8:4}return Cr},Sr={},Lr=()=>{if(!xr){var n={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(typeof navigator==\"object\"&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:g||\"./this.program\"},i;for(i in Sr)Sr[i]===void 0?delete n[i]:n[i]=Sr[i];var o=[];for(i in n)o.push(`${i}=${n[i]}`);xr=o}return xr},xr,$i=[null,[],[]],Bn=[31,29,31,30,31,30,31,31,30,31,30,31],Fr=[31,28,31,30,31,30,31,31,30,31,30,31];function Ir(n){var i=Array(Kt(n)+1);return vt(n,i,0,i.length),i}function Dn(n,i,o,a){function l(T,V,Y){for(T=typeof T==\"number\"?T.toString():T||\"\";T.lengthde?-1:0ie-T.getDate())V-=ie-T.getDate()+1,T.setDate(1),11>Y?T.setMonth(Y+1):(T.setMonth(0),T.setFullYear(T.getFullYear()+1));else{T.setDate(T.getDate()+V);break}}return Y=new Date(T.getFullYear()+1,0,4),V=y(new Date(T.getFullYear(),0,4)),Y=y(Y),0>=h(V,T)?0>=h(Y,T)?T.getFullYear()+1:T.getFullYear():T.getFullYear()-1}n>>>=0,i>>>=0,o>>>=0,a>>>=0;var _=pe[a+40>>>2>>>0];a={ji:X[a>>>2>>>0],ii:X[a+4>>>2>>>0],Qh:X[a+8>>>2>>>0],Th:X[a+12>>>2>>>0],Rh:X[a+16>>>2>>>0],Lh:X[a+20>>>2>>>0],Jh:X[a+24>>>2>>>0],Kh:X[a+28>>>2>>>0],li:X[a+32>>>2>>>0],hi:X[a+36>>>2>>>0],ki:_?qe(_):\"\"},o=qe(o),_={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"};for(var S in _)o=o.replace(new RegExp(S,\"g\"),_[S]);var k=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),z=\"January February March April May June July August September October November December\".split(\" \");_={\"%a\":T=>k[T.Jh].substring(0,3),\"%A\":T=>k[T.Jh],\"%b\":T=>z[T.Rh].substring(0,3),\"%B\":T=>z[T.Rh],\"%C\":T=>p((T.Lh+1900)/100|0,2),\"%d\":T=>p(T.Th,2),\"%e\":T=>l(T.Th,2,\" \"),\"%g\":T=>b(T).toString().substring(2),\"%G\":b,\"%H\":T=>p(T.Qh,2),\"%I\":T=>(T=T.Qh,T==0?T=12:12{for(var V=0,Y=0;Y<=T.Rh-1;V+=(jt(T.Lh+1900)?Bn:Fr)[Y++]);return p(T.Th+V,3)},\"%m\":T=>p(T.Rh+1,2),\"%M\":T=>p(T.ii,2),\"%n\":()=>`\n`,\"%p\":T=>0<=T.Qh&&12>T.Qh?\"AM\":\"PM\",\"%S\":T=>p(T.ji,2),\"%t\":()=>\"\t\",\"%u\":T=>T.Jh||7,\"%U\":T=>p(Math.floor((T.Kh+7-T.Jh)/7),2),\"%V\":T=>{var V=Math.floor((T.Kh+7-(T.Jh+6)%7)/7);if(2>=(T.Jh+371-T.Kh-2)%7&&V++,V)V==53&&(Y=(T.Jh+371-T.Kh)%7,Y==4||Y==3&&jt(T.Lh)||(V=1));else{V=52;var Y=(T.Jh+7-T.Kh-1)%7;(Y==4||Y==5&&jt(T.Lh%400-1))&&V++}return p(V,2)},\"%w\":T=>T.Jh,\"%W\":T=>p(Math.floor((T.Kh+7-(T.Jh+6)%7)/7),2),\"%y\":T=>(T.Lh+1900).toString().substring(2),\"%Y\":T=>T.Lh+1900,\"%z\":T=>{T=T.hi;var V=0<=T;return T=Math.abs(T)/60,(V?\"+\":\"-\")+(\"0000\"+(T/60*100+T%60)).slice(-4)},\"%Z\":T=>T.ki,\"%%\":()=>\"%\"},o=o.replace(/%%/g,\"\\0\\0\");for(S in _)o.includes(S)&&(o=o.replace(new RegExp(S,\"g\"),_[S](a)));return o=o.replace(/\\0\\0/g,\"%\"),S=Ir(o),S.length>i?0:(J.set(S,n>>>0),S.length-1)}for(var St=Array(256),lt=0;256>lt;++lt)St[lt]=String.fromCharCode(lt);pr=St,it=t.BindingError=class extends Error{constructor(n){super(n),this.name=\"BindingError\"}},t.InternalError=class extends Error{constructor(n){super(n),this.name=\"InternalError\"}},wt.push(0,1,void 0,1,null,1,!0,1,!1,1),t.count_emval_handles=()=>wt.length/2-5-Wt.length;var zn={bd:function(n,i,o){return wr(async()=>{await t.bi(n,i,o)})},v:function(n){if(n=new cr(n>>>0),J[n.Ih+12>>>0]==0&&(J[n.Ih+12>>>0]=1,qt--),J[n.Ih+13>>>0]=0,pt.push(n),Un(n.Nh),Wn(pe[n.Ih+4>>>2>>>0]))n=pe[n.Nh>>>2>>>0];else{var i=pe[n.Ih+16>>>2>>>0];n=i!==0?i:n.Nh}return n},N:()=>{W(0,0);var n=pt.pop();jn(n.Nh),ut=0},a:function(){return fr([])},m:function(n){return fr([n>>>0])},x:function(n,i){return fr([n>>>0,i>>>0])},q:function(n,i,o){return fr([n>>>0,i>>>0,o>>>0])},Bb:()=>{var n=pt.pop();n||ce(\"no exception to throw\");var i=n.Nh;throw J[n.Ih+13>>>0]==0&&(pt.push(n),J[n.Ih+13>>>0]=1,J[n.Ih+12>>>0]=0,qt++),ut=i,ut},s:function(n,i,o){n>>>=0;var a=new cr(n);throw pe[a.Ih+16>>>2>>>0]=0,pe[a.Ih+4>>>2>>>0]=i>>>0,pe[a.Ih+8>>>2>>>0]=o>>>0,ut=n,qt++,ut},fb:()=>qt,g:function(n){throw ut||=n>>>0,ut},Cb:function(){return 0},$c:function(){},Mc:function(){},Oc:function(){},Gc:function(){return 0},Zc:function(){},Uc:function(){},Yc:function(){},_b:function(){},Nc:function(){},Kc:function(){},_c:function(){},Lc:function(){},Wb:function(n,i,o){i=mt(i>>>0),ht(n>>>0,{name:i,fromWireType:a=>a,toWireType:function(a,l){if(typeof l!=\"bigint\"&&typeof l!=\"number\")throw l===null?l=\"null\":(a=typeof l,l=a===\"object\"||a===\"array\"||a===\"function\"?l.toString():\"\"+l),new TypeError(`Cannot convert \"${l}\" to ${this.name}`);return typeof l==\"number\"&&(l=BigInt(l)),l},argPackAdvance:8,readValueFromPointer:Se(i,o>>>0,i.indexOf(\"u\")==-1),Mh:null})},Ec:function(n,i,o,a){i=mt(i>>>0),ht(n>>>0,{name:i,fromWireType:function(l){return!!l},toWireType:function(l,p){return p?o:a},argPackAdvance:8,readValueFromPointer:function(l){return this.fromWireType(oe[l>>>0])},Mh:null})},Cc:function(n){return ht(n>>>0,bn)},Vb:function(n,i,o){i=mt(i>>>0),ht(n>>>0,{name:i,fromWireType:a=>a,toWireType:(a,l)=>l,argPackAdvance:8,readValueFromPointer:vn(i,o>>>0),Mh:null})},Aa:function(n,i,o,a,l){if(n>>>=0,o>>>=0,i=mt(i>>>0),l===-1&&(l=4294967295),l=y=>y,a===0){var p=32-8*o;l=y=>y<

>>p}var h=i.includes(\"unsigned\")?function(y,b){return b>>>0}:function(y,b){return b};ht(n,{name:i,fromWireType:l,toWireType:h,argPackAdvance:8,readValueFromPointer:Se(i,o,a!==0),Mh:null})},_:function(n,i,o){function a(p){return new l(J.buffer,pe[p+4>>>2>>>0],pe[p>>>2>>>0])}var l=[Int8Array,Uint8Array,Int16Array,Uint16Array,Int32Array,Uint32Array,Float32Array,Float64Array,BigInt64Array,BigUint64Array][i];o=mt(o>>>0),ht(n>>>0,{name:o,fromWireType:a,argPackAdvance:8,readValueFromPointer:a},{Xh:!0})},Xb:function(n,i){i=mt(i>>>0);var o=i===\"std::string\";ht(n>>>0,{name:i,fromWireType:function(a){var l=pe[a>>>2>>>0],p=a+4;if(o)for(var h=p,y=0;y<=l;++y){var b=p+y;if(y==l||oe[b>>>0]==0){if(h=qe(h,b-h),_===void 0)var _=h;else _+=String.fromCharCode(0),_+=h;h=b+1}}else{for(_=Array(l),y=0;y>>0]);_=_.join(\"\")}return _t(a),_},toWireType:function(a,l){l instanceof ArrayBuffer&&(l=new Uint8Array(l));var p=typeof l==\"string\";if(!(p||l instanceof Uint8Array||l instanceof Uint8ClampedArray||l instanceof Int8Array))throw new it(\"Cannot pass non-string to std::string\");var h=o&&p?Kt(l):l.length,y=Jt(4+h+1),b=y+4;if(pe[y>>>2>>>0]=h,o&&p)vt(l,oe,b,h+1);else if(p)for(p=0;p>>0]=_}else for(p=0;p>>0]=l[p];return a!==null&&a.push(_t,y),y},argPackAdvance:8,readValueFromPointer:gr,Mh(a){_t(a)}})},Ab:function(n,i,o){if(i>>>=0,o>>>=0,o=mt(o),i===2)var a=Ur,l=wn,p=_n,h=y=>ge[y>>>1>>>0];else i===4&&(a=$n,l=Cn,p=Sn,h=y=>pe[y>>>2>>>0]);ht(n>>>0,{name:o,fromWireType:y=>{for(var b=pe[y>>>2>>>0],_,S=y+4,k=0;k<=b;++k){var z=y+4+k*i;(k==b||h(z)==0)&&(S=a(S,z-S),_===void 0?_=S:(_+=String.fromCharCode(0),_+=S),S=z+i)}return _t(y),_},toWireType:(y,b)=>{if(typeof b!=\"string\")throw new it(`Cannot pass non-string to C++ string type ${o}`);var _=p(b),S=Jt(4+_+i);return pe[S>>>2>>>0]=_/i,l(b,S+4,_+i),y!==null&&y.push(_t,S),S},argPackAdvance:8,readValueFromPointer:gr,Mh(y){_t(y)}})},Fc:function(n,i){i=mt(i>>>0),ht(n>>>0,{Yh:!0,name:i,argPackAdvance:0,fromWireType:()=>{},toWireType:()=>{}})},ad:()=>1,kd:function(n,i,o){return i>>>=0,o>>>=0,n=ot(n>>>0),i=yr(i,\"emval::as\"),Vr(i,o,n)},Cd:function(n){return n>>>=0,wr(()=>(n=ot(n),n.then(gt)))},ud:function(n,i,o,a){return o>>>=0,a>>>=0,n=Xt[n>>>0],i=ot(i>>>0),n(null,i,o,a)},ia:function(n,i,o,a,l){return o>>>=0,a>>>=0,l>>>=0,n=Xt[n>>>0],i=ot(i>>>0),o=Ge(o),n(i,i[o],a,l)},Bc:Mr,qd:function(n,i){return i>>>=0,n=ot(n>>>0),i=ot(i),n==i},zd:function(n){return n>>>=0,n===0?gt(_r()):(n=Ge(n),gt(_r()[n]))},ha:function(n,i,o){i=kn(n,i>>>0);var a=i.shift();n--;var l=`return function (obj, func, destructorsRef, args) {\n`,p=0,h=[];o===0&&h.push(\"obj\");for(var y=[\"retType\"],b=[a],_=0;_S.name).join(\", \")}) => ${a.name}>`,Nr($r(o,n))},yd:function(n,i){return i>>>=0,n=ot(n>>>0),i=ot(i),gt(n[i])},ba:function(n){n>>>=0,9>>0);for(var i=Array(n.length),o=0;o>>0))},Xa:function(){return gt({})},vd:function(n){n>>>=0;for(var i=ot(n);i.length;){var o=i.pop();i.pop()(o)}Mr(n)},sd:function(n,i,o){i>>>=0,o>>>=0,n=ot(n>>>0),i=ot(i),o=ot(o),n[i]=o},zb:function(n,i){return i>>>=0,n=yr(n>>>0,\"_emval_take_value\"),n=n.readValueFromPointer(i),gt(n)},Rc:function(n,i){n=-9007199254740992>n||9007199254740992>>=0,n=new Date(1e3*n),X[i>>>2>>>0]=n.getUTCSeconds(),X[i+4>>>2>>>0]=n.getUTCMinutes(),X[i+8>>>2>>>0]=n.getUTCHours(),X[i+12>>>2>>>0]=n.getUTCDate(),X[i+16>>>2>>>0]=n.getUTCMonth(),X[i+20>>>2>>>0]=n.getUTCFullYear()-1900,X[i+24>>>2>>>0]=n.getUTCDay(),X[i+28>>>2>>>0]=(n.getTime()-Date.UTC(n.getUTCFullYear(),0,1,0,0,0,0))/864e5|0},Sc:function(n,i){n=-9007199254740992>n||9007199254740992>>=0,n=new Date(1e3*n),X[i>>>2>>>0]=n.getSeconds(),X[i+4>>>2>>>0]=n.getMinutes(),X[i+8>>>2>>>0]=n.getHours(),X[i+12>>>2>>>0]=n.getDate(),X[i+16>>>2>>>0]=n.getMonth(),X[i+20>>>2>>>0]=n.getFullYear()-1900,X[i+24>>>2>>>0]=n.getDay(),X[i+28>>>2>>>0]=(jt(n.getFullYear())?Gr:Hr)[n.getMonth()]+n.getDate()-1|0,X[i+36>>>2>>>0]=-(60*n.getTimezoneOffset());var o=new Date(n.getFullYear(),6,1).getTimezoneOffset(),a=new Date(n.getFullYear(),0,1).getTimezoneOffset();X[i+32>>>2>>>0]=(o!=a&&n.getTimezoneOffset()==Math.min(a,o))|0},Tc:function(n){n>>>=0;var i=new Date(X[n+20>>>2>>>0]+1900,X[n+16>>>2>>>0],X[n+12>>>2>>>0],X[n+8>>>2>>>0],X[n+4>>>2>>>0],X[n>>>2>>>0],0),o=X[n+32>>>2>>>0],a=i.getTimezoneOffset(),l=new Date(i.getFullYear(),6,1).getTimezoneOffset(),p=new Date(i.getFullYear(),0,1).getTimezoneOffset(),h=Math.min(p,l);return 0>o?X[n+32>>>2>>>0]=+(l!=p&&h==a):0>>2>>>0]=i.getDay(),X[n+28>>>2>>>0]=(jt(i.getFullYear())?Gr:Hr)[i.getMonth()]+i.getDate()-1|0,X[n>>>2>>>0]=i.getSeconds(),X[n+4>>>2>>>0]=i.getMinutes(),X[n+8>>>2>>>0]=i.getHours(),X[n+12>>>2>>>0]=i.getDate(),X[n+16>>>2>>>0]=i.getMonth(),X[n+20>>>2>>>0]=i.getYear(),n=i.getTime(),BigInt(isNaN(n)?-1:n/1e3)},Pc:function(){return-52},Qc:function(){},Ic:function(n,i,o,a){o>>>=0,a>>>=0;var l=new Date().getFullYear(),p=new Date(l,0,1),h=new Date(l,6,1);l=p.getTimezoneOffset();var y=h.getTimezoneOffset();pe[n>>>0>>>2>>>0]=60*Math.max(l,y),X[i>>>0>>>2>>>0]=+(l!=y),n=b=>b.toLocaleTimeString(void 0,{hour12:!1,timeZoneName:\"short\"}).split(\" \")[1],p=n(p),h=n(h),y{ce(\"\")},A:function(n,i,o){return n>>>=0,i=Rn(i>>>0,o>>>0),Ut[n](...i)},bc:function(n,i,o){return n>>>=0,i=Rn(i>>>0,o>>>0),Ut[n](...i)},$b:()=>Date.now(),Jc:function(){return 4294901760},ga:()=>performance.now(),Hc:function(n){n>>>=0;var i=oe.length;if(4294901760=o;o*=2){var a=i*(1+.2/o);a=Math.min(a,n+100663296);var l=Math;a=Math.max(n,a);e:{l=(l.min.call(l,4294901760,a+(65536-a%65536)%65536)-L.buffer.byteLength+65535)/65536;try{L.grow(l),Pe();var p=1;break e}catch{}p=void 0}if(p)return!0}return!1},Wc:function(n,i){n>>>=0,i>>>=0;var o=0;return Lr().forEach((a,l)=>{var p=i+o;for(l=pe[n+4*l>>>2>>>0]=p,p=0;p>>0]=a.charCodeAt(p);J[l>>>0]=0,o+=a.length+1}),0},Xc:function(n,i){n>>>=0,i>>>=0;var o=Lr();pe[n>>>2>>>0]=o.length;var a=0;return o.forEach(l=>a+=l.length+1),pe[i>>>2>>>0]=a,0},Db:()=>52,Zb:function(){return 52},Vc:function(){return 70},Yb:function(n,i,o,a){i>>>=0,o>>>=0,a>>>=0;for(var l=0,p=0;p>>2>>>0],y=pe[i+4>>>2>>>0];i+=8;for(var b=0;b>>0],S=$i[n];_===0||_===10?((n===1?j:U)(hn(S,0)),S.length=0):S.push(_)}l+=y}return pe[a>>>2>>>0]=l,0},xb:rp,cd:Th,ua:Tm,W:yh,$:bh,ra:nm,ta:ep,dd:Ah,ob:Am,P:Dp,z:Uf,b:$f,Ub:Bf,ya:Df,e:wf,kb:hh,h:vf,X:nh,i:Tf,ed:xh,j:Af,t:If,r:Lf,o:Qf,Wa:ap,Ca:Nm,ma:qf,Qb:Op,db:zp,Ib:Gm,mb:Om,kc:oh,xc:lp,hc:lh,ic:sh,ac:Sh,oa:um,yb:Zf,Ba:rh,Eb:$h,ea:lm,jc:ah,Ta:jp,F:Sf,G:_p,Gb:ih,jd:uh,qa:$m,O:xp,V:$p,T:Vp,y:zf,Fb:ph,gc:dh,D:Ym,Hb:Xm,id:ch,Ua:Bp,wa:hm,lc:Jm,cc:Ch,Nb:om,aa:tp,I:Mf,C:mp,_a:Zm,fc:fh,Q:Sp,d:xf,ab:Ap,n:Cf,Ya:mh,va:Im,wb:dp,f:_f,yc:up,da:th,gb:Ep,Da:im,lb:Rm,hb:fp,c:Ef,vc:gp,od:Hm,k:Pf,tc:Tp,l:kf,wc:pp,sc:Pp,rd:Dm,p:Of,Ra:Lp,tb:Hp,Qa:Fp,Kb:Em,B:Ff,K:jf,S:Kf,$a:Lm,pc:_m,ub:Ip,za:Up,ka:Rf,xa:Rp,Sb:Yf,La:Cm,jb:Ih,Ga:jm,nc:Um,Ha:Mm,Ia:Bm,fd:_h,xd:kp,Z:Jp,pa:xm,pd:Wm,wd:Np,Mb:am,Ma:vm,Ka:Sm,Tb:Nf,rc:dm,Ja:Pm,Na:bm,pb:sm,la:gm,Ea:Wp,mc:Qm,qc:pm,Jb:Vm,Fa:qm,ja:vp,Ad:ip,nd:Fm,R:fm,eb:Hf,Za:eh,ec:vh,ib:cp,E:Jf,M:Wf,Va:sp,ld:Km,ca:op,nb:km,na:mm,dc:wh,Ac:Gf,u:Vf,L:yp,td:wm,Pb:Qp,oc:zm,Bd:np,Ob:Xp,Lb:cm,cb:Mp,zc:Xf,Rb:wp,Oa:ym,Y:em,uc:bp,J:Cp,gd:gh,vb:hp,sa:rm,H:tm,rb:Yp,Pa:Kp,Sa:Gp,sb:qp,qb:Zp,w:function(n){return n>>>0},Dc:Dn,fa:function(n,i,o,a){return Dn(n>>>0,i>>>0,o>>>0,a>>>0)}},M=function(){function n(o){return M=o.exports,M=In(),M=Eh(),L=M.Dd,Pe(),be.unshift(M.Ed),_e--,_e==0&&(Je!==null&&(clearInterval(Je),Je=null),ee&&(o=ee,ee=null,o())),M}var i={a:zn};if(_e++,t.instantiateWasm)try{return t.instantiateWasm(i,n)}catch(o){U(`Module.instantiateWasm callback failed with error: ${o}`),s(o)}return ct(i,function(o){n(o.instance)}).catch(s),{}}(),qr=n=>(qr=M.Fd)(n);t._OrtInit=(n,i)=>(t._OrtInit=M.Gd)(n,i),t._OrtGetLastError=(n,i)=>(t._OrtGetLastError=M.Hd)(n,i),t._OrtCreateSessionOptions=(n,i,o,a,l,p,h,y,b,_)=>(t._OrtCreateSessionOptions=M.Id)(n,i,o,a,l,p,h,y,b,_),t._OrtAppendExecutionProvider=(n,i)=>(t._OrtAppendExecutionProvider=M.Jd)(n,i),t._OrtAddFreeDimensionOverride=(n,i,o)=>(t._OrtAddFreeDimensionOverride=M.Kd)(n,i,o),t._OrtAddSessionConfigEntry=(n,i,o)=>(t._OrtAddSessionConfigEntry=M.Ld)(n,i,o),t._OrtReleaseSessionOptions=n=>(t._OrtReleaseSessionOptions=M.Md)(n),t._OrtCreateSession=(n,i,o)=>(t._OrtCreateSession=M.Nd)(n,i,o),t._OrtReleaseSession=n=>(t._OrtReleaseSession=M.Od)(n),t._OrtGetInputOutputCount=(n,i,o)=>(t._OrtGetInputOutputCount=M.Pd)(n,i,o),t._OrtGetInputName=(n,i)=>(t._OrtGetInputName=M.Qd)(n,i),t._OrtGetOutputName=(n,i)=>(t._OrtGetOutputName=M.Rd)(n,i),t._OrtFree=n=>(t._OrtFree=M.Sd)(n),t._OrtCreateTensor=(n,i,o,a,l,p)=>(t._OrtCreateTensor=M.Td)(n,i,o,a,l,p),t._OrtGetTensorData=(n,i,o,a,l)=>(t._OrtGetTensorData=M.Ud)(n,i,o,a,l),t._OrtReleaseTensor=n=>(t._OrtReleaseTensor=M.Vd)(n),t._OrtCreateRunOptions=(n,i,o,a)=>(t._OrtCreateRunOptions=M.Wd)(n,i,o,a),t._OrtAddRunConfigEntry=(n,i,o)=>(t._OrtAddRunConfigEntry=M.Xd)(n,i,o),t._OrtReleaseRunOptions=n=>(t._OrtReleaseRunOptions=M.Yd)(n),t._OrtCreateBinding=n=>(t._OrtCreateBinding=M.Zd)(n),t._OrtBindInput=(n,i,o)=>(t._OrtBindInput=M._d)(n,i,o),t._OrtBindOutput=(n,i,o,a)=>(t._OrtBindOutput=M.$d)(n,i,o,a),t._OrtClearBoundOutputs=n=>(t._OrtClearBoundOutputs=M.ae)(n),t._OrtReleaseBinding=n=>(t._OrtReleaseBinding=M.be)(n),t._OrtRunWithBinding=(n,i,o,a,l)=>(t._OrtRunWithBinding=M.ce)(n,i,o,a,l),t._OrtRun=(n,i,o,a,l,p,h,y)=>(t._OrtRun=M.de)(n,i,o,a,l,p,h,y),t._OrtEndProfiling=n=>(t._OrtEndProfiling=M.ee)(n),t._JsepOutput=(n,i,o)=>(t._JsepOutput=M.fe)(n,i,o),t._JsepGetNodeName=n=>(t._JsepGetNodeName=M.ge)(n);var Jt=t._malloc=n=>(Jt=t._malloc=M.he)(n),_t=t._free=n=>(_t=t._free=M.ie)(n),W=(n,i)=>(W=M.ke)(n,i),er=n=>(er=M.le)(n),N=n=>(N=M.me)(n),Mn=n=>(Mn=M.ne)(n),G=()=>(G=M.oe)(),jn=n=>(jn=M.pe)(n),Un=n=>(Un=M.qe)(n),Vn=(n,i,o)=>(Vn=M.re)(n,i,o),Wn=n=>(Wn=M.se)(n),tr=t.dynCall_vii=(n,i,o)=>(tr=t.dynCall_vii=M.te)(n,i,o),Ar=t.dynCall_iiii=(n,i,o,a)=>(Ar=t.dynCall_iiii=M.ue)(n,i,o,a),Kr=t.dynCall_iii=(n,i,o)=>(Kr=t.dynCall_iii=M.ve)(n,i,o),Yr=t.dynCall_ii=(n,i)=>(Yr=t.dynCall_ii=M.we)(n,i),Zr=t.dynCall_iiiiiii=(n,i,o,a,l,p,h)=>(Zr=t.dynCall_iiiiiii=M.xe)(n,i,o,a,l,p,h),Tr=t.dynCall_vi=(n,i)=>(Tr=t.dynCall_vi=M.ye)(n,i),Er=t.dynCall_v=n=>(Er=t.dynCall_v=M.ze)(n),Qr=t.dynCall_iiiiii=(n,i,o,a,l,p)=>(Qr=t.dynCall_iiiiii=M.Ae)(n,i,o,a,l,p),Xr=t.dynCall_iiij=(n,i,o,a)=>(Xr=t.dynCall_iiij=M.Be)(n,i,o,a),Jr=t.dynCall_iiiii=(n,i,o,a,l)=>(Jr=t.dynCall_iiiii=M.Ce)(n,i,o,a,l),Pr=t.dynCall_viii=(n,i,o,a)=>(Pr=t.dynCall_viii=M.De)(n,i,o,a),rr=t.dynCall_viiiii=(n,i,o,a,l,p)=>(rr=t.dynCall_viiiii=M.Ee)(n,i,o,a,l,p),kr=t.dynCall_viiii=(n,i,o,a,l)=>(kr=t.dynCall_viiii=M.Fe)(n,i,o,a,l),en=t.dynCall_viiiiii=(n,i,o,a,l,p,h)=>(en=t.dynCall_viiiiii=M.Ge)(n,i,o,a,l,p,h),tn=t.dynCall_viiji=(n,i,o,a,l)=>(tn=t.dynCall_viiji=M.He)(n,i,o,a,l),rn=t.dynCall_viiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(rn=t.dynCall_viiiiiiiiiii=M.Ie)(n,i,o,a,l,p,h,y,b,_,S,k),nn=t.dynCall_viiijjjii=(n,i,o,a,l,p,h,y,b)=>(nn=t.dynCall_viiijjjii=M.Je)(n,i,o,a,l,p,h,y,b),on=t.dynCall_iid=(n,i,o)=>(on=t.dynCall_iid=M.Ke)(n,i,o),Nn=t.dynCall_iif=(n,i,o)=>(Nn=t.dynCall_iif=M.Le)(n,i,o),nr=t.dynCall_iij=(n,i,o)=>(nr=t.dynCall_iij=M.Me)(n,i,o),an=t.dynCall_jii=(n,i,o)=>(an=t.dynCall_jii=M.Ne)(n,i,o),m=t.dynCall_i=n=>(m=t.dynCall_i=M.Oe)(n),v=t.dynCall_viiiiiiii=(n,i,o,a,l,p,h,y,b)=>(v=t.dynCall_viiiiiiii=M.Pe)(n,i,o,a,l,p,h,y,b),I=t.dynCall_viiiiij=(n,i,o,a,l,p,h)=>(I=t.dynCall_viiiiij=M.Qe)(n,i,o,a,l,p,h),D=t.dynCall_ji=(n,i)=>(D=t.dynCall_ji=M.Re)(n,i),H=t.dynCall_viij=(n,i,o,a)=>(H=t.dynCall_viij=M.Se)(n,i,o,a),Q=t.dynCall_iiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Q=t.dynCall_iiiiiiiiiiii=M.Te)(n,i,o,a,l,p,h,y,b,_,S,k),re=t.dynCall_viiiiiiiii=(n,i,o,a,l,p,h,y,b,_)=>(re=t.dynCall_viiiiiiiii=M.Ue)(n,i,o,a,l,p,h,y,b,_),ye=t.dynCall_ij=(n,i)=>(ye=t.dynCall_ij=M.Ve)(n,i),he=t.dynCall_iiiiij=(n,i,o,a,l,p)=>(he=t.dynCall_iiiiij=M.We)(n,i,o,a,l,p),fe=t.dynCall_j=n=>(fe=t.dynCall_j=M.Xe)(n),ve=t.dynCall_vij=(n,i,o)=>(ve=t.dynCall_vij=M.Ye)(n,i,o),Me=t.dynCall_viijjjiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Me=t.dynCall_viijjjiiiiii=M.Ze)(n,i,o,a,l,p,h,y,b,_,S,k),je=t.dynCall_viiijiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(je=t.dynCall_viiijiiiiiii=M._e)(n,i,o,a,l,p,h,y,b,_,S,k),q=t.dynCall_iiiiiiii=(n,i,o,a,l,p,h,y)=>(q=t.dynCall_iiiiiiii=M.$e)(n,i,o,a,l,p,h,y),Ie=t.dynCall_viiiiiii=(n,i,o,a,l,p,h,y)=>(Ie=t.dynCall_viiiiiii=M.af)(n,i,o,a,l,p,h,y),Ue=t.dynCall_iiiiiiiij=(n,i,o,a,l,p,h,y,b)=>(Ue=t.dynCall_iiiiiiiij=M.bf)(n,i,o,a,l,p,h,y,b),kt=t.dynCall_viiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(kt=t.dynCall_viiiiiiiiiiiii=M.cf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),sn=t.dynCall_iiiiiiiii=(n,i,o,a,l,p,h,y,b)=>(sn=t.dynCall_iiiiiiiii=M.df)(n,i,o,a,l,p,h,y,b),yo=t.dynCall_iiiiijiiiii=(n,i,o,a,l,p,h,y,b,_,S)=>(yo=t.dynCall_iiiiijiiiii=M.ef)(n,i,o,a,l,p,h,y,b,_,S),bo=t.dynCall_vijjjiiij=(n,i,o,a,l,p,h,y,b)=>(bo=t.dynCall_vijjjiiij=M.ff)(n,i,o,a,l,p,h,y,b),vo=t.dynCall_fi=(n,i)=>(vo=t.dynCall_fi=M.gf)(n,i),wo=t.dynCall_fii=(n,i,o)=>(wo=t.dynCall_fii=M.hf)(n,i,o),_o=t.dynCall_di=(n,i)=>(_o=t.dynCall_di=M.jf)(n,i),$o=t.dynCall_dii=(n,i,o)=>($o=t.dynCall_dii=M.kf)(n,i,o),Co=t.dynCall_vijj=(n,i,o,a)=>(Co=t.dynCall_vijj=M.lf)(n,i,o,a),So=t.dynCall_iiiiiiiiii=(n,i,o,a,l,p,h,y,b,_)=>(So=t.dynCall_iiiiiiiiii=M.mf)(n,i,o,a,l,p,h,y,b,_),xo=t.dynCall_viijiii=(n,i,o,a,l,p,h)=>(xo=t.dynCall_viijiii=M.nf)(n,i,o,a,l,p,h),Io=t.dynCall_viid=(n,i,o,a)=>(Io=t.dynCall_viid=M.of)(n,i,o,a),Ao=t.dynCall_viffiii=(n,i,o,a,l,p,h)=>(Ao=t.dynCall_viffiii=M.pf)(n,i,o,a,l,p,h),To=t.dynCall_viifiii=(n,i,o,a,l,p,h)=>(To=t.dynCall_viifiii=M.qf)(n,i,o,a,l,p,h),Eo=t.dynCall_viiiiidiidi=(n,i,o,a,l,p,h,y,b,_,S)=>(Eo=t.dynCall_viiiiidiidi=M.rf)(n,i,o,a,l,p,h,y,b,_,S),Po=t.dynCall_viiiiiiiiidi=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Po=t.dynCall_viiiiiiiiidi=M.sf)(n,i,o,a,l,p,h,y,b,_,S,k),ko=t.dynCall_jiii=(n,i,o,a)=>(ko=t.dynCall_jiii=M.tf)(n,i,o,a),Oo=t.dynCall_vjiiiiii=(n,i,o,a,l,p,h,y)=>(Oo=t.dynCall_vjiiiiii=M.uf)(n,i,o,a,l,p,h,y),Ro=t.dynCall_viiid=(n,i,o,a,l)=>(Ro=t.dynCall_viiid=M.vf)(n,i,o,a,l),Bo=t.dynCall_viiiiiiiiiji=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Bo=t.dynCall_viiiiiiiiiji=M.wf)(n,i,o,a,l,p,h,y,b,_,S,k),Do=t.dynCall_viji=(n,i,o,a)=>(Do=t.dynCall_viji=M.xf)(n,i,o,a),zo=t.dynCall_vijjjjjjjjjjjjji=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>(zo=t.dynCall_vijjjjjjjjjjjjji=M.yf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y),Mo=t.dynCall_viiiji=(n,i,o,a,l,p)=>(Mo=t.dynCall_viiiji=M.zf)(n,i,o,a,l,p),jo=t.dynCall_vijjjiiji=(n,i,o,a,l,p,h,y,b)=>(jo=t.dynCall_vijjjiiji=M.Af)(n,i,o,a,l,p,h,y,b),Uo=t.dynCall_iiiji=(n,i,o,a,l)=>(Uo=t.dynCall_iiiji=M.Bf)(n,i,o,a,l),Vo=t.dynCall_iiijiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(Vo=t.dynCall_iiijiiiiiiiiii=M.Cf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),Wo=t.dynCall_vj=(n,i)=>(Wo=t.dynCall_vj=M.Df)(n,i),No=t.dynCall_jjj=(n,i,o)=>(No=t.dynCall_jjj=M.Ef)(n,i,o),Go=t.dynCall_iiijiiiiii=(n,i,o,a,l,p,h,y,b,_)=>(Go=t.dynCall_iiijiiiiii=M.Ff)(n,i,o,a,l,p,h,y,b,_),Ho=t.dynCall_vfiii=(n,i,o,a,l)=>(Ho=t.dynCall_vfiii=M.Gf)(n,i,o,a,l),Lo=t.dynCall_viiiiff=(n,i,o,a,l,p,h)=>(Lo=t.dynCall_viiiiff=M.Hf)(n,i,o,a,l,p,h),Fo=t.dynCall_viiiiiff=(n,i,o,a,l,p,h,y)=>(Fo=t.dynCall_viiiiiff=M.If)(n,i,o,a,l,p,h,y),qo=t.dynCall_viiff=(n,i,o,a,l)=>(qo=t.dynCall_viiff=M.Jf)(n,i,o,a,l),Ko=t.dynCall_viiiiiiiiifiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(Ko=t.dynCall_viiiiiiiiifiii=M.Kf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),Yo=t.dynCall_viiiiiiiijj=(n,i,o,a,l,p,h,y,b,_,S)=>(Yo=t.dynCall_viiiiiiiijj=M.Lf)(n,i,o,a,l,p,h,y,b,_,S),Zo=t.dynCall_iiiiiiiiiiiiiifii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)=>(Zo=t.dynCall_iiiiiiiiiiiiiifii=M.Mf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie),Qo=t.dynCall_viiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Qo=t.dynCall_viiiiiiiiiiii=M.Nf)(n,i,o,a,l,p,h,y,b,_,S,k,z),Xo=t.dynCall_iiiiiiiiiiiiiiiiifii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)=>(Xo=t.dynCall_iiiiiiiiiiiiiiiiifii=M.Of)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze),Jo=t.dynCall_vijjiiiiii=(n,i,o,a,l,p,h,y,b,_)=>(Jo=t.dynCall_vijjiiiiii=M.Pf)(n,i,o,a,l,p,h,y,b,_),ea=t.dynCall_iiiijjj=(n,i,o,a,l,p,h)=>(ea=t.dynCall_iiiijjj=M.Qf)(n,i,o,a,l,p,h),ta=t.dynCall_viiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S)=>(ta=t.dynCall_viiiiiiiiii=M.Rf)(n,i,o,a,l,p,h,y,b,_,S),ra=t.dynCall_iiijjj=(n,i,o,a,l,p)=>(ra=t.dynCall_iiijjj=M.Sf)(n,i,o,a,l,p),na=t.dynCall_fffffff=(n,i,o,a,l,p,h)=>(na=t.dynCall_fffffff=M.Tf)(n,i,o,a,l,p,h),ia=t.dynCall_viiiij=(n,i,o,a,l,p)=>(ia=t.dynCall_viiiij=M.Uf)(n,i,o,a,l,p),oa=t.dynCall_viiiiiijiifiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(oa=t.dynCall_viiiiiijiifiii=M.Vf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),aa=t.dynCall_vjjjjjjffjifiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de)=>(aa=t.dynCall_vjjjjjjffjifiiiiii=M.Wf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de),sa=t.dynCall_viiiiiiffjifiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)=>(sa=t.dynCall_viiiiiiffjifiiiii=M.Xf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie),ua=t.dynCall_viiiiiiffjfiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>(ua=t.dynCall_viiiiiiffjfiiiii=M.Yf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y),la=t.dynCall_viiiiiiffjiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>(la=t.dynCall_viiiiiiffjiiiii=M.Zf)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V),da=t.dynCall_vjjjjjjjjfffiiifiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)=>(da=t.dynCall_vjjjjjjjjfffiiifiiiii=M._f)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We),ca=t.dynCall_vjjjjjjfffifijiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e)=>(ca=t.dynCall_vjjjjjjfffifijiiiii=M.$f)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e),fa=t.dynCall_vjjjjjjfffifiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de)=>(fa=t.dynCall_vjjjjjjfffifiiiiii=M.ag)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de),pa=t.dynCall_vjjjjjjjjfffjifiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)=>(pa=t.dynCall_vjjjjjjjjfffjifiiiiii=M.bg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We),ma=t.dynCall_vijiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(ma=t.dynCall_vijiiiiiiiiii=M.cg)(n,i,o,a,l,p,h,y,b,_,S,k,z),ha=t.dynCall_vijjfffiii=(n,i,o,a,l,p,h,y,b,_)=>(ha=t.dynCall_vijjfffiii=M.dg)(n,i,o,a,l,p,h,y,b,_),ga=t.dynCall_viiiiiiijiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(ga=t.dynCall_viiiiiiijiiii=M.eg)(n,i,o,a,l,p,h,y,b,_,S,k,z),ya=t.dynCall_vijjjjjjifiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>(ya=t.dynCall_vijjjjjjifiiiii=M.fg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V),ba=t.dynCall_viifi=(n,i,o,a,l)=>(ba=t.dynCall_viifi=M.gg)(n,i,o,a,l),va=t.dynCall_vjjjjjiiii=(n,i,o,a,l,p,h,y,b,_)=>(va=t.dynCall_vjjjjjiiii=M.hg)(n,i,o,a,l,p,h,y,b,_),wa=t.dynCall_vjjjjfiii=(n,i,o,a,l,p,h,y,b)=>(wa=t.dynCall_vjjjjfiii=M.ig)(n,i,o,a,l,p,h,y,b),_a=t.dynCall_viiiiiijiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(_a=t.dynCall_viiiiiijiiiiii=M.jg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),$a=t.dynCall_vijjii=(n,i,o,a,l,p)=>($a=t.dynCall_vijjii=M.kg)(n,i,o,a,l,p),Ca=t.dynCall_viiiiijjiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Ca=t.dynCall_viiiiijjiiiii=M.lg)(n,i,o,a,l,p,h,y,b,_,S,k,z),Sa=t.dynCall_iiiiiji=(n,i,o,a,l,p,h)=>(Sa=t.dynCall_iiiiiji=M.mg)(n,i,o,a,l,p,h),xa=t.dynCall_iiiiji=(n,i,o,a,l,p)=>(xa=t.dynCall_iiiiji=M.ng)(n,i,o,a,l,p),Ia=t.dynCall_viiiiijiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Ia=t.dynCall_viiiiijiiiiii=M.og)(n,i,o,a,l,p,h,y,b,_,S,k,z),Aa=t.dynCall_viiijiiiiii=(n,i,o,a,l,p,h,y,b,_,S)=>(Aa=t.dynCall_viiijiiiiii=M.pg)(n,i,o,a,l,p,h,y,b,_,S),Ta=t.dynCall_viijj=(n,i,o,a,l)=>(Ta=t.dynCall_viijj=M.qg)(n,i,o,a,l),Ea=t.dynCall_viiiijii=(n,i,o,a,l,p,h,y)=>(Ea=t.dynCall_viiiijii=M.rg)(n,i,o,a,l,p,h,y),Pa=t.dynCall_viijjiii=(n,i,o,a,l,p,h,y)=>(Pa=t.dynCall_viijjiii=M.sg)(n,i,o,a,l,p,h,y),ka=t.dynCall_ijii=(n,i,o,a)=>(ka=t.dynCall_ijii=M.tg)(n,i,o,a),Oa=t.dynCall_viiiiijjji=(n,i,o,a,l,p,h,y,b,_)=>(Oa=t.dynCall_viiiiijjji=M.ug)(n,i,o,a,l,p,h,y,b,_),Ra=t.dynCall_vijjjjiij=(n,i,o,a,l,p,h,y,b)=>(Ra=t.dynCall_vijjjjiij=M.vg)(n,i,o,a,l,p,h,y,b),Ba=t.dynCall_viiiiijij=(n,i,o,a,l,p,h,y,b)=>(Ba=t.dynCall_viiiiijij=M.wg)(n,i,o,a,l,p,h,y,b),Da=t.dynCall_viiiiiijij=(n,i,o,a,l,p,h,y,b,_)=>(Da=t.dynCall_viiiiiijij=M.xg)(n,i,o,a,l,p,h,y,b,_),za=t.dynCall_vijiii=(n,i,o,a,l,p)=>(za=t.dynCall_vijiii=M.yg)(n,i,o,a,l,p),Ma=t.dynCall_viiiiiiiiifi=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Ma=t.dynCall_viiiiiiiiifi=M.zg)(n,i,o,a,l,p,h,y,b,_,S,k),ja=t.dynCall_iiijiiii=(n,i,o,a,l,p,h,y)=>(ja=t.dynCall_iiijiiii=M.Ag)(n,i,o,a,l,p,h,y),Ua=t.dynCall_viiiiiijjiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(Ua=t.dynCall_viiiiiijjiiiii=M.Bg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),Va=t.dynCall_viiiiiiijiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>(Va=t.dynCall_viiiiiiijiiiiii=M.Cg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V),Wa=t.dynCall_vif=(n,i,o)=>(Wa=t.dynCall_vif=M.Dg)(n,i,o),Na=t.dynCall_viif=(n,i,o,a)=>(Na=t.dynCall_viif=M.Eg)(n,i,o,a),Ga=t.dynCall_fiii=(n,i,o,a)=>(Ga=t.dynCall_fiii=M.Fg)(n,i,o,a),Ha=t.dynCall_diii=(n,i,o,a)=>(Ha=t.dynCall_diii=M.Gg)(n,i,o,a),La=t.dynCall_viiiiiifii=(n,i,o,a,l,p,h,y,b,_)=>(La=t.dynCall_viiiiiifii=M.Hg)(n,i,o,a,l,p,h,y,b,_),Fa=t.dynCall_viiiiijiiiiiiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We,et,dt,Ci,Si,xi)=>(Fa=t.dynCall_viiiiijiiiiiiiiiiiiiiiiiii=M.Ig)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We,et,dt,Ci,Si,xi),qa=t.dynCall_viijji=(n,i,o,a,l,p)=>(qa=t.dynCall_viijji=M.Jg)(n,i,o,a,l,p),Ka=t.dynCall_iiiiiiiiiiiji=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Ka=t.dynCall_iiiiiiiiiiiji=M.Kg)(n,i,o,a,l,p,h,y,b,_,S,k,z),Ya=t.dynCall_viifiifijjjii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Ya=t.dynCall_viifiifijjjii=M.Lg)(n,i,o,a,l,p,h,y,b,_,S,k,z),Za=t.dynCall_viiiiiiiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)=>(Za=t.dynCall_viiiiiiiiiiiiiiiiiiii=M.Mg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We),Qa=t.dynCall_viiiiifiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Qa=t.dynCall_viiiiifiiiiii=M.Ng)(n,i,o,a,l,p,h,y,b,_,S,k,z),Xa=t.dynCall_vijiiiiiiijjii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T)=>(Xa=t.dynCall_vijiiiiiiijjii=M.Og)(n,i,o,a,l,p,h,y,b,_,S,k,z,T),Ja=t.dynCall_viiiiiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e)=>(Ja=t.dynCall_viiiiiiiiiiiiiiiiii=M.Pg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e),es=t.dynCall_viiiiiiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)=>(es=t.dynCall_viiiiiiiiiiiiiiiiiii=M.Qg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze),ts=t.dynCall_viiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)=>(ts=t.dynCall_viiiiiiiiiiiiiii=M.Rg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y),rs=t.dynCall_viiiiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)=>(rs=t.dynCall_viiiiiiiiiiiiiiii=M.Sg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie),ns=t.dynCall_viiiijjj=(n,i,o,a,l,p,h,y)=>(ns=t.dynCall_viiiijjj=M.Tg)(n,i,o,a,l,p,h,y),is=t.dynCall_iiiiid=(n,i,o,a,l,p)=>(is=t.dynCall_iiiiid=M.Ug)(n,i,o,a,l,p),os=t.dynCall_viiiiiiijjj=(n,i,o,a,l,p,h,y,b,_,S)=>(os=t.dynCall_viiiiiiijjj=M.Vg)(n,i,o,a,l,p,h,y,b,_,S),as=t.dynCall_iiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S)=>(as=t.dynCall_iiiiiiiiiii=M.Wg)(n,i,o,a,l,p,h,y,b,_,S),ss=t.dynCall_iiiiiiiiiiiiiiiiiifi=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)=>(ss=t.dynCall_iiiiiiiiiiiiiiiiiifi=M.Xg)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze),us=t.dynCall_viiif=(n,i,o,a,l)=>(us=t.dynCall_viiif=M.Yg)(n,i,o,a,l),ls=t.dynCall_viiijiiiii=(n,i,o,a,l,p,h,y,b,_)=>(ls=t.dynCall_viiijiiiii=M.Zg)(n,i,o,a,l,p,h,y,b,_),ds=t.dynCall_viiij=(n,i,o,a,l)=>(ds=t.dynCall_viiij=M._g)(n,i,o,a,l),cs=t.dynCall_iijjj=(n,i,o,a,l)=>(cs=t.dynCall_iijjj=M.$g)(n,i,o,a,l),fs=t.dynCall_viiiiji=(n,i,o,a,l,p,h)=>(fs=t.dynCall_viiiiji=M.ah)(n,i,o,a,l,p,h),ps=t.dynCall_iijjji=(n,i,o,a,l,p)=>(ps=t.dynCall_iijjji=M.bh)(n,i,o,a,l,p),ms=t.dynCall_ijijji=(n,i,o,a,l,p)=>(ms=t.dynCall_ijijji=M.ch)(n,i,o,a,l,p),hs=t.dynCall_viiijjiii=(n,i,o,a,l,p,h,y,b)=>(hs=t.dynCall_viiijjiii=M.dh)(n,i,o,a,l,p,h,y,b),gs=t.dynCall_iiiiijji=(n,i,o,a,l,p,h,y)=>(gs=t.dynCall_iiiiijji=M.eh)(n,i,o,a,l,p,h,y),ys=t.dynCall_iiiifi=(n,i,o,a,l,p)=>(ys=t.dynCall_iiiifi=M.fh)(n,i,o,a,l,p),bs=t.dynCall_iiijii=(n,i,o,a,l,p)=>(bs=t.dynCall_iiijii=M.gh)(n,i,o,a,l,p),vs=t.dynCall_iiiiiiiiijii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(vs=t.dynCall_iiiiiiiiijii=M.hh)(n,i,o,a,l,p,h,y,b,_,S,k),ws=t.dynCall_iiiijjii=(n,i,o,a,l,p,h,y)=>(ws=t.dynCall_iiiijjii=M.ih)(n,i,o,a,l,p,h,y),_s=t.dynCall_iiiiiijjjii=(n,i,o,a,l,p,h,y,b,_,S)=>(_s=t.dynCall_iiiiiijjjii=M.jh)(n,i,o,a,l,p,h,y,b,_,S),$s=t.dynCall_iiijiii=(n,i,o,a,l,p,h)=>($s=t.dynCall_iiijiii=M.kh)(n,i,o,a,l,p,h),Cs=t.dynCall_iiiiiiiijjjfi=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Cs=t.dynCall_iiiiiiiijjjfi=M.lh)(n,i,o,a,l,p,h,y,b,_,S,k,z),Ss=t.dynCall_iijiiii=(n,i,o,a,l,p,h)=>(Ss=t.dynCall_iijiiii=M.mh)(n,i,o,a,l,p,h),xs=t.dynCall_iijjjii=(n,i,o,a,l,p,h)=>(xs=t.dynCall_iijjjii=M.nh)(n,i,o,a,l,p,h),Is=t.dynCall_jij=(n,i,o)=>(Is=t.dynCall_jij=M.oh)(n,i,o),As=t.dynCall_iiji=(n,i,o,a)=>(As=t.dynCall_iiji=M.ph)(n,i,o,a),Ts=t.dynCall_iiif=(n,i,o,a)=>(Ts=t.dynCall_iiif=M.qh)(n,i,o,a),Es=t.dynCall_vidi=(n,i,o,a)=>(Es=t.dynCall_vidi=M.rh)(n,i,o,a),Ps=t.dynCall_vjiii=(n,i,o,a,l)=>(Ps=t.dynCall_vjiii=M.sh)(n,i,o,a,l),ks=t.dynCall_diiii=(n,i,o,a,l)=>(ks=t.dynCall_diiii=M.th)(n,i,o,a,l),Os=t.dynCall_diiiii=(n,i,o,a,l,p)=>(Os=t.dynCall_diiiii=M.uh)(n,i,o,a,l,p),Rs=t.dynCall_viiijjiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k)=>(Rs=t.dynCall_viiijjiiiiii=M.vh)(n,i,o,a,l,p,h,y,b,_,S,k),Bs=t.dynCall_viijjijjjjiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Bs=t.dynCall_viijjijjjjiii=M.wh)(n,i,o,a,l,p,h,y,b,_,S,k,z),Ds=t.dynCall_iiiij=(n,i,o,a,l)=>(Ds=t.dynCall_iiiij=M.xh)(n,i,o,a,l),zs=t.dynCall_viiijii=(n,i,o,a,l,p,h)=>(zs=t.dynCall_viiijii=M.yh)(n,i,o,a,l,p,h),Ms=t.dynCall_viijiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z)=>(Ms=t.dynCall_viijiiiiiiiii=M.zh)(n,i,o,a,l,p,h,y,b,_,S,k,z),js=t.dynCall_fiiii=(n,i,o,a,l)=>(js=t.dynCall_fiiii=M.Ah)(n,i,o,a,l),Us=t.dynCall_jfi=(n,i,o)=>(Us=t.dynCall_jfi=M.Bh)(n,i,o),Vs=t.dynCall_viiiiiiiiiiiiii=(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)=>(Vs=t.dynCall_viiiiiiiiiiiiii=M.Ch)(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V),Ws=t.dynCall_jiij=(n,i,o,a)=>(Ws=t.dynCall_jiij=M.Dh)(n,i,o,a),Ns=n=>(Ns=M.Eh)(n),Gs=()=>(Gs=M.Fh)(),Hs=n=>(Hs=M.Gh)(n),Ls=()=>(Ls=M.Hh)();t.___start_em_js=1275044,t.___stop_em_js=1275205;function vf(n,i,o,a){var l=G();try{return Ar(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function wf(n,i,o){var a=G();try{return Kr(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function _f(n,i,o){var a=G();try{tr(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function $f(n,i){var o=G();try{return Yr(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function Cf(n,i){var o=G();try{Tr(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function Sf(n,i,o,a){var l=G();try{return Xr(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function xf(n){var i=G();try{Er(n)}catch(o){if(N(i),o!==o+0)throw o;W(1,0)}}function If(n,i,o,a,l,p,h){var y=G();try{return Zr(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Af(n,i,o,a,l,p){var h=G();try{return Qr(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Tf(n,i,o,a,l){var p=G();try{return Jr(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Ef(n,i,o,a){var l=G();try{Pr(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function Pf(n,i,o,a,l){var p=G();try{kr(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function kf(n,i,o,a,l,p){var h=G();try{rr(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Of(n,i,o,a,l,p,h){var y=G();try{en(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Rf(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{rn(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function Bf(n,i,o){var a=G();try{return on(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Df(n,i,o){var a=G();try{return Nn(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function zf(n,i,o){var a=G();try{return nr(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Mf(n,i,o){var a=G();try{return an(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;return W(1,0),0n}}function jf(n,i,o,a,l,p,h,y,b){var _=G();try{v(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function Uf(n){var i=G();try{return m(n)}catch(o){if(N(i),o!==o+0)throw o;W(1,0)}}function Vf(n,i,o){var a=G();try{ve(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Wf(n,i,o,a,l){var p=G();try{tn(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Nf(n,i,o,a,l,p,h){var y=G();try{I(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Gf(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{Me(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function Hf(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{je(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function Lf(n,i,o,a,l,p,h,y){var b=G();try{return q(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function Ff(n,i,o,a,l,p,h,y){var b=G();try{Ie(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function qf(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{return Q(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function Kf(n,i,o,a,l,p,h,y,b,_){var S=G();try{re(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function Yf(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{kt(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function Zf(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{return yo(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function Qf(n,i,o,a,l,p,h,y,b){var _=G();try{return sn(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function Xf(n,i,o,a,l,p,h,y,b){var _=G();try{bo(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function Jf(n,i,o,a){var l=G();try{H(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function ep(n,i){var o=G();try{return vo(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function tp(n,i){var o=G();try{return D(n,i)}catch(a){if(N(o),a!==a+0)throw a;return W(1,0),0n}}function rp(n,i){var o=G();try{return _o(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function np(n,i,o,a){var l=G();try{Co(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function ip(n,i,o,a,l,p,h){var y=G();try{zs(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function op(n,i,o,a,l){var p=G();try{Ta(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function ap(n,i,o,a,l,p,h,y,b,_){var S=G();try{return So(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function sp(n,i,o,a,l,p,h){var y=G();try{xo(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function up(n,i,o,a){var l=G();try{Io(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function lp(n,i,o,a,l,p,h,y,b){var _=G();try{return Ue(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function dp(n,i,o,a,l,p,h){var y=G();try{Ao(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function cp(n,i,o,a,l,p,h,y,b){var _=G();try{nn(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function fp(n,i,o,a,l,p,h){var y=G();try{To(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function pp(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{Eo(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function mp(n,i,o,a){var l=G();try{return ko(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;return W(1,0),0n}}function hp(n,i,o,a,l,p,h,y){var b=G();try{Oo(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function gp(n,i,o,a,l){var p=G();try{Ro(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function yp(n,i,o,a){var l=G();try{Do(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function bp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y){var ie=G();try{zo(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)}catch(de){if(N(ie),de!==de+0)throw de;W(1,0)}}function vp(n,i,o,a,l,p){var h=G();try{Mo(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function wp(n,i,o,a,l,p,h,y,b){var _=G();try{jo(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function _p(n,i,o,a,l){var p=G();try{return Uo(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function $p(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{return Vo(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function Cp(n,i){var o=G();try{Wo(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function Sp(n,i,o){var a=G();try{return No(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;return W(1,0),0n}}function xp(n,i,o,a,l,p,h,y,b,_){var S=G();try{return Go(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function Ip(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{Ko(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function Ap(n,i,o,a,l){var p=G();try{Ho(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Tp(n,i,o,a,l,p,h){var y=G();try{Lo(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Ep(n,i,o,a,l){var p=G();try{qo(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Pp(n,i,o,a,l,p,h,y){var b=G();try{Fo(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function kp(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{Yo(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function Op(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie){var de=G();try{return Zo(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)}catch($e){if(N(de),$e!==$e+0)throw $e;W(1,0)}}function Rp(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Qo(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function Bp(n,i){var o=G();try{return ye(n,i)}catch(a){if(N(o),a!==a+0)throw a;W(1,0)}}function Dp(n,i,o,a,l){var p=G();try{return js(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function zp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze){var We=G();try{return Xo(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)}catch(et){if(N(We),et!==et+0)throw et;W(1,0)}}function Mp(n,i,o,a,l,p,h,y,b,_){var S=G();try{Jo(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function jp(n,i,o,a,l,p,h){var y=G();try{return ea(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Up(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{ta(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function Vp(n,i,o,a,l,p){var h=G();try{return ra(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Wp(n,i,o,a,l,p){var h=G();try{ia(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Np(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{oa(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function Gp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de){var $e=G();try{aa(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de)}catch(ze){if(N($e),ze!==ze+0)throw ze;W(1,0)}}function Hp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie){var de=G();try{sa(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)}catch($e){if(N(de),$e!==$e+0)throw $e;W(1,0)}}function Lp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y){var ie=G();try{ua(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)}catch(de){if(N(ie),de!==de+0)throw de;W(1,0)}}function Fp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V){var Y=G();try{la(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)}catch(ie){if(N(Y),ie!==ie+0)throw ie;W(1,0)}}function qp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We){var et=G();try{da(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)}catch(dt){if(N(et),dt!==dt+0)throw dt;W(1,0)}}function Kp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e){var ze=G();try{ca(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e)}catch(We){if(N(ze),We!==We+0)throw We;W(1,0)}}function Yp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de){var $e=G();try{fa(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de)}catch(ze){if(N($e),ze!==ze+0)throw ze;W(1,0)}}function Zp(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We){var et=G();try{pa(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)}catch(dt){if(N(et),dt!==dt+0)throw dt;W(1,0)}}function Qp(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{ma(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function Xp(n,i,o,a,l,p,h,y,b,_){var S=G();try{ha(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function Jp(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{ga(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function em(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V){var Y=G();try{ya(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)}catch(ie){if(N(Y),ie!==ie+0)throw ie;W(1,0)}}function tm(n,i,o,a,l,p,h,y,b,_){var S=G();try{va(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function rm(n,i,o,a,l,p,h,y,b){var _=G();try{wa(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function nm(n,i,o,a,l,p,h){var y=G();try{return na(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function im(n,i,o,a,l){var p=G();try{ba(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function om(n,i,o){var a=G();try{return Us(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;return W(1,0),0n}}function am(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{_a(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function sm(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Ca(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function um(n,i,o,a,l,p,h){var y=G();try{return Sa(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function lm(n,i,o,a,l,p){var h=G();try{return xa(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function dm(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Ia(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function cm(n,i,o,a,l,p){var h=G();try{$a(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function fm(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{Aa(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function pm(n,i,o,a,l,p,h,y){var b=G();try{Ea(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function mm(n,i,o,a,l,p,h,y){var b=G();try{Pa(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function hm(n,i,o,a){var l=G();try{return ka(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function gm(n,i,o,a,l,p,h,y,b,_){var S=G();try{Oa(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function ym(n,i,o,a,l,p,h,y,b){var _=G();try{Ra(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function bm(n,i,o,a,l,p,h,y,b){var _=G();try{Ba(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function vm(n,i,o,a,l,p,h,y,b,_){var S=G();try{Da(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function wm(n,i,o,a,l,p){var h=G();try{za(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function _m(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{Ma(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function $m(n,i,o,a,l,p,h,y){var b=G();try{return ja(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function Cm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V){var Y=G();try{Vs(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)}catch(ie){if(N(Y),ie!==ie+0)throw ie;W(1,0)}}function Sm(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{Ua(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function xm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V){var Y=G();try{Va(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V)}catch(ie){if(N(Y),ie!==ie+0)throw ie;W(1,0)}}function Im(n,i,o){var a=G();try{Wa(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Am(n,i,o,a){var l=G();try{return Ga(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function Tm(n,i,o,a){var l=G();try{return Ha(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function Em(n,i,o,a,l,p,h,y,b,_){var S=G();try{La(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function Pm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We,et,dt,Ci,Si,xi){var Ph=G();try{Fa(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We,et,dt,Ci,Si,xi)}catch(Ii){if(N(Ph),Ii!==Ii+0)throw Ii;W(1,0)}}function km(n,i,o,a,l,p){var h=G();try{qa(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Om(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{return Ka(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function Rm(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Ya(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function Bm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We){var et=G();try{Za(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze,We)}catch(dt){if(N(et),dt!==dt+0)throw dt;W(1,0)}}function Dm(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Qa(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function zm(n,i,o,a,l,p,h,y,b,_,S,k,z,T){var V=G();try{Xa(n,i,o,a,l,p,h,y,b,_,S,k,z,T)}catch(Y){if(N(V),Y!==Y+0)throw Y;W(1,0)}}function Mm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze){var We=G();try{es(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)}catch(et){if(N(We),et!==et+0)throw et;W(1,0)}}function jm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie){var de=G();try{rs(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie)}catch($e){if(N(de),$e!==$e+0)throw $e;W(1,0)}}function Um(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e){var ze=G();try{Ja(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e)}catch(We){if(N(ze),We!==We+0)throw We;W(1,0)}}function Vm(n,i,o,a,l,p,h,y){var b=G();try{ns(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function Wm(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{os(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function Nm(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{return as(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function Gm(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze){var We=G();try{return ss(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y,ie,de,$e,ze)}catch(et){if(N(We),et!==et+0)throw et;W(1,0)}}function Hm(n,i,o,a,l){var p=G();try{us(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Lm(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{Po(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function Fm(n,i,o,a,l,p,h,y,b,_){var S=G();try{ls(n,i,o,a,l,p,h,y,b,_)}catch(k){if(N(S),k!==k+0)throw k;W(1,0)}}function qm(n,i,o,a,l){var p=G();try{ds(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Km(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Ms(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function Ym(n,i,o,a,l){var p=G();try{return cs(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Zm(n,i,o,a){var l=G();try{return Ws(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;return W(1,0),0n}}function Qm(n,i,o,a,l,p,h){var y=G();try{fs(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function Xm(n,i,o,a,l,p){var h=G();try{return ps(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Jm(n,i,o,a,l,p){var h=G();try{return ms(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function eh(n,i,o,a,l,p,h,y,b){var _=G();try{hs(n,i,o,a,l,p,h,y,b)}catch(S){if(N(_),S!==S+0)throw S;W(1,0)}}function th(n,i,o,a){var l=G();try{Na(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function rh(n,i,o,a,l,p,h,y){var b=G();try{return gs(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function nh(n,i,o,a,l,p){var h=G();try{return ys(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function ih(n,i,o,a,l,p){var h=G();try{return bs(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function oh(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{return vs(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function ah(n,i,o,a,l,p,h,y){var b=G();try{return ws(n,i,o,a,l,p,h,y)}catch(_){if(N(b),_!==_+0)throw _;W(1,0)}}function sh(n,i,o,a,l,p,h,y,b,_,S){var k=G();try{return _s(n,i,o,a,l,p,h,y,b,_,S)}catch(z){if(N(k),z!==z+0)throw z;W(1,0)}}function uh(n,i,o,a,l,p,h){var y=G();try{return $s(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function lh(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{return Cs(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function dh(n,i,o,a,l,p,h){var y=G();try{return Ss(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function ch(n,i,o,a,l,p,h){var y=G();try{return xs(n,i,o,a,l,p,h)}catch(b){if(N(y),b!==b+0)throw b;W(1,0)}}function fh(n,i,o){var a=G();try{return Is(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;return W(1,0),0n}}function ph(n,i,o,a){var l=G();try{return As(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function mh(n,i,o,a){var l=G();try{Es(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function hh(n,i,o,a){var l=G();try{return Ts(n,i,o,a)}catch(p){if(N(l),p!==p+0)throw p;W(1,0)}}function gh(n,i,o,a,l){var p=G();try{Ps(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function yh(n,i,o,a,l){var p=G();try{return ks(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function bh(n,i,o,a,l,p){var h=G();try{return Os(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function vh(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{Rs(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function wh(n,i,o,a,l,p,h,y,b,_,S,k,z){var T=G();try{Bs(n,i,o,a,l,p,h,y,b,_,S,k,z)}catch(V){if(N(T),V!==V+0)throw V;W(1,0)}}function _h(n,i,o,a,l,p,h,y,b,_,S,k){var z=G();try{Bo(n,i,o,a,l,p,h,y,b,_,S,k)}catch(T){if(N(z),T!==T+0)throw T;W(1,0)}}function $h(n,i,o,a,l){var p=G();try{return Ds(n,i,o,a,l)}catch(h){if(N(p),h!==h+0)throw h;W(1,0)}}function Ch(n){var i=G();try{return fe(n)}catch(o){if(N(i),o!==o+0)throw o;return W(1,0),0n}}function Sh(n,i,o,a,l,p){var h=G();try{return he(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function xh(n,i,o,a,l,p){var h=G();try{return is(n,i,o,a,l,p)}catch(y){if(N(h),y!==y+0)throw y;W(1,0)}}function Ih(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y){var ie=G();try{ts(n,i,o,a,l,p,h,y,b,_,S,k,z,T,V,Y)}catch(de){if(N(ie),de!==de+0)throw de;W(1,0)}}function Ah(n,i,o){var a=G();try{return wo(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Th(n,i,o){var a=G();try{return $o(n,i,o)}catch(l){if(N(a),l!==l+0)throw l;W(1,0)}}function Eh(){var n=M;n=Object.assign({},n);var i=o=>a=>o(a)>>>0;return n.Fd=i(n.Fd),n.he=i(n.he),n.ne=i(n.ne),n.oe=(o=>()=>o()>>>0)(n.oe),n}t.stackSave=()=>G(),t.stackRestore=n=>N(n),t.stackAlloc=n=>Mn(n),t.UTF8ToString=qe,t.stringToUTF8=(n,i,o)=>vt(n,oe,i,o),t.lengthBytesUTF8=Kt;var Gn;ee=function n(){Gn||Fs(),Gn||(ee=n)};function Fs(){if(!(0<_e)){if(t.preRun)for(typeof t.preRun==\"function\"&&(t.preRun=[t.preRun]);t.preRun.length;){var n=t.preRun.shift();Ce.unshift(n)}for(;0Ks)});var Qs=un(()=>{});var Xs=un(()=>{});var Js={};Hn(Js,{cpus:()=>jh});var jh,eu=ae(()=>{jh=void 0});var nu=un((ru,ki)=>{\"use strict\";var tu=(()=>{var e=typeof document<\"u\"&&document.currentScript?document.currentScript.src:void 0;return typeof __filename<\"u\"&&(e=e||__filename),function(r={}){function t(){return we.buffer!=Ce.buffer&&ee(),Ce}function u(){return we.buffer!=Ce.buffer&&ee(),be}function s(){return we.buffer!=Ce.buffer&&ee(),Ae}function c(){return we.buffer!=Ce.buffer&&ee(),_e}function f(){return we.buffer!=Ce.buffer&&ee(),Je}var d=r,g,w;d.ready=new Promise((m,v)=>{g=m,w=v}),d.jsepInit=(m,v,I,D,H,Q,re,ye)=>{d.Qb=m,d.wb=v,d.yb=I,d.jb=D,d.xb=H,d.Ea=Q,d.zb=re,d.Ab=ye,v=(he,fe,ve)=>(...Me)=>{let je=lt,q=fe?.();Me=he(...Me);let Ie=fe?.();return q!==Ie&&(he=Ie,ve(q),fe=ve=null),lt!=je?N():Me},I=he=>async(...fe)=>{try{if(d.bb)throw Error(\"Session already started\");let ve=d.bb={Fb:fe[0],errors:[]},Me=await he(...fe);if(d.bb!==ve)throw Error(\"Session mismatch\");m.flush();let je=ve.errors;if(0Ie),0d._OrtRun,he=>d._OrtRun=he)),d._OrtRunWithBinding=I(v(d._OrtRunWithBinding,()=>d._OrtRunWithBinding,he=>d._OrtRunWithBinding=he)),d._OrtBindInput=v(d._OrtBindInput,()=>d._OrtBindInput,he=>d._OrtBindInput=he),d.jsepRegisterBuffer=(he,fe,ve,Me)=>m.registerBuffer(he,fe,ve,Me),d.jsepUnregisterBuffers=he=>{m.unregisterBuffers(he)},d.jsepGetBuffer=he=>m.getBuffer(he),d.jsepCreateDownloader=(he,fe,ve)=>m.createDownloader(he,fe,ve)};var C=Object.assign({},d),$=\"./this.program\",A=(m,v)=>{throw v},P=typeof window==\"object\",x=typeof importScripts==\"function\",E=typeof process==\"object\"&&typeof process.versions==\"object\"&&typeof process.versions.node==\"string\",O=d.ENVIRONMENT_IS_PTHREAD||!1,B=\"\";function R(m){return d.locateFile?d.locateFile(m,B):B+m}var j,U,L;if(E){var F=(Fn(),ir(Ln)),te=(Ei(),ir(Ti));B=x?te.dirname(B)+\"/\":__dirname+\"/\",j=(v,I)=>(v=v.startsWith(\"file://\")?new URL(v):te.normalize(v),F.readFileSync(v,I?void 0:\"utf8\")),L=v=>(v=j(v,!0),v.buffer||(v=new Uint8Array(v)),v),U=(v,I,D,H=!0)=>{v=v.startsWith(\"file://\")?new URL(v):te.normalize(v),F.readFile(v,H?void 0:\"utf8\",(Q,re)=>{Q?D(Q):I(H?re.buffer:re)})},!d.thisProgram&&1{throw process.exitCode=v,I},d.inspect=()=>\"[Emscripten Module object]\";let m;try{m=Qs()}catch(v){throw console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?'),v}global.Worker=m.Worker}else(P||x)&&(x?B=self.location.href:typeof document<\"u\"&&document.currentScript&&(B=document.currentScript.src),typeof e<\"u\"&&e&&(B=e),B.indexOf(\"blob:\")!==0?B=B.substr(0,B.replace(/[?#].*/,\"\").lastIndexOf(\"/\")+1):B=\"\",E||(j=m=>{var v=new XMLHttpRequest;return v.open(\"GET\",m,!1),v.send(null),v.responseText},x&&(L=m=>{var v=new XMLHttpRequest;return v.open(\"GET\",m,!1),v.responseType=\"arraybuffer\",v.send(null),new Uint8Array(v.response)}),U=(m,v,I)=>{var D=new XMLHttpRequest;D.open(\"GET\",m,!0),D.responseType=\"arraybuffer\",D.onload=()=>{D.status==200||D.status==0&&D.response?v(D.response):I()},D.onerror=I,D.send(null)}));E&&typeof performance>\"u\"&&(global.performance=Xs().performance);var J=console.log.bind(console),oe=console.error.bind(console);E&&(J=(...m)=>F.writeSync(1,m.join(\" \")+`\n`),oe=(...m)=>F.writeSync(2,m.join(\" \")+`\n`));var le=d.print||J,ge=d.printErr||oe;Object.assign(d,C),C=null,d.thisProgram&&($=d.thisProgram),d.quit&&(A=d.quit);var X;d.wasmBinary&&(X=d.wasmBinary);var pe=d.noExitRuntime||!0;typeof WebAssembly!=\"object\"&&pt(\"no native wasm support detected\");var we,ue,me,Ee=!1,Pe,Ce,be,Ae,_e,Je;function ee(){var m=we.buffer;d.HEAP8=Ce=new Int8Array(m),d.HEAP16=new Int16Array(m),d.HEAP32=Ae=new Int32Array(m),d.HEAPU8=be=new Uint8Array(m),d.HEAPU16=new Uint16Array(m),d.HEAPU32=_e=new Uint32Array(m),d.HEAPF32=new Float32Array(m),d.HEAPF64=Je=new Float64Array(m)}var ce=d.INITIAL_MEMORY||16777216;if(5242880<=ce||pt(\"INITIAL_MEMORY should be larger than STACK_SIZE, was \"+ce+\"! (STACK_SIZE=5242880)\"),O)we=d.wasmMemory;else if(d.wasmMemory)we=d.wasmMemory;else if(we=new WebAssembly.Memory({initial:ce/65536,maximum:65536,shared:!0}),!(we.buffer instanceof SharedArrayBuffer))throw ge(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\"),E&&ge(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and/or recent version)\"),Error(\"bad memory\");ee(),ce=we.buffer.byteLength;var Be=[],tt=[],Ye=[],Ke=0;function Pt(){return pe||0{if(!v.ok)throw\"failed to load wasm binary file at '\"+m+\"'\";return v.arrayBuffer()}).catch(()=>cr(m));if(U)return new Promise((v,I)=>{U(m,D=>v(new Uint8Array(D)),I)})}return Promise.resolve().then(()=>cr(m))}function zr(m,v,I){return fr(m).then(D=>WebAssembly.instantiate(D,v)).then(D=>D).then(I,D=>{ge(\"failed to asynchronously prepare wasm: \"+D),pt(D)})}function hn(m,v){var I=ut;return X||typeof WebAssembly.instantiateStreaming!=\"function\"||qt(I)||I.startsWith(\"file://\")||E||typeof fetch!=\"function\"?zr(I,m,v):fetch(I,{credentials:\"same-origin\"}).then(D=>WebAssembly.instantiateStreaming(D,m).then(v,function(H){return ge(\"wasm streaming compile failed: \"+H),ge(\"falling back to ArrayBuffer instantiation\"),zr(I,m,v)}))}var qe,Kt={906828:m=>{d.Ea(\"Abs\",m,void 0)},906879:m=>{d.Ea(\"Neg\",m,void 0)},906930:m=>{d.Ea(\"Floor\",m,void 0)},906983:m=>{d.Ea(\"Ceil\",m,void 0)},907035:m=>{d.Ea(\"Reciprocal\",m,void 0)},907093:m=>{d.Ea(\"Sqrt\",m,void 0)},907145:m=>{d.Ea(\"Exp\",m,void 0)},907196:m=>{d.Ea(\"Erf\",m,void 0)},907247:m=>{d.Ea(\"Sigmoid\",m,void 0)},907302:m=>{d.Ea(\"Log\",m,void 0)},907353:m=>{d.Ea(\"Sin\",m,void 0)},907404:m=>{d.Ea(\"Cos\",m,void 0)},907455:m=>{d.Ea(\"Tan\",m,void 0)},907506:m=>{d.Ea(\"Asin\",m,void 0)},907558:m=>{d.Ea(\"Acos\",m,void 0)},907610:m=>{d.Ea(\"Atan\",m,void 0)},907662:m=>{d.Ea(\"Sinh\",m,void 0)},907714:m=>{d.Ea(\"Cosh\",m,void 0)},907766:m=>{d.Ea(\"Asinh\",m,void 0)},907819:m=>{d.Ea(\"Acosh\",m,void 0)},907872:m=>{d.Ea(\"Atanh\",m,void 0)},907925:m=>{d.Ea(\"Tanh\",m,void 0)},907977:m=>{d.Ea(\"Not\",m,void 0)},908028:(m,v,I)=>{d.Ea(\"ClipV10\",m,{min:v,max:I})},908100:m=>{d.Ea(\"Clip\",m,void 0)},908152:(m,v)=>{d.Ea(\"Elu\",m,{alpha:v})},908210:m=>{d.Ea(\"Relu\",m,void 0)},908262:(m,v)=>{d.Ea(\"LeakyRelu\",m,{alpha:v})},908326:(m,v)=>{d.Ea(\"ThresholdedRelu\",m,{alpha:v})},908396:(m,v)=>{d.Ea(\"Cast\",m,{to:v})},908454:m=>{d.Ea(\"Add\",m,void 0)},908505:m=>{d.Ea(\"Sub\",m,void 0)},908556:m=>{d.Ea(\"Mul\",m,void 0)},908607:m=>{d.Ea(\"Div\",m,void 0)},908658:m=>{d.Ea(\"Pow\",m,void 0)},908709:m=>{d.Ea(\"Equal\",m,void 0)},908762:m=>{d.Ea(\"Greater\",m,void 0)},908817:m=>{d.Ea(\"GreaterOrEqual\",m,void 0)},908879:m=>{d.Ea(\"Less\",m,void 0)},908931:m=>{d.Ea(\"LessOrEqual\",m,void 0)},908990:(m,v,I,D,H)=>{d.Ea(\"ReduceMean\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909154:(m,v,I,D,H)=>{d.Ea(\"ReduceMax\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909317:(m,v,I,D,H)=>{d.Ea(\"ReduceMin\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909480:(m,v,I,D,H)=>{d.Ea(\"ReduceProd\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909644:(m,v,I,D,H)=>{d.Ea(\"ReduceSum\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909807:(m,v,I,D,H)=>{d.Ea(\"ReduceL1\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},909969:(m,v,I,D,H)=>{d.Ea(\"ReduceL2\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},910131:(m,v,I,D,H)=>{d.Ea(\"ReduceLogSum\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},910297:(m,v,I,D,H)=>{d.Ea(\"ReduceSumSquare\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},910466:(m,v,I,D,H)=>{d.Ea(\"ReduceLogSumExp\",m,{keepDims:!!v,noopWithEmptyAxes:!!I,axes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},910635:m=>{d.Ea(\"Where\",m,void 0)},910688:(m,v,I)=>{d.Ea(\"Transpose\",m,{perm:v?Array.from(s().subarray(I>>>0,I+v>>>0)):[]})},910801:(m,v,I,D,H,Q,re,ye,he,fe)=>{d.Ea(\"Conv\",m,{format:he?\"NHWC\":\"NCHW\",auto_pad:v,dilations:[I],group:D,kernel_shape:[H],pads:[Q,re],strides:[ye],w_is_const:()=>!!t()[fe>>>0]})},911029:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie)=>{d.Ea(\"Conv\",m,{format:q?\"NHWC\":\"NCHW\",auto_pad:v,dilations:[I,D],group:H,kernel_shape:[Q,re],pads:[ye,he,fe,ve],strides:[Me,je],w_is_const:()=>!!t()[Ie>>>0]})},911288:(m,v,I,D,H,Q,re,ye,he,fe)=>{d.Ea(\"Conv\",m,{format:he?\"NHWC\":\"NCHW\",auto_pad:v,dilations:[I],group:D,kernel_shape:[H],pads:[Q,re],strides:[ye],w_is_const:()=>!!t()[fe>>>0]})},911516:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie)=>{d.Ea(\"Conv\",m,{format:q?\"NHWC\":\"NCHW\",auto_pad:v,dilations:[I,D],group:H,kernel_shape:[Q,re],pads:[ye,he,fe,ve],strides:[Me,je],w_is_const:()=>!!t()[Ie>>>0]})},911775:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q)=>{d.Ea(\"ConvTranspose\",m,{format:he?\"NHWC\":\"NCHW\",autoPad:v,dilations:[I],group:D,kernel_shape:[H],pads:[Q,re],strides:[ye],wIsConst:()=>!!t()[fe>>>0],outputPadding:ve?Array.from(s().subarray(Me>>>0,Me+ve>>>0)):[],outputShape:je?Array.from(s().subarray(q>>>0,q+je>>>0)):[]})},912155:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je)=>{d.Ea(\"ConvTranspose\",m,{format:ye?\"NHWC\":\"NCHW\",autoPad:v,dilations:Array.from(s().subarray(I>>>0,I+2>>>0)),group:D,kernelShape:Array.from(s().subarray(H>>>0,H+2>>>0)),pads:Array.from(s().subarray(Q>>>0,Q+4>>>0)),strides:Array.from(s().subarray(re>>>0,re+2>>>0)),wIsConst:()=>!!t()[he>>>0],outputPadding:0>>0,ve+fe>>>0)):[],outputShape:0>>0,je+Me>>>0)):[]})},912678:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q)=>{d.Ea(\"ConvTranspose\",m,{format:he?\"NHWC\":\"NCHW\",autoPad:v,dilations:[I],group:D,kernel_shape:[H],pads:[Q,re],strides:[ye],wIsConst:()=>!!t()[fe>>>0],outputPadding:ve?Array.from(s().subarray(Me>>>0,Me+ve>>>0)):[],outputShape:je?Array.from(s().subarray(q>>>0,q+je>>>0)):[]})},913058:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je)=>{d.Ea(\"ConvTranspose\",m,{format:ye?\"NHWC\":\"NCHW\",autoPad:v,dilations:Array.from(s().subarray(I>>>0,I+2>>>0)),group:D,kernelShape:Array.from(s().subarray(H>>>0,H+2>>>0)),pads:Array.from(s().subarray(Q>>>0,Q+4>>>0)),strides:Array.from(s().subarray(re>>>0,re+2>>>0)),wIsConst:()=>!!t()[he>>>0],outputPadding:0>>0,ve+fe>>>0)):[],outputShape:0>>0,je+Me>>>0)):[]})},913581:(m,v)=>{d.Ea(\"GlobalAveragePool\",m,{format:v?\"NHWC\":\"NCHW\"})},913672:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie,Ue)=>{d.Ea(\"AveragePool\",m,{format:Ue?\"NHWC\":\"NCHW\",auto_pad:v,ceil_mode:I,count_include_pad:D,storage_order:H,dilations:[Q,re],kernel_shape:[ye,he],pads:[fe,ve,Me,je],strides:[q,Ie]})},913956:(m,v)=>{d.Ea(\"GlobalAveragePool\",m,{format:v?\"NHWC\":\"NCHW\"})},914047:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie,Ue)=>{d.Ea(\"AveragePool\",m,{format:Ue?\"NHWC\":\"NCHW\",auto_pad:v,ceil_mode:I,count_include_pad:D,storage_order:H,dilations:[Q,re],kernel_shape:[ye,he],pads:[fe,ve,Me,je],strides:[q,Ie]})},914331:(m,v)=>{d.Ea(\"GlobalMaxPool\",m,{format:v?\"NHWC\":\"NCHW\"})},914418:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie,Ue)=>{d.Ea(\"MaxPool\",m,{format:Ue?\"NHWC\":\"NCHW\",auto_pad:v,ceil_mode:I,count_include_pad:D,storage_order:H,dilations:[Q,re],kernel_shape:[ye,he],pads:[fe,ve,Me,je],strides:[q,Ie]})},914698:(m,v)=>{d.Ea(\"GlobalMaxPool\",m,{format:v?\"NHWC\":\"NCHW\"})},914785:(m,v,I,D,H,Q,re,ye,he,fe,ve,Me,je,q,Ie,Ue)=>{d.Ea(\"MaxPool\",m,{format:Ue?\"NHWC\":\"NCHW\",auto_pad:v,ceil_mode:I,count_include_pad:D,storage_order:H,dilations:[Q,re],kernel_shape:[ye,he],pads:[fe,ve,Me,je],strides:[q,Ie]})},915065:(m,v,I,D,H)=>{d.Ea(\"Gemm\",m,{alpha:v,beta:I,transA:D,transB:H})},915169:m=>{d.Ea(\"MatMul\",m,void 0)},915223:(m,v,I,D)=>{d.Ea(\"ArgMax\",m,{keepDims:!!v,selectLastIndex:!!I,axis:D})},915331:(m,v,I,D)=>{d.Ea(\"ArgMin\",m,{keepDims:!!v,selectLastIndex:!!I,axis:D})},915439:(m,v)=>{d.Ea(\"Softmax\",m,{axis:v})},915502:(m,v)=>{d.Ea(\"Concat\",m,{axis:v})},915562:(m,v,I,D,H)=>{d.Ea(\"Split\",m,{axis:v,numOutputs:I,splitSizes:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},915707:m=>{d.Ea(\"Expand\",m,void 0)},915761:(m,v)=>{d.Ea(\"Gather\",m,{axis:Number(v)})},915832:(m,v)=>{d.Ea(\"GatherElements\",m,{axis:Number(v)})},915911:(m,v,I,D,H,Q,re,ye,he,fe,ve)=>{d.Ea(\"Resize\",m,{antialias:v,axes:I?Array.from(s().subarray(D>>>0,D+I>>>0)):[],coordinateTransformMode:it(H),cubicCoeffA:Q,excludeOutside:re,extrapolationValue:ye,keepAspectRatioPolicy:it(he),mode:it(fe),nearestMode:it(ve)})},916262:(m,v,I,D,H,Q,re)=>{d.Ea(\"Slice\",m,{starts:v?Array.from(s().subarray(I>>>0,I+v>>>0)):[],ends:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[],axes:Q?Array.from(s().subarray(re>>>0,re+Q>>>0)):[]})},916493:m=>{d.Ea(\"Tile\",m,void 0)},916545:(m,v,I)=>{d.Ea(\"LayerNormalization\",m,{axis:Number(v),epsilon:Number(I)})},916652:(m,v,I)=>{d.Ea(\"InstanceNormalization\",m,{epsilon:v,format:I?\"NHWC\":\"NCHW\"})},916766:(m,v,I)=>{d.Ea(\"InstanceNormalization\",m,{epsilon:v,format:I?\"NHWC\":\"NCHW\"})},916880:m=>{d.Ea(\"Range\",m,void 0)},916933:(m,v)=>{d.Ea(\"Einsum\",m,{equation:it(v)})},917014:(m,v,I,D,H)=>{d.Ea(\"Pad\",m,{mode:v,value:I,pads:D?Array.from(s().subarray(H>>>0,H+D>>>0)):[]})},917146:m=>{d.Ea(\"Gelu\",m,void 0)},917198:m=>{d.Ea(\"BiasAdd\",m,void 0)},917253:m=>{d.Ea(\"BiasSplitGelu\",m,void 0)},917314:(m,v)=>{d.Ea(\"SkipLayerNormalization\",m,{epsilon:v})},917395:m=>{d.zb(m)},917429:(m,v)=>d.Ab(m,v,d.bb.Fb,d.bb.errors),917541:m=>d.wb(m),917574:m=>d.yb(m),917606:(m,v,I)=>{d.jb(m,v,I,!0)},917645:(m,v,I)=>{d.jb(m,v,I)}};function vt(m){this.name=\"ExitStatus\",this.message=`Program terminated with exit(${m})`,this.status=m}function pr(m){m.terminate(),m.onmessage=()=>{}}function mt(m){(m=Se.Qa[m])||pt(),Se.Eb(m)}function mr(m){var v=Se.tb();if(!v)return 6;Se.Ya.push(v),Se.Qa[m.Xa]=v,v.Xa=m.Xa;var I={cmd:\"run\",start_routine:m.Gb,arg:m.rb,pthread_ptr:m.Xa};return E&&v.unref(),v.postMessage(I,m.Mb),0}var hr=typeof TextDecoder<\"u\"?new TextDecoder(\"utf8\"):void 0,gn=(m,v,I)=>{v>>>=0;var D=v+I;for(I=v;m[I]&&!(I>=D);)++I;if(16H?D+=String.fromCharCode(H):(H-=65536,D+=String.fromCharCode(55296|H>>10,56320|H&1023))}}else D+=String.fromCharCode(H)}return D},it=(m,v)=>(m>>>=0)?gn(u(),m,v):\"\";function yn(m){if(O)return Ge(1,1,m);Pe=m,Pt()||(Se.Hb(),d.onExit&&d.onExit(m),Ee=!0),A(m,new vt(m))}var ht=m=>{if(Pe=m,O)throw wt(m),\"unwind\";yn(m)},Se={ab:[],Ya:[],mb:[],Qa:{},gb:function(){O?Se.vb():Se.ub()},ub:function(){Be.unshift(()=>{Ut(),Se.Bb(()=>Vt())})},vb:function(){Se.receiveObjectTransfer=Se.Db,Se.threadInitTLS=Se.lb,Se.setExitStatus=Se.kb,pe=!1},kb:function(m){Pe=m},Sb:[\"$terminateWorker\"],Hb:function(){for(var m of Se.Ya)pr(m);for(m of Se.ab)pr(m);Se.ab=[],Se.Ya=[],Se.Qa=[]},Eb:function(m){var v=m.Xa;delete Se.Qa[v],Se.ab.push(m),Se.Ya.splice(Se.Ya.indexOf(m),1),m.Xa=0,Tr(v)},Db:function(){},lb:function(){Se.mb.forEach(m=>m())},Cb:m=>new Promise(v=>{m.onmessage=Q=>{Q=Q.data;var re=Q.cmd;if(Q.targetThread&&Q.targetThread!=tr()){var ye=Se.Qa[Q.Rb];ye?ye.postMessage(Q,Q.transferList):ge('Internal error! Worker sent a message \"'+re+'\" to target pthread '+Q.targetThread+\", but that thread no longer exists!\")}else re===\"checkMailbox\"?Qt():re===\"spawnThread\"?mr(Q):re===\"cleanupThread\"?mt(Q.thread):re===\"killThread\"?(Q=Q.thread,re=Se.Qa[Q],delete Se.Qa[Q],pr(re),Tr(Q),Se.Ya.splice(Se.Ya.indexOf(re),1),re.Xa=0):re===\"cancelThread\"?Se.Qa[Q.thread].postMessage({cmd:\"cancel\"}):re===\"loaded\"?(m.loaded=!0,v(m)):re===\"alert\"?alert(\"Thread \"+Q.threadId+\": \"+Q.text):Q.target===\"setimmediate\"?m.postMessage(Q):re===\"callHandler\"?d[Q.handler](...Q.args):re&&ge(\"worker sent an unknown command \"+re)},m.onerror=Q=>{throw ge(\"worker sent an error! \"+Q.filename+\":\"+Q.lineno+\": \"+Q.message),Q},E&&(m.on(\"message\",function(Q){m.onmessage({data:Q})}),m.on(\"error\",function(Q){m.onerror(Q)}));var I=[],D=[\"onExit\",\"onAbort\",\"print\",\"printErr\"],H;for(H of D)d.hasOwnProperty(H)&&I.push(H);m.postMessage({cmd:\"load\",handlers:I,urlOrBlob:d.mainScriptUrlOrBlob||e,wasmMemory:we,wasmModule:me})}),Bb:function(m){m()},qb:function(){var m=R(\"ort-wasm-simd-threaded.worker.js\");m=new Worker(m),Se.ab.push(m)},tb:function(){return Se.ab.length==0&&(Se.qb(),Se.Cb(Se.ab[0])),Se.ab.pop()}};d.PThread=Se;var Wt=m=>{for(;0>2>>>0];m=s()[m+56>>2>>>0],Jr(v,v-m),rr(v)};function wt(m){if(O)return Ge(2,0,m);ht(m)}d.invokeEntryPoint=function(m,v){m=en.apply(null,[m,v]),Pt()?Se.kb(m):Er(m)};function Mr(m){this.fb=m-24,this.pb=function(v){c()[this.fb+4>>2>>>0]=v},this.ob=function(v){c()[this.fb+8>>2>>>0]=v},this.gb=function(v,I){this.nb(),this.pb(v),this.ob(I)},this.nb=function(){c()[this.fb+16>>2>>>0]=0}}var ot=0,gt=0;function gr(m,v,I,D){return O?Ge(3,1,m,v,I,D):bn(m,v,I,D)}function bn(m,v,I,D){if(m>>>=0,v>>>=0,I>>>=0,D>>>=0,typeof SharedArrayBuffer>\"u\")return ge(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\"),6;var H=[];return O&&H.length===0?gr(m,v,I,D):(m={Gb:I,Xa:m,rb:D,Mb:H},O?(m.Ob=\"spawnThread\",postMessage(m,H),0):mr(m))}function vn(m,v,I){return O?Ge(4,1,m,v,I):0}function jr(m,v){if(O)return Ge(5,1,m,v)}var Ur=m=>{for(var v=0,I=0;I=D?v++:2047>=D?v+=2:55296<=D&&57343>=D?(v+=4,++I):v+=3}return v},wn=(m,v,I,D)=>{if(I>>>=0,!(0=re){var ye=m.charCodeAt(++Q);re=65536+((re&1023)<<10)|ye&1023}if(127>=re){if(I>=D)break;v[I++>>>0]=re}else{if(2047>=re){if(I+1>=D)break;v[I++>>>0]=192|re>>6}else{if(65535>=re){if(I+2>=D)break;v[I++>>>0]=224|re>>12}else{if(I+3>=D)break;v[I++>>>0]=240|re>>18,v[I++>>>0]=128|re>>12&63}v[I++>>>0]=128|re>>6&63}v[I++>>>0]=128|re&63}}return v[I>>>0]=0,I-H},_n=(m,v,I)=>wn(m,u(),v,I);function $n(m,v){if(O)return Ge(6,1,m,v)}function Cn(m,v,I){if(O)return Ge(7,1,m,v,I)}function Sn(m,v,I){return O?Ge(8,1,m,v,I):0}function yr(m,v){if(O)return Ge(9,1,m,v)}function Vr(m,v,I){if(O)return Ge(10,1,m,v,I)}function Yt(m,v,I,D){if(O)return Ge(11,1,m,v,I,D)}function xn(m,v,I,D){if(O)return Ge(12,1,m,v,I,D)}function In(m,v,I,D){if(O)return Ge(13,1,m,v,I,D)}function Ct(m){if(O)return Ge(14,1,m)}function yt(m,v){if(O)return Ge(15,1,m,v)}function Wr(m,v,I){if(O)return Ge(16,1,m,v,I)}var Zt=m=>{if(!Ee)try{if(m(),!Pt())try{O?Er(Pe):ht(Pe)}catch(v){v instanceof vt||v==\"unwind\"||A(1,v)}}catch(v){v instanceof vt||v==\"unwind\"||A(1,v)}};function br(m){m>>>=0,typeof Atomics.Nb==\"function\"&&(Atomics.Nb(s(),m>>2,m).value.then(Qt),m+=128,Atomics.store(s(),m>>2,1))}d.__emscripten_thread_mailbox_await=br;function Qt(){var m=tr();m&&(br(m),Zt(()=>Qr()))}d.checkMailbox=Qt;var Nt=m=>m%4===0&&(m%100!==0||m%400===0),vr=[0,31,60,91,121,152,182,213,244,274,305,335],An=[0,31,59,90,120,151,181,212,243,273,304,334];function Tn(m,v,I,D,H,Q,re,ye){return O?Ge(17,1,m,v,I,D,H,Q,re,ye):-52}function En(m,v,I,D,H,Q,re){if(O)return Ge(18,1,m,v,I,D,H,Q,re)}var Pn=m=>{var v=Ur(m)+1,I=Ar(v);return I&&_n(m,I,v),I},wr=[],Xt=(m,v)=>{wr.length=0;var I;for(v>>=2;I=u()[m++>>>0];)v+=I!=105&v,wr.push(I==105?s()[v>>>0]:f()[v++>>>1]),++v;return wr},_i=m=>{var v=Pr();return m=m(),rr(v),m};function Ge(m,v){var I=arguments.length-2,D=arguments;return _i(()=>{for(var H=kr(8*I),Q=H>>3,re=0;re>>0]=ye}return Zr(m,I,H,v)})}var _r=[],Nr={},kn=()=>{if(!$r){var m={USER:\"web_user\",LOGNAME:\"web_user\",PATH:\"/\",PWD:\"/\",HOME:\"/home/web_user\",LANG:(typeof navigator==\"object\"&&navigator.languages&&navigator.languages[0]||\"C\").replace(\"-\",\"_\")+\".UTF-8\",_:$||\"./this.program\"},v;for(v in Nr)Nr[v]===void 0?delete m[v]:m[v]=Nr[v];var I=[];for(v in m)I.push(`${v}=${m[v]}`);$r=I}return $r},$r;function On(m,v){if(O)return Ge(19,1,m,v);m>>>=0,v>>>=0;var I=0;return kn().forEach(function(D,H){var Q=v+I;for(H=c()[m+4*H>>2>>>0]=Q,Q=0;Q>0>>>0]=D.charCodeAt(Q);t()[H>>0>>>0]=0,I+=D.length+1}),0}function jt(m,v){if(O)return Ge(20,1,m,v);m>>>=0,v>>>=0;var I=kn();c()[m>>2>>>0]=I.length;var D=0;return I.forEach(function(H){D+=H.length+1}),c()[v>>2>>>0]=D,0}function Gr(m){return O?Ge(21,1,m):52}function Hr(m,v,I,D){return O?Ge(22,1,m,v,I,D):52}function Cr(m,v,I,D,H){return O?Ge(23,1,m,v,I,D,H):70}var Rn=[null,[],[]];function Sr(m,v,I,D){if(O)return Ge(24,1,m,v,I,D);v>>>=0,I>>>=0,D>>>=0;for(var H=0,Q=0;Q>2>>>0],ye=c()[v+4>>2>>>0];v+=8;for(var he=0;he>>0],ve=Rn[m];fe===0||fe===10?((m===1?le:ge)(gn(ve,0)),ve.length=0):ve.push(fe)}H+=ye}return c()[D>>2>>>0]=H,0}var Lr=[31,29,31,30,31,30,31,31,30,31,30,31],xr=[31,28,31,30,31,30,31,31,30,31,30,31];function $i(m){var v=Array(Ur(m)+1);return wn(m,v,0,v.length),v}var Bn=(m,v)=>{t().set(m,v>>>0)};function Fr(m,v,I,D){function H(q,Ie,Ue){for(q=typeof q==\"number\"?q.toString():q||\"\";q.lengthsn?-1:0kt-q.getDate())Ie-=kt-q.getDate()+1,q.setDate(1),11>Ue?q.setMonth(Ue+1):(q.setMonth(0),q.setFullYear(q.getFullYear()+1));else{q.setDate(q.getDate()+Ie);break}}return Ue=new Date(q.getFullYear()+1,0,4),Ie=ye(new Date(q.getFullYear(),0,4)),Ue=ye(Ue),0>=re(Ie,q)?0>=re(Ue,q)?q.getFullYear()+1:q.getFullYear():q.getFullYear()-1}m>>>=0,v>>>=0,I>>>=0,D>>>=0;var fe=s()[D+40>>2>>>0];D={Kb:s()[D>>2>>>0],Jb:s()[D+4>>2>>>0],cb:s()[D+8>>2>>>0],ib:s()[D+12>>2>>>0],eb:s()[D+16>>2>>>0],$a:s()[D+20>>2>>>0],Wa:s()[D+24>>2>>>0],Za:s()[D+28>>2>>>0],Tb:s()[D+32>>2>>>0],Ib:s()[D+36>>2>>>0],Lb:fe?it(fe):\"\"},I=it(I),fe={\"%c\":\"%a %b %d %H:%M:%S %Y\",\"%D\":\"%m/%d/%y\",\"%F\":\"%Y-%m-%d\",\"%h\":\"%b\",\"%r\":\"%I:%M:%S %p\",\"%R\":\"%H:%M\",\"%T\":\"%H:%M:%S\",\"%x\":\"%m/%d/%y\",\"%X\":\"%H:%M:%S\",\"%Ec\":\"%c\",\"%EC\":\"%C\",\"%Ex\":\"%m/%d/%y\",\"%EX\":\"%H:%M:%S\",\"%Ey\":\"%y\",\"%EY\":\"%Y\",\"%Od\":\"%d\",\"%Oe\":\"%e\",\"%OH\":\"%H\",\"%OI\":\"%I\",\"%Om\":\"%m\",\"%OM\":\"%M\",\"%OS\":\"%S\",\"%Ou\":\"%u\",\"%OU\":\"%U\",\"%OV\":\"%V\",\"%Ow\":\"%w\",\"%OW\":\"%W\",\"%Oy\":\"%y\"};for(var ve in fe)I=I.replace(new RegExp(ve,\"g\"),fe[ve]);var Me=\"Sunday Monday Tuesday Wednesday Thursday Friday Saturday\".split(\" \"),je=\"January February March April May June July August September October November December\".split(\" \");fe={\"%a\":q=>Me[q.Wa].substring(0,3),\"%A\":q=>Me[q.Wa],\"%b\":q=>je[q.eb].substring(0,3),\"%B\":q=>je[q.eb],\"%C\":q=>Q((q.$a+1900)/100|0,2),\"%d\":q=>Q(q.ib,2),\"%e\":q=>H(q.ib,2,\" \"),\"%g\":q=>he(q).toString().substring(2),\"%G\":q=>he(q),\"%H\":q=>Q(q.cb,2),\"%I\":q=>(q=q.cb,q==0?q=12:12{for(var Ie=0,Ue=0;Ue<=q.eb-1;Ie+=(Nt(q.$a+1900)?Lr:xr)[Ue++]);return Q(q.ib+Ie,3)},\"%m\":q=>Q(q.eb+1,2),\"%M\":q=>Q(q.Jb,2),\"%n\":()=>`\n`,\"%p\":q=>0<=q.cb&&12>q.cb?\"AM\":\"PM\",\"%S\":q=>Q(q.Kb,2),\"%t\":()=>\"\t\",\"%u\":q=>q.Wa||7,\"%U\":q=>Q(Math.floor((q.Za+7-q.Wa)/7),2),\"%V\":q=>{var Ie=Math.floor((q.Za+7-(q.Wa+6)%7)/7);if(2>=(q.Wa+371-q.Za-2)%7&&Ie++,Ie)Ie==53&&(Ue=(q.Wa+371-q.Za)%7,Ue==4||Ue==3&&Nt(q.$a)||(Ie=1));else{Ie=52;var Ue=(q.Wa+7-q.Za-1)%7;(Ue==4||Ue==5&&Nt(q.$a%400-1))&&Ie++}return Q(Ie,2)},\"%w\":q=>q.Wa,\"%W\":q=>Q(Math.floor((q.Za+7-(q.Wa+6)%7)/7),2),\"%y\":q=>(q.$a+1900).toString().substring(2),\"%Y\":q=>q.$a+1900,\"%z\":q=>{q=q.Ib;var Ie=0<=q;return q=Math.abs(q)/60,(Ie?\"+\":\"-\")+(\"0000\"+(q/60*100+q%60)).slice(-4)},\"%Z\":q=>q.Lb,\"%%\":()=>\"%\"},I=I.replace(/%%/g,\"\\0\\0\");for(ve in fe)I.includes(ve)&&(I=I.replace(new RegExp(ve,\"g\"),fe[ve](D)));return I=I.replace(/\\0\\0/g,\"%\"),ve=$i(I),ve.length>v?0:(Bn(ve,m),ve.length-1)}function Ir(m){try{m()}catch(v){pt(v)}}function Dn(m){var v={},I;for(I in m)(function(D){var H=m[D];v[D]=typeof H==\"function\"?function(){M.push(D);try{return H.apply(null,arguments)}finally{Ee||(M.pop()===D||pt(),lt&&St===1&&M.length===0&&(St=0,Ke+=1,Ir(rn),typeof Fibers<\"u\"&&Fibers.Ub()))}}:H})(I);return v}var St=0,lt=null,zn=0,M=[],qr={},Jt={},_t=0,W=null,er=[];function N(){return new Promise((m,v)=>{W={resolve:m,reject:v}})}function Mn(){var m=Ar(65548),v=m+12;c()[m>>2>>>0]=v,c()[m+4>>2>>>0]=v+65536,v=M[0];var I=qr[v];return I===void 0&&(I=_t++,qr[v]=I,Jt[I]=v),v=I,s()[m+8>>2>>>0]=v,m}function G(){var m=s()[lt+8>>2>>>0];return m=ue[Jt[m]],--Ke,m()}function jn(m){if(!Ee){if(St===0){var v=!1,I=!1;m((D=0)=>{if(!Ee&&(zn=D,v=!0,I)){St=2,Ir(()=>nn(lt)),typeof Browser<\"u\"&&Browser.hb.sb&&Browser.hb.resume(),D=!1;try{var H=G()}catch(ye){H=ye,D=!0}var Q=!1;if(!lt){var re=W;re&&(W=null,(D?re.reject:re.resolve)(H),Q=!0)}if(D&&!Q)throw H}}),I=!0,v||(St=1,lt=Mn(),typeof Browser<\"u\"&&Browser.hb.sb&&Browser.hb.pause(),Ir(()=>tn(lt)))}else St===2?(St=0,Ir(on),Kr(lt),lt=null,er.forEach(D=>Zt(D))):pt(`invalid state: ${St}`);return zn}}function Un(m){return jn(v=>{m().then(v)})}Se.gb();var Vn=[null,yn,wt,gr,vn,jr,$n,Cn,Sn,yr,Vr,Yt,xn,In,Ct,yt,Wr,Tn,En,On,jt,Gr,Hr,Cr,Sr],Wn={r:function(m,v,I){return Un(async()=>{await d.xb(m,v,I)})},b:function(m,v,I){throw m>>>=0,new Mr(m).gb(v>>>0,I>>>0),ot=m,gt++,ot},O:function(m){Yr(m>>>0,!x,1,!P,131072,!1),Se.lb()},l:function(m){m>>>=0,O?postMessage({cmd:\"cleanupThread\",thread:m}):mt(m)},I:bn,i:vn,U:jr,E:$n,G:Cn,V:Sn,S:yr,K:Vr,R:Yt,p:xn,F:In,C:Ct,T:yt,D:Wr,q:()=>!0,A:function(m,v){m>>>=0,m==v>>>0?setTimeout(()=>Qt()):O?postMessage({targetThread:m,cmd:\"checkMailbox\"}):(m=Se.Qa[m])&&m.postMessage({cmd:\"checkMailbox\"})},M:function(){return-1},N:br,X:function(m){E&&Se.Qa[m>>>0].ref()},u:function(m,v,I){m=v+2097152>>>0<4194305-!!m?(m>>>0)+4294967296*v:NaN,I>>>=0,m=new Date(1e3*m),s()[I>>2>>>0]=m.getUTCSeconds(),s()[I+4>>2>>>0]=m.getUTCMinutes(),s()[I+8>>2>>>0]=m.getUTCHours(),s()[I+12>>2>>>0]=m.getUTCDate(),s()[I+16>>2>>>0]=m.getUTCMonth(),s()[I+20>>2>>>0]=m.getUTCFullYear()-1900,s()[I+24>>2>>>0]=m.getUTCDay(),m=(m.getTime()-Date.UTC(m.getUTCFullYear(),0,1,0,0,0,0))/864e5|0,s()[I+28>>2>>>0]=m},v:function(m,v,I){m=v+2097152>>>0<4194305-!!m?(m>>>0)+4294967296*v:NaN,I>>>=0,m=new Date(1e3*m),s()[I>>2>>>0]=m.getSeconds(),s()[I+4>>2>>>0]=m.getMinutes(),s()[I+8>>2>>>0]=m.getHours(),s()[I+12>>2>>>0]=m.getDate(),s()[I+16>>2>>>0]=m.getMonth(),s()[I+20>>2>>>0]=m.getFullYear()-1900,s()[I+24>>2>>>0]=m.getDay(),v=(Nt(m.getFullYear())?vr:An)[m.getMonth()]+m.getDate()-1|0,s()[I+28>>2>>>0]=v,s()[I+36>>2>>>0]=-(60*m.getTimezoneOffset()),v=new Date(m.getFullYear(),6,1).getTimezoneOffset();var D=new Date(m.getFullYear(),0,1).getTimezoneOffset();m=(v!=D&&m.getTimezoneOffset()==Math.min(D,v))|0,s()[I+32>>2>>>0]=m},w:function(m){m>>>=0;var v=new Date(s()[m+20>>2>>>0]+1900,s()[m+16>>2>>>0],s()[m+12>>2>>>0],s()[m+8>>2>>>0],s()[m+4>>2>>>0],s()[m>>2>>>0],0),I=s()[m+32>>2>>>0],D=v.getTimezoneOffset(),H=new Date(v.getFullYear(),6,1).getTimezoneOffset(),Q=new Date(v.getFullYear(),0,1).getTimezoneOffset(),re=Math.min(Q,H);return 0>I?s()[m+32>>2>>>0]=+(H!=Q&&re==D):0>2>>>0]=v.getDay(),I=(Nt(v.getFullYear())?vr:An)[v.getMonth()]+v.getDate()-1|0,s()[m+28>>2>>>0]=I,s()[m>>2>>>0]=v.getSeconds(),s()[m+4>>2>>>0]=v.getMinutes(),s()[m+8>>2>>>0]=v.getHours(),s()[m+12>>2>>>0]=v.getDate(),s()[m+16>>2>>>0]=v.getMonth(),s()[m+20>>2>>>0]=v.getYear(),m=v.getTime()/1e3,Xr((qe=m,1<=+Math.abs(qe)?0>>0:~~+Math.ceil((qe-+(~~qe>>>0))/4294967296)>>>0:0)),m>>>0},s:Tn,t:En,z:function(m,v,I){function D(fe){return(fe=fe.toTimeString().match(/\\(([A-Za-z ]+)\\)$/))?fe[1]:\"GMT\"}m>>>=0,v>>>=0,I>>>=0;var H=new Date().getFullYear(),Q=new Date(H,0,1),re=new Date(H,6,1);H=Q.getTimezoneOffset();var ye=re.getTimezoneOffset(),he=Math.max(H,ye);c()[m>>2>>>0]=60*he,s()[v>>2>>>0]=+(H!=ye),m=D(Q),v=D(re),m=Pn(m),v=Pn(v),ye>2>>>0]=m,c()[I+4>>2>>>0]=v):(c()[I>>2>>>0]=v,c()[I+4>>2>>>0]=m)},d:()=>{pt(\"\")},c:function(m,v,I){return m>>>=0,v=Xt(v>>>0,I>>>0),Kt[m].apply(null,v)},k:function(m,v,I){return m>>>=0,v=Xt(v>>>0,I>>>0),Kt[m].apply(null,v)},m:function(){},j:function(){return Date.now()},W:()=>{throw Ke+=1,\"unwind\"},B:function(){return 4294901760},f:()=>performance.timeOrigin+performance.now(),g:function(){return E?(eu(),ir(Js)).cpus().length:navigator.hardwareConcurrency},L:function(m,v,I,D){for(Se.Pb=v>>>0,_r.length=I,v=D>>>0>>3,D=0;D>>0];return(0>m?Kt[-m-1]:Vn[m]).apply(null,_r)},y:function(m){m>>>=0;var v=u().length;if(m<=v||4294901760=I;I*=2){var D=v*(1+.2/I);D=Math.min(D,m+100663296);var H=Math;D=Math.max(m,D);e:{H=H.min.call(H,4294901760,D+(65536-D%65536)%65536)-we.buffer.byteLength+65535>>>16;try{we.grow(H),ee();var Q=1;break e}catch{}Q=void 0}if(Q)return!0}return!1},P:On,Q:jt,H:ht,h:Gr,o:Hr,x:Cr,n:Sr,a:we||d.wasmMemory,J:Fr,e:function(m,v,I,D){return Fr(m>>>0,v>>>0,I>>>0,D>>>0)}};(function(){function m(I,D){return I=I.exports,I=Dn(I),ue=I=Nn(I),Se.mb.push(ue.Da),tt.unshift(ue.Y),me=D,Vt(),I}var v={a:Wn};if(Ut(),d.instantiateWasm)try{return d.instantiateWasm(v,m)}catch(I){ge(\"Module.instantiateWasm callback failed with error: \"+I),w(I)}return hn(v,function(I){m(I.instance,I.module)}).catch(w),{}})(),d._OrtInit=(m,v)=>(d._OrtInit=ue.Z)(m,v),d._OrtGetLastError=(m,v)=>(d._OrtGetLastError=ue._)(m,v),d._OrtCreateSessionOptions=(m,v,I,D,H,Q,re,ye,he,fe)=>(d._OrtCreateSessionOptions=ue.$)(m,v,I,D,H,Q,re,ye,he,fe),d._OrtAppendExecutionProvider=(m,v)=>(d._OrtAppendExecutionProvider=ue.aa)(m,v),d._OrtAddFreeDimensionOverride=(m,v,I)=>(d._OrtAddFreeDimensionOverride=ue.ba)(m,v,I),d._OrtAddSessionConfigEntry=(m,v,I)=>(d._OrtAddSessionConfigEntry=ue.ca)(m,v,I),d._OrtReleaseSessionOptions=m=>(d._OrtReleaseSessionOptions=ue.da)(m),d._OrtCreateSession=(m,v,I)=>(d._OrtCreateSession=ue.ea)(m,v,I),d._OrtReleaseSession=m=>(d._OrtReleaseSession=ue.fa)(m),d._OrtGetInputOutputCount=(m,v,I)=>(d._OrtGetInputOutputCount=ue.ga)(m,v,I),d._OrtGetInputName=(m,v)=>(d._OrtGetInputName=ue.ha)(m,v),d._OrtGetOutputName=(m,v)=>(d._OrtGetOutputName=ue.ia)(m,v),d._OrtFree=m=>(d._OrtFree=ue.ja)(m),d._OrtCreateTensor=(m,v,I,D,H,Q)=>(d._OrtCreateTensor=ue.ka)(m,v,I,D,H,Q),d._OrtGetTensorData=(m,v,I,D,H)=>(d._OrtGetTensorData=ue.la)(m,v,I,D,H),d._OrtReleaseTensor=m=>(d._OrtReleaseTensor=ue.ma)(m),d._OrtCreateRunOptions=(m,v,I,D)=>(d._OrtCreateRunOptions=ue.na)(m,v,I,D),d._OrtAddRunConfigEntry=(m,v,I)=>(d._OrtAddRunConfigEntry=ue.oa)(m,v,I),d._OrtReleaseRunOptions=m=>(d._OrtReleaseRunOptions=ue.pa)(m),d._OrtCreateBinding=m=>(d._OrtCreateBinding=ue.qa)(m),d._OrtBindInput=(m,v,I)=>(d._OrtBindInput=ue.ra)(m,v,I),d._OrtBindOutput=(m,v,I,D)=>(d._OrtBindOutput=ue.sa)(m,v,I,D),d._OrtClearBoundOutputs=m=>(d._OrtClearBoundOutputs=ue.ta)(m),d._OrtReleaseBinding=m=>(d._OrtReleaseBinding=ue.ua)(m),d._OrtRunWithBinding=(m,v,I,D,H)=>(d._OrtRunWithBinding=ue.va)(m,v,I,D,H),d._OrtRun=(m,v,I,D,H,Q,re,ye)=>(d._OrtRun=ue.wa)(m,v,I,D,H,Q,re,ye),d._OrtEndProfiling=m=>(d._OrtEndProfiling=ue.xa)(m),d._JsepOutput=(m,v,I)=>(d._JsepOutput=ue.ya)(m,v,I),d._JsepGetNodeName=m=>(d._JsepGetNodeName=ue.za)(m);var tr=d._pthread_self=()=>(tr=d._pthread_self=ue.Aa)(),Ar=d._malloc=m=>(Ar=d._malloc=ue.Ba)(m),Kr=d._free=m=>(Kr=d._free=ue.Ca)(m);d.__emscripten_tls_init=()=>(d.__emscripten_tls_init=ue.Da)();var Yr=d.__emscripten_thread_init=(m,v,I,D,H,Q)=>(Yr=d.__emscripten_thread_init=ue.Fa)(m,v,I,D,H,Q);d.__emscripten_thread_crashed=()=>(d.__emscripten_thread_crashed=ue.Ga)();var Zr=(m,v,I,D)=>(Zr=ue.Ha)(m,v,I,D),Tr=m=>(Tr=ue.Ia)(m),Er=d.__emscripten_thread_exit=m=>(Er=d.__emscripten_thread_exit=ue.Ja)(m),Qr=d.__emscripten_check_mailbox=()=>(Qr=d.__emscripten_check_mailbox=ue.Ka)(),Xr=m=>(Xr=ue.La)(m),Jr=(m,v)=>(Jr=ue.Ma)(m,v),Pr=()=>(Pr=ue.Na)(),rr=m=>(rr=ue.Oa)(m),kr=m=>(kr=ue.Pa)(m),en=d.dynCall_ii=(m,v)=>(en=d.dynCall_ii=ue.Ra)(m,v),tn=m=>(tn=ue.Sa)(m),rn=()=>(rn=ue.Ta)(),nn=m=>(nn=ue.Ua)(m),on=()=>(on=ue.Va)();d.___start_em_js=917678,d.___stop_em_js=917839;function Nn(m){m=Object.assign({},m);var v=D=>()=>D()>>>0,I=D=>H=>D(H)>>>0;return m.__errno_location=v(m.__errno_location),m.pthread_self=v(m.pthread_self),m.malloc=I(m.malloc),m.stackSave=v(m.stackSave),m.stackAlloc=I(m.stackAlloc),m}d.keepRuntimeAlive=Pt,d.wasmMemory=we,d.stackAlloc=kr,d.stackSave=Pr,d.stackRestore=rr,d.UTF8ToString=it,d.stringToUTF8=_n,d.lengthBytesUTF8=Ur,d.ExitStatus=vt,d.PThread=Se;var nr;ct=function m(){nr||an(),nr||(ct=m)};function an(){function m(){if(!nr&&(nr=!0,d.calledRun=!0,!Ee)&&(O||Wt(tt),g(d),d.onRuntimeInitialized&&d.onRuntimeInitialized(),!O)){if(d.postRun)for(typeof d.postRun==\"function\"&&(d.postRun=[d.postRun]);d.postRun.length;){var v=d.postRun.shift();Ye.unshift(v)}Wt(Ye)}}if(!(0tu)});var iu=un((N0,Uh)=>{Uh.exports='\"use strict\";var Module={},ENVIRONMENT_IS_NODE=typeof process==\"object\"&&typeof process.versions==\"object\"&&typeof process.versions.node==\"string\";if(ENVIRONMENT_IS_NODE){var nodeWorkerThreads=require(\"worker_threads\"),parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",e=>onmessage({data:e}));var fs=require(\"fs\");Object.assign(global,{self:global,require,Module,location:{href:__filename},Worker:nodeWorkerThreads.Worker,importScripts:e=>(0,eval)(fs.readFileSync(e,\"utf8\")+\"//# sourceURL=\"+e),postMessage:e=>parentPort.postMessage(e),performance:global.performance||{now:Date.now}})}var initializedJS=!1;function threadPrintErr(){var e=Array.prototype.slice.call(arguments).join(\" \");if(ENVIRONMENT_IS_NODE){fs.writeSync(2,e+`\\n`);return}console.error(e)}function threadAlert(){var e=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:e,threadId:Module._pthread_self()})}var err=threadPrintErr;self.alert=threadAlert,Module.instantiateWasm=(e,t)=>{var a=Module.wasmModule;Module.wasmModule=null;var r=new WebAssembly.Instance(a,e);return t(r)},self.onunhandledrejection=e=>{throw e.reason??e};function handleMessage(e){try{if(e.data.cmd===\"load\"){let a=[];self.onmessage=r=>a.push(r),self.startWorker=r=>{Module=r,postMessage({cmd:\"loaded\"});for(let s of a)handleMessage(s);self.onmessage=handleMessage},Module.wasmModule=e.data.wasmModule;for(const r of e.data.handlers)Module[r]=(...s)=>{postMessage({cmd:\"callHandler\",handler:r,args:s})};if(Module.wasmMemory=e.data.wasmMemory,Module.buffer=Module.wasmMemory.buffer,Module.ENVIRONMENT_IS_PTHREAD=!0,typeof e.data.urlOrBlob==\"string\")importScripts(e.data.urlOrBlob);else{var t=URL.createObjectURL(e.data.urlOrBlob);importScripts(t),URL.revokeObjectURL(t)}ortWasmThreaded(Module)}else if(e.data.cmd===\"run\"){Module.__emscripten_thread_init(e.data.pthread_ptr,0,0,1),Module.__emscripten_thread_mailbox_await(e.data.pthread_ptr),Module.establishStackSpace(),Module.PThread.receiveObjectTransfer(e.data),Module.PThread.threadInitTLS(),initializedJS||(initializedJS=!0);try{Module.invokeEntryPoint(e.data.start_routine,e.data.arg)}catch(a){if(a!=\"unwind\")throw a}}else e.data.cmd===\"cancel\"?Module._pthread_self()&&Module.__emscripten_thread_exit(-1):e.data.target===\"setimmediate\"||(e.data.cmd===\"checkMailbox\"?initializedJS&&Module.checkMailbox():e.data.cmd&&(err(\"worker.js received unknown command \"+e.data.cmd),err(e.data)))}catch(a){throw Module.__emscripten_thread_crashed&&Module.__emscripten_thread_crashed(),a}}self.onmessage=handleMessage;\\n'});var Bi,Gt,or,Kn,dn,du,Di,Te=ae(()=>{\"use strict\";Bi=e=>{switch(e){case\"int8\":return 3;case\"uint8\":return 2;case\"bool\":return 9;case\"int16\":return 5;case\"uint16\":return 4;case\"int32\":return 6;case\"uint32\":return 12;case\"float16\":return 10;case\"float32\":return 1;case\"float64\":return 11;case\"string\":return 8;case\"int64\":return 7;case\"uint64\":return 13;default:throw new Error(`unsupported data type: ${e}`)}},Gt=e=>{switch(e){case 3:return\"int8\";case 2:return\"uint8\";case 9:return\"bool\";case 5:return\"int16\";case 4:return\"uint16\";case 6:return\"int32\";case 12:return\"uint32\";case 10:return\"float16\";case 1:return\"float32\";case 11:return\"float64\";case 8:return\"string\";case 7:return\"int64\";case 13:return\"uint64\";default:throw new Error(`unsupported data type: ${e}`)}},or=e=>[void 0,4,1,1,2,2,4,8,void 0,1,2,8,4,8,void 0,void 0,void 0][e],Kn=e=>{switch(e){case\"float16\":return typeof Float16Array<\"u\"&&Float16Array.from?Float16Array:Uint16Array;case\"float32\":return Float32Array;case\"uint8\":return Uint8Array;case\"int8\":return Int8Array;case\"uint16\":return Uint16Array;case\"int16\":return Int16Array;case\"int32\":return Int32Array;case\"bool\":return Uint8Array;case\"float64\":return Float64Array;case\"uint32\":return Uint32Array;case\"int64\":return BigInt64Array;case\"uint64\":return BigUint64Array;default:throw new Error(`unsupported type: ${e}`)}},dn=e=>{switch(e){case\"verbose\":return 0;case\"info\":return 1;case\"warning\":return 2;case\"error\":return 3;case\"fatal\":return 4;default:throw new Error(`unsupported logging level: ${e}`)}},du=e=>e===\"float32\"||e===\"float16\"||e===\"int32\"||e===\"int64\"||e===\"uint32\"||e===\"uint8\"||e===\"bool\",Di=e=>{switch(e){case\"none\":return 0;case\"cpu\":return 1;case\"cpu-pinned\":return 2;case\"texture\":return 3;case\"gpu-buffer\":return 4;default:throw new Error(`unsupported data location: ${e}`)}}});var Yn=ae(()=>{\"use strict\"});var fu=ae(()=>{\"use strict\";Yn()});var pu,mu=ae(()=>{\"use strict\";pu=\"1.18.0\"});var hu,bt,zi=ae(()=>{\"use strict\";mu();hu=\"warning\",bt={wasm:{},webgl:{},webgpu:{},versions:{common:pu},set logLevel(e){if(e!==void 0){if(typeof e!=\"string\"||[\"verbose\",\"info\",\"warning\",\"error\",\"fatal\"].indexOf(e)===-1)throw new Error(`Unsupported logging level: ${e}`);hu=e}},get logLevel(){return hu}};Object.defineProperty(bt,\"logLevel\",{enumerable:!0})});var Or,gu=ae(()=>{\"use strict\";zi();Or=bt});var yu=ae(()=>{\"use strict\"});var bu=ae(()=>{\"use strict\";Zn()});var wu=ae(()=>{\"use strict\"});var _u=ae(()=>{\"use strict\";Zn()});var Zn=ae(()=>{\"use strict\";yu();bu();wu();_u()});var Qn=ae(()=>{\"use strict\";Zn()});var Mi,$u,ar,Ht,ji=ae(()=>{\"use strict\";zi();Mi=(e,r)=>{(typeof bt.trace>\"u\"?!bt.wasm.trace:!bt.trace)||console.timeStamp(`${e}::ORT::${r}`)},$u=(e,r)=>{let t=new Error().stack?.split(/\\r\\n|\\r|\\n/g)||[],u=!1;for(let s=0;s{(typeof bt.trace>\"u\"?!bt.wasm.trace:!bt.trace)||$u(\"BEGIN\",e)},Ht=e=>{(typeof bt.trace>\"u\"?!bt.wasm.trace:!bt.trace)||$u(\"END\",e)}});var Cu=ae(()=>{\"use strict\";Yn();Qn();ji()});var Su=ae(()=>{\"use strict\";Cu()});var xu=ae(()=>{\"use strict\"});var Iu=ae(()=>{\"use strict\"});var Au=ae(()=>{\"use strict\"});var Tu=ae(()=>{\"use strict\"});var Eu=ae(()=>{\"use strict\";Yn();Qn()});var Pu=ae(()=>{\"use strict\";Eu()});var Rr=ae(()=>{\"use strict\";fu();gu();Su();Qn();xu();Iu();ji();Au();Tu();Pu()});var Zh,Qh,ku,Ou,Ru,Xh,Fe,Lt=ae(()=>{\"use strict\";Te();Zh=[\"V\",\"I\",\"W\",\"E\",\"F\"],Qh=(e,r)=>{console.log(`[${Zh[e]},${new Date().toISOString()}]${r}`)},Ru=(e,r)=>{ku=e,Ou=r},Xh=(e,r)=>{let t=dn(e),u=dn(ku);t>=u&&Qh(t,typeof r==\"function\"?r():r)},Fe=(...e)=>{Ou&&Xh(...e)}});var Bu,Du=ae(()=>{\"use strict\";Te();Bu=(e,r)=>new(Kn(r))(e)});var Xn=ae(()=>{\"use strict\"});var zu,Ui,Vi,Jh,eg,Mu,Ni,Wi,Uu,Vu=ae(()=>{\"use strict\";Lt();Xn();zu=new Map([[64,250],[128,200],[256,200],[512,200],[2048,230],[4096,200],[8192,50],[16384,50],[32768,50],[65536,50],[131072,50],[262144,50],[524288,50],[1048576,50],[2097152,30],[4194304,20],[8388608,10],[12582912,10],[16777216,10],[26214400,15],[33554432,22],[44236800,2],[58982400,6],[67108864,6],[134217728,6],[167772160,6]]),Ui=[],Vi=e=>Math.ceil(e/16)*16,Jh=e=>{for(let r=0;reg++,Ni=async(e,r,t,u)=>{let s=Vi(t),c=e.device.createBuffer({size:s,usage:GPUBufferUsage.COPY_DST|GPUBufferUsage.MAP_READ});try{let f=e.getCommandEncoder();e.endComputePass(),f.copyBufferToBuffer(r,0,c,0,s),e.flush(),await c.mapAsync(GPUMapMode.READ);let d=c.getMappedRange();if(u){let g=u();return g.set(new Uint8Array(d,0,t)),g}else return new Uint8Array(d.slice(0,t))}finally{c.destroy()}},Wi=class{constructor(r){this.backend=r;this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.buffersForUploadingPending=[],this.buffersPending=[],this.externalBuffers=new Map,this.capturedPendingBuffers=new Map;for(let[t]of zu)Ui.push(t),this.freeBuffers.set(t,[]),this.freeUniformBuffers.set(t,[])}upload(r,t){let u=t.buffer,s=t.byteOffset,c=t.byteLength,f=Vi(c),d=this.storageCache.get(r);if(!d)throw new Error(\"gpu data for uploading does not exist\");if(d.originalSize!==c)throw new Error(`inconsistent data size. gpu data size=${d.originalSize}, data size=${c}`);let g=this.backend.device.createBuffer({mappedAtCreation:!0,size:f,usage:GPUBufferUsage.MAP_WRITE|GPUBufferUsage.COPY_SRC}),w=g.getMappedRange();new Uint8Array(w).set(new Uint8Array(u,s,c)),g.unmap();let C=this.backend.getCommandEncoder();this.backend.endComputePass(),C.copyBufferToBuffer(g,0,d.gpuData.buffer,0,f),Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.upload(id=${r})`),this.buffersForUploadingPending.push(g)}memcpy(r,t){let u=this.storageCache.get(r);if(!u)throw new Error(\"source gpu data for memcpy does not exist\");let s=this.storageCache.get(t);if(!s)throw new Error(\"destination gpu data for memcpy does not exist\");if(u.originalSize!==s.originalSize)throw new Error(\"inconsistent source and destination gpu data size\");let c=Vi(u.originalSize),f=this.backend.getCommandEncoder();this.backend.endComputePass(),f.copyBufferToBuffer(u.gpuData.buffer,0,s.gpuData.buffer,0,c)}registerExternalBuffer(r,t,u){let s;if(u){if(s=this.externalBuffers.get(u),s===void 0)throw new Error(\"previous buffer is not registered\");if(r===u)return Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${t}) => id=${s}, buffer is the same, skip.`),s;if(this.backend.capturedCommandList.has(this.backend.currentSessionId))throw new Error(`Registering a different external buffer under graph capture mode is not supported yet.\n Please use the previous external buffer!`);this.externalBuffers.delete(u)}else s=Mu();return this.storageCache.set(s,{gpuData:{id:s,type:0,buffer:r},originalSize:t}),this.externalBuffers.set(r,s),Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.registerExternalBuffer(size=${t}) => id=${s}, registered.`),s}unregisterExternalBuffer(r){let t=this.externalBuffers.get(r);t!==void 0&&(this.storageCache.delete(t),this.externalBuffers.delete(r),Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.unregisterExternalBuffer() => id=${t}`))}create(r,t=GPUBufferUsage.STORAGE|GPUBufferUsage.COPY_SRC|GPUBufferUsage.COPY_DST){let u=Jh(r),s,c=(t&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE,f=(t&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM;if(c||f){let w=(c?this.freeBuffers:this.freeUniformBuffers).get(u);w?w.length>0?s=w.pop():s=this.backend.device.createBuffer({size:u,usage:t}):s=this.backend.device.createBuffer({size:u,usage:t})}else s=this.backend.device.createBuffer({size:u,usage:t});let d={id:Mu(),type:0,buffer:s};return this.storageCache.set(d.id,{gpuData:d,originalSize:r}),Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.create(size=${r}) => id=${d.id}`),d}get(r){return this.storageCache.get(r)?.gpuData}release(r){let t=this.storageCache.get(r);if(!t)throw new Error(\"releasing data does not exist\");return Fe(\"verbose\",()=>`[WebGPU] GpuDataManager.release(id=${r}), gpuDataId=${t.gpuData.id}`),this.storageCache.delete(r),this.buffersPending.push(t.gpuData.buffer),t.originalSize}async download(r,t){let u=this.storageCache.get(r);if(!u)throw new Error(\"data does not exist\");await Ni(this.backend,u.gpuData.buffer,u.originalSize,t)}refreshPendingBuffers(){for(let r of this.buffersForUploadingPending)r.destroy();if(this.buffersForUploadingPending=[],this.buffersPending.length!==0)if(this.backend.sessionStatus===\"default\"){for(let r of this.buffersPending){let t=zu.get(r.size);if((r.usage&GPUBufferUsage.STORAGE)===GPUBufferUsage.STORAGE){let u=this.freeBuffers.get(r.size)||[];t===void 0||u.length>=t?r.destroy():u.push(r)}else if((r.usage&GPUBufferUsage.UNIFORM)===GPUBufferUsage.UNIFORM){let u=this.freeUniformBuffers.get(r.size)||[];t===void 0||u.length>=t?r.destroy():u.push(r)}else r.destroy()}this.buffersPending=[]}else{let r=this.capturedPendingBuffers.get(this.backend.currentSessionId);r||(r=[],this.capturedPendingBuffers.set(this.backend.currentSessionId,r));for(let t of this.buffersPending)r.push(t);this.buffersPending=[]}}dispose(){this.freeBuffers.forEach(r=>{r.forEach(t=>{t.destroy()})}),this.freeUniformBuffers.forEach(r=>{r.forEach(t=>{t.destroy()})}),this.storageCache.forEach(r=>{r.gpuData.buffer.destroy()}),this.capturedPendingBuffers.forEach(r=>{r.forEach(t=>{t.destroy()})}),this.storageCache=new Map,this.freeBuffers=new Map,this.freeUniformBuffers=new Map,this.capturedPendingBuffers=new Map}onReleaseSession(r){let t=this.capturedPendingBuffers.get(r);t&&(t.forEach(u=>{u.destroy()}),this.capturedPendingBuffers.delete(r))}},Uu=(...e)=>new Wi(...e)});var Gi,ke,nt=ae(()=>{\"use strict\";Gi=class{constructor(r){Object.assign(this,r)}get cacheKey(){return this.key||(this.key=Object.getOwnPropertyNames(this).sort().map(r=>`${this[r]}`).join(\";\")),this.key}},ke=e=>new Gi(e)});var Hi,Ot,K,sr,Jn,ei,ti,De=ae(()=>{\"use strict\";Hi=class{static calcMatMulShape(r,t){return r[1]!==t[0]?void 0:[r[0],t[1]]}},Ot=class{static calcShape(r,t,u=!1){let s=r.length,c=t.length;if(s===0)return t;if(c===0)return r;let f=Math.max(r.length,t.length),d=new Array(f);if(u){if(s<2||c<2)return;let g=Hi.calcMatMulShape([r[s-2],r[s-1]],[t[c-2],t[c-1]]);if(g===void 0)return;[d[f-2],d[f-1]]=g}for(let g=u?3:1;g<=f;g++){let w=s-g<0?1:r[s-g],C=c-g<0?1:t[c-g];if(w!==C&&w>1&&C>1)return;let $=Math.max(w,C);if(w&&C)d[f-g]=Math.max(w,C);else{if($>1)return;d[f-g]=0}}return d}static isValidBroadcast(r,t){let u=r.length,s=t.length;if(u>s)return!1;for(let c=1;c<=u;c++)if(r[u-c]!==1&&r[u-c]!==t[s-c])return!1;return!0}},K=class e{static size(r){return e.getSizeFromDimensionRange(r,0,r.length)}static convertShape(r,t=4){let u=r.length;if(u===0)return[];let s=new Array(u),c=u-1;for(;c>=0;){if(r[c]%t===0){s[c]=r[c]/t;break}if(t%r[c]!==0)throw new Error(\"cannot convert shape\");s[c]=1,t/=r[c],c--}for(c--;c>=0;c--)s[c]=r[c];return s}static sizeFromDimension(r,t){if(t<0||t>r.length)throw new Error(`invalid dimension of ${t} for sizeFromDimension as Tensor has ${r.length} dimensions.`);return e.getSizeFromDimensionRange(r,t,r.length)}static sizeToDimension(r,t){if(t<0||t>r.length)throw new Error(`invalid dimension of ${t} for sizeToDimension as Tensor has ${r.length} dimensions.`);return e.getSizeFromDimensionRange(r,0,t)}static getSizeFromDimensionRange(r,t,u){let s=1;for(let c=t;c=0;--s)u[s]=u[s+1]*r[s+1];return u}static normalizeAxis(r,t){if(r<-t&&r>=t)throw new Error(\"unsupported axis for this operation.\");return r<0?r+t:r}static normalizeAxes(r,t){return r.map(u=>this.normalizeAxis(u,t??r.length))}static sortBasedOnPerm(r,t){return t?t.map(u=>r[u]):r.slice().reverse()}static padShape(r,t){let u=r.length;return r.map((s,c)=>s+t[c]+t[c+u])}static areEqual(r,t){return r.length!==t.length?!1:r.every((u,s)=>u===t[s])}},sr=class e{static adjustPoolAttributes(r,t,u,s,c,f){if(!r&&u.length!==t.length-2)throw new Error(\"length of specified kernel shapes should be 2 less than length of input dimensions\");if(r)for(let d=0;d=u.length?u.push(t[d+2]):u[d]=t[d+2];for(let d=0;d=u[d]||f[d+u.length]>=u[d])throw new Error(\"pads should be smaller than kernel\")}}static adjustPadsBasedOnAutoPad(r,t,u,s,c,f,d){if(d){if(c.length!==2*(r.length-2))throw new Error(\"length of pads should be twice the length of data dimensions\");if(t.length!==r.length-2)throw new Error(\"length of strides should be the length of data dimensions\");if(s.length!==r.length-2)throw new Error(\"length of kernel shapes should be the length of data dimensions\");for(let g=0;g{\"use strict\";Te();De();ur=64,Fi=(e,r)=>{if(r===3)throw new Error(\"vec3 has same alignment as vec4, use vec4 instead\");switch(e){case 10:return r>1?`vec${r}`:\"f16\";case 1:return r>1?`vec${r}`:\"f32\";case 6:return r>1?`vec${r}`:\"i32\";case 12:return r>1?`vec${r}`:\"u32\";case 7:if(r>1)throw new Error(\"currently not supported vecX of uint64 yet\");return[\"vec2\",\"i32\"];case 13:if(r>1)throw new Error(\"currently not supported vecX of uint64 yet\");return[\"vec2\",\"u32\"];case 9:if(r!==4)throw new Error(\"bool must be vec4\");return[\"u32\",\"vec4\"];default:throw new Error(`Unknown data type: ${e}`)}},Ne=(e,r=1)=>{let t=Fi(e,r);return typeof t==\"string\"?t:t[0]},at=(e,r=1)=>{let t=Fi(e,r);return typeof t==\"string\"?t:t[1]},se=(...e)=>{let r=[];return e.forEach(t=>{t.length!==0&&r.push({type:12,data:t},{type:12,data:K.computeStrides(t)})}),r},He=e=>e%4===0?4:e%2===0?2:1,xt=(e=\"f32\",r,t=\"0\")=>!r||r===1?`${e}(${t})`:`vec${r}<${e}>(${t})`,lr=(e,r,t)=>e===\"f32\"?t:r===1?`f32(${t})`:`vec${r}(${t})`,It=(e,r)=>r===4?`(${e}.x + ${e}.y + ${e}.z + ${e}.w)`:r===2?`(${e}.x + ${e}.y)`:r===3?`(${e}.x + ${e}.y + ${e}.z)`:e,xe=(e,r,t,u)=>e.startsWith(\"uniforms.\")&&t>4?typeof r==\"string\"?u===\"f16\"?`${e}[(${r}) / 8][(${r}) % 8 / 4][(${r}) % 8 % 4]`:`${e}[(${r}) / 4][(${r}) % 4]`:u===\"f16\"?`${e}[${Math.floor(r/8)}][${Math.floor(r%8/4)}][${r%8%4}]`:`${e}[${Math.floor(r/4)}][${r%4}]`:t>1?`${e}[${r}]`:e,qi=(e,r,t,u,s)=>{let c=typeof t==\"number\",f=c?t:t.length,d=[...new Array(f).keys()],g=f<2?\"u32\":f<=4?`vec${f}`:`array`,w=Fi(r,s),C=typeof w==\"string\"?w:w[1],$=typeof w==\"string\"?w:w[0],A={indices:g,value:C,storage:$,tensor:r},P=ee=>typeof ee==\"string\"?ee:`${ee}u`,x={offsetToIndices:!1,indicesToOffset:!1,broadcastedIndicesToOffset:!1,set:!1,setByIndices:!1,get:!1,getByIndices:!1},E=c?\"uniforms.\":\"\",O=`${E}${e}_shape`,B=`${E}${e}_strides`,R=\"\";for(let ee=0;ee ${A.indices} {\n var indices: ${A.indices};\n var current = offset;\n ${R}\n return indices;\n }`,U=ee=>(x.offsetToIndices=!0,f<2?ee:`o2i_${e}(${ee})`),L=[];if(f>=2)for(let ee=f-1;ee>=0;ee--)L.push(`${xe(B,ee,f)} * (indices[${ee}])`);let F=f<2?\"\":`\n fn i2o_${e}(indices: ${A.indices}) -> u32 {\n return ${L.join(\"+\")};\n }`,te=ee=>(x.indicesToOffset=!0,f<2?ee:`i2o_${e}(${ee})`),J=(...ee)=>f===0?\"0u\":`${A.indices}(${ee.map(P).join(\",\")})`,oe=(ee,ce)=>f<2?`${ee}`:`${xe(ee,ce,f)}`,le=(ee,ce,Be)=>f<2?`${ee}=${Be};`:`${xe(ee,ce,f)}=${Be};`,ge={},X=(ee,ce)=>{x.broadcastedIndicesToOffset=!0;let Be=`${ce.name}broadcastedIndicesTo${e}Offset`;if(Be in ge)return`${Be}(${ee})`;let tt=[];for(let Ye=f-1;Ye>=0;Ye--){let Ke=ce.indicesGet(\"outputIndices\",Ye+ce.rank-f);tt.push(`${oe(B,Ye)} * (${Ke} % ${oe(O,Ye)})`)}return ge[Be]=`fn ${Be}(outputIndices: ${ce.type.indices}) -> u32 {\n return ${tt.length>0?tt.join(\"+\"):\"0u\"};\n }`,`${Be}(${ee})`},pe=(ee,ce)=>(()=>{if(A.storage===A.value)return`${e}[${ee}]=${ce};`;if(A.storage===\"vec2\"&&A.value===\"i32\")return`${e}[${ee}]=vec2(u32(${ce}), select(0u, 0xFFFFFFFFu, ${ce} < 0));`;if(A.storage===\"vec2\"&&A.value===\"u32\")return`${e}[${ee}]=vec2(u32(${ce}), 0u);`;if(A.storage===\"u32\"&&A.value===\"vec4\")return`${e}[${ee}]=dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(${ce}));`;throw new Error(`not supported combination of storage type ${A.storage} and value type ${A.value} yet`)})(),we=ee=>(()=>{if(A.storage===A.value)return`${e}[${ee}]`;if(A.storage===\"vec2\"&&A.value===\"i32\")return`i32(${e}[${ee}].x)`;if(A.storage===\"vec2\"&&A.value===\"u32\")return`u32(${e}[${ee}].x)`;if(A.storage===\"u32\"&&A.value===\"vec4\")return`vec4(bool(${e}[${ee}] & 0xFFu), bool(${e}[${ee}] & 0xFF00u), bool(${e}[${ee}] & 0xFF0000u), bool(${e}[${ee}] & 0xFF000000u))`;throw new Error(`not supported combination of storage type ${A.storage} and value type ${A.value} yet`)})(),ue=f<2?\"\":`\n fn get_${e}ByIndices(indices: ${A.indices}) -> ${C} {\n return ${we(`i2o_${e}(indices)`)};\n }`,me=f<2?\"\":(()=>{let ee=d.map(Be=>`d${Be}: u32`).join(\", \"),ce=d.map(Be=>`d${Be}`).join(\", \");return`\n fn get_${e}(${ee}) -> ${C} {\n return get_${e}ByIndices(${J(ce)});\n }`})(),Ee=(...ee)=>{if(ee.length!==f)throw new Error(`indices length must be ${f}`);let ce=ee.map(P).join(\",\");return f===0?we(\"0u\"):f===1?we(ce[0]):(x.get=!0,x.getByIndices=!0,x.indicesToOffset=!0,`get_${e}(${ce})`)},Pe=ee=>f<2?we(ee):(x.getByIndices=!0,x.indicesToOffset=!0,`get_${e}ByIndices(${ee})`),Ce=f<2?\"\":`\n fn set_${e}ByIndices(indices: ${A.indices}, value: ${C}) {\n ${pe(`i2o_${e}(indices)`,\"value\")}\n }`,be=f<2?\"\":(()=>{let ee=d.map(Be=>`d${Be}: u32`).join(\", \"),ce=d.map(Be=>`d${Be}`).join(\", \");return`\n fn set_${e}(${ee}, value: ${C}) {\n set_${e}ByIndices(${J(ce)}, value);\n }`})();return{impl:()=>{let ee=[],ce=!1;return x.offsetToIndices&&(ee.push(j),ce=!0),x.indicesToOffset&&(ee.push(F),ce=!0),x.broadcastedIndicesToOffset&&(Object.values(ge).forEach(Be=>ee.push(Be)),ce=!0),x.set&&(ee.push(be),ce=!0),x.setByIndices&&(ee.push(Ce),ce=!0),x.get&&(ee.push(me),ce=!0),x.getByIndices&&(ee.push(ue),ce=!0),!c&&ce&&ee.unshift(`const ${O} = ${A.indices}(${t.join(\",\")});`,`const ${B} = ${A.indices}(${K.computeStrides(t).join(\",\")});`),ee.join(`\n`)},type:A,offsetToIndices:U,indicesToOffset:te,broadcastedIndicesToOffset:X,indices:J,indicesGet:oe,indicesSet:le,set:(...ee)=>{if(ee.length!==f+1)throw new Error(`indices length must be ${f}`);let ce=ee[f];if(typeof ce!=\"string\")throw new Error(\"value must be string\");let Be=ee.slice(0,f).map(P).join(\",\");return f===0?pe(\"0u\",ce):f===1?pe(Be[0],ce):(x.set=!0,x.setByIndices=!0,x.indicesToOffset=!0,`set_${e}(${Be}, ${ce})`)},setByOffset:pe,setByIndices:(ee,ce)=>f<2?pe(ee,ce):(x.setByIndices=!0,x.indicesToOffset=!0,`set_${e}ByIndices(${ee}, ${ce});`),get:Ee,getByOffset:we,getByIndices:Pe,usage:u,name:e,strides:B,shape:O,rank:f}},Z=(e,r,t,u=1)=>qi(e,r,t,\"input\",u),ne=(e,r,t,u=1)=>qi(e,r,t,\"output\",u),ri=(e,r,t,u=1)=>qi(e,r,t,\"internal\",u),Li=class{constructor(r,t){this.normalizedDispatchGroup=r;this.limits=t;this.internalVariables=[];this.variables=[];this.uniforms=[];this.variableIndex=0}guardAgainstOutOfBoundsWorkgroupSizes(r){return`if (global_idx >= ${typeof r==\"number\"?`${r}u`:r}) { return; }`}mainStart(r=ur){let t=typeof r==\"number\"?r:r[0],u=typeof r==\"number\"?1:r[1],s=typeof r==\"number\"?1:r[2];if(t>this.limits.maxComputeWorkgroupSizeX||u>this.limits.maxComputeWorkgroupSizeY||s>this.limits.maxComputeWorkgroupSizeZ)throw new Error(`workgroup size [${t}, ${u}, ${s}] exceeds the maximum workgroup size [${this.limits.maxComputeWorkgroupSizeX}, ${this.limits.maxComputeWorkgroupSizeY}, ${this.limits.maxComputeWorkgroupSizeZ}].`);if(t*u*s>this.limits.maxComputeInvocationsPerWorkgroup)throw new Error(`workgroup size [${t}, ${u}, ${s}] exceeds the maximum workgroup invocations ${this.limits.maxComputeInvocationsPerWorkgroup}.`);let c=this.normalizedDispatchGroup[1]===1&&this.normalizedDispatchGroup[2]===1,f=c?`@builtin(global_invocation_id) global_id : vec3,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(local_invocation_id) local_id : vec3`:`@builtin(global_invocation_id) global_id : vec3,\n @builtin(local_invocation_id) local_id : vec3,\n @builtin(local_invocation_index) local_idx : u32,\n @builtin(workgroup_id) workgroup_id : vec3,\n @builtin(num_workgroups) num_workgroups : vec3`,d=c?\"let global_idx = global_id.x; let local_idx = local_id.x;\":`let global_idx = (workgroup_id.z * num_workgroups[0] * num_workgroups[1] +\n workgroup_id.y * num_workgroups[0] + workgroup_id.x) * ${t*u*s}u + local_idx;`;return`@compute @workgroup_size(${t}, ${u}, ${s})\n fn main(${f}) {\n ${d}\n `}appendVariableUniforms(r){r.rank!==0&&(r.shape.startsWith(\"uniforms.\")&&this.uniforms.push({name:r.shape.replace(\"uniforms.\",\"\"),type:\"u32\",length:r.rank}),r.strides.startsWith(\"uniforms.\")&&this.uniforms.push({name:r.strides.replace(\"uniforms.\",\"\"),type:\"u32\",length:r.rank}))}declareVariable(r,t){if(r.usage===\"internal\")throw new Error(\"cannot use internal variable with declareVariable(). use registerInternalVariables() instead.\");this.variables.push(r),this.appendVariableUniforms(r);let u=r.usage===\"input\"?\"read\":\"read_write\",s=r.type.storage;return`@group(0) @binding(${t}) var ${r.name}: array<${s}>;`}declareVariables(...r){return r.map(t=>this.declareVariable(t,this.variableIndex++)).join(`\n`)}registerInternalVariable(r){if(r.usage!==\"internal\")throw new Error(\"cannot use input or output variable with registerInternalVariable(). use declareVariables() instead.\");this.internalVariables.push(r),this.appendVariableUniforms(r)}registerInternalVariables(...r){return r.forEach(t=>this.registerInternalVariable(t)),this}registerUniform(r,t,u=1){return this.uniforms.push({name:r,type:t,length:u}),this}registerUniforms(r){return this.uniforms=this.uniforms.concat(r),this}uniformDeclaration(){if(this.uniforms.length===0)return\"\";let r=[];for(let{name:t,type:u,length:s}of this.uniforms)if(s&&s>4)u===\"f16\"?r.push(`@align(16) ${t}:array, ${Math.ceil(s/8)}>`):r.push(`${t}:array, ${Math.ceil(s/4)}>`);else{let c=s==null||s===1?u:`vec${s}<${u}>`;r.push(`${t}:${c}`)}return`\n struct Uniforms { ${r.join(\", \")} };\n @group(0) @binding(${this.variableIndex}) var uniforms: Uniforms;`}get additionalImplementations(){return this.uniformDeclaration()+this.variables.map(r=>r.impl()).join(`\n`)+this.internalVariables.map(r=>r.impl()).join(`\n`)}get variablesInfo(){if(this.uniforms.length===0)return;let r=t=>[12,10,1,6][[\"u32\",\"f16\",\"f32\",\"i32\"].indexOf(t)];return this.uniforms.map(t=>[r(t.type),t.length??1])}},Wu=(e,r)=>new Li(e,r),Br=(e,r)=>{let t=e.length,u=[];for(let s=0;s1&&f===1&&u.unshift(c)}return u}});var tg,Nu,rg,ng,$t,Gu,Hu,Dr=ae(()=>{\"use strict\";Te();De();nt();Re();tg=e=>{if(!e||e.length!==1)throw new Error(\"Transpose requires 1 input.\")},Nu=(e,r)=>r&&r.length!==e?[...new Array(e).keys()].reverse():r,rg=(e,r)=>K.sortBasedOnPerm(e,Nu(e.length,r)),ng=(e,r,t,u)=>{let s=[];s.push(`fn perm(i: ${u.type.indices}) -> ${t.type.indices} {\n var a: ${t.type.indices};`);for(let c=0;c{let t=e.dataType,u=e.dims.length,s=Nu(u,r),c=rg(e.dims,s),f=ne(\"output\",t,c.length),d=Z(\"a\",t,u),g=w=>`\n ${w.registerUniform(\"output_size\",\"u32\").declareVariables(d,f)}\n\n ${ng(s,u,d,f)}\n\n ${w.mainStart()}\n ${w.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n\n let indices = ${f.offsetToIndices(\"global_idx\")};\n let aIndices = perm(indices);\n\n ${f.setByOffset(\"global_idx\",d.getByIndices(\"aIndices\"))}\n }`;return{name:\"Transpose\",shaderCache:{hint:`${r}`,inputDependencies:[\"rank\"]},getRunData:w=>{let C=K.size(c);return{outputs:[{dims:c,dataType:w[0].dataType}],dispatchGroup:{x:Math.ceil(C/64)},programUniforms:[{type:12,data:C},...se(w[0].dims,c)]}},getShaderSource:g}},Gu=(e,r)=>{tg(e.inputs),e.compute($t(e.inputs[0],r.perm))},Hu=e=>ke({perm:e.perm})});var ig,og,ag,sg,ug,lg,dg,cg,fg,pg,Rt,Lu,Fu,qu,Ku,Yu,Zu,Qu,Xu,Ju,el,tl=ae(()=>{\"use strict\";Te();De();Re();ni();Dr();ig={max:\"select(bestValue, candidate, candidate > bestValue)\",min:\"select(bestValue, candidate, candidate < bestValue)\",mean:\"bestValue + candidate\",sum:\"bestValue + candidate\",prod:\"bestValue * candidate\",sumSquare:\"bestValue + candidate * candidate\",logSumExp:\"bestValue + exp(candidate)\",l1:\"bestValue + abs(candidate)\",l2:\"bestValue + candidate * candidate\",logSum:\"bestValue + candidate\"},og={max:\"select(bestValue, candidate, candidate > bestValue)\",min:\"select(bestValue, candidate, candidate < bestValue)\",mean:\"bestValue + candidate\",sum:\"bestValue + candidate\",prod:\"bestValue * candidate\",sumSquare:\"bestValue + candidate\",logSumExp:\"bestValue + candidate\",l1:\"bestValue + candidate\",l2:\"bestValue + candidate\",logSum:\"bestValue + candidate\"},ag={max:\"_A[offset]\",min:\"_A[offset]\",mean:\"0\",sum:\"0\",prod:\"1\",sumSquare:\"0\",logSumExp:\"0\",l1:\"0\",l2:\"0\",logSum:\"0\"},sg={max:\"bestValue\",min:\"bestValue\",sum:\"bestValue\",prod:\"bestValue\",sumSquare:\"bestValue\",logSumExp:\"log(bestValue)\",l1:\"bestValue\",l2:\"sqrt(bestValue)\",logSum:\"log(bestValue)\"},ug=(e,r)=>{let t=[];for(let u=r-e;u{let t=[],u=e.length;for(let c=0;ce[c]);return[t,s]},dg=(e,r)=>{let t=e.length+r.length,u=[],s=0;for(let c=0;c{for(let t=0;t{let t=[];if(!cg(e,r)){for(let u=0;ut.push(u))}return t},pg=(e,r,t,u,s,c,f)=>{let d=t[0].dims,g=K.size(c),w=K.size(f),C=Z(\"_A\",t[0].dataType,d),$=ne(\"output\",s,c),A=32,P=`\n var aBestValues : array;\n `;return{name:e,shaderCache:r,getShaderSource:E=>`\n ${E.registerUniform(\"reduceSize\",\"u32\").declareVariables(C,$)}\n ${P}\n fn DIV_CEIL(a : u32, b : u32) -> u32 {\n return ((a - 1u) / b + 1u);\n }\n ${E.mainStart(A)}\n\n let outputIndex = global_idx / ${A};\n let offset = outputIndex * uniforms.reduceSize;\n\n var bestValue = f32(${ag[u]});\n let Length = uniforms.reduceSize;\n for (var k = local_idx; k < Length; k = k + ${A}) {\n let candidate = f32(${C.getByOffset(\"offset + k\")});\n bestValue = ${ig[u]};\n }\n aBestValues[local_idx] = bestValue;\n workgroupBarrier();\n\n var reduceSize = min(Length, ${A}u);\n for (var currentSize = reduceSize / 2u; reduceSize > 1u;\n currentSize = reduceSize / 2u) {\n let interval = DIV_CEIL(reduceSize, 2u);\n if (local_idx < currentSize) {\n let candidate = aBestValues[local_idx + interval];\n bestValue = ${og[u]};\n aBestValues[local_idx] = bestValue;\n }\n reduceSize = interval;\n workgroupBarrier();\n }\n\n if (local_idx == 0u) {\n ${$.setByOffset(\"outputIndex\",`${u===\"mean\"?`${$.type.storage}(bestValue / f32(uniforms.reduceSize))`:`${$.type.storage}(${sg[u]})`}`)};\n }\n }`,getRunData:()=>({outputs:[{dims:c,dataType:s}],dispatchGroup:{x:g},programUniforms:[{type:12,data:w}]})}},Rt=(e,r,t,u)=>{let s=e.inputs.length===1?t:Ki(e.inputs,t),c=s.axes;c.length===0&&!s.noopWithEmptyAxes&&(c=e.inputs[0].dims.map((P,x)=>x));let f=K.normalizeAxes(c,e.inputs[0].dims.length),d=f,g=e.inputs[0],w=fg(d,e.inputs[0].dims.length);w.length>0&&(g=e.compute($t(e.inputs[0],w),{inputs:[0],outputs:[-1]})[0],d=ug(d.length,g.dims.length));let[C,$]=lg(g.dims,d),A=C;s.keepDims&&(A=dg(C,f)),e.compute(pg(r,{hint:s.cacheKey,inputDependencies:[\"type\"]},[g],u,e.inputs[0].dataType,A,$),{inputs:[g]})},Lu=(e,r)=>{Rt(e,\"ReduceMeanShared\",r,\"mean\")},Fu=(e,r)=>{Rt(e,\"ReduceL1Shared\",r,\"l1\")},qu=(e,r)=>{Rt(e,\"ReduceL2Shared\",r,\"l2\")},Ku=(e,r)=>{Rt(e,\"ReduceLogSumExpShared\",r,\"logSumExp\")},Yu=(e,r)=>{Rt(e,\"ReduceMaxShared\",r,\"max\")},Zu=(e,r)=>{Rt(e,\"ReduceMinShared\",r,\"min\")},Qu=(e,r)=>{Rt(e,\"ReduceProdShared\",r,\"prod\")},Xu=(e,r)=>{Rt(e,\"ReduceSumShared\",r,\"sum\")},Ju=(e,r)=>{Rt(e,\"ReduceSumSquareShared\",r,\"sumSquare\")},el=(e,r)=>{Rt(e,\"ReduceLogSumShared\",r,\"logSum\")}});var Bt,mg,ii,Ki,Dt,hg,gg,yg,bg,vg,wg,_g,$g,Cg,Sg,zt,rl,nl,il,ol,al,sl,ul,ll,dl,cl,ni=ae(()=>{\"use strict\";Te();De();nt();Re();tl();Bt=e=>{if(!e||e.length===0||e.length>2)throw new Error(\"Reduce op requires 1 or 2 inputs.\");if(e.length===2&&e[1].dims.length!==1)throw new Error(\"Invalid axes input dims.\")},mg=e=>[\"\",\"\",`var value = ${e.getByIndices(\"input_indices\")};`,\"\"],ii=(e,r,t,u,s,c,f=!1,d=!1)=>{let g=[],w=t[0].dims,C=w.length,$=K.normalizeAxes(s,C),A=!d&&$.length===0;w.forEach((O,B)=>{A||$.indexOf(B)>=0?f&&g.push(1):g.push(O)});let P=g.length,x=K.size(g);return{name:e,shaderCache:r,getShaderSource:O=>{let B=[],R=Z(\"_A\",t[0].dataType,C),j=ne(\"output\",c,P),U=u(R,j,$),L=U[2];for(let F=0,te=0;F=0?(f&&te++,L=`for(var j${F}: u32 = 0; j${F} < ${w[F]}; j${F}++) {\n ${U[2].includes(\"last_index\")?`let last_index = j${F};`:\"\"}\n ${R.indicesSet(\"input_indices\",F,`j${F}`)}\n ${L}\n }`):(B.push(`${R.indicesSet(\"input_indices\",F,j.indicesGet(\"output_indices\",te))};`),te++);return`\n\n ${O.registerUniform(\"output_size\",\"u32\").declareVariables(R,j)}\n\n ${O.mainStart()}\n ${O.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n var input_indices: ${R.type.indices};\n let output_indices = ${j.offsetToIndices(\"global_idx\")};\n\n ${B.join(`\n`)}\n ${U[0]} // init ops for reduce max/min\n ${U[1]}\n ${L}\n ${U[3]}\n ${U.length===4?j.setByOffset(\"global_idx\",\"value\"):U.slice(4).join(`\n`)}\n }`},getRunData:()=>({outputs:[{dims:g,dataType:c}],dispatchGroup:{x:Math.ceil(x/64)},programUniforms:[{type:12,data:x},...se(w,g)]})}},Ki=(e,r)=>{let t=[];return e[1].dims[0]>0&&e[1].getBigInt64Array().forEach(u=>t.push(Number(u))),ke({axes:t,keepDims:r.keepDims,noopWithEmptyAxes:r.noopWithEmptyAxes})},Dt=(e,r,t,u)=>{let s=e.inputs,c=s.length===1?t:Ki(s,t);e.compute(ii(r,{hint:c.cacheKey,inputDependencies:[\"rank\"]},[s[0]],c.noopWithEmptyAxes&&c.axes.length===0?mg:u,c.axes,s[0].dataType,c.keepDims,c.noopWithEmptyAxes),{inputs:[0]})},hg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceLogSum\",r,(u,s)=>[`var value = ${s.type.storage}(0);`,\"\",`value += ${u.getByIndices(\"input_indices\")};`,\"value = log(value);\"])},gg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceL1\",r,(u,s)=>[`var value = ${s.type.storage}(0);`,\"\",`value += abs(${u.getByIndices(\"input_indices\")});`,\"\"])},yg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceL2\",r,(u,s)=>[`var t = ${s.type.value}(0); var value = ${s.type.value}(0);`,\"\",`t = ${u.getByIndices(\"input_indices\")}; value += (t * t);`,\"value = sqrt(value);\"])},bg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceLogSumExp\",r,(u,s)=>[`var value = ${s.type.storage}(0);`,\"\",`value += exp(${u.getByIndices(\"input_indices\")});`,\"value = log(value);\"])},vg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceMax\",r,(u,s,c)=>{let f=[];for(let d=0;d=0||c.length===0)&&f.push(u.indicesSet(\"input_indices\",d,0));return[`${f.join(`\n`)}`,`var value = ${u.getByIndices(\"input_indices\")};`,`value = max(value, ${u.getByIndices(\"input_indices\")});`,\"\"]})},wg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceMean\",r,(u,s,c)=>{let f=1;for(let d=0;d=0||c.length===0)&&(f*=e.inputs[0].dims[d]);return[\"var sum = f32(0);\",\"\",`sum += f32(${u.getByIndices(\"input_indices\")});`,`let value = ${s.type.value}(sum / ${f});`]})},_g=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceMin\",r,(u,s,c)=>{let f=[];for(let d=0;d=0||c.length===0)&&f.push(`input_indices[${d}] = 0;`);return[`${f.join(`\n`)}`,`var value = ${u.getByIndices(\"input_indices\")};`,`value = min(value, ${u.getByIndices(\"input_indices\")});`,\"\"]})},$g=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceProd\",r,(u,s)=>[`var value = ${s.type.storage}(1);`,\"\",`value *= ${u.getByIndices(\"input_indices\")};`,\"\"])},Cg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceSum\",r,(u,s)=>[`var value = ${s.type.storage}(0);`,\"\",`value += ${u.getByIndices(\"input_indices\")};`,\"\"])},Sg=(e,r)=>{Bt(e.inputs),Dt(e,\"ReduceSumSquare\",r,(u,s)=>[`var t = ${s.type.value}(0); var value = ${s.type.value}(0);`,\"\",`t = ${u.getByIndices(\"input_indices\")}; value += t * t;`,\"\"])},zt=(e,r,t)=>{if(r.length===0)return t;let u=1,s=1;for(let c=0;c1024},rl=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?wg(e,r):Lu(e,r)},nl=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?gg(e,r):Fu(e,r)},il=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?yg(e,r):qu(e,r)},ol=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?bg(e,r):Ku(e,r)},al=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?vg(e,r):Yu(e,r)},sl=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?_g(e,r):Zu(e,r)},ul=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?$g(e,r):Qu(e,r)},ll=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?Cg(e,r):Xu(e,r)},dl=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?Sg(e,r):Ju(e,r)},cl=(e,r)=>{zt(e.inputs[0].dims,r.axes,r.noopWithEmptyAxes)?hg(e,r):el(e,r)}});var fl,pl,ml,Yi,hl=ae(()=>{\"use strict\";Te();nt();ni();fl=e=>{if(!e||e.length===0||e.length>2)throw new Error(\"ArgMinMaxOp op requires 1 or 2 inputs.\");if(e[0].dataType!==1)throw new Error(\"Invalid input type.\")},pl=(e,r)=>{fl(e.inputs);let t=(u,s,c)=>{let f=[];for(let d=0;d=0||c.length===0)&&f.push(`input_indices[${d}] = 0;`);return[`${f.join(`\n`)}`,`var value = ${u.getByIndices(\"input_indices\")};\nvar best_index : i32 = 0;`,`if (${u.getByIndices(\"input_indices\")} ${r.selectLastIndex>0?\"<=\":\"<\"} value) {\n value = ${u.getByIndices(\"input_indices\")};\n best_index = i32(last_index);\n }`,\"\",s.setByOffset(\"global_idx\",\"best_index\")]};e.compute(ii(\"ArgMin\",{hint:r.cacheKey,inputDependencies:[\"rank\"]},[e.inputs[0]],t,[r.axis],7,r.keepDims),{inputs:[0]})},ml=(e,r)=>{fl(e.inputs);let t=(u,s,c)=>{let f=[];for(let d=0;d=0||c.length===0)&&f.push(`input_indices[${d}] = 0;`);return[`${f.join(`\n`)}`,`var value = ${u.getByIndices(\"input_indices\")};\nvar best_index : i32 = 0;`,`if (${u.getByIndices(\"input_indices\")} ${r.selectLastIndex>0?\">=\":\">\"} value) {\n value = ${u.getByIndices(\"input_indices\")};\n best_index = i32(last_index);\n }`,\"\",s.setByOffset(\"global_idx\",\"best_index\")]};e.compute(ii(\"argMax\",{hint:r.cacheKey,inputDependencies:[\"rank\"]},[e.inputs[0]],t,[r.axis],7,r.keepDims),{inputs:[0]})},Yi=e=>ke(e)});var xg,Ig,Ag,oi,gl,yl,Zi=ae(()=>{\"use strict\";Te();De();nt();Re();xg=(e,r)=>{if(!e||e.length<1)throw new Error(\"too few inputs\");let t=0,u=e[t],s=u.dataType,c=u.dims.length;e.forEach((f,d)=>{if(d!==t){if(f.dataType!==s)throw new Error(\"input tensors should be one type\");if(f.dims.length!==c)throw new Error(\"input tensors should have the same shape\");f.dims.forEach((g,w)=>{if(w!==r&&g!==u.dims[w])throw new Error(\"non concat dimensions must match\")})}})},Ig=(e,r)=>`\n fn calculateInputIndex(index: u32) -> u32 {\n let sizeInConcatAxis = array(${r});\n for (var i: u32 = 0u; i < ${e}; i += 1u ) {\n if (index < sizeInConcatAxis[i]) {\n return i;\n }\n }\n return ${e}u;\n }`,Ag=(e,r)=>{let t=e.length,u=[];for(let s=0;s{let s=K.size(t),c=new Array(e.length),f=new Array(e.length),d=0,g=[],w=[],C=[{type:12,data:s}];for(let E=0;E`uniforms.sizeInConcatAxis${E}`).join(\",\"),x=E=>`\n\n ${(()=>{E.registerUniform(\"outputSize\",\"u32\");for(let O=0;O(${P});\n ${A} -= sizeInConcatAxis[inputIndex - 1u];\n }\n\n ${Ag(f,$)}\n }`;return{name:\"Concat\",shaderCache:{hint:`${r}`,inputDependencies:g},getRunData:()=>({outputs:[{dims:t,dataType:u}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:C}),getShaderSource:x}},gl=(e,r)=>{let t=e.inputs,u=t[0].dims,s=K.normalizeAxis(r.axis,u.length);xg(t,s);let c=u.slice();c[s]=t.reduce((d,g)=>d+(g.dims.length>s?g.dims[s]:0),0);let f=t.filter(d=>K.size(d.dims)>0);e.compute(oi(f,s,c,t[0].dataType),{inputs:f})},yl=e=>ke({axis:e.axis})});var Tg,Eg,Pg,kg,ai,Og,bl,Qi=ae(()=>{\"use strict\";Te();Xn();Re();Zi();Tg=(e,r)=>{let t=e[0],u=e[1],s=e[2],c=e[3],f=e[4],d=e[5];if(f&&d)throw new Error(\"Attention cannot have both past and relative_position_bias\");if(t.dims.length!==3)throw new Error('Input \"input\" must have 3 dimensions');let g=t.dims[0],w=t.dims[1],C=t.dims[2];if(s.dims.length!==1)throw new Error('Input \"bias\" is expected to have 1 dimensions');if(u.dims.length!==2)throw new Error('Input \"weights\" is expected to have 2 dimensions');if(u.dims[0]!==C)throw new Error(\"Input 1 dimension 0 should have same length as dimension 2 of input 0\");if(s.dims[0]!==u.dims[1])throw new Error('Input \"bias\" dimension 0 should have same length as dimension 1 of input \"weights\"');let $=s.dims[0]/3,A=$,P=A;if(r.qkvHiddenSizes.length>0){if(r.qkvHiddenSizes.length!==3)throw new Error(\"qkv_hidden_sizes attribute should have 3 elements\");for(let j of r.qkvHiddenSizes)if(j%r.numHeads!==0)throw new Error(\"qkv_hidden_sizes should be divisible by num_heads\");$=r.qkvHiddenSizes[0],A=r.qkvHiddenSizes[1],P=r.qkvHiddenSizes[2]}let x=w;if($!==A)throw new Error(\"qkv_hidden_sizes first element should be same as the second\");if(s.dims[0]!==$+A+P)throw new Error('Input \"bias\" dimension 0 should have same length as sum of Q/K/V hidden sizes');let E=0;if(f){if(A!==P)throw new Error('Input \"past\" expect k_hidden_size == v_hidden_size');if(f.dims.length!==5)throw new Error('Input \"past\" must have 5 dimensions');if(f.dims[0]!==2)throw new Error('Input \"past\" first dimension must be 2');if(f.dims[1]!==g)throw new Error('Input \"past\" second dimension must be batch_size');if(f.dims[2]!==r.numHeads)throw new Error('Input \"past\" third dimension must be num_heads');if(f.dims[4]!==A/r.numHeads)throw new Error('Input \"past\" fifth dimension must be k_hidden_size / num_heads');r.pastPresentShareBuffer||(E=f.dims[3])}let O=x+E,B=-1,R=0;if(c)throw new Error(\"Mask not supported\");if(f)throw new Error(\"past is not supported\");return{batchSize:g,sequenceLength:w,pastSequenceLength:E,kvSequenceLength:x,totalSequenceLength:O,maxSequenceLength:B,inputHiddenSize:C,hiddenSize:$,vHiddenSize:P,headSize:Math.floor($/r.numHeads),vHeadSize:Math.floor(P/r.numHeads),numHeads:r.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:r.maskFilterValue,maskType:R,scale:r.scale,broadcastResPosBias:!1,passPastInKv:!1,qkvFormat:1}},Eg=(e,r,t,u)=>{let s=He(u),c=64,f=u/s;f{let P=ne(\"x\",r.dataType,r.dims,s),E=[{name:\"d_inv\",type:at(r.dataType)},{name:\"d_comp\",type:\"u32\"},{name:\"elements_per_thread\",type:\"u32\"}];return`\n var thread_max: array;\n var thread_sum: array;\n ${A.registerUniforms(E).declareVariables(P)}\n ${A.mainStart([c,1,1])}\n let local_offset = local_idx * uniforms.elements_per_thread;\n let offset = workgroup_id.x * uniforms.d_comp + local_offset;\n\n var thread_max_vector = ${C}(-3.402823e+38f);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n thread_max_vector = max(${C}(x[offset + i]), thread_max_vector);\n }\n thread_max[local_idx] = ${(()=>{switch(s){case 1:return\"thread_max_vector\";case 2:return\"max(thread_max_vector.x, thread_max_vector.y)\";case 4:return\"max(max(thread_max_vector.x, thread_max_vector.y), max(thread_max_vector.z, thread_max_vector.w))\";default:throw new Error(`Unsupported components: ${s}`)}})()};\n workgroupBarrier();\n\n var max_value = f32(-3.402823e+38f);\n for (var i = 0u; i < ${c}; i++) {\n max_value = max(thread_max[i], max_value);\n }\n\n var sum_vector = ${C}(0);\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n sum_vector += exp(${C}(x[offset + i]) - max_value);\n }\n thread_sum[local_idx] = ${(()=>{switch(s){case 1:return\"sum_vector\";case 2:return\"sum_vector.x + sum_vector.y\";case 4:return\"sum_vector.x + sum_vector.y + sum_vector.z + sum_vector.w\";default:throw new Error(`Unsupported components: ${s}`)}})()};\n workgroupBarrier();\n\n var sum: f32 = 0;\n for (var i = 0u; i < ${c}; i++) {\n sum += thread_sum[i];\n }\n\n if (sum == 0) {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n x[offset + i] = ${P.type.value}(uniforms.d_inv);\n }\n } else {\n for (var i: u32 = 0; i < uniforms.elements_per_thread && i + local_offset < uniforms.d_comp; i++) {\n var f32input = ${C}(x[offset + i]);\n x[offset + i] = ${P.type.value}(exp(f32input - max_value) / sum);\n }\n }\n }`};return{name:\"AttentionProbsSoftmax\",shaderCache:{hint:`${c};${w};${s}`},getShaderSource:$,getRunData:()=>({outputs:[],dispatchGroup:{x:t},programUniforms:g})}},Pg=(e,r,t,u,s,c,f)=>{let d=f+s.kvSequenceLength,g=[s.batchSize,s.numHeads,s.sequenceLength,d],w=c.scale===0?1/Math.sqrt(s.headSize):c.scale,C=He(s.headSize),$=s.headSize/C,A=12,P={x:Math.ceil(d/A),y:Math.ceil(s.sequenceLength/A),z:s.batchSize*s.numHeads},x=[{type:12,data:s.sequenceLength},{type:12,data:$},{type:12,data:d},{type:12,data:s.numHeads},{type:1,data:w}],E=[\"type\",\"type\"];u&&(E.push(\"rank\"),x.push(...se(u.dims)));let O=B=>{let R=Z(\"q\",r.dataType,r.dims,C),j=Z(\"key\",t.dataType,t.dims,C),U=[R,j],L=u?Z(\"relative_position_bias\",u.dataType,u.dims.length):void 0;L&&U.push(L);let F=ne(\"output\",r.dataType,g),te=at(1,C),J=[{name:\"M\",type:\"u32\"},{name:\"K\",type:\"u32\"},{name:\"N\",type:\"u32\"},{name:\"num_heads\",type:\"u32\"},{name:\"alpha\",type:\"f32\"}];return`\n const TILE_SIZE = ${A}u;\n\n var tileQ: array<${R.type.storage}, ${A*A}>;\n var tileK: array<${R.type.storage}, ${A*A}>;\n ${B.registerUniforms(J).declareVariables(...U,F)}\n ${B.mainStart([A,A,1])}\n // x holds the N and y holds the M\n let headIdx = workgroup_id.z;\n let m = workgroup_id.y * TILE_SIZE;\n let n = workgroup_id.x * TILE_SIZE;\n let qOffset = uniforms.M * uniforms.K * headIdx + m * uniforms.K;\n let kOffset = uniforms.N * uniforms.K * headIdx + n * uniforms.K;\n\n var value = ${te}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (global_id.y < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = q[qOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n if (n + local_id.y < uniforms.N && w + local_id.x < uniforms.K) {\n tileK[TILE_SIZE * local_id.y + local_id.x] = key[kOffset + local_id.y * uniforms.K + w + local_id.x];\n }\n workgroupBarrier();\n\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += ${te}(tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * local_id.x + k]);\n }\n\n workgroupBarrier();\n }\n\n let headOffset = headIdx * uniforms.M * uniforms.N;\n if (global_id.y < uniforms.M && global_id.x < uniforms.N) {\n let outputIdx = headOffset + global_id.y * uniforms.N + global_id.x;\n var sum: f32 = ${(()=>{switch(C){case 1:return\"value\";case 2:return\"value.x + value.y\";case 4:return\"value.x + value.y + value.z + value.w\";default:throw new Error(`Unsupported components: ${C}`)}})()};\n\n ${(()=>L?`\n let batch = workgroup_id.z / uniforms.num_heads;\n let head = workgroup_id.z % uniforms.num_heads;\n var indices = ${L.type.indices}(batch, head, global_id.y, global_id.x);\n output[outputIdx] = ${F.type.value}(sum * uniforms.alpha) + ${L.getByIndices(\"indices\")};`:`output[outputIdx] = ${F.type.value} (sum * uniforms.alpha);`)()}\n }\n }`};return{name:\"AttentionProbs\",shaderCache:{hint:`${C}`,inputDependencies:E},getRunData:()=>({outputs:[{dims:g,dataType:r.dataType,gpuDataType:0}],dispatchGroup:P,programUniforms:x}),getShaderSource:O}},kg=(e,r,t,u,s)=>{let c=s+u.kvSequenceLength,f=[u.batchSize,u.sequenceLength,u.vHiddenSize],d=12,g={x:Math.ceil(u.vHeadSize/d),y:Math.ceil(u.sequenceLength/d),z:u.batchSize*u.numHeads},w=[{type:12,data:u.sequenceLength},{type:12,data:c},{type:12,data:u.vHeadSize},{type:12,data:u.numHeads},{type:12,data:u.vHiddenSize}];return{name:\"AttentionScore\",shaderCache:{inputDependencies:[\"type\",\"type\"]},getRunData:()=>({outputs:[{dims:f,dataType:r.dataType,gpuDataType:0}],dispatchGroup:g,programUniforms:w}),getShaderSource:A=>{let P=Z(\"probs\",r.dataType,r.dims),x=Z(\"v\",t.dataType,t.dims),E=ne(\"output\",r.dataType,f),O=[{name:\"M\",type:\"u32\"},{name:\"K\",type:\"u32\"},{name:\"N\",type:\"u32\"},{name:\"num_heads\",type:\"u32\"},{name:\"v_hidden_size\",type:\"u32\"}];return`\n const TILE_SIZE = ${d}u;\n var tileQ: array<${P.type.value}, ${d*d}>;\n var tileK: array<${P.type.value}, ${d*d}>;\n ${A.registerUniforms(O).declareVariables(P,x,E)}\n ${A.mainStart([d,d,1])}\n let headIdx = workgroup_id.z;\n let m = global_id.y;\n let n = global_id.x;\n\n let offsetA = headIdx * (uniforms.M * uniforms.K) + m * uniforms.K;\n let offsetB = headIdx * (uniforms.N * uniforms.K) + n;\n\n var value = ${P.type.storage}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileQ[TILE_SIZE * local_id.y + local_id.x] = probs[offsetA + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n tileK[TILE_SIZE * local_id.y + local_id.x] = v[offsetB + (w + local_id.y) * uniforms.N];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k < TILE_SIZE && w+k < uniforms.K; k++) {\n value += tileQ[TILE_SIZE * local_id.y + k] * tileK[TILE_SIZE * k + local_id.x];\n }\n workgroupBarrier();\n }\n\n // we need to transpose output from BNSH_v to BSND_v\n let batchIdx = workgroup_id.z / uniforms.num_heads;\n let currentBatchHeadNumber = workgroup_id.z % uniforms.num_heads;\n if (m < uniforms.M && n < uniforms.N) {\n let outputIdx = batchIdx * uniforms.M * uniforms.v_hidden_size + m * uniforms.v_hidden_size\n + currentBatchHeadNumber * uniforms.N + n;\n output[outputIdx] = value;\n }\n }`}}},ai=(e,r,t,u,s,c,f,d,g,w,C)=>{let $=e.outputCount>1,A=e.outputCount>2,P=$&&A?w.pastSequenceLength:0,x=P+w.kvSequenceLength,E=[w.batchSize,w.numHeads,x,w.headSize],O=f?[f,t]:[t],B=$?e.compute(oi(O,2,E,t.dataType),{inputs:O,outputs:[1]})[0]:t,R=[w.batchSize,w.numHeads,x,w.headSize],j=d?[d,u]:[u],U=A?e.compute(oi(j,2,R,u.dataType),{inputs:j,outputs:[2]})[0]:u,L=[r,B];g&&L.push(g);let F=e.compute(Pg(e,r,B,g,w,C,P),{inputs:L,outputs:[-1]})[0];e.compute(Eg(e,F,w.batchSize*w.numHeads*w.sequenceLength,x),{inputs:[F],outputs:[]});let te=[F,U];e.compute(kg(e,F,U,w,P),{inputs:te,outputs:[0]})},Og=(e,r)=>{let t=[r.batchSize,r.numHeads,r.sequenceLength,r.headSize],u=r.sequenceLength,s=r.inputHiddenSize,c=r.headSize,f=12,d={x:Math.ceil(r.headSize/f),y:Math.ceil(r.sequenceLength/f),z:r.batchSize*r.numHeads},g=[e.inputs[0],e.inputs[1],e.inputs[2]],w=[{type:12,data:u},{type:12,data:s},{type:12,data:c},{type:12,data:r.numHeads},{type:12,data:r.headSize},{type:12,data:r.hiddenSize},{type:12,data:r.hiddenSize+r.hiddenSize+r.vHiddenSize}],C=$=>{let A=ne(\"output_q\",g[0].dataType,t),P=ne(\"output_k\",g[0].dataType,t),x=ne(\"output_v\",g[0].dataType,t),E=Z(\"input\",g[0].dataType,g[0].dims),O=Z(\"weight\",g[1].dataType,g[1].dims),B=Z(\"bias\",g[2].dataType,g[2].dims),R=E.type.storage,j=[{name:\"M\",type:\"u32\"},{name:\"K\",type:\"u32\"},{name:\"N\",type:\"u32\"},{name:\"num_heads\",type:\"u32\"},{name:\"head_size\",type:\"u32\"},{name:\"hidden_size\",type:\"u32\"},{name:\"ldb\",type:\"u32\"}];return`\n const TILE_SIZE = ${f}u;\n var tileInput: array<${R}, ${f*f}>;\n var tileWeightQ: array<${R}, ${f*f}>;\n var tileWeightK: array<${R}, ${f*f}>;\n var tileWeightV: array<${R}, ${f*f}>;\n ${$.registerUniforms(j).declareVariables(E,O,B,A,P,x)}\n ${$.mainStart([f,f,1])}\n let batchIndex = workgroup_id.z / uniforms.num_heads;\n let headNumber = workgroup_id.z % uniforms.num_heads;\n let m = global_id.y;\n let n = global_id.x;\n\n let inputOffset = batchIndex * (uniforms.M * uniforms.K) + m * uniforms.K;\n let biasOffsetQ = headNumber * uniforms.head_size;\n let biasOffsetK = uniforms.hidden_size + biasOffsetQ;\n let biasOffsetV = uniforms.hidden_size + biasOffsetK;\n\n var valueQ = ${R}(0);\n var valueK = ${R}(0);\n var valueV = ${R}(0);\n for (var w: u32 = 0u; w < uniforms.K; w += TILE_SIZE) {\n if (m < uniforms.M && w + local_id.x < uniforms.K) {\n tileInput[TILE_SIZE * local_id.y + local_id.x] = input[inputOffset + w + local_id.x];\n }\n if (n < uniforms.N && w + local_id.y < uniforms.K) {\n let offset = n + (w + local_id.y) * uniforms.ldb;\n tileWeightQ[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetQ + offset];\n tileWeightK[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetK + offset];\n tileWeightV[TILE_SIZE * local_id.y + local_id.x] = weight[biasOffsetV + offset];\n }\n workgroupBarrier();\n for (var k: u32 = 0u; k({outputs:[{dims:t,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:t,dataType:e.inputs[0].dataType,gpuDataType:0},{dims:t,dataType:e.inputs[0].dataType,gpuDataType:0}],dispatchGroup:d,programUniforms:w}),getShaderSource:C},{inputs:g,outputs:[-1,-1,-1]})},bl=(e,r)=>{let t=Tg(e.inputs,r),[u,s,c]=Og(e,t);return ai(e,u,s,c,e.inputs[4],void 0,void 0,void 0,e.inputs[5],t,r)}});var Rg,Bg,Dg,vl,wl=ae(()=>{\"use strict\";Rr();Te();De();nt();Re();Rg=(e,r)=>{if(!e||e.length!==5)throw new Error(\"BatchNormalization requires 5 inputs\");let t=(u,s,c)=>{let f=s.length;if(f!==u.length)throw new Error(`${c}: num dimensions != ${f}`);s.forEach((d,g)=>{if(d!==u[g])throw new Error(`${c}: dim[${g}] do not match`)})};if(e[0].dims.length>1){let u=r.format===\"NHWC\"?r.spatial?e[0].dims.slice(-1):e[0].dims.slice(-1).concat(e[0].dims.slice(1,e[0].dims.length-1)):e[0].dims.slice(1,r.spatial?2:void 0);t(e[1].dims,u,\"Invalid input scale\"),t(e[2].dims,u,\"Invalid input B\"),t(e[3].dims,u,\"Invalid input mean\"),t(e[4].dims,u,\"Invalid input var\")}else t(e[1].dims,[1],\"Invalid input scale\"),t(e[2].dims,[1],\"Invalid input B\"),t(e[3].dims,[1],\"Invalid input mean\"),t(e[4].dims,[1],\"Invalid input var\")},Bg=(e,r)=>{let{epsilon:t,spatial:u,format:s}=r,c=e[0].dims,f=u?He(c[c.length-1]):1,d=s===\"NHWC\"&&c.length>1?f:1,g=K.size(c)/f,w=u,C=w?c.length:c,$=Z(\"x\",e[0].dataType,e[0].dims,f),A=Z(\"scale\",e[1].dataType,e[1].dims,d),P=Z(\"bias\",e[2].dataType,e[2].dims,d),x=Z(\"inputMean\",e[3].dataType,e[3].dims,d),E=Z(\"inputVar\",e[4].dataType,e[4].dims,d),O=ne(\"y\",e[0].dataType,C,f),B=()=>{let j=\"\";if(u)j=`let cOffset = ${c.length===1?\"0u\":s===\"NHWC\"?`outputIndices[${c.length-1}] / ${f}`:\"outputIndices[1]\"};`;else if(s===\"NCHW\")j=`\n ${O.indicesSet(\"outputIndices\",\"0\",\"0\")}\n let cOffset = ${O.indicesToOffset(\"outputIndices\")};`;else{j=`var cIndices = ${A.type.indices}(0);\n cIndices[0] = outputIndices[${c.length-1}];`;for(let U=1;U`\n const epsilon = ${t};\n ${j.registerUniform(\"outputSize\",\"u32\").declareVariables($,A,P,x,E,O)}\n ${j.mainStart()}\n ${j.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n var outputIndices = ${O.offsetToIndices(`global_idx * ${f}`)};\n ${B()}\n let scale = ${A.getByOffset(\"cOffset\")};\n let bias = ${P.getByOffset(\"cOffset\")};\n let inputMean = ${x.getByOffset(\"cOffset\")};\n let inputVar = ${E.getByOffset(\"cOffset\")};\n let x = ${$.getByOffset(\"global_idx\")};\n let value = (x - inputMean) * inverseSqrt(inputVar + epsilon) * scale + bias;\n ${O.setByOffset(\"global_idx\",\"value\")}\n }`;return{name:\"BatchNormalization\",shaderCache:{hint:`${r.epsilon}_${r.format}_${u}_${f}`,inputDependencies:w?[\"rank\",\"type\",\"type\",\"type\",\"type\"]:void 0},getShaderSource:R,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(g/64)},programUniforms:w?[{type:12,data:g},...se(c)]:[{type:12,data:g}]})}},Dg=e=>ke(e),vl=(e,r)=>{let{inputs:t,outputCount:u}=e,s=Dg({...r,outputCount:u});if(Or.webgpu.validateInputContent&&Rg(t,s),r.trainingMode)throw new Error(\"BatchNormalization trainingMode is not supported yet.\");e.compute(Bg(t,s))}});var zg,Mg,_l,$l=ae(()=>{\"use strict\";De();Re();zg=e=>{if(e[0].dims.length!==3)throw new Error(\"input should have 3 dimensions\");if(![320,640,1280].includes(e[0].dims[2]))throw new Error(\"number of channels should be 320, 640 or 1280\");if(e[1].dims.length!==1)throw new Error(\"bias is expected to have 1 dimensions\");if(e[0].dims[2]!==e[1].dims[0])throw new Error(\"last dimension of input and bias are not the same\")},Mg=e=>{let r=e[0].dims,t=e[0].dims[2],u=K.size(r)/4,s=e[0].dataType,c=Z(\"input\",s,r,4),f=Z(\"bias\",s,[t],4),d=Z(\"residual\",s,r,4),g=ne(\"output\",s,r,4);return{name:\"BiasAdd\",getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(u/64)}}),getShaderSource:C=>`\n const channels = ${t}u / 4;\n ${C.declareVariables(c,f,d,g)}\n\n ${C.mainStart()}\n ${C.guardAgainstOutOfBoundsWorkgroupSizes(u)}\n let value = ${c.getByOffset(\"global_idx\")}\n + ${f.getByOffset(\"global_idx % channels\")} + ${d.getByOffset(\"global_idx\")};\n ${g.setByOffset(\"global_idx\",\"value\")}\n }`}},_l=e=>{zg(e.inputs),e.compute(Mg(e.inputs))}});var jg,Ve,Cl,Sl,xl,Il,Al,Tl,El,Pl,kl,Ug,Ol,Rl,Bl,Dl,si,zl,ui,Ml,jl,Ul,Vl,Wl,Nl,Gl,Hl,Ll,Fl,ql,Kl,Yl,Zl,Ql,Xl,Jl,ed,Xi,Ji,td,rd,nd,li=ae(()=>{\"use strict\";Te();De();nt();Re();jg=(e,r,t,u,s,c)=>{let f=Math.ceil(r/4),d=\"\";typeof s==\"string\"?d=`${s}(a)`:d=s(\"a\");let g=Z(\"inputData\",t,[f],4),w=ne(\"outputData\",u,[f],4);return`\n ${e.registerUniform(\"vec_size\",\"u32\").declareVariables(g,w)}\n\n ${c??\"\"}\n\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.vec_size\")}\n\n let a = ${g.getByOffset(\"global_idx\")};\n ${w.setByOffset(\"global_idx\",d)}\n }`},Ve=(e,r,t,u,s,c=e.dataType)=>({name:r,shaderCache:{hint:s,inputDependencies:[\"type\"]},getShaderSource:f=>jg(f,K.size(e.dims),e.dataType,c,t,u),getRunData:f=>({outputs:[{dims:e.dims,dataType:c}],dispatchGroup:{x:Math.ceil(K.size(f[0].dims)/64/4)},programUniforms:[{type:12,data:Math.ceil(K.size(e.dims)/4)}]})}),Cl=e=>{e.compute(Ve(e.inputs[0],\"Abs\",\"abs\"))},Sl=e=>{e.compute(Ve(e.inputs[0],\"Acos\",\"acos\"))},xl=e=>{e.compute(Ve(e.inputs[0],\"Acosh\",\"acosh\"))},Il=e=>{e.compute(Ve(e.inputs[0],\"Asin\",\"asin\"))},Al=e=>{e.compute(Ve(e.inputs[0],\"Asinh\",\"asinh\"))},Tl=e=>{e.compute(Ve(e.inputs[0],\"Atan\",\"atan\"))},El=e=>{e.compute(Ve(e.inputs[0],\"Atanh\",\"atanh\"))},Pl=e=>ke(e),kl=(e,r)=>{let t;switch(r.to){case 10:t=\"vec4\";break;case 1:t=\"vec4\";break;case 12:t=\"vec4\";break;case 6:t=\"vec4\";break;case 9:t=\"vec4\";break;default:throw new RangeError(`not supported type (specified in attribute 'to' from 'Cast' operator): ${r.to}`)}e.compute(Ve(e.inputs[0],\"Cast\",t,void 0,r.cacheKey,r.to))},Ug=e=>{let r=e.length>=2&&e[1].data!==0?e[1].getFloat32Array()[0]:ei,t=e.length>=3&&e[2].data!==0?e[2].getFloat32Array()[0]:ti;return ke({min:r,max:t})},Ol=(e,r)=>{let t=e.inputs.length===1?r:Ug(e.inputs),u=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"Clip\",s=>`clamp(${s}, clip_min_, clip_max_)`,`\n const clip_min_: vec4<${u}> = vec4(${u}(${t.min}));\n const clip_max_: vec4<${u}> = vec4(${u}(${t.max}));\n`,t.cacheKey),{inputs:[0]})},Rl=e=>{e.compute(Ve(e.inputs[0],\"Ceil\",\"ceil\"))},Bl=e=>{e.compute(Ve(e.inputs[0],\"Cos\",\"cos\"))},Dl=e=>{e.compute(Ve(e.inputs[0],\"Cosh\",\"cosh\"))},si=e=>ke(e),zl=(e,r)=>{let t=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"Elu\",u=>`elu_vf32(${u})`,`\n const elu_alpha_ = ${t}(${r.alpha});\n\n fn elu_f32(a: ${t}) -> ${t} {\n return select((exp(a) - 1.0) * elu_alpha_, a, a >= 0.0);\n }\n\n fn elu_vf32(v: vec4<${t}>) -> vec4<${t}> {\n return vec4(elu_f32(v.x), elu_f32(v.y), elu_f32(v.z), elu_f32(v.w));\n }`,r.cacheKey))},ui=(e=\"f32\")=>`\nconst r0: ${e} = 0.3275911;\nconst r1: ${e} = 0.254829592;\nconst r2: ${e} = -0.284496736;\nconst r3: ${e} = 1.421413741;\nconst r4: ${e} = -1.453152027;\nconst r5: ${e} = 1.061405429;\n\nfn erf_vf32(v: vec4<${e}>) -> vec4<${e}> {\n let absv = abs(v);\n let x = 1.0 / (1.0 + r0 * absv);\n return sign(v) * (1.0 - ((((r5 * x + r4) * x + r3) * x + r2) * x + r1) * x * exp(-absv * absv));\n}`,Ml=e=>{let r=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"Erf\",t=>`erf_vf32(${t})`,ui(r)))},jl=e=>{e.compute(Ve(e.inputs[0],\"Exp\",\"exp\"))},Ul=e=>{e.compute(Ve(e.inputs[0],\"Floor\",\"floor\"))},Vl=e=>{let r=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"Gelu\",t=>`0.5 * ${t} * (1.0 + erf_vf32(${t} * 0.7071067811865475))`,ui(r)))},Wl=(e,r)=>{let t=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"LeakyRelu\",u=>`select(leaky_relu_alpha_ * ${u}, ${u}, ${u} >= vec4<${t}>(0.0))`,`const leaky_relu_alpha_ = ${t}(${r.alpha});`,r.cacheKey))},Nl=e=>{e.compute(Ve(e.inputs[0],\"Not\",r=>`!${r}`))},Gl=e=>{e.compute(Ve(e.inputs[0],\"Neg\",r=>`-${r}`))},Hl=e=>{e.compute(Ve(e.inputs[0],\"Reciprocal\",r=>`1.0/${r}`))},Ll=e=>{let r=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"Relu\",t=>`select(vec4<${r}>(0.0), ${t}, ${t} > vec4<${r}>(0.0))`))},Fl=e=>{e.compute(Ve(e.inputs[0],\"Sigmoid\",r=>`(1.0 / (1.0 + exp(-${r})))`))},ql=e=>ke(e),Kl=(e,r)=>{let t=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"HardSigmoid\",u=>`max(vec4<${t}>(0.0), min(vec4<${t}>(1.0), ${r.alpha} * ${u} + vec4<${t}>(${r.beta})))`,void 0,r.cacheKey))},Yl=e=>{e.compute(Ve(e.inputs[0],\"Sin\",\"sin\"))},Zl=e=>{e.compute(Ve(e.inputs[0],\"Sinh\",\"sinh\"))},Ql=e=>{e.compute(Ve(e.inputs[0],\"Sqrt\",\"sqrt\"))},Xl=e=>{e.compute(Ve(e.inputs[0],\"Tan\",\"tan\"))},Jl=e=>`sign(${e}) * (1 - exp(-2 * abs(${e}))) / (1 + exp(-2 * abs(${e})))`,ed=e=>{e.compute(Ve(e.inputs[0],\"Tanh\",Jl))},Xi=(e=\"f32\")=>`\nconst fast_gelu_a: ${e} = 0.5;\nconst fast_gelu_b: ${e} = 0.7978845608028654;\nconst fast_gelu_c: ${e} = 0.035677408136300125;\n\nfn tanh_v(v: vec4<${e}>) -> vec4<${e}> {\n return ${Jl(\"v\")};\n}\n`,Ji=e=>`(fast_gelu_a + fast_gelu_a * tanh_v(${e} * (fast_gelu_c * ${e} * ${e} + fast_gelu_b))) * ${e}`,td=e=>{let r=at(e.inputs[0].dataType);e.compute(Ve(e.inputs[0],\"FastGelu\",Ji,Xi(r),void 0,e.inputs[0].dataType))},rd=(e,r)=>{let t=at(e.inputs[0].dataType);return e.compute(Ve(e.inputs[0],\"ThresholdedRelu\",u=>`select(vec4<${t}>(0.0), ${u}, ${u} > thresholded_relu_alpha_)`,`const thresholded_relu_alpha_ = vec4<${t}>(${r.alpha});`,r.cacheKey)),0},nd=e=>{e.compute(Ve(e.inputs[0],\"Log\",\"log\"))}});var Vg,Wg,od,ad=ae(()=>{\"use strict\";De();Re();li();Vg=e=>{if(e[0].dims.length!==3)throw new Error(\"input should have 3 dimensions\");if(![2560,5120,10240].includes(e[0].dims[2]))throw new Error(\"hidden state should be 2560, 5120 or 10240\");if(e[1].dims.length!==1)throw new Error(\"bias is expected to have 1 dimensions\");if(e[0].dims[2]!==e[1].dims[0])throw new Error(\"last dimension of input and bias are not the same\")},Wg=e=>{let r=e[0].dims.slice();r[2]=r[2]/2;let t=Z(\"input\",e[0].dataType,e[0].dims,4),u=Z(\"bias\",e[0].dataType,[e[0].dims[2]],4),s=ne(\"output\",e[0].dataType,r,4),c=K.size(r)/4,f=Ne(e[0].dataType);return{name:\"BiasSplitGelu\",getRunData:()=>({outputs:[{dims:r,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(c/64)}}),getShaderSource:g=>`\n const M_SQRT2 = sqrt(2.0);\n const halfChannels = ${e[0].dims[2]/4/2}u;\n\n ${g.declareVariables(t,u,s)}\n\n ${ui(f)}\n\n ${g.mainStart()}\n ${g.guardAgainstOutOfBoundsWorkgroupSizes(c)}\n let biasIdx = global_idx % halfChannels;\n let batchIndex = global_idx / halfChannels;\n let inputOffset = biasIdx + batchIndex * halfChannels * 2;\n let valueLeft = input[inputOffset] + bias[biasIdx];\n let valueRight = input[inputOffset + halfChannels] + bias[biasIdx + halfChannels];\n let geluRight = valueRight * 0.5 * (erf_vf32(valueRight / M_SQRT2) + 1);\n\n ${s.setByOffset(\"global_idx\",\"valueLeft * geluRight\")}\n }`}},od=e=>{Vg(e.inputs),e.compute(Wg(e.inputs))}});var Ng,Gg,Mt,sd,ud,ld,dd,cd,fd,pd,md,hd,gd,yd=ae(()=>{\"use strict\";Te();De();Re();Ng=(e,r,t,u,s,c,f,d,g,w,C,$)=>{let A,P;typeof d==\"string\"?A=P=(R,j)=>`${d}((${R}),(${j}))`:typeof d==\"function\"?A=P=d:(A=d.scalar,P=d.vector);let x=ne(\"outputData\",C,u.length,4),E=Z(\"aData\",g,r.length,4),O=Z(\"bData\",w,t.length,4),B;if(s)if(c){let R=K.size(r)===1,j=K.size(t)===1,U=r.length>0&&r[r.length-1]%4===0,L=t.length>0&&t[t.length-1]%4===0;R||j?B=x.setByOffset(\"global_idx\",P(R?`${E.type.value}(${E.getByOffset(\"0\")}.x)`:E.getByOffset(\"global_idx\"),j?`${O.type.value}(${O.getByOffset(\"0\")}.x)`:O.getByOffset(\"global_idx\"))):B=`\n let outputIndices = ${x.offsetToIndices(\"global_idx * 4u\")};\n let offsetA = ${E.broadcastedIndicesToOffset(\"outputIndices\",x)};\n let offsetB = ${O.broadcastedIndicesToOffset(\"outputIndices\",x)};\n ${x.setByOffset(\"global_idx\",P(f||U?E.getByOffset(\"offsetA / 4u\"):`${E.type.value}(${E.getByOffset(\"offsetA / 4u\")}[offsetA % 4u])`,f||L?O.getByOffset(\"offsetB / 4u\"):`${O.type.value}(${O.getByOffset(\"offsetB / 4u\")}[offsetB % 4u])`))}\n `}else B=x.setByOffset(\"global_idx\",P(E.getByOffset(\"global_idx\"),O.getByOffset(\"global_idx\")));else{if(!c)throw new Error(\"no necessary to use scalar implementation for element-wise binary op implementation.\");let R=(j,U,L=\"\")=>{let F=`aData[indexA${U}][componentA${U}]`,te=`bData[indexB${U}][componentB${U}]`;return`\n let outputIndices${U} = ${x.offsetToIndices(`global_idx * 4u + ${U}u`)};\n let offsetA${U} = ${E.broadcastedIndicesToOffset(`outputIndices${U}`,x)};\n let offsetB${U} = ${O.broadcastedIndicesToOffset(`outputIndices${U}`,x)};\n let indexA${U} = offsetA${U} / 4u;\n let indexB${U} = offsetB${U} / 4u;\n let componentA${U} = offsetA${U} % 4u;\n let componentB${U} = offsetB${U} % 4u;\n ${j}[${U}] = ${L}(${A(F,te)});\n `};C===9?B=`\n var data = vec4(0);\n ${R(\"data\",0,\"u32\")}\n ${R(\"data\",1,\"u32\")}\n ${R(\"data\",2,\"u32\")}\n ${R(\"data\",3,\"u32\")}\n outputData[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`:B=`\n ${R(\"outputData[global_idx]\",0)}\n ${R(\"outputData[global_idx]\",1)}\n ${R(\"outputData[global_idx]\",2)}\n ${R(\"outputData[global_idx]\",3)}\n `}return`\n ${e.registerUniform(\"vec_size\",\"u32\").declareVariables(E,O,x)}\n\n ${$??\"\"}\n\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.vec_size\")}\n ${B}\n }`},Gg=(e,r,t,u,s,c,f=t.dataType)=>{let d=!K.areEqual(t.dims,u.dims),g=t.dims,w=K.size(t.dims),C=!1,$=!1,A=[d];if(d){let P=Ot.calcShape(t.dims,u.dims,!1);if(!P)throw new Error(\"Can't perform binary op on the given tensors\");g=P,w=K.size(g);let x=K.size(t.dims)===1,E=K.size(u.dims)===1,O=t.dims.length>0&&t.dims[t.dims.length-1]%4===0,B=u.dims.length>0&&u.dims[u.dims.length-1]%4===0;A.push(x),A.push(E),A.push(O),A.push(B);let R=1;for(let j=1;jP.toString()).join(\"_\"),inputDependencies:[\"rank\",\"rank\"]},getShaderSource:P=>Ng(P,t.dims,u.dims,g,C,d,$,s,t.dataType,u.dataType,f,c),getRunData:()=>({outputs:[{dims:g,dataType:f}],dispatchGroup:{x:Math.ceil(w/64/4)},programUniforms:[{type:12,data:Math.ceil(K.size(g)/4)},...se(t.dims,u.dims,g)]})}},Mt=(e,r,t,u,s,c)=>{e.compute(Gg(r,s??\"\",e.inputs[0],e.inputs[1],t,u,c))},sd=e=>{Mt(e,\"Add\",(r,t)=>`${r}+${t}`)},ud=e=>{Mt(e,\"Div\",(r,t)=>`${r}/${t}`)},ld=e=>{Mt(e,\"Equal\",{scalar:(r,t)=>`u32(${r}==${t})`,vector:(r,t)=>`vec4(${r}==${t})`},void 0,void 0,9)},dd=e=>{Mt(e,\"Mul\",(r,t)=>`${r}*${t}`)},cd=e=>{let r=Z(\"input\",e.inputs[0].dataType,e.inputs[0].dims).type.value;Mt(e,\"Pow\",{scalar:(u,s)=>`pow_custom(${u},${s})`,vector:(u,s)=>`pow_vector_custom(${u},${s})`},`\n fn pow_custom(a : ${r}, b : ${r}) -> ${r} {\n if (b == ${r}(0.0)) {\n return ${r}(1.0);\n } else if (a < ${r}(0.0) && f32(b) != floor(f32(b))) {\n return ${r}(pow(f32(a), f32(b))); // NaN\n }\n return select(sign(a), ${r}(1.0), round(f32(abs(b) % ${r}(2.0))) != 1.0) * ${r}(${r===\"i32\"?\"round\":\"\"}(pow(f32(abs(a)), f32(b))));\n }\n fn pow_vector_custom(a : vec4<${r}>, b : vec4<${r}>) -> vec4<${r}> {\n // TODO: implement vectorized pow\n return vec4<${r}>(pow_custom(a.x, b.x), pow_custom(a.y, b.y), pow_custom(a.z, b.z), pow_custom(a.w, b.w));\n }\n `)},fd=e=>{Mt(e,\"Sub\",(r,t)=>`${r}-${t}`)},pd=e=>{Mt(e,\"Greater\",{scalar:(r,t)=>`u32(${r}>${t})`,vector:(r,t)=>`vec4(${r}>${t})`},void 0,void 0,9)},md=e=>{Mt(e,\"Less\",{scalar:(r,t)=>`u32(${r}<${t})`,vector:(r,t)=>`vec4(${r}<${t})`},void 0,void 0,9)},hd=e=>{Mt(e,\"GreaterOrEqual\",{scalar:(r,t)=>`u32(${r}>=${t})`,vector:(r,t)=>`vec4(${r}>=${t})`},void 0,void 0,9)},gd=e=>{Mt(e,\"LessOrEqual\",{scalar:(r,t)=>`u32(${r}<=${t})`,vector:(r,t)=>`vec4(${r}<=${t})`},void 0,void 0,9)}});var At,Tt,Et,di,Ft=ae(()=>{\"use strict\";Te();De();At=(e,r,t=\"f32\")=>{switch(e.activation){case\"Relu\":return`value = max(value, ${r}(0.0));`;case\"Sigmoid\":return`value = (${r}(1.0) / (${r}(1.0) + exp(-value)));`;case\"Clip\":return`value = clamp(value, ${r}(${t}(uniforms.clip_min)), ${r}(${t}(uniforms.clip_max)));`;case\"HardSigmoid\":return`value = max(${r}(0.0), min(${r}(1.0), ${t}(uniforms.alpha) * value + ${t}(uniforms.beta)));`;case\"LeakyRelu\":return`value = select(${t}(uniforms.alpha) * value, value, value >= ${r}(0.0));`;case\"\":return\"\";default:throw new Error(`Unsupported activation ${e.activation}`)}},Tt=(e,r)=>{e.activation===\"Clip\"?r.push({type:1,data:e.clipMax},{type:1,data:e.clipMin}):e.activation===\"HardSigmoid\"?r.push({type:1,data:e.alpha},{type:1,data:e.beta}):e.activation===\"LeakyRelu\"&&r.push({type:1,data:e.alpha})},Et=(e,r)=>{e.activation===\"Clip\"?r.push({name:\"clip_max\",type:\"f32\"},{name:\"clip_min\",type:\"f32\"}):e.activation===\"HardSigmoid\"?r.push({name:\"alpha\",type:\"f32\"},{name:\"beta\",type:\"f32\"}):e.activation===\"LeakyRelu\"&&r.push({name:\"alpha\",type:\"f32\"})},di=e=>{let r=e?.activation||\"\";if(r===\"HardSigmoid\"){let[t,u]=e?.activation_params||[.2,.5];return{activation:r,alpha:t,beta:u}}else if(r===\"Clip\"){let[t,u]=e?.activation_params||[ei,ti];return{activation:r,clipMax:u,clipMin:t}}else if(r===\"LeakyRelu\"){let[t]=e?.activation_params||[.01];return{activation:r,alpha:t}}return{activation:r}}});var st,ci,fi=ae(()=>{\"use strict\";st=(e,r)=>{switch(e){case 1:return r;case 2:return`vec2<${r}>`;case 3:return`vec3<${r}>`;case 4:return`vec4<${r}>`;default:throw new Error(`${e}-component is not supported.`)}},ci=e=>`\n ${e?\"value = value + getBiasByOutputCoords(coords);\":\"\"}\n `});var pi,eo=ae(()=>{\"use strict\";pi=e=>`\nfn getIndexFromCoords4D(coords : vec4, shape : vec4) -> i32 {\n return dot(coords, vec4(\n shape.y * shape.z * shape.w, shape.z * shape.w, shape.w, 1));\n}\nfn getOutputIndexFromCoords(coords : vec4) -> i32 {\n return dot(coords, vec4(\n i32(${e}.x), i32(${e}.y), i32(${e}.z), 1));\n}\n`});var Lg,Fg,cn,bd,qg,fn,Kg,mi,pn=ae(()=>{\"use strict\";Te();De();Re();Ft();fi();Lg=(e,r)=>e?`\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart / innerElementSize + inputCol${r?\", batchIndices\":\"\"});\n `:`\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRow + innerRow,\n kStart / innerElementSize + inputCol${r?\", batchIndices\":\"\"});\n `,Fg=(e,r)=>e?`\n let ACached0 = mm_Asub[k * innerElementSize][localRow];\n let ACached1 = mm_Asub[k * innerElementSize + 1][localRow];\n let ACached2 = mm_Asub[k * innerElementSize + 2][localRow];\n ${r===3?\"\":\"let ACached3 = mm_Asub[k * innerElementSize + 3][localRow];\"}\n for (var i = 0; i < rowPerThread; i = i + 1) {\n acc[i] = BCached0 * ACached0[i] + acc[i];\n acc[i] = BCached1 * ACached1[i] + acc[i];\n acc[i] = BCached2 * ACached2[i] + acc[i];\n ${r===3?\"\":\"acc[i] = BCached3 * ACached3[i] + acc[i];\"}\n }`:`\n for (var i = 0; i < rowPerThread; i = i + 1) {\n let ACached = mm_Asub[tileRow + i][k];\n acc[i] = BCached0 * ACached.x + acc[i];\n acc[i] = BCached1 * ACached.y + acc[i];\n acc[i] = BCached2 * ACached.z + acc[i];\n ${r===3?\"\":\"acc[i] = BCached3 * ACached.w + acc[i];\"}\n }`,cn=(e,r,t=\"f32\",u,s=!1,c=32,f=!1,d=32)=>{let g=r[1]*e[1],w=r[0]*e[0],C=s?g:c,$=s?c:g,A=C/r[0],P=c/r[1];if(!((s&&A===4&&e[1]===4||!s&&(A===3||A===4))&&C%r[0]===0&&c%r[1]===0&&e[0]===4))throw new Error(`If transposeA ${s} is true, innerElementSize ${A} and workPerThread[1] ${e[1]} must be 4.\n Otherwise, innerElementSize ${A} must be 3 or 4.\n tileAWidth ${C} must be divisible by workgroupSize[0]${r[0]}. tileInner ${c} must be divisible by workgroupSize[1] ${r[1]}. colPerThread ${e[0]} must be 4.`);return`\nvar mm_Asub: array, ${C/A}>, ${$}>;\nvar mm_Bsub: array, ${w/e[0]}>, ${c}>;\n\nconst rowPerThread = ${e[1]};\nconst colPerThread = ${e[0]};\nconst innerElementSize = ${A};\nconst tileInner = ${c};\n\n@compute @workgroup_size(${r[0]}, ${r[1]}, ${r[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let localRow = i32(localId.y);\n let tileRow = localRow * rowPerThread;\n let tileCol = i32(localId.x);\n\n let globalRow =i32(globalId.y) * rowPerThread;\n let globalCol = i32(globalId.x);\n let batch = ${f?\"0\":\"i32(globalId.z)\"};\n ${u?`let batchIndices = ${u.offsetToIndices(\"u32(batch)\")};`:\"\"}\n let globalRowStart = i32(workgroupId.y) * ${g};\n\n let num_tiles = ${f?`${Math.ceil(d/c)}`:\"(uniforms.dim_inner - 1) / tileInner + 1\"};\n var kStart = ${f?`i32(globalId.z) * ${d}`:\"0\"};\n\n var acc: array, rowPerThread>;\n\n // Loop over shared dimension.\n let tileRowB = localRow * ${P};\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let inputRow = tileRow + innerRow;\n let inputCol = tileCol;\n ${Lg(s,u)}\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${P}; innerRow = innerRow + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch, kStart + inputRow, globalCol${u?\", batchIndices\":\"\"});\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n for (var k = 0; k < tileInner / innerElementSize; k = k + 1) {\n let BCached0 = mm_Bsub[k * innerElementSize][tileCol];\n let BCached1 = mm_Bsub[k * innerElementSize + 1][tileCol];\n let BCached2 = mm_Bsub[k * innerElementSize + 2][tileCol];\n ${A===3?\"\":\"let BCached3 = mm_Bsub[k * innerElementSize + 3][tileCol];\"}\n\n ${Fg(s,A)}\n }\n\n workgroupBarrier();\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n mm_write(batch, globalRow + innerRow, globalCol, acc[innerRow]);\n }\n}`},bd=(e,r)=>e?`\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n kStart + inputRow,\n globalRowStart + inputCol${r?\", batchIndices\":\"\"});\n `:`\n mm_Asub[inputRow][inputCol] = mm_readA(batch,\n globalRowStart + inputRow,\n kStart + inputCol${r?\", batchIndices\":\"\"});\n `,qg=e=>e?\"let ACached = mm_Asub[k][tileRow + innerRow];\":\"let ACached = mm_Asub[tileRow + innerRow][k];\",fn=(e,r,t=\"f32\",u,s=!1,c=32,f=!1,d=32,g=!1)=>{let w=e[1]*r[1],C=e[0]*r[0],$=s?w:c,A=s?c:w;if(!(A%r[1]===0&&$%r[0]===0&&c%r[1]===0))throw new Error(`tileAHight ${A} must be divisible by workgroupSize[1]${r[1]}, tileAWidth ${$} must be divisible by workgroupSize[0]${r[0]}, tileInner ${c} must be divisible by workgroupSize[1]${r[1]}`);let P=A/r[1],x=$/r[0],E=c/r[1],O=g?`\n let localRow = i32(localId.y);\n let localCol = i32(localId.x);\n let globalRowStart = i32(workgroupId.y) * ${w};\n let globalColStart = i32(workgroupId.x) * ${C};\n\n // Loop over shared dimension.\n for (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var inputRow = localRow; inputRow < ${A}; inputRow = inputRow + ${r[1]}) {\n for (var inputCol = localCol; inputCol < ${$}; inputCol = inputCol + ${r[0]}) {\n ${bd(s,u)}\n }\n }\n // Load one tile of B into local memory.\n for (var inputRow = localRow; inputRow < ${c}; inputRow = inputRow + ${r[1]}) {\n for (var inputCol = localCol; inputCol < ${C}; inputCol = inputCol + ${r[0]}) {\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalColStart + inputCol${u?\", batchIndices\":\"\"});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${t}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][localCol + inner * ${r[0]}];\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let ACached = ${s?`mm_Asub[k][localRow + innerRow * ${r[1]}];`:`mm_Asub[localRow + innerRow * ${r[1]}][k];`}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] +\n ACached * BCached[innerCol];\n }\n }\n }\n workgroupBarrier();\n }\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n let gRow = globalRowStart + localRow + innerRow * ${r[1]};\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let gCol = globalColStart + localCol + innerCol * ${r[0]};\n mm_write(batch, gRow, gCol, acc[innerRow][innerCol]);\n }\n }\n `:`\nlet tileRow = i32(localId.y) * rowPerThread;\nlet tileCol = i32(localId.x) * colPerThread;\n\nlet globalRow = i32(globalId.y) * rowPerThread;\nlet globalCol = i32(globalId.x) * colPerThread;\nlet globalRowStart = i32(workgroupId.y) * ${w};\n\nlet tileRowA = i32(localId.y) * ${P};\nlet tileColA = i32(localId.x) * ${x};\nlet tileRowB = i32(localId.y) * ${E};\n// Loop over shared dimension.\nfor (var t = 0; t < num_tiles; t = t + 1) {\n // Load one tile of A into local memory.\n for (var innerRow = 0; innerRow < ${P}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < ${x}; innerCol = innerCol + 1) {\n let inputRow = tileRowA + innerRow;\n let inputCol = tileColA + innerCol;\n ${bd(s,u)}\n }\n }\n\n // Load one tile of B into local memory.\n for (var innerRow = 0; innerRow < ${E}; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n let inputRow = tileRowB + innerRow;\n let inputCol = tileCol + innerCol;\n mm_Bsub[inputRow][inputCol] = mm_readB(batch,\n kStart + inputRow,\n globalCol + innerCol${u?\", batchIndices\":\"\"});\n }\n }\n kStart = kStart + tileInner;\n workgroupBarrier();\n\n // Compute acc values for a single thread.\n var BCached : array<${t}, colPerThread>;\n for (var k = 0; k < tileInner; k = k + 1) {\n for (var inner = 0; inner < colPerThread; inner = inner + 1) {\n BCached[inner] = mm_Bsub[k][tileCol + inner];\n }\n\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n ${qg(s)}\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = acc[innerRow][innerCol] + ACached * BCached[innerCol];\n }\n }\n }\n\n workgroupBarrier();\n}\n\nfor (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n mm_write(batch, globalRow + innerRow, globalCol + innerCol,\n acc[innerRow][innerCol]);\n }\n}\n`;return`\n var mm_Asub : array, ${A}>;\n var mm_Bsub : array, ${c}>;\n const rowPerThread = ${e[1]};\n const colPerThread = ${e[0]};\n const tileInner = ${c};\n\n@compute @workgroup_size(${r[0]}, ${r[1]}, ${r[2]})\nfn main(@builtin(local_invocation_id) localId : vec3,\n @builtin(global_invocation_id) globalId : vec3,\n @builtin(workgroup_id) workgroupId : vec3) {\n let batch = ${f?\"0\":\"i32(globalId.z)\"};\n ${u?`let batchIndices = ${u.offsetToIndices(\"u32(batch)\")};`:\"\"}\n let num_tiles = ${f?`${Math.ceil(d/c)}`:\"(uniforms.dim_inner - 1) / tileInner + 1\"};\n var kStart = ${f?`i32(globalId.z) * ${d}`:\"0\"};\n\n var acc : array, rowPerThread>;\n\n // Without this initialization strange values show up in acc.\n for (var innerRow = 0; innerRow < rowPerThread; innerRow = innerRow + 1) {\n for (var innerCol = 0; innerCol < colPerThread; innerCol = innerCol + 1) {\n acc[innerRow][innerCol] = 0.0;\n }\n }\n ${O}\n }\n`},Kg=(e,r,t,u,s,c=!1)=>{let[f,d,g]=s,[w,C,$,A]=u,P=Br(f,g),x=Br(d,g),E=Ne(u[0].type.tensor),O=()=>{let j=C.rank,U=w.rank,L=`var aIndices: ${C.type.indices};`;for(let F=j-2-1,te=U-1;F>=0;F--,te--)L+=`\naIndices[${F}] = ${U>1?`batchIndices[${te}]`:\"batchIndices\"};`;return P.forEach(F=>{L+=`\naIndices[${F}] = 0;`}),L+=`\naIndices[${j-2}] = u32(row);\n aIndices[${j-1}] = u32(colIn);`,L},B=()=>{let j=$.rank,U=w.rank,L=`var bIndices: ${$.type.indices};`;for(let F=j-2-1,te=U-1;F>=0;F--,te--)L+=`\nbIndices[${F}] = ${U>1?`batchIndices[${te}]`:\"batchIndices\"};`;return x.forEach(F=>{L+=`\nbIndices[${F}] = 0;`}),L+=`\nbIndices[${j-2}] = u32(row);\n bIndices[${j-1}] = u32(colIn);`,L};return`\n fn mm_readA(batch: i32, row: i32, colIn: i32, batchIndices: ${w.type.indices}) -> ${st(e,E)} {\n var value = ${st(e,E)}(0.0);\n let col = colIn * ${e};\n if(row < uniforms.dim_a_outer && col < uniforms.dim_inner)\n {\n ${O()}\n value = ${C.getByIndices(\"aIndices\")};\n }\n return value;\n }\n\n fn mm_readB(batch: i32, row: i32, colIn: i32, batchIndices: ${w.type.indices}) -> ${st(e,E)} {\n var value = ${st(e,E)}(0.0);\n let col = colIn * ${e};\n if(row < uniforms.dim_inner && col < uniforms.dim_b_outer)\n {\n ${B()}\n value = ${$.getByIndices(\"bIndices\")};\n }\n return value;\n }\n\n fn mm_write(batch: i32, row: i32, colIn: i32, valueIn: ${st(e,E)}) {\n let col = colIn * ${e};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueIn;\n let coords = vec3(batch, row, colIn);\n ${r?`value = value + ${c?\"bias[colIn]\":`${st(e,E)}(bias[row])`};`:\"\"}\n ${t}\n ${A.setByIndices(\"vec3(coords)\",\"value\")}\n }\n }\n `},mi=(e,r,t,u,s=!1)=>{let c=e[0].dims,f=e[1].dims,d=c.slice(0,-2),g=f.slice(0,-2),w=u?u.slice(0,-2):t.slice(0,-2),C=K.size(w),$=c[c.length-2],A=c[c.length-1],P=f[f.length-1],x=A%4===0&&P%4===0,E=$<=8?[4,1,1]:[4,4,1],O=[8,8,1],B=[Math.ceil(P/O[0]/E[0]),Math.ceil($/O[1]/E[1]),Math.ceil(C/O[2]/E[2])],R=x?4:1,j=[...d,$,A/R],U=j.length,L=[...g,A,P/R],F=L.length,te=[C,$,P/R],J=[{type:6,data:$},{type:6,data:P},{type:6,data:A}];Tt(r,J),J.push(...se(w,j,L));let oe=[\"rank\",\"rank\"],le=e.length>2;le&&(J.push(...se(e[2].dims)),oe.push(\"rank\")),J.push(...se(te));let ge=X=>{let pe=w.length,we=ri(\"batchDims\",e[0].dataType,pe,1),ue=Ne(e[0].dataType),me=Z(\"a\",e[0].dataType,U,R),Ee=Z(\"b\",e[1].dataType,F,R),Pe=ne(\"result\",e[0].dataType,te.length,R),Ce=[me,Ee];if(le){let ee=s?R:1;Ce.push(Z(\"bias\",e[2].dataType,e[2].dims.length,ee))}let be=[{name:\"dim_a_outer\",type:\"i32\"},{name:\"dim_b_outer\",type:\"i32\"},{name:\"dim_inner\",type:\"i32\"}];Et(r,be);let Ae=Ne(Pe.type.tensor),_e=At(r,Pe.type.value,Ae),Je=Kg(R,le,_e,[we,me,Ee,Pe],[d,g,w],s);return`\n ${X.registerUniforms(be).registerInternalVariables(we).declareVariables(...Ce,Pe)}\n ${Je}\n ${x?cn(E,O,ue,we):fn(E,O,ue,we)}\n `};return{name:\"MatMul\",shaderCache:{hint:`${E};${r.activation};${x};${s}`,inputDependencies:oe},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:B[0],y:B[1],z:B[2]},programUniforms:J}),getShaderSource:ge}}});var Yg,vd,wd=ae(()=>{\"use strict\";Te();Lt();Re();Ft();fi();eo();pn();Yg=(e,r,t,u,s=!1,c,f=4,d=4,g=4,w=\"f32\")=>{let C=le=>{switch(le){case 1:return\"resData = x[xIndex];\";case 3:return`resData = vec3<${w}>(x[xIndex], x[xIndex + 1], x[xIndex + 2]);`;case 4:return\"resData = x[xIndex / 4];\";default:throw new Error(`innerElementSize ${le} is not supported.`)}},$=le=>{switch(le){case 1:return\"return w[row * i32(uniforms.w_shape[3]) + colIn];\";case 4:return\"return w[row * i32(uniforms.w_shape[3]) / 4 + colIn];\";default:throw new Error(`innerElementSize ${le} is not supported.`)}},A=e?`\n let coord = vec4(batch, xRow, xCol, xCh);\n `:`\n let coord = vec4(batch, xCh, xRow, xCol);\n `,P=e?`\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n `:`\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `,x=e?\"i32(uniforms.x_shape[1])\":\"i32(uniforms.x_shape[2])\",E=e?\"i32(uniforms.x_shape[2])\":\"i32(uniforms.x_shape[3])\",O=e?\"row\":\"col\",B=e?\"col\":\"row\",R=`\n let inChannels = i32(uniforms.w_shape[2]);\n let outWidth = ${e?\"i32(uniforms.result_shape[2])\":\"i32(uniforms.result_shape[3])\"};\n let outRow = ${O} / outWidth;\n let outCol = ${O} % outWidth;\n\n let WRow = ${B} / (i32(uniforms.w_shape[1]) * inChannels);\n let WCol = ${B} / inChannels % i32(uniforms.w_shape[1]);\n let xRow = outRow * uniforms.stride[0] + uniforms.dilation[0] * WRow - uniforms.pad[0];\n let xCol = outCol * uniforms.stride[1] + uniforms.dilation[1] * WCol - uniforms.pad[1];\n let xCh = ${B} % inChannels;\n var resData = ${st(f,w)}(0.0);\n // The bounds checking is always needed since we use it to pad zero for\n // the 'same' padding type.\n if (xRow >= 0 && xRow < ${x} && xCol >= 0 && xCol < ${E}) {\n ${A}\n let xIndex = getIndexFromCoords4D(coord, vec4(uniforms.x_shape));\n ${C(f)}\n }\n return resData;`,j=e?r&&u?`\n let col = colIn * ${f};\n ${R}`:`\n let col = colIn * ${f};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${R}\n }\n return ${st(f,w)}(0.0);`:u&&t?`\n let col = colIn * ${f};\n ${R}`:`\n let col = colIn * ${f};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${R}\n }\n return ${st(f,w)}(0.0);`,U=`${$(d)}`,L=st(g,w),F=e?st(f,w):st(d,w),te=e?st(d,w):st(f,w),J=At(c,L,w);return`\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${F} {\n ${e?j:U}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${te} {\n ${e?U:j}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueIn : ${L}) {\n let col = colIn * ${g};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer)\n {\n var value = valueIn;\n let outWidth = ${e?\"i32(uniforms.result_shape[2])\":\"i32(uniforms.result_shape[3])\"};\n ${P}\n ${ci(s)}\n ${J}\n setOutputAtCoords(coords[0], coords[1], coords[2], coords[3], value);\n }\n }`},vd=(e,r,t,u,s,c,f,d)=>{let g=r.format===\"NHWC\",w=g?e[0].dims[3]:e[0].dims[1],C=t[0],$=g?t[2]:t[3],A=g?t[1]:t[2],P=g?t[3]:t[1],x=g&&(w%4===0||w%3===0)&&P%4===0,E=g?P:$*A,O=g?$*A:P,B=[8,8,1],R=u<=8?[4,1,1]:[4,4,1],j=[Math.ceil(E/B[0]/R[0]),Math.ceil(O/B[1]/R[1]),Math.ceil(C/B[2]/R[2])];Fe(\"verbose\",()=>`[conv2d_mm_webgpu] dispatch = ${j}`);let U=x?g&&w%4!==0?3:4:1,L=B[1]*R[1],F=B[0]*R[0],te=Math.max(B[0]*U,B[1]),J=u%L===0,oe=s%F===0,le=c%te===0,ge=x?[U,4,4]:[1,1,1],X=[{type:6,data:u},{type:6,data:s},{type:6,data:c},{type:6,data:[r.pads[0],r.pads[1]]},{type:6,data:r.strides},{type:6,data:r.dilations}];Tt(r,X),X.push(...se(e[0].dims,e[1].dims));let pe=[\"rank\",\"rank\"];f&&(X.push(...se(e[2].dims)),pe.push(\"rank\")),X.push(...se(t));let we=ue=>{let me=[{name:\"dim_a_outer\",type:\"i32\"},{name:\"dim_b_outer\",type:\"i32\"},{name:\"dim_inner\",type:\"i32\"},{name:\"pad\",type:\"i32\",length:2},{name:\"stride\",type:\"i32\",length:2},{name:\"dilation\",type:\"i32\",length:2}];Et(r,me);let Ee=x?4:1,Pe=Ne(e[0].dataType),Ce=`\n fn setOutputAtIndex(flatIndex : i32, value : ${x?`vec4<${Pe}>`:Pe}) {\n result[flatIndex] = ${x?`vec4<${Pe}>`:Pe}(value);\n }\n fn setOutputAtCoords(d0 : i32, d1 : i32, d2 : i32, d3 : i32, value : ${x?`vec4<${Pe}>`:Pe}) {\n let flatIndex = getOutputIndexFromCoords(vec4(d0, d1, d2, d3));\n setOutputAtIndex(flatIndex ${x?\"/ 4\":\"\"}, value);\n }`,be=Z(\"x\",e[0].dataType,e[0].dims.length,U===3?1:U),Ae=Z(\"w\",e[1].dataType,e[1].dims.length,Ee),_e=[be,Ae],Je=ne(\"result\",e[0].dataType,t.length,Ee);if(f){let ee=Z(\"bias\",e[2].dataType,e[2].dims.length,Ee);_e.push(ee),Ce+=`\n fn getBiasByOutputCoords(coords : vec4) -> ${x?`vec4<${Pe}>`:Pe} {\n return bias[coords.${g?\"w\":\"y\"}${x?\"/ 4\":\"\"}];\n }`}return`\n ${pi(\"uniforms.result_strides\")}\n //struct Uniforms { xShape : vec4, wShape : vec4, outShape : vec4,\n // outShapeStrides: vec3, filterDims : vec2, pad : vec2, stride : vec2,\n // dilation : vec2, dimAOuter : i32, dimBOuter : i32, dimInner : i32 };\n ${ue.registerUniforms(me).declareVariables(..._e,Je)}\n ${Ce}\n ${Yg(g,J,oe,le,f,r,ge[0],ge[1],ge[2],Pe)}\n ${x?cn(R,B,Pe,void 0,!g,te):fn(R,B,Pe,void 0,!g,te,!1,void 0,d)}`};return{name:\"Conv2DMatMul\",shaderCache:{hint:`${r.cacheKey};${U};${x};${J};${oe};${le};${L};${F};${te}`,inputDependencies:pe},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:j[0],y:j[1],z:j[2]},programUniforms:X}),getShaderSource:we}}});var to,_d,$d=ae(()=>{\"use strict\";Te();De();Re();ro();Ft();to=(e,r,t)=>{let u=e.length>2,s=u?\"value += b[output_channel];\":\"\",c=e[0].dims,f=e[1].dims,d=f[0]/r.group,g=r.format===\"NHWC\",w=hi(c,f,r.dilations,r.pads,r.strides,g),C=K.size(w),$=[{type:12,data:C},{type:12,data:r.dilations},{type:12,data:[r.strides[0],r.strides[1]]},{type:12,data:[r.pads[0],r.pads[1]]},{type:12,data:d}];Tt(r,$),$.push(...se(c,f));let A=[\"rank\",\"rank\"];u&&($.push(...se(e[2].dims)),A.push(\"rank\")),$.push(...se(w));let P=x=>{let E=ne(\"output\",e[0].dataType,w.length),O=Ne(E.type.tensor),B=At(r,E.type.value,O),R=Z(\"x\",e[0].dataType,c.length),j=Z(\"w\",e[1].dataType,f.length),U=[R,j];u&&U.push(Z(\"b\",e[2].dataType,e[2].dims.length));let L=[{name:\"output_size\",type:\"u32\"},{name:\"dilations\",type:\"u32\",length:r.dilations.length},{name:\"strides\",type:\"u32\",length:2},{name:\"pads\",type:\"u32\",length:2},{name:\"output_channels_per_group\",type:\"u32\"}];return Et(r,L),`\n ${x.registerUniforms(L).declareVariables(...U,E)}\n\n ${x.mainStart()}\n ${x.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n\n let outputIndices = ${E.offsetToIndices(\"global_idx\")};\n let batch: u32 = outputIndices[0];\n let output_channel: u32 = outputIndices[${g?3:1}];\n let xRCCorner: vec2 = vec2(outputIndices[${g?1:2}], outputIndices[${g?2:3}]) * uniforms.strides - uniforms.pads;\n let group_id: u32 = output_channel / uniforms.output_channels_per_group;\n\n var value: ${E.type.value} = ${E.type.value}(0);\n for (var wInChannel: u32 = 0u; wInChannel < uniforms.w_shape[1]; wInChannel++) {\n let input_channel = group_id * uniforms.w_shape[1] + wInChannel;\n for (var wHeight: u32 = 0u; wHeight < uniforms.w_shape[2]; wHeight++) {\n let xHeight = xRCCorner.x + wHeight * uniforms.dilations[0];\n\n if (xHeight < 0u || xHeight >= uniforms.x_shape[${g?1:2}]) {\n continue;\n }\n\n for (var wWidth: u32 = 0u; wWidth < uniforms.w_shape[3]; wWidth++) {\n let xWidth = xRCCorner.y + wWidth * uniforms.dilations[1];\n if (xWidth < 0u || xWidth >= uniforms.x_shape[${g?2:3}]) {\n continue;\n }\n\n let xVal = ${g?R.get(\"batch\",\"xHeight\",\"xWidth\",\"input_channel\"):R.get(\"batch\",\"input_channel\",\"xHeight\",\"xWidth\")};\n let wVal = ${j.get(\"output_channel\",\"wInChannel\",\"wHeight\",\"wWidth\")};\n value += xVal*wVal;\n }\n }\n }\n ${s}\n ${B}\n ${E.setByOffset(\"global_idx\",\"value\")}\n }`};return{name:\"GroupedConv\",shaderCache:{hint:r.cacheKey,inputDependencies:A},getRunData:()=>({outputs:[{dims:t?t(w):w,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(C/64)},programUniforms:$}),getShaderSource:P}},_d=(e,r,t)=>{let u=e.length>2,s=He(t[3]),c=He(t[2]),f=K.size(t)/s/c,d=[e[0].dims[0],e[0].dims[1],e[0].dims[2],e[0].dims[3]/s],g=[e[1].dims[0],e[1].dims[1],e[1].dims[2],e[1].dims[3]/s],w=[t[0],t[1],t[2],t[3]/s],C=[{type:12,data:f},{type:6,data:[r.strides[0],r.strides[1]]},{type:6,data:[r.pads[0],r.pads[1]]}];Tt(r,C),C.push(...se(d,g,w));let $=(c-1)*r.strides[1]+g[1],A=P=>{let x=ne(\"output\",e[0].dataType,w.length,s),E=Ne(x.type.tensor),O=At(r,x.type.value,E),B=Z(\"x\",e[0].dataType,d.length,s),R=Z(\"w\",e[1].dataType,g.length,s),j=[B,R];u&&j.push(Z(\"b\",e[2].dataType,e[2].dims,s));let U=u?\"value += b[output_channel];\":\"\",L=[{name:\"output_size\",type:\"u32\"},{name:\"strides\",type:\"i32\",length:2},{name:\"pads\",type:\"i32\",length:2}];return Et(r,L),`\n ${P.registerUniforms(L).declareVariables(...j,x)}\n ${P.mainStart()}\n ${P.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n let width0 = uniforms.output_shape[3];\n let output_channel = global_idx % width0;\n var index1 = global_idx / width0;\n let width1 = uniforms.output_shape[2] / ${c}u;\n let col = (index1 % width1) * ${c}u;\n index1 = index1 / width1;\n let row = index1 % uniforms.output_shape[1];\n let batch = index1 / uniforms.output_shape[1];\n\n let x_corner = vec2(i32(row), i32(col)) * uniforms.strides - uniforms.pads;\n\n var x_vals: array<${B.type.value}, ${$}>;\n var values: array<${x.type.value}, ${c}>;\n let input_channel = output_channel;\n // Use constant instead of uniform can give better performance for w's height/width.\n for (var w_height: u32 = 0u; w_height < ${g[0]}; w_height++) {\n let x_height = x_corner.x + i32(w_height);\n if (x_height >= 0 && u32(x_height) < uniforms.x_shape[1]) {\n for (var i = 0; i < ${$}; i++) {\n let x_width = x_corner.y + i;\n if (x_width >= 0 && u32(x_width) < uniforms.x_shape[2]) {\n x_vals[i] = ${B.get(\"batch\",\"u32(x_height)\",\"u32(x_width)\",\"input_channel\")};\n } else {\n x_vals[i] = ${B.type.value}(0);\n }\n }\n for (var w_width: u32 = 0u; w_width < ${g[1]}; w_width++) {\n let w_val = ${R.get(\"w_height\",\"w_width\",\"0\",\"output_channel\")};\n for (var i = 0u; i < ${c}u; i++) {\n values[i] = fma(x_vals[i * u32(uniforms.strides[1]) + w_width], w_val, values[i]);\n }\n }\n }\n }\n\n for (var i = 0u; i < ${c}u; i++) {\n var value = values[i];\n ${U}\n ${O}\n ${x.set(\"batch\",\"row\",\"col + i\",\"output_channel\",\"value\")};\n }\n }`};return{name:\"GroupedConv-Vectorize\",shaderCache:{hint:`${r.cacheKey};${s};${c};${$};${g[0]};${g[1]}`,inputDependencies:u?[\"rank\",\"rank\",\"type\"]:[\"rank\",\"rank\"]},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:C}),getShaderSource:A}}});var no,Zg,Cd,io=ae(()=>{\"use strict\";Te();De();pn();Re();Ft();no=(e,r,t,u,s=!1)=>{let c=e[0].dims,f=e[1].dims,d=c[c.length-2],g=f[f.length-1],w=c[c.length-1],C=He(g),$=He(w),A=He(d),P=K.size(t)/C/A,x=e.length>2,E=u?u.slice(0,-2):t.slice(0,-2),B=[K.size(E),d,g],R=[{type:12,data:P},{type:12,data:d},{type:12,data:g},{type:12,data:w}];Tt(r,R),R.push(...se(E,c,f)),x&&R.push(...se(e[2].dims)),R.push(...se(B));let j=U=>{let L=ri(\"batch_dims\",e[0].dataType,E.length),F=Z(\"a\",e[0].dataType,c.length,$),te=Z(\"b\",e[1].dataType,f.length,C),J=ne(\"output\",e[0].dataType,B.length,C),oe=Ne(J.type.tensor),le=At(r,J.type.value,oe),ge=[F,te],X=\"\";if(x){let be=s?C:1;ge.push(Z(\"bias\",e[2].dataType,e[2].dims.length,be)),X=`${s?`value += bias[col / ${be}];`:`value += ${J.type.value}(bias[row + i]);`}`}let pe=c.slice(0,-2),we=f.slice(0,-2),ue=Br(pe,E),me=Br(we,E),Ee=[{name:\"output_size\",type:\"u32\"},{name:\"M\",type:\"u32\"},{name:\"N\",type:\"u32\"},{name:\"K\",type:\"u32\"}];Et(r,Ee);let Pe=(be,Ae)=>{let _e=be.rank,Je=be.name;if(_e===2)return`var ${Je}_indices = ${be.type.indices}(0u, 0u);`;let ee=L.rank,ce=`var ${Je}_indices: ${be.type.indices};`;for(let Be=_e-2-1,tt=ee-1;Be>=0;Be--,tt--)ce+=`\n${Je}_indices[${Be}] = ${ee>1?`batch_indices[${tt}]`:\"batch_indices\"};`;return Ae.forEach(Be=>{ce+=`\n${Je}_indices[${Be}] = 0;`}),ce+=`${Je}_indices[${_e-2}] = 0u;\n ${Je}_indices[${_e-1}] = 0u;`,ce},Ce=()=>{let be=`var a_data: ${F.type.value};`;for(let Ae=0;Ae<$;Ae++)be+=`\n let b_data${Ae} = b[(b_offset + (k + ${Ae}) * uniforms.N + col) / ${C}];`;for(let Ae=0;Ae;\n for (var k: u32 = 0u; k < uniforms.K; k = k + ${$}) {\n ${Ce()}\n }\n for (var i = 0u; i < ${A}u; i++) {\n var value = values[i];\n ${X}\n ${le}\n let cur_indices = ${J.type.indices}(batch, row + i, col);\n let offset = ${J.indicesToOffset(\"cur_indices\")};\n ${J.setByOffset(`offset / ${C}`,\"value\")};\n }\n }\n `};return{name:\"MatMulNaive\",shaderCache:{hint:`${r.activation};${C};${$};${A};${s}`,inputDependencies:x?[\"rank\",\"rank\",\"rank\"]:[\"rank\",\"rank\"]},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(P/64)},programUniforms:R}),getShaderSource:j}},Zg=e=>{if(!e||e.length!==2)throw new Error(\"MatMul requires 2 inputs.\");if(e[0].dims[e[0].dims.length-1]!==e[1].dims[e[1].dims.length-2])throw new Error(\"shared dimension does not match.\")},Cd=e=>{Zg(e.inputs);let r=Ot.calcShape(e.inputs[0].dims,e.inputs[1].dims,!0);if(!r)throw new Error(\"Can't use matmul on the given tensors\");let t=r[r.length-1],u=e.inputs[0].dims[e.inputs[0].dims.length-1];t<8&&u<8?e.compute(no(e.inputs,{activation:\"\"},r)):e.compute(mi(e.inputs,{activation:\"\"},r))}});var hi,oo,Qg,Sd,ao,Xg,Jg,so,ro=ae(()=>{\"use strict\";De();wd();pn();$d();Ft();io();Dr();hi=(e,r,t,u,s,c)=>{let f=e[0],d=e.slice(c?1:2,c?3:4),g=d.length,w=r[0],$=r.slice(2).map((x,E)=>x+(x-1)*(t[E]-1)),P=d.map((x,E)=>x+u[E]+u[E+g]).map((x,E)=>Math.floor((x-$[E]+s[E])/s[E]));return P.splice(0,0,f),P.splice(c?3:1,0,w),P},oo=[2,3,1,0],Qg=(e,r)=>{if(!e||e.length!==2&&e.length!==3)throw new Error(\"Conv requires 2 or 3 inputs\");if(e[0].dims.length!==4&&e[0].dims.length!==3)throw new Error(\"currently only support conv 1D and 2D\");if(e[0].dims.length!==e[1].dims.length)throw new Error(\"filter does not have same dimension as input\");let t=e[0].dims[r.format===\"NHWC\"?e[0].dims.length-1:1],u=e[1].dims[1]*r.group;if(t!==u)throw new Error(\"FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\");if(e.length===3&&(e[2].dims.length!==1||e[1].dims[0]!==e[2].dims[0]))throw new Error(\"invalid bias\");let s=e[0].dims.length-2;if(r.dilations.length!==s)throw new Error(`dilations should be ${s}D`);if(r.strides.length!==s)throw new Error(`strides should be ${s}D`);if(r.pads.length!==s*2)throw new Error(`pads should be ${s*2}D`);if(r.kernelShape.length!==0&&r.kernelShape.length!==e[1].dims.length-2)throw new Error(\"invalid kernel shape\")},Sd=(e,r)=>{let t=e.kernelShape.slice();for(let c=2;c{let r=di(e),t=e.format,u=[\"NOTSET\",\"VALID\",\"SAME_UPPER\",\"SAME_LOWER\"][e.auto_pad],s=e.dilations,c=e.group,f=e.kernel_shape,d=e.pads,g=e.strides,w=e.w_is_const();return{autoPad:u,format:t,dilations:s,group:c,kernelShape:f,pads:d,strides:g,wIsConst:w,...r,cacheKey:`${e.format};${r.activation};`}},Xg=(e,r,t)=>{let u=Sd(t,r),s=t.format===\"NHWC\";if(t.group!==1){if(!e.adapterInfo.isArchitecture(\"ampere\")&&s&&r[1].dims[0]===t.group&&r[1].dims[1]===1&&t.dilations[0]===1&&t.dilations[1]===1){let te=hi(r[0].dims,r[1].dims,t.dilations,u.pads,t.strides,s),J=e.kernelCustomData.wT??e.compute($t(r[1],oo),{inputs:[1],outputs:[t.wIsConst?-2:-1]})[0];t.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=J);let oe=[r[0],J];r.length===3&&oe.push(r[2]),e.compute(_d(oe,u,te),{inputs:oe})}else e.compute(to(r,u));return}let c=r.length===3,f=r[0].dims[s?1:2],d=r[0].dims[s?2:3],g=r[0].dims[s?3:1],w=r[1].dims[2],C=r[1].dims[3],$=hi(r[0].dims,r[1].dims,t.dilations,u.pads,t.strides,s),A=$[s?1:2],P=$[s?2:3],x=$[s?3:1],E=s&&w===f&&C===d&&t.pads[0]===0&&t.pads[1]===0;if(E||w===1&&C===1&&t.dilations[0]===1&&t.dilations[1]===1&&t.strides[0]===1&&t.strides[1]===1&&t.pads[0]===0&&t.pads[1]===0){let F=$[0],te,J,oe,le=[];if(s){let pe=e.kernelCustomData.wT??e.compute($t(r[1],oo),{inputs:[1],outputs:[t.wIsConst?-2:-1]})[0];if(t.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=pe),E){let we=f*d*g;te=r[0].reshape([1,F,we]),J=pe.reshape([1,we,x]),oe=[1,F,x]}else te=r[0].reshape([F,f*d,g]),J=pe.reshape([1,g,x]),oe=[F,A*P,x];le.push(te),le.push(J)}else te=r[0].reshape([F,g,f*d]),J=r[1].reshape([1,x,g]),oe=[F,x,A*P],le.push(J),le.push(te);c&&le.push(r[2]);let ge=oe[2],X=le[0].dims[le[0].dims.length-1];ge<8&&X<8?e.compute(no(le,u,$,oe,s),{inputs:le}):e.compute(mi(le,u,$,oe,s),{inputs:le});return}let O=!0,B=e.kernelCustomData.wT??e.compute($t(r[1],oo),{inputs:[1],outputs:[t.wIsConst?-2:-1]})[0];t.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=B);let R=[r[0],B];c&&R.push(r[2]);let j=s?A*P:x,U=s?x:A*P,L=w*C*g;e.compute(vd(R,u,$,j,U,L,c,O),{inputs:R})},Jg=(e,r)=>{let t=r.format===\"NHWC\",u=[e.inputs[0].reshape(t?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&u.push(e.inputs[2]);let s=[0,r.pads[0],0,r.pads[1]],c=[1].concat(r.strides),f=[1].concat(r.dilations),d=[1].concat(r.kernelShape),g=Sd({...r,pads:s,strides:c,dilations:f,kernelShape:d},u);e.compute(to(u,g,w=>t?[w[0],w[2],w[3]]:[]))},so=(e,r)=>{Qg(e.inputs,r),e.inputs[0].dims.length===3?Jg(e,r):Xg(e,e.inputs,r)}});var ey,xd,Id=ae(()=>{\"use strict\";Te();Lt();Re();Ft();fi();eo();pn();ey=(e,r=!1,t,u,s=4)=>{let c=B=>{switch(B){case 1:return\"return w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\";case 4:return`\n let coord1 = vec4(coordX, coordY, col + 1, rowInner);\n let coord2 = vec4(coordX, coordY, col + 2, rowInner);\n let coord3 = vec4(coordX, coordY, col + 3, rowInner);\n let v0 = w[getIndexFromCoords4D(coord, vec4(uniforms.w_shape))];\n let v1 = w[getIndexFromCoords4D(coord1, vec4(uniforms.w_shape))];\n let v2 = w[getIndexFromCoords4D(coord2, vec4(uniforms.w_shape))];\n let v3 = w[getIndexFromCoords4D(coord3, vec4(uniforms.w_shape))];\n return ${u}(v0, v1, v2, v3);\n `;default:throw new Error(`innerElementSize ${B} is not supported.`)}},f=e?`\n let coord = vec4(batch, iXR, iXC, xCh);\n `:`\n let coord = vec4(batch, xCh, iXR, iXC);\n `,d=e?`\n let coords = vec4(\n batch,\n row / outWidth,\n row % outWidth,\n col);\n `:`\n let coords = vec4(\n batch,\n row,\n col / outWidth,\n col % outWidth);\n `,g=e?\"i32(uniforms.x_shape[1])\":\"i32(uniforms.x_shape[2])\",w=e?\"i32(uniforms.x_shape[2])\":\"i32(uniforms.x_shape[3])\",C=e?\"row\":\"col\",$=e?\"col\":\"row\",A=`\n let inChannels = ${e?\"i32(uniforms.x_shape[3])\":\"i32(uniforms.x_shape[1])\"};\n let outWidth = ${e?\"i32(uniforms.result_shape[2])\":\"i32(uniforms.result_shape[3])\"};\n let outRow = ${C} / outWidth;\n let outCol = ${C} % outWidth;\n\n let WRow = ${$} / (uniforms.filter_dims[1] * inChannels);\n let WCol = ${$} / inChannels % uniforms.filter_dims[1];\n let xR = f32(outRow - uniforms.pads[0] + uniforms.dilations[0] * WRow) / f32(uniforms.strides[0]);\n let xC = f32(outCol - uniforms.pads[1] + uniforms.dilations[1] * WCol) / f32(uniforms.strides[1]);\n if (xR < 0.0 || xR >= f32(${g}) || fract(xR) > 0.0) {\n return ${u}(0.0);\n }\n if (xC < 0.0 || xC >= f32(${w}) || fract(xC) > 0.0) {\n return ${u}(0.0);\n }\n let iXR = i32(xR);\n let iXC = i32(xC);\n let xCh = ${$} % inChannels;\n ${f}\n return x[getIndexFromCoords4D(coord, vec4(uniforms.x_shape))/${s}];`,P=e?`\n let col = colIn * ${s};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_inner) {\n ${A}\n }\n return ${u}(0.0);`:`\n let col = colIn * ${s};\n if (row < uniforms.dim_inner && col < uniforms.dim_b_outer) {\n ${A}\n }\n return ${u}(0.0);`,x=`\n let col = colIn * ${s};\n let inChannels = ${e?\"i32(uniforms.x_shape[3])\":\"i32(uniforms.x_shape[1])\"};\n let coordX = uniforms.filter_dims[0] - 1 - row / (uniforms.filter_dims[1] * inChannels);\n let coordY = uniforms.filter_dims[1] - 1 - (row / inChannels) % uniforms.filter_dims[1];\n if (${e?\"row < uniforms.dim_inner && col < uniforms.dim_b_outer\":\"row < uniforms.dim_inner && col < uniforms.dim_a_outer\"} && coordX >= 0 && coordY >= 0) {\n let rowInner = row % inChannels;\n let coord = vec4(coordX, coordY, col, rowInner);\n ${c(s)}\n }\n return ${u}(0.0);\n `,E=At(t,u);return`\n fn mm_readA(batch: i32, row : i32, colIn : i32) -> ${u} {\n ${e?P:x}\n }\n\n fn mm_readB(batch: i32, row : i32, colIn : i32) -> ${u} {\n ${e?x:P}\n }\n\n fn mm_write(batch: i32, row : i32, colIn : i32, valueInput : ${u}) {\n let col = colIn * ${s};\n if (row < uniforms.dim_a_outer && col < uniforms.dim_b_outer) {\n var value = valueInput;\n let outWidth = ${e?\"i32(uniforms.result_shape[2])\":\"i32(uniforms.result_shape[3])\"};\n ${d}\n ${ci(r)}\n ${E}\n result[getIndexFromCoords4D(coords, vec4(uniforms.result_shape))/${s}] = value;\n }\n }`},xd=(e,r,t,u,s,c,f,d)=>{let g=r.format===\"NHWC\",w=g?e[0].dims[3]:e[0].dims[1],C=t[0],$=g?t[2]:t[3],A=g?t[1]:t[2],P=g?t[3]:t[1],x=g&&w%4===0&&w%3&&P%4===0,E=g?P:$*A,O=g?$*A:P,B=[8,8,1],R=u<=8?[4,1,1]:[4,4,1],j=[Math.ceil(E/B[0]/R[0]),Math.ceil(O/B[1]/R[1]),Math.ceil(C/B[2]/R[2])];Fe(\"verbose\",()=>`[conv_backprop_mm_webgpu] dispatch = ${j}`);let U=x?4:1,L=Math.max(B[0]*U,B[1]),F=x?4:1,te=[r.kernelShape[g?1:2],r.kernelShape[g?2:3]],J=[te[0]+(r.dilations[0]<=1?0:(te[0]-1)*(r.dilations[0]-1)),te[1]+(r.dilations[1]<=1?0:(te[1]-1)*(r.dilations[1]-1))],oe=[J[0]-1-Math.floor((r.pads[0]+r.pads[2])/2),J[1]-1-Math.floor((r.pads[1]+r.pads[3])/2)],le=[{type:6,data:u},{type:6,data:s},{type:6,data:c},{type:6,data:r.strides},{type:6,data:r.dilations},{type:6,data:te},{type:6,data:oe}];Tt(r,le),le.push(...se(e[0].dims,e[1].dims));let ge=[\"rank\",\"rank\"];f&&(le.push(...se(e[2].dims)),ge.push(\"rank\")),le.push(...se(t));let X=pe=>{let we=Z(\"x\",e[0].dataType,e[0].dims.length,F),ue=Z(\"w\",e[1].dataType,e[1].dims.length,1),me=ne(\"result\",e[0].dataType,t.length,F),Ee=[we,ue],Pe=\"\";if(f){let Ae=Z(\"bias\",e[2].dataType,e[2].dims.length,F);Ee.push(Ae),Pe+=`\n fn getBiasByOutputCoords(coords : vec4) -> ${Ae.type.value} {\n return bias[coords.${g?\"w\":\"y\"}${x?\"/ 4\":\"\"}];\n }`}let Ce=[{name:\"dim_a_outer\",type:\"i32\"},{name:\"dim_b_outer\",type:\"i32\"},{name:\"dim_inner\",type:\"i32\"},{name:\"strides\",type:\"i32\",length:2},{name:\"dilations\",type:\"i32\",length:2},{name:\"filter_dims\",type:\"i32\",length:te.length},{name:\"pads\",type:\"i32\",length:oe.length}];Et(r,Ce);let be=Ne(e[0].dataType,1);if(be!==\"f16\"&&be!==\"f32\")throw new Error(`elemType ${be} is not supported.`);return`\n ${pi(\"uniforms.result_strides\")}\n ${pe.registerUniforms(Ce).declareVariables(...Ee,me)};\n ${Pe}\n ${ey(g,f,r,we.type.value,U)}\n ${x?cn(R,B,be,void 0,!g,L):fn(R,B,be,void 0,!g,L,!1,void 0,d)}`};return{name:\"Conv2DTransposeMatMul\",shaderCache:{hint:`${r.cacheKey};${R};${B};${x}`,inputDependencies:ge},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:j[0],y:j[1],z:j[2]},programUniforms:le}),getShaderSource:X}}});var ty,uo,Ad=ae(()=>{\"use strict\";Te();Lt();De();Re();ty=(e,r,t,u,s,c=!1,f,d,g=!1)=>{let w=g?1:2,C=g?2:3,$=g?3:1,A=c?2:1,P=`\n fn setOutputAtIndex(flatIndex : u32, value : ${c?`vec4<${f}>`:f}) {\n result[flatIndex] = ${c?`vec4<${f}>`:f}(value);\n }`;u&&(P+=`\n fn getBiasByOutputCoords(coords : vec4) -> ${c?`vec4<${f}>`:f} {\n return bias[coords.${g?\"w\":\"y\"}${c?\"/ 4\":\"\"}];\n }`);let x=c?4:1,E=Z(\"W\",r[1].dataType,r[1].dims.length,x),O=Z(\"Dy\",r[0].dataType,r[0].dims.length,x),B=[O,E];u&&B.push(Z(\"bias\",r[2].dataType,[t[$]].length,x));let R=ne(\"result\",r[0].dataType,t.length,x),j=`{\n let batch: u32 = ${s?\"global_id.z\":\"workgroup_id.z\"} / uniforms.result_shape[1];\n let r = ${s?\"global_id.z\":\"workgroup_id.z\"} % uniforms.result_shape[1];\n let c = ${s?\"global_id.y\":\"workgroup_id.y\"} * ${A};\n let d1: u32 = ${s?\"global_id.x\":\"workgroup_id.x\"} * 4;\n\n let dyCorner = vec2(i32(r), i32(c)) - vec2(uniforms.pads);\n\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd: array, ${A}>;\n for (var i = 0; i < ${A}; i++) {\n dotProd[i] = vec4<${f}>(0.0);\n }\n for (var wR: u32 = 0; wR < uniforms.filter_dims[0]; wR = wR + 1) {\n var dyR = (${f}(dyCorner.x) + ${f}(wR)) / ${f}(uniforms.strides.x);\n let wRPerm = uniforms.filter_dims[0] - 1 - wR;\n if (dyR < 0.0 || dyR >= ${f}(uniforms.Dy_shape[1]) ||\n fract(dyR) > 0.0 || wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.filter_dims[1]; wC = wC + 1) {\n let dyC = (${f}(dyCorner.y) + ${f}(wC)) / ${f}(uniforms.strides.y);\n let dyC2 = (${f}(dyCorner.y) + 1.0 + ${f}(wC)) / ${f}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims[1] - 1 - wC;\n if (wCPerm < 0) {\n continue;\n }\n var bDyCVal = true;\n var bDyCVal2 = true;\n if (dyC < 0.0 || dyC >= ${f}(uniforms.Dy_shape[2]) ||\n fract(dyC) > 0.0) {\n bDyCVal = false;\n }\n if (dyC2 < 0.0 || dyC2 >= ${f}(uniforms.Dy_shape[2]) ||\n fract(dyC2) > 0.0) {\n bDyCVal2 = false;\n }\n\n let idyC: u32 = u32(dyC);\n let idyC2: u32 = u32(dyC2);\n if (bDyCVal && bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2 :u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1\",\"d2\")};\n let wValue1 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 1\",\"d2\")};\n let wValue2 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 2\",\"d2\")};\n let wValue3 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 3\",\"d2\")};\n\n var xValue = ${O.get(\"batch\",\"idyR\",\"idyC\",\"d2\")};\n let tmpval = vec4<${f}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n\n xValue = ${O.get(\"batch\",\"idyR\",\"idyC2\",\"d2\")};\n\n dotProd[1] = dotProd[1] + vec4<${f}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n }\n } else if (bDyCVal) {\n let d2Length = uniforms.Dy_shape[${$}];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1\",\"d2\")};\n let wValue1 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 1\",\"d2\")};\n let wValue2 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 2\",\"d2\")};\n let wValue3 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 3\",\"d2\")};\n\n var xValue = ${O.get(\"batch\",\"idyR\",\"idyC\",\"d2\")};\n let tmpval = vec4<${f}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[0] = dotProd[0] + tmpval;\n }\n } else if (bDyCVal2) {\n let d2Length = uniforms.Dy_shape[3];\n for (var d2: u32 = 0; d2 < d2Length; d2 = d2 + 4) {\n let wValue0 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1\",\"d2\")};\n let wValue1 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 1\",\"d2\")};\n let wValue2 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 2\",\"d2\")};\n let wValue3 = ${E.get(\"u32(wRPerm)\",\"u32(wCPerm)\",\"d1 + 3\",\"d2\")};\n\n var xValue = ${O.get(\"batch\",\"idyR\",\"idyC2\",\"d2\")};\n let tmpval = vec4<${f}>(dot(xValue, wValue0),\n dot(xValue, wValue1),\n dot(xValue, wValue2),\n dot(xValue, wValue3));\n dotProd[1] = dotProd[1] + tmpval;\n }\n }\n }\n }\n\n for (var i: u32 = 0; i < ${A}; i = i + 1) {\n let value = dotProd[i] + ${u?\"bias[c+i]\":`vec4<${f}>(0.0)`};\n ${R.set(\"batch\",\"r\",\"c + i\",\"d1\",\"value\")};\n }\n }`,U=`\n let outputIndices = ${R.offsetToIndices(\"global_idx\")};\n let batch = ${R.indicesGet(\"outputIndices\",0)};\n let d1 = ${R.indicesGet(\"outputIndices\",$)};\n let r = ${R.indicesGet(\"outputIndices\",w)};\n let c = ${R.indicesGet(\"outputIndices\",C)};\n let dyCorner = vec2(i32(r), i32(c)) - uniforms.pads;\n let dyRCorner = dyCorner.x;\n let dyCCorner = dyCorner.y;\n let groupId = d1 / uniforms.output_channels_per_group;\n let wOutChannel = d1 - groupId * uniforms.output_channels_per_group;\n // Convolve dy(?, ?, d2) with w(:, :, d1, d2) to compute dx(xR, xC, d1).\n // ? = to be determined. : = across all values in that axis.\n var dotProd = ${f}(0.0);\n for (var wR: u32 = 0; wR < uniforms.effective_filter_dims.x; wR = wR + 1) {\n if (wR % uniforms.dilations.x != 0) {\n continue;\n }\n let dyR = (${f}(dyRCorner) + ${f}(wR)) / ${f}(uniforms.strides[0]);\n let wRPerm = uniforms.filter_dims.x - 1 - wR / uniforms.dilations.x;\n if (dyR < 0.0 || dyR >= ${f}(uniforms.Dy_shape[${w}]) || fract(dyR) > 0.0 ||\n wRPerm < 0) {\n continue;\n }\n let idyR: u32 = u32(dyR);\n\n for (var wC: u32 = 0; wC < uniforms.effective_filter_dims.y; wC = wC + 1) {\n if (wC % uniforms.dilations.y != 0) {\n continue;\n }\n let dyC = (${f}(dyCCorner) + ${f}(wC)) / ${f}(uniforms.strides.y);\n let wCPerm = uniforms.filter_dims.y - 1 - wC / uniforms.dilations.y;\n if (dyC < 0.0 || dyC >= ${f}(uniforms.Dy_shape[${C}]) ||\n fract(dyC) > 0.0 || wCPerm < 0) {\n continue;\n }\n let idyC: u32 = u32(dyC);\n var inputChannel = groupId * uniforms.input_channels_per_group;\n for (var d2: u32 = 0; d2 < uniforms.input_channels_per_group; d2 = d2 + 1) {\n let xValue = ${g?O.get(\"batch\",\"idyR\",\"idyC\",\"inputChannel\"):O.get(\"batch\",\"inputChannel\",\"idyR\",\"idyC\")};\n let wValue = ${E.get(\"inputChannel\",\"wOutChannel\",\"u32(wRPerm)\",\"u32(wCPerm)\")};\n dotProd = dotProd + xValue * wValue;\n inputChannel = inputChannel + 1;\n }\n }\n }\n let value = dotProd + ${u?\"bias[d1]\":`${f}(0.0)`};\n ${R.setByOffset(\"global_idx\",\"value\")};\n `;return`\n ${e.registerUniforms(d).declareVariables(...B,R)}\n ${P}\n\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")};\n ${c?j:U}}`},uo=(e,r,t)=>{let u=e.length>2,s=r.outputShape,c=K.size(s),f=[Math.ceil(c/64),1,1];Fe(\"verbose\",()=>`[conv2d_backprop_webgpu] dispatch = ${f}`);let d=r.format===\"NHWC\",g=[\"rank\",\"rank\"],w=[r.strides[0],r.strides[1]],C=[r.kernelShape[d?1:2],r.kernelShape[d?2:3]],$=[r.dilations[0],r.dilations[1]],A=[C[0]+(r.dilations[0]<=1?0:(r.kernelShape[d?1:2]-1)*(r.dilations[0]-1)),C[1]+(r.dilations[1]<=1?0:(r.kernelShape[d?2:3]-1)*(r.dilations[1]-1))],P=[A[0]-1-Math.floor((r.pads[0]+r.pads[2])/2),A[1]-1-Math.floor(r.pads[1]+r.pads[3])/2],x=!1,E=r.group,O=e[1].dims,B=O[0]/E,R=O[1],j=[{type:12,data:c},{type:12,data:w},{type:12,data:C},{type:12,data:$},{type:12,data:A},{type:6,data:P},{type:12,data:B},{type:12,data:R},...se(e[0].dims,e[1].dims)];u&&(j.push(...se(e[2].dims)),g.push(\"rank\")),j.push(...se(s));let U=f[1]===1&&f[2]===1,L=F=>{let te=[{name:\"output_size\",type:\"u32\"},{name:\"strides\",type:\"u32\",length:w.length},{name:\"filter_dims\",type:\"u32\",length:C.length},{name:\"dilations\",type:\"u32\",length:C.length},{name:\"effective_filter_dims\",type:\"u32\",length:A.length},{name:\"pads\",type:\"i32\",length:P.length},{name:\"input_channels_per_group\",type:\"u32\"},{name:\"output_channels_per_group\",type:\"u32\"}],J=Ne(e[0].dataType);return`${ty(F,e,s,u,U,x,J,te,d)}`};return{name:\"ConvTranspose2D\",shaderCache:{hint:`${r.cacheKey};`,inputDependencies:g},getRunData:()=>({dispatchGroup:{x:f[0],y:f[1],z:f[2]},outputs:[{dims:t?t(s):s,dataType:e[0].dataType}],programUniforms:j}),getShaderSource:L}}});var ry,ny,iy,Td,Ed,oy,ay,sy,uy,Pd,kd=ae(()=>{\"use strict\";Id();Ad();Ft();Dr();ry=(e,r,t,u,s,c)=>(e-1)*r+t+(u-1)*s+1-c,ny=(e,r,t,u,s)=>{let c=Math.floor(e/2);r===\"SAME_UPPER\"?(t[u]=c,t[s]=e-c):r===\"SAME_LOWER\"&&(t[u]=e-c,t[s]=c)},iy=(e,r,t,u,s,c,f,d,g,w)=>{let C=e.length-2,$=w.length===0;if(g.length===0)for(let x=0;x{let t=e.kernelShape.slice();if(e.kernelShape.length===0||e.kernelShape.reduce(($,A)=>$*A,1)===0){t.length=0;for(let $=2;$$+A,0)===0){let $=r[0].dims.length-2;g=new Array($).fill(1)}let w=e.strides.slice();if(w.reduce(($,A)=>$+A,0)===0){let $=r[0].dims.length-2;w=new Array($).fill(1)}iy(d,t,g,e.autoPad,e.group,s,w,u,f,c);let C=Object.assign({},e);return Object.assign(C,{kernelShape:t,pads:s,outputPadding:f,outputShape:c,dilations:g,strides:w}),C},Ed=e=>{let r=di(e),t=e.format,u=[\"NOTSET\",\"VALID\",\"SAME_UPPER\",\"SAME_LOWER\"][typeof e.autoPad>\"u\"?0:e.autoPad],s=e.dilations,c=e.group,f=e.kernelShape,d=e.pads,g=e.strides,w=e.wIsConst(),C=e.outputPadding,$=e.outputShape;return{autoPad:u,format:t,dilations:s,group:c,kernelShape:f,outputPadding:C,outputShape:$,pads:d,strides:g,wIsConst:w,...r,cacheKey:`${e.format};${r.activation};`}},oy=(e,r)=>{if(!e||e.length!==2&&e.length!==3)throw new Error(\"Conv requires 2 or 3 inputs\");if(e[0].dims.length!==4&&e[0].dims.length!==3)throw new Error(\"currently only support 2-dimensional conv\");if(e[0].dims.length!==e[1].dims.length)throw new Error(\"filter does not have same dimension as input\");let t=e[0].dims[r.format===\"NHWC\"?e[0].dims.length-1:1],u=e[1].dims[0];if(t!==u)throw new Error(\"FILTER_IN_CHANNEL should be equal to DATA_CHANNEL\");let s=e[1].dims[1]*r.group;if(e.length===3&&(e[2].dims.length!==1||e[2].dims[0]!==s))throw new Error(\"invalid bias\");let c=e[0].dims.length-2;if(r.dilations.reduce((C,$)=>C+$,0)>0&&r.dilations.length!==c)throw new Error(`dilations should be ${c}D`);if(r.strides.reduce((C,$)=>C+$,0)>0&&r.strides.length!==c)throw new Error(`strides should be ${c}D`);if(r.pads.reduce((C,$)=>C+$,0)>0&&r.pads.length!==c*2)throw new Error(`pads should be ${c*2}D`);if(r.outputPadding.length!==c&&r.outputPadding.length!==0)throw new Error(`output_padding should be ${c}D`);if(r.kernelShape.reduce((C,$)=>C+$,0)>0&&r.kernelShape.length!==0&&r.kernelShape.length!==e[1].dims.length-2)throw new Error(\"invalid kernel shape\");if(r.outputShape.length!==0&&r.outputShape.length!==e[0].dims.length-2)throw new Error(\"invalid output shape\")},ay=[2,3,1,0],sy=(e,r,t)=>{let u=Td(t,r),s=t.format===\"NHWC\",c=u.outputShape,f=c[s?3:1],d=r[0].dims[s?3:1];if(u.group!==1||f===1&&d===1){e.compute(uo(r,u));return}let g=c[s?1:2],w=c[s?2:3],C=r[1].dims[2],$=r[1].dims[3],A=s?g*w:f,P=s?f:g*w,x=C*$*d,E=!0,O=e.kernelCustomData.wT??e.compute($t(r[1],ay),{inputs:[1],outputs:[t.wIsConst?-2:-1]})[0];t.wIsConst&&!e.kernelCustomData.wT&&(e.kernelCustomData.wT=O);let B=[r[0],O],R=r.length===3;R&&(!s&&r[2].dims.length===1?B.push(r[2].reshape([r[2].dims[0],1,1])):B.push(r[2])),e.compute(xd(B,u,c,A,P,x,R,E),{inputs:B})},uy=(e,r)=>{let t=r.format===\"NHWC\",u=[e.inputs[0].reshape(t?[e.inputs[0].dims[0],1,e.inputs[0].dims[1],e.inputs[0].dims[2]]:[e.inputs[0].dims[0],e.inputs[0].dims[1],1,e.inputs[0].dims[2]]),e.inputs[1].reshape([e.inputs[1].dims[0],e.inputs[1].dims[1],1,e.inputs[1].dims[2]])];e.inputs.length===3&&u.push(e.inputs[2]);let s=r.kernelShape;(s.length===0||s[0]===0)&&(s=[e.inputs[1].dims[2]]);let c=r.dilations;(c.length===0||c[0]===0)&&(c=[1]);let f=r.strides;(f.length===0||f[0]===0)&&(f=[1]);let d=r.pads;d.length===0&&(d=[0,0]),d=[0,d[0],0,d[1]],f=[1].concat(f),c=[1].concat(c),s=[1].concat(s);let g=Td({...r,pads:d,strides:f,dilations:c,kernelShape:s},u);e.compute(uo(u,g,w=>t?[w[0],w[2],w[3]]:[w[0],w[1],w[3]]))},Pd=(e,r)=>{oy(e.inputs,r),e.inputs[0].dims.length===3?uy(e,r):sy(e,e.inputs,r)}});var ly,Od,Rd,Bd=ae(()=>{\"use strict\";Te();De();nt();Re();ly=(e,r,t,u)=>{let s=K.size(r),c=r.length,f=Z(\"input\",e,c),d=ne(\"output\",e,c),g=t.dataType===6?t.getInt32Array()[0]:Number(t.getBigInt64Array()[0]),w=K.normalizeAxis(g,c),C=$=>{let A=` i32(${f.indicesGet(\"inputIndices\",\"uniforms.axis\")}) `,P=xe(\"uniforms.input_shape\",\"uniforms.axis\",c),x=u.reverse?A+(u.exclusive?\" + 1\":\"\"):\"0\",E=u.reverse?P:A+(u.exclusive?\"\":\" + 1\");return`\n ${$.registerUniform(\"outputSize\",\"u32\").registerUniform(\"axis\",\"u32\").declareVariables(f,d)}\n ${$.mainStart()}\n ${$.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n var inputIndices = ${d.offsetToIndices(\"global_idx\")};\n var sum = ${d.type.value}(0);\n let first : i32 = ${x};\n let last : i32 = ${E};\n for (var i : i32 = first; i < last; i++) {\n ${f.indicesSet(\"inputIndices\",\"uniforms.axis\",\"u32(i)\")};\n sum = sum + ${f.getByIndices(\"inputIndices\")};\n }\n ${d.setByOffset(\"global_idx\",\"sum\")};\n }`};return{name:\"CumSum\",shaderCache:{hint:u.cacheKey,inputDependencies:[\"rank\"]},getRunData:()=>({outputs:[{dims:r,dataType:e}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:[{type:12,data:s},{type:12,data:w},...se(r,r)]}),getShaderSource:C}},Od=(e,r)=>{let t=e.inputs[0].dims,u=e.inputs[0].dataType,s=e.inputs[1];e.compute(ly(u,t,s,r),{inputs:[0]})},Rd=e=>{let r=e.exclusive===1,t=e.reverse===1;return ke({exclusive:r,reverse:t})}});var dy,cy,fy,Dd,zd,Md=ae(()=>{\"use strict\";Te();De();nt();Re();dy=e=>{if(!e||e.length!==1)throw new Error(\"DepthToSpace requires 1 input.\");if(e[0].dims.length!==4)throw new Error(\"DepthToSpace requires 4D input.\")},cy=(e,r,t,u)=>{let s=[];s.push(`fn perm(i: ${u.type.indices}) -> ${t.type.indices} {\n var a: ${t.type.indices};`);for(let c=0;c{let t,u,s,c,f,d,g=r.format===\"NHWC\",w=r.blocksize,C=r.mode===\"DCR\";g?([t,u,s,c]=e.dims,f=C?[t,u,s,w,w,c/w**2]:[t,u,s,c/w**2,w,w],d=C?[0,1,3,2,4,5]:[0,1,4,2,5,3]):([t,u,s,c]=[e.dims[0],e.dims[2],e.dims[3],e.dims[1]],f=C?[t,w,w,c/w**2,u,s]:[t,c/w**2,w,w,u,s],d=C?[0,3,4,1,5,2]:[0,1,4,2,5,3]);let $=e.reshape(f),A=$.dims.length,P=e.dataType,x=Z(\"a\",P,A),E=ne(\"output\",P,A),O=B=>`\n ${B.registerUniform(\"output_size\",\"u32\").declareVariables(x,E)}\n\n ${cy(d,A,x,E)}\n\n ${B.mainStart()}\n ${B.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n\n let indices = ${E.offsetToIndices(\"global_idx\")};\n let aIndices = perm(indices);\n\n ${E.setByOffset(\"global_idx\",x.getByIndices(\"aIndices\"))}\n }`;return{name:\"DepthToSpace\",shaderCache:{hint:`${e.dims};${r.blocksize};${r.mode}`,inputDependencies:[\"rank\"]},getRunData:B=>{let R=g?[t,u*w,s*w,c/w**2]:[t,c/w**2,u*w,s*w],j=K.size(R),U=$.dims,L=K.sortBasedOnPerm(U,d);return{outputs:[{dims:R,dataType:B[0].dataType}],dispatchGroup:{x:Math.ceil(j/64)},programUniforms:[{type:12,data:j},...se(U,L)]}},getShaderSource:O}},Dd=(e,r)=>{dy(e.inputs),e.compute(fy(e.inputs[0],r))},zd=e=>ke({blocksize:e.blocksize,mode:e.mode,format:e.format})});var lo,gi,jd,py,my,co,fo,Ud,hy,Vd,Wd,Nd=ae(()=>{\"use strict\";Te();De();nt();Re();lo=\"[a-zA-Z]|\\\\.\\\\.\\\\.\",gi=\"(\"+lo+\")+\",jd=\"^\"+gi+\"$\",py=\"(\"+gi+\",)*\"+gi,my=\"^\"+py+\"$\",co=class{constructor(r=-1){this.symbolToIndices=new Map,this.inputIndex=r}addSymbol(r,t){let u=this.symbolToIndices.get(r);u===void 0?u=[t]:u.push(t),this.symbolToIndices.set(r,u)}},fo=class{constructor(r,t){this.equation=t;this.hasEllipsis=!1,this.symbolToInfo=new Map,this.lhs=new Array,this.outputDims=[];let[u,s]=t.includes(\"->\")?t.split(\"->\",2):[t,\"\"];if(!u.match(RegExp(my)))throw new Error(\"Invalid LHS term\");if(u.split(\",\").forEach((d,g)=>{let w=r[g].dims.slice();if(!d.match(RegExp(jd)))throw new Error(\"Invalid LHS term\");let C=this.processTerm(d,!0,w,g);this.lhs.push(C)}),s===\"\")s+=[...this.symbolToInfo.entries()].filter(([d,g])=>g.count===1||d===\"...\").map(([d])=>d).join(\"\");else if(!s.match(RegExp(gi)))throw new Error(\"Invalid RHS\");s.match(RegExp(lo,\"g\"))?.forEach(d=>{if(d===\"...\")this.outputDims=this.outputDims.concat(this.ellipsisDims);else{let g=this.symbolToInfo.get(d);if(g===void 0)throw new Error(\"Invalid RHS symbol\");this.outputDims.push(g.dimValue)}}),this.rhs=this.processTerm(s,!1,this.outputDims)}addSymbol(r,t,u){let s=this.symbolToInfo.get(r);if(s!==void 0){if(s.dimValue!==t&&s.count!==1)throw new Error(\"Dimension mismatch\");s.count++,s.inputIndices.push(u)}else s={count:1,dimValue:t,inputIndices:[u]};this.symbolToInfo.set(r,s)}processTerm(r,t,u,s=-1){let c=u.length,f=!1,d=[],g=0;if(!r.match(RegExp(jd))&&!t&&r!==\"\")throw new Error(\"Invalid LHS term\");let w=r.match(RegExp(lo,\"g\")),C=new co(s);return w?.forEach(($,A)=>{if($===\"...\"){if(f)throw new Error(\"Only one ellipsis is allowed per input term\");f=!0;let P=c-w.length+1;if(P<0)throw new Error(\"Ellipsis out of bounds\");if(d=u.slice(g,g+P),this.hasEllipsis){if(this.ellipsisDims.length!==d.length||this.ellipsisDims.toString()!==d.toString())throw new Error(\"Ellipsis dimensions mismatch\")}else if(t)this.hasEllipsis=!0,this.ellipsisDims=d;else throw new Error(\"Ellipsis must be specified in the LHS\");for(let x=0;xe+\"_max\",hy=(e,r,t,u)=>{let c=e.map(C=>C.length).map((C,$)=>Z(`input${$}`,r,C)),f=K.size(u),d=ne(\"output\",r,u.length),g=[...t.symbolToInfo.keys()].filter(C=>!t.rhs.symbolToIndices.has(C)),w=C=>{let $=[],A=\"var prod = 1.0;\",P=\"var sum = 0.0;\",x=\"sum += prod;\",E=[],O=[],B=[],R=[],j=t.symbolToInfo.size===t.rhs.symbolToIndices.size;t.symbolToInfo.forEach((L,F)=>{if(t.rhs.symbolToIndices.has(F)){let te=t.rhs.symbolToIndices.get(F)?.[0];te!==void 0&&t.lhs.forEach((J,oe)=>{if(L.inputIndices.includes(oe)){let le=J.symbolToIndices.get(F);if(le===void 0)throw new Error(\"Invalid symbol error\");le.forEach(ge=>{$.push(`${c[oe].indicesSet(`input${oe}Indices`,ge,d.indicesGet(\"outputIndices\",te))}`)})}})}else t.lhs.forEach((te,J)=>{if(L.inputIndices.includes(J)){let oe=te.symbolToIndices.get(F);if(oe===void 0)throw new Error(\"Invalid symbol error\");oe.forEach(le=>{E.push(`${c[J].indicesSet(`input${J}Indices`,le,`${F}`)}`)}),R.push(`prod *= ${c[J].getByIndices(`input${J}Indices`)};`)}}),O.push(`for(var ${F}: u32 = 0; ${F} < uniforms.${Ud(F)}; ${F}++) {`),B.push(\"}\")});let U=j?[...$,`let sum = ${c.map((L,F)=>L.getByIndices(`input${F}Indices`)).join(\" * \")};`]:[...$,P,...O,...E,A,...R,x,...B];return`\n ${C.registerUniforms(g.map(L=>({name:`${Ud(L)}`,type:\"u32\"}))).registerUniform(\"outputSize\",\"u32\").declareVariables(...c,d)}\n\n ${C.mainStart()}\n ${C.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n var outputIndices = ${d.offsetToIndices(\"global_idx\")};\n ${c.map((L,F)=>`var input${F}Indices: ${c[F].type.indices};`).join(`\n`)}\n ${U.join(`\n`)};\n ${d.setByOffset(\"global_idx\",\"sum\")};\n }`};return{name:\"Einsum\",shaderCache:{hint:t.equation,inputDependencies:e.map(()=>\"rank\")},getRunData:()=>{let C=g.filter(A=>t.symbolToInfo.has(A)).map(A=>({type:12,data:t.symbolToInfo.get(A)?.dimValue||0}));C.push({type:12,data:f});let $=e.map((A,P)=>[...se(A)]).reduce((A,P)=>A.concat(P),C);return $.push(...se(u)),{outputs:[{dims:u,dataType:r}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:$}},getShaderSource:w}},Vd=(e,r)=>{let t=new fo(e.inputs,r.equation),u=t.outputDims,s=e.inputs.map((c,f)=>c.dims);e.compute(hy(s,e.inputs[0].dataType,t,u))},Wd=e=>{let r=e.equation.replace(/\\s+/g,\"\");return ke({equation:r})}});var gy,Gd,yy,by,Hd,Ld=ae(()=>{\"use strict\";Te();De();Re();gy=e=>{if(!e||e.length!==2)throw new Error(\"Expand requires 2 input.\");let r=e[0].dims,t=Array.from(e[1].getBigInt64Array(),Number),u=t.length{let t=e.length-r.length,u=[];for(let s=0;se.length>r.length?Gd(e,r):Gd(r,e),by=e=>{let r=e[0].dims,t=Array.from(e[1].getBigInt64Array(),Number),u=yy(r,t),s=e[0].dataType,c=s===9?4:1,f=Math.ceil(K.size(u)/c),d=w=>{let C=Z(\"input\",s,r.length,c),$=ne(\"output\",s,u.length,c),A;if(s===9){let P=(x,E,O=\"\")=>`\n let outputIndices${E} = ${$.offsetToIndices(`outputOffset + ${E}u`)};\n let offset${E} = ${C.broadcastedIndicesToOffset(`outputIndices${E}`,$)};\n let index${E} = offset${E} / 4u;\n let component${E} = offset${E} % 4u;\n ${x}[${E}] = ${O}(${C.getByOffset(`index${E}`)}[component${E}]);\n `;A=`\n let outputOffset = global_idx * ${c};\n var data = vec4(0);\n ${P(\"data\",0,\"u32\")}\n ${P(\"data\",1,\"u32\")}\n ${P(\"data\",2,\"u32\")}\n ${P(\"data\",3,\"u32\")}\n ${$.setByOffset(\"global_idx\",\"data\")}\n }`}else A=`\n let outputIndices = ${$.offsetToIndices(\"global_idx\")};\n let inputOffset = ${C.broadcastedIndicesToOffset(\"outputIndices\",$)};\n ${$.setByOffset(\"global_idx\",C.getByOffset(\"inputOffset\"))}\n }`;return`\n ${w.registerUniform(\"vec_size\",\"u32\").declareVariables(C,$)}\n ${w.mainStart()}\n ${w.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.vec_size\")}\n ${A}`},g=[{type:12,data:f},...se(r,u)];return{name:\"Expand\",shaderCache:{hint:`${u.length}`,inputDependencies:[\"rank\"]},getShaderSource:d,getRunData:()=>({outputs:[{dims:u,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:g})}},Hd=e=>{gy(e.inputs),e.compute(by(e.inputs),{inputs:[0]})}});var vy,Fd,qd=ae(()=>{\"use strict\";Te();De();Re();li();vy=e=>{let r=e[0].dataType,t=K.size(e[0].dims),u=K.size(e[1].dims),s=u%4===0,c=f=>{let d=Z(\"x\",r,[1],4),g=Z(\"bias\",r,[1],4),w=ne(\"y\",r,[1],4),C=[{name:\"output_vec_size\",type:\"u32\"},{name:\"bias_size\",type:\"u32\"}],$=P=>`\n let bias${P}_offset: u32 = (global_idx * 4 + ${P}) % uniforms.bias_size;\n let bias${P} = ${g.getByOffset(`bias${P}_offset / 4`)}[bias${P}_offset % 4];`,A=s?`\n let bias = ${g.getByOffset(\"global_idx % (uniforms.bias_size / 4)\")};`:`${$(0)}${$(1)}${$(2)}${$(3)}\n let bias = ${d.type.value}(bias0, bias1, bias2, bias3);`;return`${f.registerUniforms(C).declareVariables(d,g,w)}\n\n ${Xi(at(r))}\n\n ${f.mainStart(ur)}\n ${f.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_vec_size\")}\n\n let x = ${d.getByOffset(\"global_idx\")};\n ${A}\n let x_in = x + bias;\n ${w.setByOffset(\"global_idx\",Ji(\"x_in\"))}\n }`};return{name:\"FastGeluWithBias\",shaderCache:{hint:`${s}`,inputDependencies:[\"type\",\"type\"]},getShaderSource:c,getRunData:f=>({outputs:[{dims:f[0].dims,dataType:f[0].dataType}],programUniforms:[{type:12,data:Math.ceil(t/4)},{type:12,data:u}],dispatchGroup:{x:Math.ceil(t/ur/4)}})}},Fd=e=>{e.inputs.length<2||K.size(e.inputs[1].dims)===0?td(e):e.compute(vy(e.inputs))}});var wy,_y,Kd,Yd,Zd=ae(()=>{\"use strict\";Te();De();nt();Re();wy=e=>{if(!e||e.length!==2)throw new Error(\"Gather requires 2 inputs.\")},_y=(e,r)=>{let t=e[0].dims,u=e[1].dims,s=t.length,c=K.normalizeAxis(r.axis,s),f=t.slice(0);f.splice(c,1,...u);let d=t[c],g=e[0].dataType===9?4:1,w=Math.ceil(K.size(f)/g),C=[{type:12,data:w},{type:6,data:d},{type:12,data:c},...se(e[0].dims,e[1].dims,f)],$=A=>{let P=Z(\"data\",e[0].dataType,e[0].dims.length,g),x=Z(\"inputIndices\",e[1].dataType,e[1].dims.length),E=ne(\"output\",e[0].dataType,f.length,g),O=R=>{let j=u.length,U=`var indicesIndices${R} = ${x.type.indices}(0);`;for(let L=0;L1?`indicesIndices${R}[${L}]`:`indicesIndices${R}`} = ${f.length>1?`outputIndices${R}[uniforms.axis + ${L}]`:`outputIndices${R}`};`;U+=`\n var idx${R} = ${x.getByIndices(`indicesIndices${R}`)};\n if (idx${R} < 0) {\n idx${R} = idx${R} + uniforms.axisDimLimit;\n }\n var dataIndices${R} : ${P.type.indices};\n `;for(let L=0,F=0;L1?`dataIndices${R}[${L}]`:`dataIndices${R}`} = u32(idx${R});`,F+=j):(U+=`${s>1?`dataIndices${R}[${L}]`:`dataIndices${R}`} = ${f.length>1?`outputIndices${R}[${F}]`:`outputIndices${R}`};`,F++);return U},B;if(e[0].dataType===9){let R=(j,U,L=\"\")=>`\n let outputIndices${U} = ${E.offsetToIndices(`outputOffset + ${U}u`)};\n ${O(U)};\n let offset${U} = ${P.indicesToOffset(`dataIndices${U}`)};\n let index${U} = offset${U} / 4u;\n let component${U} = offset${U} % 4u;\n ${j}[${U}] = ${L}(${P.getByOffset(`index${U}`)}[component${U}]);\n `;B=`\n let outputOffset = global_idx * ${g};\n var value = vec4(0);\n ${R(\"value\",0,\"u32\")}\n ${R(\"value\",1,\"u32\")}\n ${R(\"value\",2,\"u32\")}\n ${R(\"value\",3,\"u32\")}\n ${E.setByOffset(\"global_idx\",\"value\")}\n `}else B=`\n let outputIndices = ${E.offsetToIndices(\"global_idx\")};\n ${O(\"\")};\n let value = ${P.getByIndices(\"dataIndices\")};\n ${E.setByOffset(\"global_idx\",\"value\")};\n `;return`\n ${A.registerUniform(\"outputSize\",\"u32\").registerUniform(\"axisDimLimit\",\"i32\").registerUniform(\"axis\",\"u32\").declareVariables(P,x,E)}\n ${A.mainStart()}\n ${A.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n ${B}\n }`};return{name:\"Gather\",shaderCache:{hint:r.cacheKey,inputDependencies:[\"rank\",\"rank\"]},getRunData:()=>({outputs:[{dims:f,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:C}),getShaderSource:$}},Kd=e=>ke({axis:e.axis}),Yd=(e,r)=>{let t=e.inputs;wy(t),e.compute(_y(e.inputs,r))}});var $y,Cy,Qd,Xd,Jd=ae(()=>{\"use strict\";Te();De();nt();Re();$y=e=>{if(!e||e.length!==2)throw new Error(\"GatherElements requires 2 inputs.\");if(e[0].dims.length<1)throw new Error(\"GatherElements requires that the data input be rank >= 1.\");if(e[0].dims.length!==e[1].dims.length)throw new Error(`GatherElements requires that the data input and\n indices input tensors be of same rank.`)},Cy=(e,r)=>{let t=e[0].dims,u=e[0].dataType,s=t.length,c=e[1].dims,f=e[1].dataType,d=K.normalizeAxis(r.axis,s),g=t[d],w=c.slice(0),C=K.size(w),$=Z(\"input\",u,s),A=Z(\"indicesInput\",f,c.length),P=ne(\"output\",u,w.length),x=[{type:12,data:C},{type:6,data:g},{type:12,data:d}];return x.push(...se(t,c,w)),{name:\"GatherElements\",shaderCache:{inputDependencies:[\"rank\",\"rank\"]},getRunData:()=>({outputs:[{dims:w,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(C/64)},programUniforms:x}),getShaderSource:B=>`\n ${B.registerUniform(\"outputSize\",\"u32\").registerUniform(\"axisDimLimit\",\"i32\").registerUniform(\"axis\",\"u32\").declareVariables($,A,P)}\n ${B.mainStart()}\n ${B.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n\n let outputIndices = ${P.offsetToIndices(\"global_idx\")};\n\n var idx = ${A.getByOffset(\"global_idx\")};\n if (idx < 0) {\n idx = idx + uniforms.axisDimLimit;\n }\n var inputIndices = ${$.type.indices}(outputIndices);\n ${$.indicesSet(\"inputIndices\",\"uniforms.axis\",\"u32(idx)\")};\n let value = ${$.getByIndices(\"inputIndices\")};\n\n ${P.setByOffset(\"global_idx\",\"value\")};\n }`}},Qd=e=>ke({axis:e.axis}),Xd=(e,r)=>{let t=e.inputs;$y(t),e.compute(Cy(e.inputs,r))}});var Sy,xy,ec,tc,rc=ae(()=>{\"use strict\";Te();De();Re();Sy=e=>{if(!e)throw new Error(\"Input is missing\");if(e.length<2||e.length>3)throw new Error(\"Invaid input number.\");if(e.length===3&&e[2].dims.length>2)throw new Error(\"Invalid input shape of C\");if(e[0].dataType!==e[1].dataType||e.length===3&&e[0].dataType!==e[2].dataType)throw new Error(\"Input types are mismatched\")},xy=(e,r)=>{let t=e[0].dims.slice(),u=e[1].dims.slice(),[s,c,f]=Jn.getShapeOfGemmResult(t,r.transA,u,r.transB,e.length===3?e[2].dims:void 0),d=[s,c];if(!d)throw new Error(\"Can't use gemm on the given tensors\");let g=K.size(d),w=[{type:12,data:g},{type:12,data:s},{type:12,data:c},{type:12,data:f},{type:1,data:r.alpha},{type:1,data:r.beta}],C=[\"type\",\"type\"];e.length===3&&(w.push(...se(e[2].dims)),C.push(\"rank\")),w.push(...se(d));let $=A=>{let P=\"\";r.transA&&r.transB?P=\"value += a[k * uniforms.M + m] * b[n * uniforms.K + k];\":r.transA&&!r.transB?P=\"value += a[k * uniforms.M + m] * b[k * uniforms.N + n];\":!r.transA&&r.transB?P=\"value += a[m * uniforms.K + k] * b[n * uniforms.K + k];\":!r.transA&&!r.transB&&(P=\"value += a[m * uniforms.K + k] * b[k * uniforms.N + n];\");let x=r.alpha===1?\"\":\"value *= uniforms.alpha;\",E=Z(\"a\",e[0].dataType,e[0].dims),O=Z(\"b\",e[1].dataType,e[1].dims),B=E.type.value,R=null,j=[E,O];e.length===3&&(R=Z(\"c\",e[2].dataType,e[2].dims.length),j.push(R));let U=ne(\"output\",e[0].dataType,d.length);j.push(U);let L=[{name:\"output_size\",type:\"u32\"},{name:\"M\",type:\"u32\"},{name:\"N\",type:\"u32\"},{name:\"K\",type:\"u32\"},{name:\"alpha\",type:\"f32\"},{name:\"beta\",type:\"f32\"}];return`\n ${A.registerUniforms(L).declareVariables(...j)}\n\n ${A.mainStart()}\n ${A.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n\n let m = global_idx / uniforms.N;\n let n = global_idx % uniforms.N;\n\n var value = ${B}(0);\n for (var k: u32 = 0u; k < uniforms.K; k++) {\n ${P}\n }\n\n ${x}\n ${(()=>R!=null?`let cOffset = ${R.broadcastedIndicesToOffset(\"vec2(m, n)\",U)}; value += ${B}(uniforms.beta) * ${R.getByOffset(\"cOffset\")};`:\"\")()}\n output[global_idx] = value;\n }`};return{name:\"Gemm\",shaderCache:{hint:`${r.cacheKey}`,inputDependencies:C},getRunData:()=>({outputs:[{dims:d,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(g/64)},programUniforms:w}),getShaderSource:$}},ec=e=>{let r=e.transA,t=e.transB,u=e.alpha,s=e.beta;return{transA:r,transB:t,alpha:u,beta:s,cacheKey:`${e.transA};${e.transB};${e.alpha===1}`}},tc=(e,r)=>{Sy(e.inputs),e.compute(xy(e.inputs,r))}});var Iy,Ay,Ty,nc,ic=ae(()=>{\"use strict\";Te();De();Re();Iy=(e,r)=>{let t=e[0].dims,u=t,s=2,c=K.sizeToDimension(t,s),f=K.sizeFromDimension(t,s),d=He(f),g=f/d,w=[t[0],t[1],g],C=[\"rank\",\"type\",\"type\"],$=[{type:12,data:f},{type:12,data:g}];$.push(...se(w,w));let A=P=>{let x=Z(\"x\",e[0].dataType,w.length,d),E=Z(\"scale\",e[1].dataType,e[1].dims),O=Z(\"bias\",e[2].dataType,e[2].dims),B=ne(\"output\",e[0].dataType,w.length,d),R=[x,E,O,B],j=x.type.value,U=d===1?\"f32\":`vec${d}`,L=64,F=[{name:\"normSize\",type:\"u32\"},{name:\"normPackedSize\",type:\"u32\"}];return`\n var meanShared : f32;\n var squaredNormShared : f32;\n var workgroupShared : array<${U}, ${L}>;\n const workgroupSize = ${L}u;\n ${P.registerUniforms(F).declareVariables(...R)}\n ${P.mainStart(L)}\n let norm = global_idx / workgroupSize;\n let batch = norm / uniforms.x_shape[1];\n let channel = norm % uniforms.x_shape[1];\n let localIndex = local_id.x;\n\n // initialize workgroup memory\n var initial = ${U}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n initial = initial + ${U}(${x.get(\"batch\",\"channel\",\"h\")});\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the mean of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n meanShared = ${It(\"workgroupShared[0]\",d)} / f32(uniforms.normSize);\n }\n workgroupBarrier();\n\n // reinitialize workgroup memory.\n initial = ${U}(0);\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let deviation = ${U}(${x.get(\"batch\",\"channel\",\"h\")}) - ${U}(meanShared);\n initial = initial + deviation * deviation;\n }\n workgroupShared[localIndex] = initial;\n workgroupBarrier();\n\n // Calculate the sum of square of deviation of current channel data.\n for (var currSize = workgroupSize >> 1; currSize > 0; currSize = currSize >> 1) {\n if (localIndex < currSize) {\n workgroupShared[localIndex] = workgroupShared[localIndex] + workgroupShared[localIndex + currSize];\n }\n workgroupBarrier();\n }\n if (localIndex == 0) {\n squaredNormShared = ${It(\"workgroupShared[0]\",d)};\n }\n workgroupBarrier();\n\n let invStdDev = inverseSqrt(squaredNormShared / f32(uniforms.normSize) + f32(${r.epsilon}));\n let channelScale = invStdDev * f32(${E.getByOffset(\"channel\")});\n let channelShift = f32(${O.getByOffset(\"channel\")}) - meanShared * channelScale;\n for (var h = localIndex; h < uniforms.normPackedSize; h += workgroupSize) {\n let value = ${x.get(\"batch\",\"channel\",\"h\")} * ${j}(${U}(channelScale)) + ${j}(${U}(channelShift));\n ${B.set(\"batch\",\"channel\",\"h\",\"value\")};\n }\n }`};return{name:\"InstanceNormalization\",shaderCache:{hint:`${r.epsilon};${d}`,inputDependencies:C},getRunData:()=>({outputs:[{dims:u,dataType:e[0].dataType}],dispatchGroup:{x:c},programUniforms:$}),getShaderSource:A}},Ay=(e,r,t,u,s,c,f,d)=>{let g=He(f),w=64,C=g===1?\"vec2f\":`mat2x${g}f`,$=g===1?\"f32\":`vec${g}f`,A=(F,te)=>`${C}(${F}, ${te})`,P=s*f/g,x=Math.ceil(c/w),E=[\"type\"],O=[{type:12,data:x},{type:12,data:c},{type:12,data:Math.floor(f/g)},{type:12,data:Math.floor(c*f/g)}],B=F=>{let te=Z(\"input\",r.dataType,r.dims,g);return`\n ${F.declareVariables(te)}\n @group(0) @binding(1) var output : array<${C}>;\n struct Uniforms {wg_size:u32, H:u32, C:u32, image_size:u32};\n @group(0) @binding(2) var uniforms: Uniforms;\n\n ${F.mainStart(w)}\n let currentImageNumber = global_idx / ${w} / uniforms.C;\n let currentChannelNumber = (global_idx / ${w}) % uniforms.C;\n let wgOffset = local_id.x * uniforms.wg_size;\n if (wgOffset >= uniforms.H) {\n return;\n }\n let wgMax = min(wgOffset + uniforms.wg_size, uniforms.H);\n\n let offset = currentImageNumber * uniforms.image_size + currentChannelNumber;\n var sum = ${xt(\"f32\",g)};\n var squaredSum = ${xt(\"f32\",g)};\n for (var i: u32 = wgOffset; i < wgMax; i++) {\n let value = ${$}(input[offset + i * uniforms.C]);\n sum += value;\n squaredSum += value * value;\n }\n output[global_idx] = ${A(\"sum\",\"squaredSum\")};\n }`},R=e.compute({name:\"InstanceNormComputeMean\",shaderCache:{hint:`${g}`,inputDependencies:E},getRunData:()=>({outputs:[{dims:[s,f,w,2],dataType:1}],dispatchGroup:{x:s*f/g},programUniforms:O}),getShaderSource:B},{inputs:[r],outputs:[-1]})[0],j=[{type:12,data:P},{type:12,data:c},{type:12,data:Math.floor(f/g)},{type:12,data:Math.floor(w*f/g)}],U=[\"type\",\"type\",\"type\"],L=F=>{let te=Z(\"scale\",t.dataType,t.dims,g),J=Z(\"bias\",u.dataType,u.dims,g);return`\n @group(0) @binding(0) var input : array<${C}>;\n @group(0) @binding(1) var scale : array<${te.type.storage}>;\n @group(0) @binding(2) var bias : array<${J.type.storage}>;\n @group(0) @binding(3) var output : array<${C}>;\n struct Uniforms {units_of_work : u32, H: u32, C : u32, image_size : u32};\n @group(0) @binding(4) var uniforms: Uniforms;\n\n ${F.mainStart()}\n ${F.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.units_of_work\")}\n let currentImageNumber = global_idx / uniforms.C;\n let currentChannelNumber = global_idx % uniforms.C;\n\n let offset = currentImageNumber * uniforms.image_size;\n var sum = ${xt(\"f32\",g)};\n var squaredSum = ${xt(\"f32\",g)};\n for (var i: u32 = 0; i < min(${w}, uniforms.H); i++) {\n let value = input[offset + i + currentChannelNumber * ${w}];\n sum += value[0];\n squaredSum += value[1];\n }\n sum = sum / f32(uniforms.H);\n squaredSum = squaredSum / f32(uniforms.H);\n let invStdDev = inverseSqrt(squaredSum - sum * sum + f32(${d}));\n let channelScale = invStdDev * ${$}(scale[currentChannelNumber]);\n let channelShift = ${$}(bias[currentChannelNumber]) - sum * channelScale;\n\n output[global_idx] = ${A(\"channelScale\",\"channelShift\")};\n }`};return e.compute({name:\"InstanceNormComputeChannelScaleShift\",shaderCache:{hint:`${g};${d}`,inputDependencies:U},getRunData:()=>({outputs:[{dims:[s,f,2],dataType:1}],dispatchGroup:{x:Math.ceil(P/64)},programUniforms:j}),getShaderSource:L},{inputs:[R,t,u],outputs:[-1]})[0]},Ty=(e,r,t)=>{let u=r[0].dims,s=u,c=u[0],f=u[u.length-1],d=K.sizeFromDimension(u,1)/f,g=He(f),w=K.size(s)/g,C=[{type:12,data:d},{type:12,data:Math.floor(f/g)}],$=[\"type\",\"type\"],A=Ay(e,r[0],r[1],r[2],c,d,f,t.epsilon),P=x=>{let E=Ne(r[0].dataType),O=g===1?\"vec2f\":`mat2x${g}f`,B=g===1?E:`vec${g}<${E}>`,R=Z(\"input\",r[0].dataType,r[0].dims,g),j=ne(\"output\",r[0].dataType,s,g);return`\n @group(0) @binding(0) var input : array<${R.type.storage}>;\n @group(0) @binding(1) var scaleInput : array<${O}>;\n @group(0) @binding(2) var output : array<${j.type.storage}>;\n struct Uniforms {H: u32, C : u32};\n @group(0) @binding(3) var uniforms: Uniforms;\n\n ${x.mainStart()}\n let currentImageNumber = global_idx / (uniforms.C * uniforms.H);\n let currentChannelNumber = global_idx % uniforms.C;\n\n let scaleOffset = currentImageNumber * uniforms.C + currentChannelNumber;\n let scale = scaleInput[scaleOffset];\n output[global_idx] = fma(input[global_idx], ${B}(scale[0]), ${B}(scale[1]));\n }`};e.compute({name:\"InstanceNormalizationNHWC\",shaderCache:{hint:`${g}`,inputDependencies:$},getRunData:()=>({outputs:[{dims:s,dataType:r[0].dataType}],dispatchGroup:{x:Math.ceil(w/64)},programUniforms:C}),getShaderSource:P},{inputs:[r[0],A]})},nc=(e,r)=>{r.format===\"NHWC\"?Ty(e,e.inputs,r):e.compute(Iy(e.inputs,r))}});var Ey,Py,oc,ac=ae(()=>{\"use strict\";Te();De();Re();Ey=e=>{if(!e||e.length<2)throw new Error(\"layerNorm requires at least 2 inputs.\")},Py=(e,r,t)=>{let u=r.simplified,s=e[0].dims,c=e[1],f=!u&&e[2],d=s,g=K.normalizeAxis(r.axis,s.length),w=K.sizeToDimension(s,g),C=K.sizeFromDimension(s,g),$=K.size(c.dims),A=f?K.size(f.dims):0;if($!==C||f&&A!==C)throw new Error(`Size of X.shape()[axis:] == ${C}.\n Size of scale and bias (if provided) must match this.\n Got scale size of ${$} and bias size of ${A}`);let P=[];for(let L=0;L1,R=t>2,j=L=>{let F=Ne(e[0].dataType),te=[Z(\"x\",e[0].dataType,e[0].dims,x),Z(\"scale\",c.dataType,c.dims,x)];f&&te.push(Z(\"bias\",f.dataType,f.dims,x)),te.push(ne(\"output\",e[0].dataType,d,x)),B&&te.push(ne(\"mean_data_output\",1,P)),R&&te.push(ne(\"inv_std_output\",1,P));let J=[{name:\"norm_count\",type:\"u32\"},{name:\"norm_size\",type:\"f32\"},{name:\"norm_size_vectorized\",type:\"u32\"},{name:\"epsilon\",type:\"f32\"}];return`\n ${L.registerUniforms(J).declareVariables(...te)}\n ${L.mainStart()}\n ${L.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.norm_count\")}\n let offset = global_idx * uniforms.norm_size_vectorized;\n var mean_vector = ${xt(\"f32\",x)};\n var mean_square_vector = ${xt(\"f32\",x)};\n\n for (var h: u32 = 0u; h < uniforms.norm_size_vectorized; h++) {\n let value = ${lr(F,x,\"x[h + offset]\")};\n mean_vector += value;\n mean_square_vector += value * value;\n }\n let mean = ${It(\"mean_vector\",x)} / uniforms.norm_size;\n let inv_std_dev = inverseSqrt(${It(\"mean_square_vector\",x)} / uniforms.norm_size ${u?\"\":\"- mean * mean\"} + uniforms.epsilon);\n\n for (var j: u32 = 0; j < uniforms.norm_size_vectorized; j++) {\n let f32input = ${lr(F,x,\"x[j + offset]\")};\n let f32scale = ${lr(F,x,\"scale[j]\")};\n output[j + offset] = ${te[0].type.value}((f32input ${u?\"\":\"- mean\"}) * inv_std_dev * f32scale\n ${f?`+ ${lr(F,x,\"bias[j]\")}`:\"\"}\n );\n }\n\n ${B?\"mean_data_output[global_idx] = mean\":\"\"};\n ${R?\"inv_std_output[global_idx] = inv_std_dev\":\"\"};\n }`},U=[{dims:d,dataType:e[0].dataType}];return B&&U.push({dims:P,dataType:1}),R&&U.push({dims:P,dataType:1}),{name:\"LayerNormalization\",shaderCache:{hint:`${x};${t};${u}`,inputDependencies:E},getRunData:()=>({outputs:U,dispatchGroup:{x:Math.ceil(w/64)},programUniforms:O}),getShaderSource:j}},oc=(e,r)=>{Ey(e.inputs),e.compute(Py(e.inputs,r,e.outputCount))}});var ky,Oy,sc,uc,lc=ae(()=>{\"use strict\";Te();De();nt();Re();ky=(e,r)=>{if(e.length<3||e.length>4)throw new Error(\"MatMulNBits requires 3 or 4 inputs\");let t=e[0],u=t.dims.length;if(t.dims[u-1]!==r.k)throw new Error(\"The last dim of input shape does not match the k value\");let s=Math.floor((r.k+r.blockSize-1)/r.blockSize),c=r.blockSize/8*r.bits,f=e[1];if(!K.areEqual(f.dims,[r.n,s,c]))throw new Error(\"The second inputs must be 3D tensor with shape N X nBlocksPerCol X blobSize\");let g=e[2].dims;if(K.size(g)!==r.n*s)throw new Error(\"scales input size error.\");if(e.length===4){let C=e[3].dims,$=r.bits>4?r.n*s:r.n*Math.floor((s+1)/2);if(K.size(C)!==$)throw new Error(\"zeroPoints input size error.\")}},Oy=(e,r,t,u)=>{let s=e[0].dims,c=s.length,f=Math.floor((r.k+r.blockSize-1)/r.blockSize),d=s[c-2],g=r.k,w=r.n,C=s.slice(0,c-2),$=K.size(C),P=r.blockSize/8*r.bits/4,x=e[0].dataType,E=He(d),O=He(r.k),B=He(P),R=or(x),j=d*f*R,U=Math.floor(u/j),L=f<=t[0]&&U>0,F=!L||U>=4?He(w):U>=2&&He(w)>=2?2:1,te=C.concat([d,w]),J=K.size(te)/F/E,oe=L?[]:[{type:12,data:J},{type:12,data:r.blockSize}],le=[$,d,g/O],ge=K.convertShape(e[1].dims).slice();ge.splice(-1,1,P/B),oe.push(...se(le)),oe.push(...se(ge)),oe.push(...se(e[2].dims)),e.length===4&&oe.push(...se(K.convertShape(e[3].dims)));let X=[$,d,w/F];oe.push(...se(X));let pe=we=>{let ue=le.length,me=Z(\"a\",e[0].dataType,ue,O),Ee=Z(\"b\",12,ge.length,B),Pe=Z(\"scales\",e[2].dataType,e[2].dims.length),Ce=[me,Ee,Pe],be=e.length===4?Z(\"zero_points\",12,e[3].dims.length):void 0;be&&Ce.push(be);let Ae=X.length,_e=ne(\"output\",e[0].dataType,Ae,F),Je=[{name:\"output_size\",type:\"u32\"},{name:\"block_size\",type:\"u32\"}],ee=Ne(e[0].dataType),ce=(()=>{switch(O){case 1:return`array<${ee}, 8>`;case 2:return`mat4x2<${ee}>`;case 4:return`mat2x4<${ee}>`;default:throw new Error(`${O}-component is not supported.`)}})(),Be=`\n for (var word: u32 = 0; word < ${P}; word += ${B}) {\n ${Ee.indicesSet(\"b_indices\",\"2\",\"word\")};\n let b_data = ${Ee.getByIndices(\"b_indices\")};\n for (var i: u32 = 0; i < ${B}; i++) {\n let b_value: u32 = ${B===1?\"b_data\":\"b_data[word + i]\"};\n let b_mask: u32 = 0x0F0F0F0Fu;\n let b_value_lower: vec4 = unpack4xU8(b_value & b_mask);\n let b_value_upper: vec4 = unpack4xU8((b_value >> 4) & b_mask);\n let b_quantized_values = ${ce}(${Array.from({length:4},(Ye,Ke)=>`${ee}(b_value_lower[${Ke}]), ${ee}(b_value_upper[${Ke}])`).join(\", \")});\n let b_dequantized_values = ${(()=>O===1?`${ce}(${Array.from({length:8},(Ye,Ke)=>`(b_quantized_values[${Ke}] - zero_point) * scale`).join(\", \")});`:`(b_quantized_values - ${ce}(${Array(8).fill(\"zero_point\").join(\",\")})) * scale;`)()};\n // Number of B elements per 32-bit word is 32/bits = 32/4 = 8\n for (var m: u32 = 0; m < ${L?d:E}u; m++) {\n ${me.indicesSet(\"a_indices\",ue-2,L?\"m\":`row * ${E} + m`)};\n ${me.indicesSet(\"a_indices\",ue-1,\"word_offset\")};\n var input_offset = ${me.indicesToOffset(\"a_indices\")};\n var a_data: ${ce};\n for (var j: u32 = 0; j < ${8/O}; j++) {\n a_data[j] = ${me.getByOffset(\"input_offset\")};\n input_offset++;\n }\n ${L?\"workgroup_shared[workgroup_shared_offset + m]\":\"output_values[m]\"}${F>1?\"[c]\":\"\"} += ${Array.from({length:8/O},(Ye,Ke)=>`${O===1?`a_data[${Ke}] * b_dequantized_values[${Ke}]`:`dot(a_data[${Ke}], b_dequantized_values[${Ke}])`}`).join(\" + \")};\n }\n word_offset += ${8/O};\n }\n }`,tt=be?`\n zero_point_offset += 4;\n if (zero_point_offset == 32) {\n zero_point_offset = 0;\n zero_point_index++;\n zero_point_word = ${be.getByOffset(\"zero_point_index\")};\n }`:\"\";return L?`\n var workgroup_shared: array<${_e.type.value}, ${d*f}>;\n ${we.declareVariables(...Ce,_e)}\n ${we.mainStart([f,1,1])}\n var a_indices: ${me.type.indices};\n var block = local_id.x;\n var col = workgroup_id.y;\n var batch = workgroup_id.z;\n ${me.indicesSet(\"a_indices\",\"0\",\"batch\")};\n // Two zero points are packed into one byte when uniforms.bits is 4.\n for (var c: u32 = 0; c < ${F}; c++) {\n let col_times_components_plus_c = col * ${F} + c;\n ${be?`\n var zero_point_bytes_per_col: u32 = (${f} + 1) / 2;\n var zero_point_byte_count: u32 = col_times_components_plus_c * zero_point_bytes_per_col + (block >> 0x1u);\n var zero_point_word_index: u32 = zero_point_byte_count >> 0x2u;\n var zero_point_byte_offset: u32 = zero_point_byte_count & 0x3u;\n var zero_point_nibble_offset: u32 = block & 0x1u;\n var zero_point_bits_offset: u32 = (zero_point_byte_offset << 3) + (zero_point_nibble_offset << 2);\n var zero_point_word: u32 = ${be.getByOffset(\"zero_point_word_index\")} >> zero_point_bits_offset;`:\"\"}\n var b_indices: ${Ee.type.indices};\n ${Ee.indicesSet(\"b_indices\",\"0\",\"col_times_components_plus_c\")};\n // The scale and zero points are computed per block.\n var scales_index = col_times_components_plus_c * ${f} + block;\n let scale = ${Pe.getByOffset(\"scales_index\")};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${ee}(${be?\"(zero_point_word) & 0xFu\":8});\n ${Ee.indicesSet(\"b_indices\",\"1\",\"block\")};\n var word_offset: u32 = block * ${r.blockSize/O};\n var workgroup_shared_offset: u32 = block * ${d};\n ${Be}\n }\n workgroupBarrier();\n if (local_id.x == 0u) {\n var output_indices: ${_e.type.indices};\n ${_e.indicesSet(\"output_indices\",\"0\",\"batch\")};\n ${_e.indicesSet(\"output_indices\",Ae-1,\"col\")};\n ${_e.indicesSet(\"output_indices\",Ae-2,\"0\")};\n var output_offset = ${_e.indicesToOffset(\"output_indices\")};\n for (var m: u32 = 0u; m < ${d}u; m++) {\n var output_value: ${_e.type.value} = ${_e.type.value}(0);\n var workgroup_shared_offset: u32 = m;\n for (var b: u32 = 0u; b < ${f}u; b++) {\n output_value += workgroup_shared[workgroup_shared_offset];\n workgroup_shared_offset += ${d};\n }\n ${_e.setByOffset(\"output_offset\",\"output_value\")};\n output_offset += ${w/F};\n }\n }\n }`:`\n ${we.registerUniforms(Je).declareVariables(...Ce,_e)}\n ${we.mainStart()}\n ${we.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n var output_values: array<${_e.type.value}, ${E}>;\n var output_indices = ${_e.offsetToIndices(\"global_idx\")};\n var col = ${_e.indicesGet(\"output_indices\",Ae-1)};\n var row = ${_e.indicesGet(\"output_indices\",Ae-2)};\n var a_indices: ${me.type.indices} = output_indices;\n // Two zero points are packed into one byte because uniforms.bits <= 4.\n // zero_point_offset is either 0 or 4. It is bit offset within one byte.\n // TODO support zero_point_offset for bits > 4\n ${be?`\n var zero_point_abs_offset = col * ${F} * ((${f} + 1) / 2);\n var zero_point_index: u32 = zero_point_abs_offset / 4;\n var zero_point_word: u32 = ${be.getByOffset(\"zero_point_index\")};\n var zero_point_offset: u32 = (zero_point_abs_offset % 4) * 8;`:\"\"}\n var scale_index = col * ${f*F};\n var b_indices: ${Ee.type.indices};\n for (var c: u32 = 0; c < ${F}; c++) {\n ${Ee.indicesSet(\"b_indices\",\"0\",`col * ${F} + c`)};\n var block_offset: u32 = 0;\n for (var block: u32 = 0; block < ${f}; block++) {\n // The scale and zero points are computed per block.\n let scale = ${Pe.getByOffset(\"scale_index\")};\n // The default zero point is 8 for unsigned 4-bit quantization.\n let zero_point = ${ee}(${be?\"extractBits(zero_point_word, zero_point_offset, 4)\":8});\n ${Ee.indicesSet(\"b_indices\",\"1\",\"block\")};\n var word_offset: u32 = block_offset;\n ${Be}\n scale_index++;\n ${tt}\n block_offset += uniforms.block_size / ${O};\n }\n // Drop the trailing 4 bits if the zero_poit_offset is not a byte boundary to align with the next byte.\n ${be?`if (zero_point_offset % 8 > 0) {\n ${tt}\n }`:\"\"}\n }\n for (var k: u32 = 0u; k < ${E}u; k++) {\n ${_e.indicesSet(\"output_indices\",Ae-2,`${E} * row + k`)};\n ${_e.setByIndices(\"output_indices\",\"output_values[k]\")}\n }\n }`};return{name:L?\"BlockwiseMatMulNBits\":\"MatMulNBits\",shaderCache:{hint:`${r.cacheKey};${d};${x};${e.length}`,inputDependencies:Array(e.length).fill(\"rank\")},getRunData:()=>({outputs:[{dims:te,dataType:x}],name:L?\"BlockwiseMatMulNBits\":\"MatMulNBits\",dispatchGroup:L?{x:1,y:Math.ceil(w/F),z:$}:{x:Math.ceil(J/64)},programUniforms:oe}),getShaderSource:pe}},sc=(e,r)=>{ky(e.inputs,r);let t=e.getMaxComputeWorkgroupSizes(),u=e.getMaxComputeWorkgroupStoragesize();e.compute(Oy(e.inputs,r,t,u))},uc=e=>ke(e)});var ft,Ry,cc,dc,By,po,fc,pc=ae(()=>{\"use strict\";Te();De();nt();Xn();Qi();Re();Dr();ft=(e,r)=>e.length>r&&e[r].dims.length>0&&K.size(e[r].dims)>0?e[r]:void 0,Ry=(e,r)=>{let t=e[0],u=ft(e,1),s=ft(e,2),c=ft(e,3),f=ft(e,4),d=ft(e,5),g=ft(e,6),w=ft(e,7);if(t.dims.length!==3&&t.dims.length!==5)throw new Error(\"Input query is expected to have 3 or 5 dimensions\");let C=!1,$=t.dims[0],A=t.dims[1],P=t.dims.length===3?C?t.dims[2]/3:t.dims[2]:r.numHeads*t.dims[4],x=A,E=0,O=0,B=Math.floor(P/r.numHeads);if(g&&w){if(g.dims.length!==4)throw new Error('Input \"past_key\" is expected to have 4 dimensions');if(g.dims[0]!==$||g.dims[1]!==r.numHeads||g.dims[3]!==B)throw new Error('Input \"past_key\" shape (batch_size, num_heads, past_sequence_length, head_size)');if(w.dims[0]!==$||w.dims[1]!==r.numHeads||w.dims[3]!==B)throw new Error('Input \"past_value\" shape (batch_size, num_heads, past_sequence_length, head_size)');if(g.dims[2]!==w.dims[2])throw new Error('Input \"past_key\" and \"past_value\" shall have same dim 2 (past_sequence_length)');if(w.dims.length!==4)throw new Error('Input \"past_value\" is expected to have 4 dimensions');E=g.dims[2],O=g.dims[2]}else if(g||w)throw new Error('Input \"past_key\" and \"past_value\" shall be both present or both absent');let R;if(u){if(t.dims.length!==3)throw new Error('Input \"query\" is expected to have 3 dimensions when key is given');if(u.dims.length<3||u.dims.length>5)throw new Error('Input \"key\" is expected to have 3, 4, or 5 dimensions');if(t.dims[0]!==u.dims[0])throw new Error('Input \"query\" and \"key\" shall have same dim 0 (batch size)');if(u.dims.length===3){if(u.dims[2]!==t.dims[2])throw new Error('Input \"query\" and \"key\" shall have same dim 2 (hidden_size)');R=2,x=u.dims[1]}else if(u.dims.length===5){if(u.dims[2]!==r.numHeads||u.dims[3]!==2||u.dims[4]!==B)throw new Error('Expect \"key\" shape (batch_size, kv_sequence_length, num_heads, 2, head_size) for packed kv');if(s)throw new Error('Expect \"value\" be none when \"key\" has packed kv format.');R=5,x=u.dims[1]}else{if(u.dims[1]!==r.numHeads||u.dims[3]!==B)throw new Error('Expect \"key\" shape (batch_size, num_heads, kv_sequence_length, head_size) for past_key');R=0,x=u.dims[2]}}else{if(t.dims.length!==3&&t.dims.length!==5)throw new Error('Input \"query\" is expected to have 3 or 5 dimensions when key is empty');if(t.dims.length===5&&(t.dims[2]!==r.numHeads||t.dims[3]!==3))throw new Error('Expect \"query\" shape (batch_size, kv_sequence_length, num_heads, 3, head_size) for packed kv');R=3}if(c){if(c.dims.length!==1)throw new Error('Input \"bias\" is expected to have 1 dimension');if(s&&t.dims.length===5&&t.dims[3]===2)throw new Error(\"bias is not allowed for packed kv.\")}let j=0;if(f){j=8;let J=f.dims;throw J.length===1?J[0]===$?j=1:J[0]===3*$+2&&(j=3):J.length===2&&J[0]===$&&J[1]===x&&(j=5),j===8?new Error('Input \"key_padding_mask\" shape shall be (batch_size) or (batch_size, kv_sequence_length)'):new Error(\"Mask not supported\")}let U=!1,L=P;if(s){if(s.dims.length!==3&&s.dims.length!==4)throw new Error('Input \"value\" is expected to have 3 or 4 dimensions');if(t.dims[0]!==s.dims[0])throw new Error('Input \"query\" and \"value\" shall have same dim 0 (batch_size)');if(s.dims.length===3){if(x!==s.dims[1])throw new Error('Input \"key\" and \"value\" shall have the same dim 1 (kv_sequence_length)');L=s.dims[2]}else{if(x!==s.dims[2])throw new Error('Input \"past_key\" and \"past_value\" shall have the same dim 2 (kv_sequence_length)');L=s.dims[1]*s.dims[3],U=!0}}let F=E+x,te=!1;if(f)throw new Error(\"Key padding mask is not supported\");if(d){if(d.dims.length!==4)throw new Error('Input \"relative_position_bias\" is expected to have 4 dimensions');if(d.dims[0]!==$&&d.dims[0]!==1||d.dims[1]!==r.numHeads||d.dims[2]!==A||d.dims[3]!==F)throw new Error('Input \"relative_position_bias\" shape (batch_size, 1, sequence_length, kv_sequence_length)')}return{batchSize:$,sequenceLength:A,pastSequenceLength:E,kvSequenceLength:x,totalSequenceLength:F,maxSequenceLength:O,inputHiddenSize:0,hiddenSize:P,vHiddenSize:L,headSize:B,vHeadSize:Math.floor(L/r.numHeads),numHeads:r.numHeads,isUnidirectional:!1,pastPresentShareBuffer:!1,maskFilterValue:r.maskFilterValue,maskType:j,scale:r.scale,broadcastResPosBias:te,passPastInKv:U,qkvFormat:R}},cc=e=>ke({...e}),dc=ke({perm:[0,2,1,3]}),By=(e,r,t,u,s,c,f)=>{let d=[u,s,c],g=K.size(d),w=[{type:12,data:g},{type:12,data:f},{type:12,data:c}],C=$=>{let A=ne(\"qkv_with_bias\",r.dataType,d),P=Z(\"qkv\",r.dataType,d),x=Z(\"bias\",t.dataType,d),E=[{name:\"output_size\",type:\"u32\"},{name:\"bias_offset\",type:\"u32\"},{name:\"hidden_size\",type:\"u32\"}];return`\n ${$.registerUniforms(E).declareVariables(P,x,A)}\n ${$.mainStart()}\n ${$.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n let bias_offset_idx = (global_idx % uniforms.hidden_size) + uniforms.bias_offset;\n\n qkv_with_bias[global_idx] = qkv[global_idx] + bias[bias_offset_idx];\n }`};return e.compute({name:\"MultiHeadAttentionAddBias\",shaderCache:{inputDependencies:[\"type\",\"type\"]},getRunData:()=>({outputs:[{dims:d,dataType:r.dataType,gpuDataType:0}],dispatchGroup:{x:Math.ceil(g/64)},programUniforms:w}),getShaderSource:C},{inputs:[r,t],outputs:[-1]})[0]},po=(e,r,t,u,s,c,f,d)=>{let g=c;if(f){if(u===1)throw new Error(\"AddBiasReshape is not implemented. Please export your model with packed QKV or KV\");return g=By(e,c,f,r,u,t*s,d),g=g.reshape([r,u,t,s]),e.compute($t(g,dc.perm),{inputs:[g],outputs:[-1]})[0]}else return c.dims.length===3&&(g=c.reshape([r,u,t,s])),e.compute($t(g,dc.perm),{inputs:[g],outputs:[-1]})[0]},fc=(e,r)=>{let t=Ry(e.inputs,r),u=e.inputs[0],s=ft(e.inputs,1),c=ft(e.inputs,2),f=ft(e.inputs,3),d=ft(e.inputs,4),g=ft(e.inputs,5),w=ft(e.inputs,6),C=ft(e.inputs,7);if(u.dims.length===5)throw new Error(\"Packed QKV is not implemented\");if(s?.dims.length===5)throw new Error(\"Packed KV is not implemented\");let $=s&&c&&s.dims.length===4&&c.dims.length===4,A=po(e,t.batchSize,t.numHeads,t.sequenceLength,t.headSize,u,f,0);if($)return ai(e,A,s,c,d,void 0,w,C,g,t,r);if(!s||!c)throw new Error(\"key and value must be provided\");let P=po(e,t.batchSize,t.numHeads,t.kvSequenceLength,t.headSize,s,f,t.hiddenSize),x=po(e,t.batchSize,t.numHeads,t.kvSequenceLength,t.vHeadSize,c,f,2*t.hiddenSize);ai(e,A,P,x,d,void 0,w,C,g,t,r)}});var Dy,zy,My,jy,Uy,Vy,Wy,Ny,mc,hc=ae(()=>{\"use strict\";Te();De();Re();Dy=e=>{if(!e||e.length<1)throw new Error(\"Too few inputs\");if(e[0].dataType!==1&&e[0].dataType!==10)throw new Error(\"Input type must be float or float16.\");if(e.length>=2){let r=e[0].dims.length*2===e[1].dims[0];if(e.length===4&&(r=e[3].dims[0]*2===e[1].dims[0]),!r)throw new Error(\"The pads should be a 1D tensor of shape [2 * input_rank] or [2 * num_axes].\")}},zy=(e,r,t)=>{let u=\"\";for(let s=r-1;s>=0;--s)u+=`\n k = i32(${e.indicesGet(\"indices\",s)}) - ${xe(\"uniforms.pads\",s,t)};\n if (k < 0) {\n break;\n }\n if (k >= i32(${xe(\"uniforms.x_shape\",s,r)})) {\n break;\n }\n offset += k * i32(${xe(\"uniforms.x_strides\",s,r)});\n `;return`\n value = ${e.type.value}(uniforms.constant_value);\n for (var i = 0; i < 1; i++) {\n var offset = 0;\n var k = 0;\n ${u}\n value = x[offset];\n }\n `},My=(e,r,t)=>{let u=\"\";for(let s=r-1;s>=0;--s)u+=`\n k = i32(${e.indicesGet(\"indices\",s)}) - ${xe(\"uniforms.pads\",s,t)};\n if (k < 0) {\n k = -k;\n }\n {\n let _2n_1 = 2 * (i32(${xe(\"uniforms.x_shape\",s,r)}) - 1);\n k = k % _2n_1;\n if(k >= i32(${xe(\"uniforms.x_shape\",s,r)})) {\n k = _2n_1 - k;\n }\n }\n offset += k * i32(${xe(\"uniforms.x_strides\",s,r)});\n `;return`\n var offset = 0;\n var k = 0;\n ${u}\n value = x[offset];\n `},jy=(e,r,t)=>{let u=\"\";for(let s=r-1;s>=0;--s)u+=`\n k = i32(${e.indicesGet(\"indices\",s)}) - ${xe(\"uniforms.pads\",s,t)};\n if (k < 0) {\n k = 0;\n }\n if (k >= i32(${xe(\"uniforms.x_shape\",s,r)})) {\n k = i32(${xe(\"uniforms.x_shape\",s,r)}) - 1;\n }\n offset += k * i32(${xe(\"uniforms.x_strides\",s,r)});\n `;return`\n var offset = 0;\n var k = 0;\n ${u}\n value = x[offset];\n `},Uy=(e,r,t)=>{let u=\"\";for(let s=r-1;s>=0;--s)u+=`\n k = i32(${e.indicesGet(\"indices\",s)}) - ${xe(\"uniforms.pads\",s,t)};\n if (k < 0) {\n k += i32(${xe(\"uniforms.x_shape\",s,r)}]);\n }\n if (k >= i32(${xe(\"uniforms.x_shape\",s,r)})) {\n k -= i32(${xe(\"uniforms.x_shape\",s,r)});\n }\n offset += k * i32(${xe(\"uniforms.x_strides\",s,r)});\n `;return`\n var offset = 0;\n var k = 0;\n ${u}\n value = x[offset];\n `},Vy=(e,r,t)=>{switch(t.mode){case 0:return zy(e,r,t.pads.length);case 1:return My(e,r,t.pads.length);case 2:return jy(e,r,t.pads.length);case 3:return Uy(e,r,t.pads.length);default:throw new Error(\"Invalid mode\")}},Wy=(e,r)=>{let t=K.padShape(e[0].dims.slice(),r.pads),u=e[0].dims,s=K.size(t),c=[{type:12,data:s},{type:6,data:r.pads}];r.mode===0&&c.push({type:e[0].dataType,data:r.value}),c.push(...se(e[0].dims,t));let f=[\"rank\"],d=g=>{let w=ne(\"output\",e[0].dataType,t.length),C=Z(\"x\",e[0].dataType,u.length),$=C.type.value,A=Vy(w,u.length,r),P=[{name:\"output_size\",type:\"u32\"},{name:\"pads\",type:\"i32\",length:r.pads.length}];return r.mode===0&&P.push({name:\"constant_value\",type:$}),`\n ${g.registerUniforms(P).declareVariables(C,w)}\n ${g.mainStart()}\n ${g.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n\n let indices = ${w.offsetToIndices(\"global_idx\")};\n\n var value = ${$}(0);\n ${A}\n output[global_idx] = value;\n }`};return{name:\"Pad\",shaderCache:{hint:`${r.mode}`,inputDependencies:f},getRunData:()=>({outputs:[{dims:t,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(K.size(t)/64)},programUniforms:c}),getShaderSource:d}},Ny=(e,r)=>{if(e.length>1){let t=e[1].getBigInt64Array(),u=e.length>=3&&e[2].data?e[2].getFloat32Array()[0]:0,s=e[0].dims.length,c=new Int32Array(2*s).fill(0);if(e.length>=4){let d=e[3].getBigInt64Array();for(let g=0;gc[Number(g)]=Number(d));let f=[];return c.forEach(d=>f.push(d)),{mode:r.mode,value:u,pads:f}}else return r},mc=(e,r)=>{Dy(e.inputs);let t=Ny(e.inputs,r);e.compute(Wy(e.inputs,t),{inputs:[0]})}});var yi,gc,yc,bc,vc,Gy,Hy,wc,_c,$c,Cc,Sc,xc,Ic,Ac,Tc,Ec,Pc,kc,Oc=ae(()=>{\"use strict\";Rr();Te();De();Re();yi=e=>{if(Or.webgpu.validateInputContent&&(!e||e.length!==1))throw new Error(\"Pool ops requires 1 input.\")},gc=(e,r,t)=>{let u=r.format===\"NHWC\",s=e.dims.slice();u&&s.splice(1,0,s.pop());let c=Object.hasOwnProperty.call(r,\"dilations\"),f=r.kernelShape.slice(),d=r.strides.slice(),g=c?r.dilations.slice():[],w=r.pads.slice();sr.adjustPoolAttributes(t,s,f,d,g,w);let C=sr.computePoolOutputShape(t,s,d,g,f,w,r.autoPad),$=Object.assign({},r);c?Object.assign($,{kernelShape:f,strides:d,pads:w,dilations:g,cacheKey:r.cacheKey}):Object.assign($,{kernelShape:f,strides:d,pads:w,cacheKey:r.cacheKey});let A=C.slice();return A.push(A.splice(1,1)[0]),[$,u?A:C]},yc=(e,r)=>{let t=r.format===\"NHWC\",u=K.size(e),s=K.size(r.kernelShape),c=[{type:12,data:u},{type:12,data:s}],f=[{name:\"outputSize\",type:\"u32\"},{name:\"kernelSize\",type:\"u32\"}];if(r.kernelShape.length<=2){let d=r.kernelShape[r.kernelShape.length-1],g=r.strides[r.strides.length-1],w=r.pads[r.pads.length/2-1],C=r.pads[r.pads.length-1],$=!!(w+C);c.push({type:12,data:d},{type:12,data:g},{type:12,data:w},{type:12,data:C}),f.push({name:\"kw\",type:\"u32\"},{name:\"sw\",type:\"u32\"},{name:\"pwStart\",type:\"u32\"},{name:\"pwEnd\",type:\"u32\"});let A=!1;if(r.kernelShape.length===2){let P=r.kernelShape[r.kernelShape.length-2],x=r.strides[r.strides.length-2],E=r.pads[r.pads.length/2-2],O=r.pads[r.pads.length-2];A=!!(E+O),c.push({type:12,data:P},{type:12,data:x},{type:12,data:E},{type:12,data:O}),f.push({name:\"kh\",type:\"u32\"},{name:\"sh\",type:\"u32\"},{name:\"phStart\",type:\"u32\"},{name:\"phEnd\",type:\"u32\"})}return[c,f,!0,$,A]}else{if(t)throw new Error(\"Pooling with kernelShape.length > 2 is not supported for NHWC format.\");let d=K.computeStrides(r.kernelShape);c.push({type:12,data:d},{type:12,data:r.pads},{type:12,data:r.strides}),f.push({name:\"kernelStrides\",type:\"u32\",length:d.length},{name:\"pads\",type:\"u32\",length:r.pads.length},{name:\"strides\",type:\"u32\",length:r.strides.length});let g=r.pads.reduce((w,C)=>w+C);return[c,f,!!g,!1,!1]}},bc=(e,r,t,u,s,c,f,d,g,w,C,$)=>{let A=s.format===\"NHWC\",P=r.type.value,x=ne(\"output\",r.type.tensor,u);if(s.kernelShape.length<=2){let E=\"\",O=\"\",B=\"\",R=t-(A?2:1);if(C?E=`\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${R}] = indices[${R}] * uniforms.sw - uniforms.pwStart + i;\n if (xIndices[${R}] < 0 || xIndices[${R}]\n >= uniforms.x_shape[${R}]) {\n pad++;\n continue;\n }\n let x_val = x[${r.indicesToOffset(\"xIndices\")}];\n ${c}\n }`:E=`\n for (var i: u32 = 0u; i < uniforms.kw; i++) {\n xIndices[${R}] = indices[${R}] * uniforms.sw - uniforms.pwStart + i;\n let x_val = x[${r.indicesToOffset(\"xIndices\")}];\n ${c}\n }`,s.kernelShape.length===2){let U=t-(A?3:2);$?O=`\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${U}] = indices[${U}] * uniforms.sh - uniforms.phStart + j;\n if (xIndices[${U}] < 0 || xIndices[${U}] >= uniforms.x_shape[${U}]) {\n pad += i32(uniforms.kw);\n continue;\n }\n `:O=`\n for (var j: u32 = 0u; j < uniforms.kh; j++) {\n xIndices[${U}] = indices[${U}] * uniforms.sh - uniforms.phStart + j;\n `,B=`\n }\n `}return`\n ${e.registerUniforms(g).declareVariables(r,x)}\n\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n\n let indices = ${x.offsetToIndices(\"global_idx\")};\n var xIndices = ${x.offsetToIndices(\"global_idx\")};\n\n var value = ${P}(${d});\n var pad = 0;\n ${O}\n ${E}\n ${B}\n ${f}\n\n output[global_idx] = value;\n }`}else{if(A)throw new Error(\"Pooling with kernelShape.length > 2 is not supported for NHWC format.\");let E=s.kernelShape.length,O=s.pads.length,B=\"\";return w?B=`\n if (xIndices[j] >= uniforms.x_shape[j]) {\n pad++;\n isPad = true;\n break;\n }\n }\n if (!isPad) {\n let x_val = x[${r.indicesToOffset(\"xIndices\")}];\n ${c}\n }`:B=`\n }\n let x_val = x[${r.indicesToOffset(\"xIndices\")}];\n ${c}\n `,`\n ${e.registerUniforms(g).declareVariables(r,x)}\n\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n let indices = ${x.offsetToIndices(\"global_idx\")};\n var xIndices = ${x.offsetToIndices(\"global_idx\")};\n\n var offsets: array;\n\n var value = ${P}(${d});\n var pad = 0;\n var isPad = false;\n\n for (var i: u32 = 0u; i < uniforms.kernelSize; i++) {\n var offset = i;\n for (var j = 0u; j < ${E-1}u; j++) {\n offsets[j] = offset / ${xe(\"uniforms.kernelStrides\",\"j\",E)};\n offset -= offsets[j] * ${xe(\"uniforms.kernelStrides\",\"j\",E)};\n }\n offsets[${E-1}] = offset;\n\n isPad = false;\n for (var j = ${t-E}u; j < ${t}u; j++) {\n xIndices[j] = indices[j] * ${xe(\"uniforms.strides\",`j - ${t-E}u`,E)}\n + offsets[j - ${t-E}u] - ${xe(\"uniforms.pads\",\"j - 2u\",O)};\n ${B}\n }\n ${f}\n\n output[global_idx] = value;\n }`}},vc=e=>`${e.format};${e.ceilMode};${e.autoPad};${e.kernelShape.length}`,Gy=e=>`${vc(e)};${e.countIncludePad}`,Hy=e=>`${vc(e)};${e.storageOrder};${e.dilations}`,wc=e=>({format:e.format,autoPad:[\"NOTSET\",\"VALID\",\"SAME_UPPER\",\"SAME_LOWER\"][e.auto_pad],ceilMode:e.ceil_mode,kernelShape:e.kernel_shape,strides:e.strides,pads:e.pads}),_c=(e,r,t,u)=>{let[s,c]=gc(r,u,t),f=Z(\"x\",r.dataType,r.dims.length),d=f.type.value,g=\"value += x_val;\",w=\"\";s.countIncludePad?w+=`value /= ${d}(uniforms.kernelSize);`:w+=`value /= ${d}(i32(uniforms.kernelSize) - pad);`;let[C,$,A,P,x]=yc(c,s);C.push(...se(r.dims,c));let E=[\"rank\"];return{name:e,shaderCache:{hint:`${u.cacheKey};${A};${P};${x}`,inputDependencies:E},getRunData:()=>({outputs:[{dims:c,dataType:r.dataType}],dispatchGroup:{x:Math.ceil(K.size(c)/64)},programUniforms:C}),getShaderSource:O=>bc(O,f,r.dims.length,c.length,s,g,w,0,$,A,P,x)}},$c=e=>{let r=e.count_include_pad!==0,t=wc(e);if(t.ceilMode!==0)throw new Error(\"using ceil() in shape computation is not yet supported for AveragePool\");let u={countIncludePad:r,...t,cacheKey:\"\"};return{...u,cacheKey:Gy(u)}},Cc=(e,r)=>{yi(e.inputs),e.compute(_c(\"AveragePool\",e.inputs[0],!1,r))},Sc={autoPad:\"\",ceilMode:0,countIncludePad:!1,kernelShape:[],strides:[],pads:[],storageOrder:0,dilations:[]},xc=e=>{let r=e.format;return{format:r,...Sc,cacheKey:r}},Ic=(e,r)=>{yi(e.inputs),e.compute(_c(\"GlobalAveragePool\",e.inputs[0],!0,r))},Ac=(e,r,t,u)=>{let[s,c]=gc(r,u,t),f=`\n value = max(x_val, value);\n `,d=\"\",g=Z(\"x\",r.dataType,r.dims.length),w=[\"rank\"],[C,$,A,P,x]=yc(c,s);return C.push(...se(r.dims,c)),{name:e,shaderCache:{hint:`${u.cacheKey};${A};${P};${x}`,inputDependencies:w},getRunData:()=>({outputs:[{dims:c,dataType:r.dataType}],dispatchGroup:{x:Math.ceil(K.size(c)/64)},programUniforms:C}),getShaderSource:E=>bc(E,g,r.dims.length,c.length,s,f,d,r.dataType===10?-65504:-1e5,$,A,P,x)}},Tc=(e,r)=>{yi(e.inputs),e.compute(Ac(\"MaxPool\",e.inputs[0],!1,r))},Ec=e=>{let r=e.storage_order,t=e.dilations,u=wc(e);if(r!==0)throw new Error(\"column major storage order is not yet supported for MaxPool\");if(u.ceilMode!==0)throw new Error(\"using ceil() in shape computation is not yet supported for MaxPool\");let s={storageOrder:r,dilations:t,...u,cacheKey:\"\"};return{...s,cacheKey:Hy(s)}},Pc=e=>{let r=e.format;return{format:r,...Sc,cacheKey:r}},kc=(e,r)=>{yi(e.inputs),e.compute(Ac(\"GlobalMaxPool\",e.inputs[0],!0,r))}});var Fy,qy,Rc,Bc=ae(()=>{\"use strict\";Rr();Te();Re();Fy=(e,r,t)=>{let u=e===r,s=er&&t>0;if(u||s||c)throw new Error(\"Range these inputs' contents are invalid.\")},qy=(e,r,t,u)=>{let s=Math.abs(Math.ceil((r-e)/t)),c=[s],f=s,d=[{type:12,data:f},{type:u,data:e},{type:u,data:t},...se(c)],g=w=>{let C=ne(\"output\",u,c.length),$=C.type.value,A=[{name:\"outputSize\",type:\"u32\"},{name:\"start\",type:$},{name:\"delta\",type:$}];return`\n ${w.registerUniforms(A).declareVariables(C)}\n ${w.mainStart()}\n ${w.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n output[global_idx] = uniforms.start + ${$}(global_idx) * uniforms.delta;\n }`};return{name:\"Range\",shaderCache:{hint:`${u}`},getShaderSource:g,getRunData:()=>({outputs:[{dims:c,dataType:u}],dispatchGroup:{x:Math.ceil(f/64)},programUniforms:d})}},Rc=e=>{let r=0,t=0,u=0;e.inputs[0].dataType===6?(r=e.inputs[0].getInt32Array()[0],t=e.inputs[1].getInt32Array()[0],u=e.inputs[2].getInt32Array()[0]):e.inputs[0].dataType===1&&(r=e.inputs[0].getFloat32Array()[0],t=e.inputs[1].getFloat32Array()[0],u=e.inputs[2].getFloat32Array()[0]),Or.webgpu.validateInputContent&&Fy(r,t,u),e.compute(qy(r,t,u,e.inputs[0].dataType),{inputs:[]})}});var Ky,Yy,Zy,Qy,Xy,Jy,e0,t0,r0,n0,i0,Dc,o0,a0,s0,u0,l0,zc,Mc,jc=ae(()=>{\"use strict\";Te();De();nt();Re();Ky=(e,r)=>{if(e.every(t=>t>0||(()=>{throw new Error(\"Resize requires scales input values to be positive\")})),e.length>0){if(r.mode===\"linear\"){if(!(e.length===2||e.length===3||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1||e.length===5&&e[0]===1&&e[1]===1))throw new Error(`For linear mode, Resize requires scales to be 2D, 3D, 4D with either two outermost or one innermost and\n one outermost scale values equal to 1, or 5D with two outermost scale values equal to 1`)}else if(r.mode===\"cubic\"&&!(e.length===2||e.length===4&&e[0]===1&&e[1]===1||e.length===4&&e[0]===1&&e[3]===1))throw new Error(\"Resize requires scales input size to be 2 or 4 for cubic mode\")}},Yy=(e,r,t)=>{r.every(s=>s>=0&&s{throw new Error(\"Resize requires axes input values to be positive and less than rank\")}));let u=new Array(t).fill(1);return r.forEach((s,c)=>u[s]=e[c]),u},Zy=(e,r,t,u,s,c)=>{let[f,d,g]=t>10?[1,2,3]:[-1,e.length>1?1:-1,-1],w=e[0].dims.length;if(f>0&&e.length>f&&e[f].dims.length>0)e[f].getFloat32Array().forEach(C=>c.push(C));else if(r.coordinateTransformMode===\"tf_crop_and_resize\")throw new Error(\"Resize requires RoI input to be specified when coordinateTransformMode is tfCropAndResize\");if(d>0&&e.length>d&&e[d].dims.length>0){if(e[d].getFloat32Array().forEach(C=>u.push(C)),u.length!==0&&u.length!==w&&t>=18&&u.length!==r.axes.length)throw new Error(\"Resize requires scales input size to be same as input rank or axes size for opset 18 and up\");Ky(u,r),r.axes.length>0&&Yy(u,r.axes,w).forEach((C,$)=>u[$]=C)}if(g>0&&e.length>g&&(e[g].getBigInt64Array().forEach(C=>s.push(Number(C))),s.length!==w||t>=18&&s.length===r.axes.length))throw new Error(\"Resize requires sizes input size to be same as input rank or axes size for opset 18 and up\");if(r.axes.length>0){if(u.length!==r.axes.length)throw new Error('Resize requires \"scales\" input size to be of axes rank when axes attributes is specified');if(s.length!==r.axes.length)throw new Error('Resize requires \"sizes\" input size to be of rank axes rank when axes attributes is specified')}if(typeof u<\"u\"&&typeof s<\"u\"&&u.length>0&&s.length>w)throw new Error(\"Resize requires only of scales or sizes to be specified\")},Qy=(e,r)=>`fn getOriginalCoordinateFromResizedCoordinate(xResized: u32, xScale: f32, lengthResized: u32,\n lengthOriginal: u32, roiStart: f32, roiEnd: f32) -> ${r} { `+(()=>{switch(e){case\"asymmetric\":return`return ${r}(xResized) / ${r}(xScale);`;case\"pytorch_half_pixel\":return`if (lengthResized > 1) {\n return (${r}(xResized) + 0.5) / ${r}(xScale) - 0.5;\n } else {\n return 0.0;\n }`;case\"tf_half_pixel_for_nn\":return`return (${r}(xResized) + 0.5) / ${r}(xScale);`;case\"align_corners\":return`if (lengthResized == 1) {\n return 0.0;\n } else {\n // The whole part and the fractional part are calculated separately due to inaccuracy of floating\n // point division. As an example, f32(21) / f32(7) may evaluate to 2.99... instead of 3, causing an\n // offset-by-one error later in floor().\n let whole = ${r}(xResized * (lengthOriginal - 1) / (lengthResized - 1));\n let fract =\n ${r}(xResized * (lengthOriginal - 1) % (lengthResized - 1)) / ${r}(lengthResized - 1);\n return whole + fract;\n }`;case\"tf_crop_and_resize\":return`if (lengthResized > 1) {\n return ${r}(roiStart) * ${r}(lengthOriginal - 1) +\n (${r}(xResized) * ${r}(roiEnd - roiStart) * ${r}(lengthOriginal - 1)) /\n ${r}(lengthResized - 1);\n } else {\n return 0.5 * ${r}(roiStart + roiEnd) * ${r}(lengthOriginal - 1);\n }`;case\"half_pixel_symmetric\":return`const outputWidth = ${r}xScale * ${r}(lengthResized);\n const adjustment = ${r}(lengthResized) / outputWidth;\n const center = ${r}(lengthOriginal) / 2;\n const offset = center * (1 - adjustment);\n return offset + ((${r}(xResized) + 0.5) / ${r}(xScale)) - 0.5;`;case\"half_pixel\":return`return ((${r}(xResized) + 0.5) / ${r}(xScale)) - 0.5;`;default:throw new Error(`Coordinate transform mode ${e} is not supported`)}})()+\"}\",Xy=(e,r,t)=>`fn getNearestPixelFromOriginal(xOriginal: ${t}, isDownSample: bool) -> ${t} {`+(()=>{switch(e){case\"round_prefer_ceil\":return\"if (fract(xOriginal) == 0.5) { return ceil(xOriginal); } else { return round(xOriginal); }\";case\"floor\":return\"return floor(xOriginal);\";case\"ceil\":return\"return ceil(xOriginal);\";case\"round_prefer_floor\":return\"if (fract(xOriginal) == 0.5) { return floor(xOriginal); } else { return round(xOriginal); }\";case\"simple\":default:if(r<11)return\"if (isDownSample) { return ceil(xOriginal); } else { return xOriginal; }\";throw new Error(`Nearest mode ${e} is not supported`)}})()+\"}\",Jy=(e,r,t)=>{let u=new Array(t).fill(0).concat(new Array(t).fill(1)),s=e.length===0?u:e.slice();return r.length>0?(r.forEach((c,f)=>{u[c]=s[f],u[f+t]=s[r.length+f]}),u):s},e0=(e,r,t,u)=>{let s=[];if(t.length>0)if(u.length>0){if(e.forEach(c=>s.push(c)),Math.max(...u)>e.length)throw new Error(\"axes is out of bound\");u.forEach((c,f)=>s[c]=t[f])}else t.forEach(c=>s.push(c));else{if(r.length===0)throw new Error(\"Resize requires either scales or sizes.\");s=e.map((c,f)=>Math.round(c*r[f]))}return s},t0=(e,r,t)=>{let u=(()=>{switch(t.keepAspectRatioPolicy){case\"not_larger\":return t.axes.length>0?Math.min(...t.axes.map(c=>r[c]),Number.MAX_VALUE):Math.min(...r,Number.MAX_VALUE);case\"not_smaller\":return t.axes.length>0?Math.max(...t.axes.map(c=>r[c]),Number.MIN_VALUE):Math.max(...r,Number.MIN_VALUE);default:throw new Error(`Keep aspect ratio policy ${t.keepAspectRatioPolicy} is not supported`)}})();r.fill(1,0,r.length);let s=e.slice();return t.axes.length>0?(t.axes.forEach(c=>r[c]=u),t.axes.forEach(c=>s[c]=Math.round(e[c]*r[c]))):(r.fill(u,0,r.length),s.forEach((c,f)=>s[f]=Math.round(c*r[f]))),s},r0=(e,r,t,u,s)=>`\n fn calculateOriginalIndicesFromOutputIndices(output_indices: ${e.type.indices}) -> array<${e.type.value}, ${t.length}> {\n var original_indices: array<${e.type.value}, ${t.length}>;\n for (var i:u32 = 0; i < ${t.length}; i++) {\n var output_index = ${e.indicesGet(\"output_indices\",\"i\")};\n var scale = ${xe(\"uniforms.scales\",\"i\",u)};\n var roi_low = ${xe(\"uniforms.roi\",\"i\",s)};\n var roi_hi = ${xe(\"uniforms.roi\",`i + ${r.length}`,s)};\n if (scale == 1.0) {\n original_indices[i] = ${e.type.value}(output_index);\n } else {\n var input_shape_i = ${xe(\"uniforms.input_shape\",\"i\",r.length)};\n var output_shape_i = ${xe(\"uniforms.output_shape\",\"i\",t.length)};\n original_indices[i] = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n }\n }\n return original_indices;\n }`,n0=(e,r,t,u,s,c,f)=>`\n fn calculateInputIndicesFromOutputIndices(output_indices: ${r.type.indices}) -> ${e.type.indices} {\n var input_indices: ${e.type.indices};\n for (var i:u32 = 0; i < ${u.length}; i++) {\n var output_index = ${r.indicesGet(\"output_indices\",\"i\")};\n var input_index: u32;\n var scale = ${xe(\"uniforms.scales\",\"i\",s)};\n if (scale == 1.0) {\n input_index = output_index;\n } else {\n var roi_low = ${xe(\"uniforms.roi\",\"i\",c)};\n var roi_hi = ${xe(\"uniforms.roi\",`i + ${t.length}`,c)};\n var input_shape_i = ${xe(\"uniforms.input_shape\",\"i\",t.length)};\n var output_shape_i = ${xe(\"uniforms.output_shape\",\"i\",u.length)};\n var original_idx = getOriginalCoordinateFromResizedCoordinate(output_index, scale, output_shape_i,\n input_shape_i, roi_low, roi_hi);\n if (!${f} || (original_idx >= 0 && original_idx < ${r.type.value}(input_shape_i))) {\n if (original_idx < 0) {\n input_index = 0;\n } else if (original_idx > ${r.type.value}(input_shape_i - 1)) {\n input_index = input_shape_i - 1;\n } else {\n input_index = u32(getNearestPixelFromOriginal(original_idx, scale < 1));\n }\n } else {\n input_index = u32(original_idx);\n }\n }\n ${e.indicesSet(\"input_indices\",\"i\",\" input_index\")}\n }\n return input_indices;\n }`,i0=(e,r)=>`\n fn checkInputIndices(input_indices: ${e.type.indices}) -> bool {\n for (var i:u32 = 0; i < ${r.length}; i++) {\n var input_index = ${e.indicesGet(\"input_indices\",\"i\")};\n if (input_index < 0 || input_index >= ${xe(\"uniforms.input_shape\",\"i\",r.length)}) {\n return false;\n }\n }\n return true;\n }`,Dc=(e,r,t,u)=>e.rank>u?`\n ${e.indicesSet(\"input_indices\",r,\"channel\")};\n ${e.indicesSet(\"input_indices\",t,\"batch\")};\n`:\"\",o0=(e,r,t,u,s)=>{let[f,d,g,w]=t.length===2?[-1,0,1,-1]:[0,2,3,1],C=e.type.value;return`\n fn getInputValue(batch: u32, channel: u32, row: u32, col: u32) -> ${C} {\n var input_indices: ${e.type.indices};\n ${e.indicesSet(\"input_indices\",d,`max(0, min(row, ${t[d]} - 1))`)};\n ${e.indicesSet(\"input_indices\",g,`max(0, min(col, ${t[g]} - 1))`)};\n ${Dc(e,w,f,2)}\n return ${e.getByIndices(\"input_indices\")};\n }\n\n fn bilinearInterpolation(output_indices: ${r.type.indices}) -> ${C} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var row:${C} = originalIndices[${d}];\n var col:${C} = originalIndices[${g}];\n ${u?`if (row < 0 || row > (${t[d]} - 1) || col < 0 || col > (${t[g]} - 1)) {\n return ${s};\n }`:\"\"};\n row = max(0, min(row, ${t[d]} - 1));\n col = max(0, min(col, ${t[g]} - 1));\n var row1: u32 = u32(row);\n var col1: u32 = u32(col);\n var row2: u32 = u32(row + 1);\n var col2: u32 = u32(col + 1);\n var channel: u32 = ${t.length>2?`u32(originalIndices[${w}])`:\"0\"};\n var batch: u32 = ${t.length>2?`u32(originalIndices[${f}])`:\"0\"};\n var x11: ${C} = getInputValue(batch, channel, row1, col1);\n var x12: ${C} = getInputValue(batch, channel, row1, col2);\n var x21: ${C} = getInputValue(batch, channel, row2, col1);\n var x22: ${C} = getInputValue(batch, channel, row2, col2);\n var dx1: ${C} = abs(row - ${C}(row1));\n var dx2: ${C} = abs(${C}(row2) - row);\n var dy1: ${C} = abs(col - ${C}(col1));\n var dy2: ${C} = abs(${C}(col2) - col);\n if (row1 == row2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (col1 == col2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n return (x11 * dx2 * dy2 + x12 * dx2 * dy1 + x21 * dx1 * dy2 + x22 * dx1 * dy1);\n }`},a0=(e,r,t,u,s,c,f,d,g,w)=>{let C=t.length===2,$=!0,[A,P]=C?[0,1]:$?[2,3]:[1,2],x=e.type.value,E=O=>{let B=O===A?\"row\":\"col\";return`\n fn ${B}CubicInterpolation(input_indices: ${e.type.indices}, output_indices: ${r.type.indices}) -> ${x} {\n var output_index = ${r.indicesGet(\"output_indices\",O)};\n var originalIdx: ${x} = getOriginalCoordinateFromResizedCoordinate(output_index, ${s[O]},\n ${u[O]}, ${t[O]}, ${c[O]}, ${c[O]} + ${t.length});\n var fractOriginalIdx: ${x} = originalIdx - floor(originalIdx);\n var coefs = getCubicInterpolationCoefs(fractOriginalIdx);\n\n if (${d} && (originalIdx < 0 || originalIdx > (${t[O]} - 1))) {\n return ${g};\n }\n var data: array<${x}, 4> = array<${x}, 4>(0.0, 0.0, 0.0, 0.0);\n for (var i: i32 = -1; i < 3; i++) {\n var ${B}: ${x} = originalIdx + ${x}(i);\n if (${B} < 0 || ${B} >= ${t[O]}) {\n ${(()=>w?`coefs[i + 1] = 0.0;\n continue;`:d?`return ${g};`:`${B} = max(0, min(${B}, ${t[O]} - 1));`)()};\n }\n var input_indices_copy: ${e.type.indices} = input_indices;\n ${e.indicesSet(\"input_indices_copy\",O,`u32(${B})`)};\n data[i + 1] = ${O===A?e.getByIndices(\"input_indices_copy\"):\"rowCubicInterpolation(input_indices_copy, output_indices)\"};\n }\n return cubicInterpolation1D(data, coefs);\n }`};return`\n ${E(A)};\n ${E(P)};\n fn getCubicInterpolationCoefs(s: ${x}) -> array<${x}, 4> {\n var absS = abs(s);\n var coeffs: array<${x}, 4> = array<${x}, 4>(0.0, 0.0, 0.0, 0.0);\n var oneMinusAbsS: ${x} = 1.0 - absS;\n var twoMinusAbsS: ${x} = 2.0 - absS;\n var onePlusAbsS: ${x} = 1.0 + absS;\n coeffs[0] = ((${f} * onePlusAbsS - 5 * ${f}) * onePlusAbsS + 8 * ${f}) * onePlusAbsS - 4 * ${f};\n coeffs[1] = ((${f} + 2) * absS - (${f} + 3)) * absS * absS + 1;\n coeffs[2] = ((${f} + 2) * oneMinusAbsS - (${f} + 3)) * oneMinusAbsS * oneMinusAbsS + 1;\n coeffs[3] = ((${f} * twoMinusAbsS - 5 * ${f}) * twoMinusAbsS + 8 * ${f}) * twoMinusAbsS - 4 * ${f};\n return coeffs;\n }\n\n fn cubicInterpolation1D(x: array<${x}, 4>, coefs: array<${x}, 4>) -> ${x} {\n var coefsSum: ${x} = coefs[0] + coefs[1] + coefs[2] + coefs[3];\n return (x[0] * coefs[0] + x[1] * coefs[1]+ x[2] * coefs[2]+ x[3] * coefs[3]) / coefsSum;\n }\n\n fn bicubicInterpolation(output_indices: ${r.type.indices}) -> ${x} {\n var input_indices: ${e.type.indices} = output_indices;\n return colCubicInterpolation(input_indices, output_indices);\n }\n `},s0=(e,r,t,u,s)=>{let[f,d,g,w,C]=t.length===3?[-1,0,1,2,-1]:[0,2,3,4,1],$=e.type.value;return`\n fn getInputValue(batch: u32, channel: u32, depth:u32, height: u32, width: u32) -> ${$} {\n var input_indices: ${e.type.indices};\n ${e.indicesSet(\"input_indices\",d,`max(0, min(depth, ${t[d]} - 1))`)};\n ${e.indicesSet(\"input_indices\",g,`max(0, min(height, ${t[g]} - 1))`)};\n ${e.indicesSet(\"input_indices\",w,`max(0, min(width, ${t[w]} - 1))`)};\n ${Dc(e,C,f,3)}\n return ${e.getByIndices(\"input_indices\")};\n }\n\n fn trilinearInterpolation(output_indices: ${r.type.indices}) -> ${$} {\n var originalIndices = calculateOriginalIndicesFromOutputIndices(output_indices);\n var depth:${$} = originalIndices[${d}];\n var height:${$} = originalIndices[${g}];\n var width:${$} = originalIndices[${w}];\n ${u?`if (depth < 0 || depth > (${t[d]} - 1) || height < 0 || height > (${t[g]} - 1) || width < 0 || (width > ${t[w]} - 1)) {\n return ${s};\n }`:\"\"};\n\n depth = max(0, min(depth, ${t[d]} - 1));\n height = max(0, min(height, ${t[g]} - 1));\n width = max(0, min(width, ${t[w]} - 1));\n var depth1: u32 = u32(depth);\n var height1: u32 = u32(height);\n var width1: u32 = u32(width);\n var depth2: u32 = u32(depth + 1);\n var height2: u32 = u32(height + 1);\n var width2: u32 = u32(width + 1);\n var channel: u32 = ${t.length>3?`u32(originalIndices[${C}])`:\"0\"};\n var batch: u32 = ${t.length>3?`u32(originalIndices[${f}])`:\"0\"};\n\n var x111: ${$} = getInputValue(batch, channel, depth1, height1, width1);\n var x112: ${$} = getInputValue(batch, channel, depth1, height1, width2);\n var x121: ${$} = getInputValue(batch, channel, depth1, height2, width1);\n var x122: ${$} = getInputValue(batch, channel, depth1, height2, width2);\n var x211: ${$} = getInputValue(batch, channel, depth2, height1, width1);\n var x212: ${$} = getInputValue(batch, channel, depth2, height1, width2);\n var x221: ${$} = getInputValue(batch, channel, depth2, height2, width1);\n var x222: ${$} = getInputValue(batch, channel, depth2, height2, width2);\n var dx1: ${$} = abs(depth - ${$}(depth1));\n var dx2: ${$} = abs(${$}(depth2) - depth);\n var dy1: ${$} = abs(height - ${$}(height1));\n var dy2: ${$} = abs(${$}(height2) - height);\n var dz1: ${$} = abs(width - ${$}(width1));\n var dz2: ${$} = abs(${$}(width2) - width);\n if (depth1 == depth2) {\n dx1 = 0.5;\n dx2 = 0.5;\n }\n if (height1 == height2) {\n dy1 = 0.5;\n dy2 = 0.5;\n }\n if (width1 == width2) {\n dz1 = 0.5;\n dz2 = 0.5;\n }\n return (x111 * dx2 * dy2 * dz2 + x112 * dx2 * dy2 * dz1 + x121 * dx2 * dy1 *dz2 + x122 * dx2 * dy1 * dz1 +\n x211 * dx1 * dy2 * dz2 + x212 * dx1 * dy2 * dz1 + x221 * dx1 * dy1 *dz2 + x222 * dx1 * dy1 * dz1);\n }`},u0=(e,r,t,u,s,c)=>{let f=e.dims,d=Jy(c,r.axes,f.length),g=e0(f,u,s,r.axes),w=u.slice();u.length===0&&(w=f.map((R,j)=>R===0?1:g[j]/R),r.keepAspectRatioPolicy!==\"stretch\"&&(g=t0(f,w,r)));let C=ne(\"output\",e.dataType,g.length),$=Z(\"input\",e.dataType,f.length),A=K.size(g),P=f.length===g.length&&f.every((R,j)=>R===g[j]),x=r.coordinateTransformMode===\"tf_crop_and_resize\",E=r.extrapolationValue,O=$.type.value,B=R=>`\n ${P?\"\":`\n ${Qy(r.coordinateTransformMode,O)};\n ${(()=>{switch(r.mode){case\"nearest\":return`\n ${i0($,f)};\n ${Xy(r.nearestMode,t,O)};\n ${n0($,C,f,g,w.length,d.length,x)};\n `;case\"linear\":return`\n ${r0(C,f,g,w.length,d.length)};\n ${(()=>{if(f.length===2||f.length===4)return`${o0($,C,f,x,E)}`;if(f.length===3||f.length===5)return`${s0($,C,f,x,E)}`;throw Error(\"Linear mode only supports input dims 2, 3, 4 and 5 are supported in linear mode.\")})()};\n `;case\"cubic\":return`\n ${(()=>{if(f.length===2||f.length===4)return`${a0($,C,f,g,w,d,r.cubicCoeffA,x,r.extrapolationValue,r.excludeOutside)}`;throw Error(\"Cubic mode only supports input dims 2 and 4 are supported in linear mode.\")})()};\n `;default:throw Error(\"Invalid resize mode\")}})()};\n `}\n ${R.registerUniform(\"output_size\",\"u32\").registerUniform(\"scales\",\"f32\",w.length).registerUniform(\"roi\",\"f32\",d.length).declareVariables($,C)}\n ${R.mainStart()}\n ${R.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n ${P?\"output[global_idx] = input[global_idx];\":`\n let output_indices = ${C.offsetToIndices(\"global_idx\")};\n var input_indices: ${$.type.indices};\n ${(()=>{switch(r.mode){case\"nearest\":return`input_indices = calculateInputIndicesFromOutputIndices(output_indices);\n if (checkInputIndices(input_indices)) {\n output[global_idx] = ${$.getByIndices(\"input_indices\")};\n } else {\n output[global_idx] = ${r.extrapolationValue};\n }`;case\"linear\":return`output[global_idx] = ${f.length===2||f.length===4?\"bilinearInterpolation\":\"trilinearInterpolation\"}(output_indices);`;case\"cubic\":return\"output[global_idx] = bicubicInterpolation(output_indices);\";default:throw Error(`Unsupported resize mode: ${r.mode}`)}})()};\n`}\n }`;return{name:\"Resize\",shaderCache:{hint:`${r.cacheKey}|${t}|${w.length>0?w:\"\"}|${s.length>0?s:\"\"}|${d.length>0?d:\"\"}|${P}|${f}`,inputDependencies:[\"rank\"]},getShaderSource:B,getRunData:()=>({outputs:[{dims:g,dataType:e.dataType}],dispatchGroup:{x:Math.ceil(A/64)},programUniforms:[{type:12,data:A},{type:1,data:w},{type:1,data:d},...se(f,g)]})}},l0=e=>{let r=e.customDataBuffer;return new Uint32Array(r,r.byteOffset,1)[0]},zc=(e,r)=>{let t=[],u=[],s=[],c=l0(e);if(r.antialias!==0)throw Error(\"Only default value (0) for Antialias attribute is supported\");Zy(e.inputs,r,c,t,u,s),e.compute(u0(e.inputs[0],r,c,t,u,s),{inputs:[0]})},Mc=e=>{let r=e.antialias,t=e.axes,u=e.coordinateTransformMode,s=e.cubicCoeffA,c=e.excludeOutside!==0,f=e.extrapolationValue,d=e.keepAspectRatioPolicy,g=e.mode,w=e.nearestMode===\"\"?\"simple\":e.nearestMode;return ke({antialias:r,axes:t,coordinateTransformMode:u,cubicCoeffA:s,excludeOutside:c,extrapolationValue:f,keepAspectRatioPolicy:d,mode:g,nearestMode:w})}});var d0,c0,Uc,Vc=ae(()=>{\"use strict\";Te();De();nt();Re();d0=(e,r)=>{let[t,u,s,c]=e,{numHeads:f,rotaryEmbeddingDim:d}=r;if(t.dims.length!==3&&t.dims.length!==4)throw new Error(`Input 'x' is expected to have 3 or 4 dimensions, got ${t.dims.length}`);if(!K.areEqual(u.dims,[])&&!K.areEqual(u.dims,[1])&&u.dims.length!==2)throw new Error(`Input 'position_ids' is expected to have 0, 1, or 2 dimensions, got ${u.dims.length}`);if(s.dims.length!==2)throw new Error(`Input 'cos_cache' is expected to have 2 dimensions, got ${s.dims.length}`);if(c.dims.length!==2)throw new Error(`Input 'sin_cache' is expected to have 2 dimensions, got ${c.dims.length}`);if(!K.areEqual(s.dims,c.dims))throw new Error(\"Inputs 'cos_cache' and 'sin_cache' are expected to have the same shape\");if(d>0&&f===0)throw new Error(\"num_heads must be provided if rotary_embedding_dim is specified\");let g=t.dims[0],w=t.dims[t.dims.length-2],C=s.dims[0],$=K.sizeFromDimension(t.dims,1)/w,A=d===0?s.dims[1]*2:$/f;if(d>A)throw new Error(\"rotary_embedding_dim must be less than or equal to head_size\");if(u.dims.length===2){if(g!==u.dims[0])throw new Error(`Input 'position_ids' dimension 0 should be of size batch_size, got ${u.dims[0]}`);if(w!==u.dims[1])throw new Error(`Input 'position_ids' dimension 1 should be of size sequence_length, got ${u.dims[1]}`)}if(A/2!==s.dims[1]&&d/2!==s.dims[1])throw new Error(`Input 'cos_cache' dimension 1 should be same as head_size / 2 or rotary_embedding_dim / 2, got ${s.dims[1]}`);if(w>C)throw new Error(\"Updating cos_cache and sin_cache in RotaryEmbedding is not currently supported\")},c0=(e,r)=>{let{interleaved:t,numHeads:u,rotaryEmbeddingDim:s,scale:c}=r,f=e[0].dims[0],d=K.sizeFromDimension(e[0].dims,1),g=e[0].dims[e[0].dims.length-2],w=d/g,C=e[2].dims[1],$=s===0?C*2:w/u,A=new Array(f,g,w/$,$-C),P=K.computeStrides(A),x=[{type:1,data:c},{type:12,data:A},{type:12,data:P},...e[0].dims.length===3?new Array({type:12,data:[d,w,$,1]}):[],...e[0].dims.length===4?new Array({type:12,data:[d,$,g*$,1]}):[],...se(e[0].dims,e[1].dims,e[2].dims,e[3].dims,e[0].dims)],E=O=>{let B=Z(\"input\",e[0].dataType,e[0].dims.length),R=Z(\"position_ids\",e[1].dataType,e[1].dims.length),j=Z(\"cos_cache\",e[2].dataType,e[2].dims.length),U=Z(\"sin_cache\",e[3].dataType,e[3].dims.length),L=ne(\"output\",e[0].dataType,e[0].dims.length);return O.registerUniforms([{name:\"scale\",type:\"f32\"},{name:\"global_shape\",type:\"u32\",length:A.length},{name:\"global_strides\",type:\"u32\",length:P.length},{name:\"input_output_strides\",type:\"u32\",length:P.length}]),`\n ${O.declareVariables(B,R,j,U,L)}\n\n ${O.mainStart(ur)}\n let half_rotary_emb_dim = uniforms.${j.name}_shape[1];\n let bsnh = global_idx / uniforms.global_strides % uniforms.global_shape;\n let size = uniforms.global_shape[0] * uniforms.global_strides[0];\n ${O.guardAgainstOutOfBoundsWorkgroupSizes(\"size\")}\n\n if (bsnh[3] < half_rotary_emb_dim) {\n let position_ids_idx =\n ${R.broadcastedIndicesToOffset(\"bsnh.xy\",ne(\"\",R.type.tensor,2))};\n let position_id =\n u32(${R.getByOffset(\"position_ids_idx\")}) + select(0, bsnh[1], position_ids_idx == 0);\n let i = dot(bsnh, uniforms.input_output_strides) + select(0, bsnh[3], ${t});\n let j = i + select(half_rotary_emb_dim, 1, ${t});\n let re = ${B.getByOffset(\"i\")} * ${j.get(\"position_id\",\"bsnh[3]\")} -\n ${B.getByOffset(\"j\")} * ${U.get(\"position_id\",\"bsnh[3]\")};\n ${L.setByOffset(\"i\",\"re\")}\n let im = ${B.getByOffset(\"i\")} * ${U.get(\"position_id\",\"bsnh[3]\")} +\n ${B.getByOffset(\"j\")} * ${j.get(\"position_id\",\"bsnh[3]\")};\n ${L.setByOffset(\"j\",\"im\")}\n } else {\n let k = dot(bsnh, uniforms.input_output_strides) + half_rotary_emb_dim;\n ${L.setByOffset(\"k\",B.getByOffset(\"k\"))}\n }\n }`};return{name:\"RotaryEmbedding\",shaderCache:{hint:ke({interleaved:t}).cacheKey,inputDependencies:[\"rank\",\"rank\",\"rank\",\"rank\"]},getShaderSource:E,getRunData:()=>({outputs:[{dims:e[0].dims,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(K.size(A)/ur)},programUniforms:x})}},Uc=(e,r)=>{d0(e.inputs,r),e.compute(c0(e.inputs,r))}});var f0,p0,Wc,Nc=ae(()=>{\"use strict\";Te();De();Re();f0=e=>{if(!e||e.length<3)throw new Error(\"layerNorm requires at least 3 inputs.\");let r=e[0],t=e[1],u=e[2];if(r.dataType!==t.dataType||r.dataType!==u.dataType)throw new Error(\"All inputs must have the same data type\");if(r.dims.length!==3&&r.dims.length!==2)throw new Error(\"Input must be 2D or 3D\");if(t.dims.length!==3&&t.dims.length!==2)throw new Error(\"Skip must be 2D or 3D\");let s=r.dims[r.dims.length-1],c=r.dims[r.dims.length-2];if(t.dims[t.dims.length-1]!==s)throw new Error(\"Skip must have the same hidden size as input\");if(t.dims[t.dims.length-2]!==c)throw new Error(\"Skip must have the same sequence length as input\");if(u.dims.length!==1)throw new Error(\"Gamma must be 1D\");if(u.dims[u.dims.length-1]!==s)throw new Error(\"Gamma must have the same hidden size as input\");if(e.length>3){let f=e[3];if(f.dims.length!==1)throw new Error(\"Beta must be 1D\");if(f.dims[f.dims.length-1]!==s)throw new Error(\"Beta must have the same hidden size as input\")}if(e.length>4){let f=e[4];if(f.dims.length!==1)throw new Error(\"Bias must be 1D\");if(f.dims[f.dims.length-1]!==s)throw new Error(\"Bias must have the same hidden size as input\")}},p0=(e,r,t,u)=>{let s=r.simplified,c=e[0].dims,f=K.size(c),d=c,g=f,w=c.slice(-1)[0],C=u?c.slice(0,-1).concat(1):[],$=!s&&e.length>3,A=e.length>4,P=u&&t>1,x=u&&t>2,E=t>3,O=He(w),B=[{type:12,data:g},{type:12,data:O},{type:12,data:w},{type:1,data:r.epsilon}],R=U=>{let L=[{name:\"output_size\",type:\"u32\"},{name:\"components\",type:\"u32\"},{name:\"hidden_size\",type:\"u32\"},{name:\"epsilon\",type:\"f32\"}],F=[Z(\"x\",e[0].dataType,e[0].dims,O),Z(\"skip\",e[1].dataType,e[1].dims,O),Z(\"gamma\",e[2].dataType,e[2].dims,O)];$&&F.push(Z(\"beta\",e[3].dataType,e[3].dims,O)),A&&F.push(Z(\"bias\",e[4].dataType,e[4].dims,O)),F.push(ne(\"output\",e[0].dataType,d,O)),P&&F.push(ne(\"mean_output\",1,C)),x&&F.push(ne(\"inv_std_output\",1,C)),E&&F.push(ne(\"input_skip_bias_sum\",e[0].dataType,d,O));let te=Ne(e[0].dataType);return`\n\n ${U.registerUniforms(L).declareVariables(...F)}\n\n ${U.mainStart()}\n ${U.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size / uniforms.hidden_size\")}\n let hidden_size_vectorized: u32 = uniforms.hidden_size / uniforms.components;\n let offset = global_idx * hidden_size_vectorized;\n var sum = ${xt(\"f32\",O)};\n var squareSum = ${xt(\"f32\",O)};\n for (var i: u32 = 0; i < hidden_size_vectorized; i++) {\n let skip_value = skip[offset + i];\n let bias_value = ${A?\"bias[i]\":te+\"(0.0)\"};\n let input_value = x[offset + i];\n let value = input_value + skip_value + bias_value;\n ${E?\"input_skip_bias_sum[offset + i] = value;\":\"\"}\n output[offset + i] = value;\n let f32_value = ${lr(te,O,\"value\")};\n sum += f32_value;\n squareSum += f32_value * f32_value;\n }\n let mean = ${It(\"sum\",O)} / f32(uniforms.hidden_size);\n let inv_std_dev = inverseSqrt(${It(\"squareSum\",O)} / f32(uniforms.hidden_size) ${s?\"\":\"- mean * mean\"} + uniforms.epsilon);\n ${P?\"mean_output[global_idx] = mean;\":\"\"}\n ${x?\"inv_std_output[global_idx] = inv_std_dev;\":\"\"}\n for (var i: u32 = 0; i < hidden_size_vectorized; i++) {\n output[offset + i] = (output[offset + i] ${s?\"\":`- ${te}(mean)`}) * ${te}(inv_std_dev) * gamma[i] ${$?\"+ beta[i]\":\"\"};\n }\n }`},j=[{dims:d,dataType:e[0].dataType}];return t>1&&j.push({dims:C,dataType:1}),t>2&&j.push({dims:C,dataType:1}),t>3&&j.push({dims:c,dataType:e[0].dataType}),{name:\"SkipLayerNormalization\",shaderCache:{hint:`${O};${P};${x};${E}`,inputDependencies:e.map((U,L)=>\"type\")},getShaderSource:R,getRunData:()=>({outputs:j,dispatchGroup:{x:Math.ceil(g/w/64)},programUniforms:B})}},Wc=(e,r)=>{f0(e.inputs);let u=[0];e.outputCount>1&&u.push(-3),e.outputCount>2&&u.push(-3),e.outputCount>3&&u.push(3),e.compute(p0(e.inputs,r,e.outputCount,!1),{outputs:u})}});var m0,bi,h0,Gc,g0,y0,Hc,Lc,Fc=ae(()=>{\"use strict\";Te();De();nt();Re();m0=(e,r)=>{if(!e||e.length<1)throw new Error(\"too few inputs\");if(r.axes.length!==0){if(r.axes.length!==r.starts.length||r.axes.length!==r.ends.length)throw new Error(\"axes, starts and ends must have the same length\")}else if(r.starts.length!==r.ends.length)throw new Error(\"starts and ends must have the same length\");e.slice(1).forEach((t,u)=>{if(e[u+1].dataType!==6&&e[u+1].dataType!==7)throw new Error(`Input ${u} must be an array of int32 or int64`)})},bi=(e,r)=>{let t=[];if(e.length>r)if(e[r].dataType===7)e[r].getBigInt64Array().forEach(u=>t.push(Number(u)));else if(e[r].dataType===6)e[r].getInt32Array().forEach(u=>t.push(Number(u)));else throw new Error(`Input ${r} must be an array of int32 or int64`);return t},h0=(e,r)=>{if(e.length>1){let t=bi(e,1),u=bi(e,2),s=bi(e,3);return s.length===0&&(s=[...Array(e[0].dims.length).keys()]),ke({starts:t,ends:u,axes:s})}else return r},Gc=(e,r,t,u,s)=>{let c=e;return e<0&&(c+=t[u[r]]),s[r]<0?Math.max(0,Math.min(c,t[u[r]]-1)):Math.max(0,Math.min(c,t[u[r]]))},g0=(e,r,t)=>`fn calculateInputIndices(output_indices: ${r.type.indices}) -> ${e.type.indices} {\n var input_indices: ${e.type.indices};\n var carry = 0u;\n for (var i = ${t.length}; i >= 0; i--) {\n let input_shape_i = ${xe(\"uniforms.input_shape\",\"i\",t.length)};\n let steps_i = ${xe(\"uniforms.steps\",\"i\",t.length)};\n let signs_i = ${xe(\"uniforms.signs\",\"i\",t.length)};\n let starts_i = ${xe(\"uniforms.starts\",\"i\",t.length)};\n var output_index = ${r.indicesGet(\"output_indices\",\"i\")};\n var input_index = output_index * steps_i + starts_i + carry;\n carry = input_index / input_shape_i;\n input_index = input_index % input_shape_i;\n if (signs_i < 0) {\n input_index = input_shape_i - input_index - 1u + starts_i;\n }\n ${e.indicesSet(\"input_indices\",\"i\",\"input_index\")};\n }\n return input_indices;\n }`,y0=(e,r)=>{let t=e[0].dims,u=K.size(t),s=r.axes.length>0?K.normalizeAxes(r.axes,t.length):[...Array(t.length).keys()],c=bi(e,4);c.forEach(B=>B!==0||(()=>{throw new Error(\"step cannot be 0\")})),c.length===0&&(c=Array(s.length).fill(1));let f=r.starts.map((B,R)=>Gc(B,R,t,s,c)),d=r.ends.map((B,R)=>Gc(B,R,t,s,c));if(s.length!==f.length||s.length!==d.length)throw new Error(\"start, ends and axes should have the same number of elements\");if(s.length!==t.length)for(let B=0;BMath.sign(B));c.forEach((B,R,j)=>{if(B<0){let U=(d[R]-f[R])/B,L=f[R],F=L+U*c[R];f[R]=F,d[R]=L,j[R]=-B}});let w=t.slice(0);s.forEach((B,R)=>{w[B]=Math.ceil((d[B]-f[B])/c[B])});let C={dims:w,dataType:e[0].dataType},$=ne(\"output\",e[0].dataType,w.length),A=Z(\"input\",e[0].dataType,e[0].dims.length),P=K.size(w),x=[{name:\"outputSize\",type:\"u32\"},{name:\"starts\",type:\"u32\",length:f.length},{name:\"signs\",type:\"i32\",length:g.length},{name:\"steps\",type:\"u32\",length:c.length}],E=[{type:12,data:P},{type:12,data:f},{type:6,data:g},{type:12,data:c},...se(e[0].dims,w)],O=B=>`\n ${B.registerUniforms(x).declareVariables(A,$)}\n ${g0(A,$,t)}\n ${B.mainStart()}\n ${B.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.outputSize\")}\n let output_indices = ${$.offsetToIndices(\"global_idx\")};\n let input_indices = calculateInputIndices(output_indices);\n ${$.setByOffset(\"global_idx\",A.getByIndices(\"input_indices\"))}\n }`;return{name:\"Slice\",shaderCache:{hint:`${g.length}_${f.length}_${c.length}`,inputDependencies:[\"rank\"]},getShaderSource:O,getRunData:()=>({outputs:[C],dispatchGroup:{x:Math.ceil(u/64)},programUniforms:E})}},Hc=(e,r)=>{m0(e.inputs,r);let t=h0(e.inputs,r);e.compute(y0(e.inputs,t),{inputs:[0]})},Lc=e=>{let r=e.starts,t=e.ends,u=e.axes;return ke({starts:r,ends:t,axes:u})}});var b0,v0,qc,Kc,Yc=ae(()=>{\"use strict\";Te();De();nt();Re();b0=e=>{if(!e||e.length!==1)throw new Error(\"Softmax op requires 1 input.\")},v0=(e,r)=>{let t=e.dims,u=K.size(t),s=64,c=r.axis;if(c<0&&(c=t.length+c),cB===4?`max(max(${O}.x, ${O}.y), max(${O}.z, ${O}.w))`:B===2?`max(${O}.x, ${O}.y)`:B===3?`max(max(${O}.x, ${O}.y), ${O}.z)`:O,$=Z(\"x\",e.dataType,e.dims,g),A=ne(\"result\",e.dataType,e.dims,g),P=$.type.value,x=Ne(e.dataType)===\"f32\"?`var threadMax = ${P}(-3.402823e+38f);`:`var threadMax = ${P}(-65504.0h);`,E=O=>`\n var rowMaxShared : ${P};\n var rowSumShared : ${P};\n var threadShared : array<${P}, ${s}>;\n\n fn getValue(row: i32, col: i32, row_stride: i32) -> ${P} {\n let index = row * row_stride + col;\n return x[index];\n }\n\n fn setValue(row: i32, col: i32, row_stride: i32, value: ${P}) {\n let index = row * row_stride + col;\n result[index] = value;\n }\n ${O.registerUniform(\"packedCols\",\"i32\").declareVariables($,A)}\n ${O.mainStart()}\n let gindex = i32(global_idx);\n let lindex = i32(local_idx);\n const wg = ${s};\n let row = gindex / wg;\n let cols = uniforms.packedCols;\n let row_stride : i32 = uniforms.packedCols;\n\n // find the rows max\n ${x}\n for (var col = lindex; col < cols; col += wg) {\n let value = getValue(row, col, row_stride);\n threadMax = max(threadMax, value);\n }\n if (lindex < cols) {\n threadShared[lindex] = threadMax;\n }\n workgroupBarrier();\n\n var reduceSize = min(cols, wg);\n for (var currSize = reduceSize >> 1; currSize > 0; currSize = reduceSize >> 1) {\n reduceSize = currSize + (reduceSize & 1);\n if (lindex < currSize) {\n threadShared[lindex] = max(threadShared[lindex], threadShared[lindex + reduceSize]);\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowMaxShared = ${P}(${C(\"threadShared[0]\",g)});\n }\n workgroupBarrier();\n\n // find the rows sum\n var threadSum = ${P}(0.0);\n for (var col = lindex; col < cols; col += wg) {\n let subExp = exp(getValue(row, col, row_stride) - rowMaxShared);\n threadSum += subExp;\n }\n threadShared[lindex] = threadSum;\n workgroupBarrier();\n\n for (var currSize = wg >> 1; currSize > 0; currSize = currSize >> 1) {\n if (lindex < currSize) {\n threadShared[lindex] = threadShared[lindex] + threadShared[lindex + currSize];\n }\n workgroupBarrier();\n }\n if (lindex == 0) {\n rowSumShared = ${P}(${It(\"threadShared[0]\",g)});\n }\n workgroupBarrier();\n\n // calculate final value for each element in the row\n for (var col = lindex; col < cols; col += wg) {\n let value = exp(getValue(row, col, row_stride) - rowMaxShared) / rowSumShared;\n setValue(row, col, row_stride, value);\n }\n }`;return{name:\"Softmax\",shaderCache:{hint:`${g}`,inputDependencies:[\"type\"]},getRunData:()=>({outputs:[{dims:t,dataType:e.dataType}],dispatchGroup:{x:d},programUniforms:[{type:6,data:w}]}),getShaderSource:E}},qc=(e,r)=>{b0(e.inputs),e.compute(v0(e.inputs[0],r))},Kc=e=>ke({axis:e.axis})});var w0,_0,$0,C0,S0,Zc,Qc,Xc=ae(()=>{\"use strict\";Te();De();nt();Re();w0=e=>{if(!e||e.length<1)throw new Error(\"too few inputs\")},_0=(e,r)=>{let t=[],u=r.numOutputs;return e[1].dims[0]>0&&(e[1].getBigInt64Array().forEach(s=>t.push(Number(s))),u=t.length),ke({numOutputs:u,axis:r.axis,splitSizes:t})},$0=e=>`\nfn calculateOutputIndex(index: u32) -> u32 {\n for (var i: u32 = 0u; i < ${e}u; i += 1u ) {\n if (index < ${xe(\"uniforms.size_in_split_axis\",\"i\",e)}) {\n return i;\n }\n }\n return ${e}u;\n}`,C0=e=>{let r=e.length,t=[];for(let u=0;u{let t=e[0].dims,u=K.size(t),s=e[0].dataType,c=K.normalizeAxis(r.axis,t.length),f=new Array(r.numOutputs),d=Z(\"input\",s,t.length),g=new Array(r.numOutputs),w=[],C=[],$=0,A=[{type:12,data:u}];for(let x=0;x`\n ${x.registerUniform(\"input_size\",\"u32\").registerUniform(\"size_in_split_axis\",\"u32\",g.length).declareVariables(d,...f)}\n ${$0(g.length)}\n ${C0(f)}\n\n ${x.mainStart()}\n ${x.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.input_size\")}\n\n var indices = ${d.offsetToIndices(\"global_idx\")};\n var index = ${d.indicesGet(\"indices\",c)};\n let output_number = calculateOutputIndex(index);\n if (output_number != 0) {\n index -= ${xe(\"uniforms.size_in_split_axis\",\"output_number - 1u\",g.length)};\n ${d.indicesSet(\"indices\",c,\"index\")};\n }\n writeBufferData(output_number, indices, global_idx);\n }`;return{name:\"Split\",shaderCache:{hint:r.cacheKey,inputDependencies:[\"rank\"]},getShaderSource:P,getRunData:()=>({outputs:w,dispatchGroup:{x:Math.ceil(u/64)},programUniforms:A})}},Zc=(e,r)=>{w0(e.inputs);let t=e.inputs.length===1?r:_0(e.inputs,r);e.compute(S0(e.inputs,t),{inputs:[0]})},Qc=e=>{let r=e.axis,t=e.splitSizes,u=e.numOutputs<0?t.length:e.numOutputs;if(u!==t.length)throw new Error(\"numOutputs and splitSizes lengh must be equal\");return ke({axis:r,numOutputs:u,splitSizes:t})}});var Jc,x0,I0,A0,ef,tf=ae(()=>{\"use strict\";Te();De();Re();Jc=e=>Array.from(e.getBigInt64Array(),Number),x0=e=>{if(!e||e.length!==2)throw new Error(\"Tile requires 2 inputs.\");if(e[0].dataType!==1&&e[0].dataType!==6&&e[0].dataType!==12)throw new Error(\"Tile only support float, int32, and uint32 data types\");if(e[1].dataType!==7)throw new Error(\"Tile `repeats` input should be of int64 data type\");if(e[1].dims.length!==1)throw new Error(\"Tile `repeats` input should be 1-D\");if(Jc(e[1]).length!==e[0].dims.length)throw new Error(\"Tile `repeats` input should have same number of elements as rank of input data tensor\")},I0=(e,r)=>{let t=[];for(let u=0;u{let r=e[0].dims,t=Jc(e[1]),u=I0(r,t),s=K.size(u),c=e[0].dataType,f=Z(\"input\",c,r.length),d=ne(\"output\",c,u.length),g=w=>`\n const inputShape = ${f.indices(...r)};\n ${w.registerUniform(\"output_size\",\"u32\").declareVariables(f,d)}\n ${w.mainStart()}\n ${w.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.output_size\")}\n let output_indices = ${d.offsetToIndices(\"global_idx\")};\n var input_indices: ${f.type.indices};\n for (var i = 0; i < ${r.length}; i++) {\n let input_dim_i = ${f.indicesGet(\"uniforms.input_shape\",\"i\")};\n let input_dim_value = ${d.indicesGet(\"output_indices\",\"i\")} % input_dim_i;\n\n ${f.indicesSet(\"input_indices\",\"i\",\"input_dim_value\")}\n }\n ${d.setByOffset(\"global_idx\",f.getByIndices(\"input_indices\"))}\n }`;return{name:\"Tile\",shaderCache:{hint:`${t}`,inputDependencies:[\"rank\"]},getRunData:()=>({outputs:[{dims:u,dataType:e[0].dataType}],dispatchGroup:{x:Math.ceil(s/64)},programUniforms:[{type:12,data:s},...se(e[0].dims,u)]}),getShaderSource:g}},ef=e=>{x0(e.inputs),e.compute(A0(e.inputs),{inputs:[0]})}});var T0,E0,rf,nf=ae(()=>{\"use strict\";Te();De();Re();T0=(e,r,t,u,s)=>{let c=ne(\"output_data\",s,t.length,4),f=Z(\"a_data\",r[1].dataType,r[1].dims.length,4),d=Z(\"b_data\",r[2].dataType,r[2].dims.length,4),g=Z(\"c_data\",r[0].dataType,r[0].dims.length,4),w,C=($,A,P)=>`select(${A}, ${$}, ${P})`;if(!u)w=c.setByOffset(\"global_idx\",C(f.getByOffset(\"global_idx\"),d.getByOffset(\"global_idx\"),g.getByOffset(\"global_idx\")));else{let $=(A,P,x=\"\")=>{let E=`a_data[index_a${P}][component_a${P}]`,O=`b_data[index_b${P}][component_b${P}]`,B=`bool(c_data[index_c${P}] & (0xffu << (component_c${P} * 8)))`;return`\n let output_indices${P} = ${c.offsetToIndices(`global_idx * 4u + ${P}u`)};\n let offset_a${P} = ${f.broadcastedIndicesToOffset(`output_indices${P}`,c)};\n let offset_b${P} = ${d.broadcastedIndicesToOffset(`output_indices${P}`,c)};\n let offset_c${P} = ${g.broadcastedIndicesToOffset(`output_indices${P}`,c)};\n let index_a${P} = offset_a${P} / 4u;\n let index_b${P} = offset_b${P} / 4u;\n let index_c${P} = offset_c${P} / 4u;\n let component_a${P} = offset_a${P} % 4u;\n let component_b${P} = offset_b${P} % 4u;\n let component_c${P} = offset_c${P} % 4u;\n ${A}[${P}] = ${x}(${C(E,O,B)});\n `};s===9?w=`\n var data = vec4(0);\n ${$(\"data\",0,\"u32\")}\n ${$(\"data\",1,\"u32\")}\n ${$(\"data\",2,\"u32\")}\n ${$(\"data\",3,\"u32\")}\n output_data[global_idx] = dot(vec4(0x1, 0x100, 0x10000, 0x1000000), vec4(data));`:w=`\n ${$(\"output_data[global_idx]\",0)}\n ${$(\"output_data[global_idx]\",1)}\n ${$(\"output_data[global_idx]\",2)}\n ${$(\"output_data[global_idx]\",3)}\n `}return`\n ${e.registerUniform(\"vec_size\",\"u32\").declareVariables(g,f,d,c)}\n ${e.mainStart()}\n ${e.guardAgainstOutOfBoundsWorkgroupSizes(\"uniforms.vec_size\")}\n ${w}\n }`},E0=e=>{let r=e[1].dims,t=e[2].dims,u=e[0].dims,s=e[1].dataType,c=!(K.areEqual(r,t)&&K.areEqual(t,u)),f=r,d=K.size(r);if(c){let w=Ot.calcShape(Ot.calcShape(r,t,!1),u,!1);if(!w)throw new Error(\"Can't perform where op on the given tensors\");f=w,d=K.size(f)}let g=Math.ceil(d/4);return{name:\"Where\",shaderCache:{inputDependencies:[\"rank\",\"rank\",\"rank\"]},getShaderSource:w=>T0(w,e,f,c,s),getRunData:()=>({outputs:[{dims:f,dataType:s}],dispatchGroup:{x:Math.ceil(d/64/4)},programUniforms:[{type:12,data:g},...se(u,r,t,f)]})}},rf=e=>{e.compute(E0(e.inputs))}});var of,af=ae(()=>{\"use strict\";hl();Qi();wl();$l();ad();yd();Zi();ro();kd();Bd();Md();Nd();Ld();qd();Zd();Jd();rc();ic();ac();io();lc();pc();hc();Oc();Bc();ni();jc();Vc();Nc();Fc();Yc();Xc();tf();Dr();li();nf();of=new Map([[\"Abs\",[Cl]],[\"Acos\",[Sl]],[\"Acosh\",[xl]],[\"Add\",[sd]],[\"ArgMax\",[ml,Yi]],[\"ArgMin\",[pl,Yi]],[\"Asin\",[Il]],[\"Asinh\",[Al]],[\"Atan\",[Tl]],[\"Atanh\",[El]],[\"Attention\",[bl]],[\"AveragePool\",[Cc,$c]],[\"BatchNormalization\",[vl]],[\"BiasAdd\",[_l]],[\"BiasSplitGelu\",[od]],[\"Cast\",[kl,Pl]],[\"Ceil\",[Rl]],[\"Clip\",[Ol]],[\"Concat\",[gl,yl]],[\"Conv\",[so,ao]],[\"ConvTranspose\",[Pd,Ed]],[\"Cos\",[Bl]],[\"Cosh\",[Dl]],[\"CumSum\",[Od,Rd]],[\"DepthToSpace\",[Dd,zd]],[\"Div\",[ud]],[\"Einsum\",[Vd,Wd]],[\"Elu\",[zl,si]],[\"Equal\",[ld]],[\"Erf\",[Ml]],[\"Exp\",[jl]],[\"Expand\",[Hd]],[\"FastGelu\",[Fd]],[\"Floor\",[Ul]],[\"FusedConv\",[so,ao]],[\"Gather\",[Yd,Kd]],[\"GatherElements\",[Xd,Qd]],[\"Gelu\",[Vl]],[\"Gemm\",[tc,ec]],[\"GlobalAveragePool\",[Ic,xc]],[\"GlobalMaxPool\",[kc,Pc]],[\"Greater\",[pd]],[\"GreaterOrEqual\",[hd]],[\"HardSigmoid\",[Kl,ql]],[\"InstanceNormalization\",[nc]],[\"LayerNormalization\",[oc]],[\"LeakyRelu\",[Wl,si]],[\"Less\",[md]],[\"LessOrEqual\",[gd]],[\"Log\",[nd]],[\"MatMul\",[Cd]],[\"MatMulNBits\",[sc,uc]],[\"MaxPool\",[Tc,Ec]],[\"Mul\",[dd]],[\"MultiHeadAttention\",[fc,cc]],[\"Neg\",[Gl]],[\"Not\",[Nl]],[\"Pad\",[mc]],[\"Pow\",[cd]],[\"Range\",[Rc]],[\"Reciprocal\",[Hl]],[\"ReduceMin\",[sl]],[\"ReduceMean\",[rl]],[\"ReduceMax\",[al]],[\"ReduceSum\",[ll]],[\"ReduceProd\",[ul]],[\"ReduceL1\",[nl]],[\"ReduceL2\",[il]],[\"ReduceLogSum\",[cl]],[\"ReduceLogSumExp\",[ol]],[\"ReduceSumSquare\",[dl]],[\"Relu\",[Ll]],[\"Resize\",[zc,Mc]],[\"RotaryEmbedding\",[Uc]],[\"Sigmoid\",[Fl]],[\"Sin\",[Yl]],[\"Sinh\",[Zl]],[\"Slice\",[Hc,Lc]],[\"SkipLayerNormalization\",[Wc]],[\"Split\",[Zc,Qc]],[\"Sqrt\",[Ql]],[\"Softmax\",[qc,Kc]],[\"Sub\",[fd]],[\"Tan\",[Xl]],[\"Tanh\",[ed]],[\"ThresholdedRelu\",[rd,si]],[\"Tile\",[ef]],[\"Transpose\",[Gu,Hu]],[\"Where\",[rf]]])});var vi,sf=ae(()=>{\"use strict\";Rr();Lt();Re();vi=class{constructor(r){this.backend=r;this.repo=new Map,this.attributesBound=!1}getArtifact(r){return this.repo.get(r)}setArtifact(r,t){this.repo.set(r,t)}run(r,t,u,s,c){ar(r.programInfo.name);let f=this.backend.device,d=this.backend.getComputePassEncoder();this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2);let g=[];for(let C of t)g.push({binding:g.length,resource:{buffer:C.buffer}});for(let C of u)g.push({binding:g.length,resource:{buffer:C.buffer}});c&&g.push({binding:g.length,resource:c});let w=f.createBindGroup({layout:r.computePipeline.getBindGroupLayout(0),entries:g,label:r.programInfo.name});if(this.backend.sessionStatus===\"capturing\"){let C={kernelId:this.backend.currentKernelId,computePipeline:r.computePipeline,bindGroup:w,dispatchGroup:s};this.backend.capturedCommandList.get(this.backend.currentSessionId).push(C)}d.setPipeline(r.computePipeline),d.setBindGroup(0,w),d.dispatchWorkgroups(...s),this.backend.writeTimestamp(this.backend.pendingDispatchNumber*2+1),this.backend.pendingDispatchNumber++,(this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber||this.backend.queryType===\"at-passes\")&&this.backend.endComputePass(),this.backend.pendingDispatchNumber>=this.backend.maxDispatchNumber&&this.backend.flush(),Ht(r.programInfo.name)}dispose(){}build(r,t){ar(r.name);let u=this.backend.device,s=[];u.features.has(\"shader-f16\")&&s.push(\"enable f16;\");let c=Wu(t,this.backend.device.limits),f=r.getShaderSource(c),d=`${s.join(`\n`)}\n${c.additionalImplementations}\n${f}`,g=u.createShaderModule({code:d,label:r.name});Fe(\"verbose\",()=>`[WebGPU] ${r.name} shader code: ${d}`);let w=u.createComputePipeline({compute:{module:g,entryPoint:\"main\"},layout:\"auto\",label:r.name});return Ht(r.name),{programInfo:r,computePipeline:w,uniformVariablesInfo:c.variablesInfo}}normalizeDispatchGroupSize(r){let t=typeof r==\"number\"?r:r.x,u=typeof r==\"number\"?1:r.y||1,s=typeof r==\"number\"?1:r.z||1,c=this.backend.device.limits.maxComputeWorkgroupsPerDimension;if(t<=c&&u<=c&&s<=c)return[t,u,s];let f=t*u*s,d=Math.ceil(Math.sqrt(f));if(d>c){if(d=Math.ceil(Math.cbrt(f)),d>c)throw new Error(\"Total dispatch size exceeds WebGPU maximum.\");return[d,d,d]}else return[d,d,1]}}});var P0,k0,mo,wi,uf=ae(()=>{\"use strict\";Rr();Te();Lt();Du();Vu();af();sf();P0=(e,r)=>{if(r.length!==e.length)throw new Error(`inputDependencies length ${r.length} is not equal to inputTensors length ${e.length}.`);let t=[];for(let u=0;u{let u=e.name;return e.shaderCache?.hint&&(u+=\"[\"+e.shaderCache.hint+\"]\"),u+=\":\"+t+`:${P0(r,e.shaderCache?.inputDependencies??new Array(r.length).fill(\"dims\"))}`,u},mo=class{constructor(r){r&&(this.architecture=r.architecture,this.vendor=r.vendor)}isArchitecture(r){return this.architecture===r}isVendor(r){return this.vendor===r}},wi=class{constructor(){this.currentSessionId=null;this.currentKernelId=null;this.commandEncoder=null;this.computePassEncoder=null;this.maxDispatchNumber=16;this.pendingDispatchNumber=0;this.pendingKernels=[];this.pendingQueries=new Map;this.sessionStatus=\"default\";this.capturedCommandList=new Map;this.capturedPendingKernels=new Map;this.sessionExternalDataMapping=new Map}get currentKernelCustomData(){if(this.currentKernelId===null)throw new Error(\"currentKernelCustomData(): currentKernelId is null. (should not happen)\");let r=this.kernelCustomData.get(this.currentKernelId);return r||(r={},this.kernelCustomData.set(this.currentKernelId,r)),r}async initialize(r,t){this.env=r;let u=[],s={requiredLimits:{maxComputeWorkgroupStorageSize:t.limits.maxComputeWorkgroupStorageSize,maxComputeWorkgroupsPerDimension:t.limits.maxComputeWorkgroupsPerDimension,maxStorageBufferBindingSize:t.limits.maxStorageBufferBindingSize,maxBufferSize:t.limits.maxBufferSize,maxComputeInvocationsPerWorkgroup:t.limits.maxComputeInvocationsPerWorkgroup,maxComputeWorkgroupSizeX:t.limits.maxComputeWorkgroupSizeX,maxComputeWorkgroupSizeY:t.limits.maxComputeWorkgroupSizeY,maxComputeWorkgroupSizeZ:t.limits.maxComputeWorkgroupSizeZ},requiredFeatures:u};t.features.has(\"chromium-experimental-timestamp-query-inside-passes\")?u.push(\"chromium-experimental-timestamp-query-inside-passes\"):t.features.has(\"timestamp-query\")&&u.push(\"timestamp-query\"),t.features.has(\"shader-f16\")&&u.push(\"shader-f16\"),this.device=await t.requestDevice(s),this.adapterInfo=new mo(await t.requestAdapterInfo()),this.gpuDataManager=Uu(this),this.programManager=new vi(this),this.kernels=new Map,this.kernelPersistentData=new Map,this.kernelCustomData=new Map,Ru(r.logLevel,!!r.debug),this.device.onuncapturederror=c=>{c.error instanceof GPUValidationError&&console.error(`An uncaught WebGPU validation error was raised: ${c.error.message}`)},Object.defineProperty(this.env.webgpu,\"device\",{value:this.device,writable:!1,enumerable:!0,configurable:!1}),Object.defineProperty(this.env.webgpu,\"adapter\",{value:t,writable:!1,enumerable:!0,configurable:!1}),this.setQueryType()}dispose(){typeof this.querySet<\"u\"&&this.querySet.destroy(),this.gpuDataManager.dispose()}getCommandEncoder(){return this.commandEncoder||(this.commandEncoder=this.device.createCommandEncoder()),this.commandEncoder}getComputePassEncoder(){if(!this.computePassEncoder){let r=this.getCommandEncoder(),t={};this.queryType===\"at-passes\"&&(t.timestampWrites={querySet:this.querySet,beginningOfPassWriteIndex:this.pendingDispatchNumber*2,endOfPassWriteIndex:this.pendingDispatchNumber*2+1}),this.computePassEncoder=r.beginComputePass(t)}return this.computePassEncoder}endComputePass(){this.computePassEncoder&&(this.computePassEncoder.end(),this.computePassEncoder=null)}flush(){if(!this.commandEncoder)return;ar(),this.endComputePass();let r;this.queryType!==\"none\"&&(this.commandEncoder.resolveQuerySet(this.querySet,0,this.pendingDispatchNumber*2,this.queryResolveBuffer,0),r=this.device.createBuffer({size:this.pendingDispatchNumber*2*8,usage:GPUBufferUsage.MAP_READ|GPUBufferUsage.COPY_DST}),this.pendingQueries.set(r,this.pendingKernels),this.pendingKernels=[],this.commandEncoder.copyBufferToBuffer(this.queryResolveBuffer,0,r,0,this.pendingDispatchNumber*2*8)),this.device.queue.submit([this.commandEncoder.finish()]),this.gpuDataManager.refreshPendingBuffers(),this.commandEncoder=null,this.pendingDispatchNumber=0,this.queryType!==\"none\"&&r.mapAsync(GPUMapMode.READ).then(()=>{let t=new BigUint64Array(r.getMappedRange()),u=this.pendingQueries.get(r);for(let s=0;s\"u\"&&(this.queryTimeBase=P);let E=Number(P-this.queryTimeBase),O=Number(x-this.queryTimeBase);if(!Number.isSafeInteger(E)||!Number.isSafeInteger(O))throw new RangeError(\"incorrect timestamp range\");if(this.env.webgpu.profiling?.ondata)this.env.webgpu.profiling.ondata({version:1,inputsMetadata:$.map(B=>({dims:B.dims,dataType:Gt(B.dataType)})),outputsMetadata:A.map(B=>({dims:B.dims,dataType:Gt(B.dataType)})),kernelId:f,kernelType:g,kernelName:w,programName:C,startTime:E,endTime:O});else{let B=\"\";$.forEach((j,U)=>{B+=`input[${U}]: [${j.dims}] | ${Gt(j.dataType)}, `});let R=\"\";A.forEach((j,U)=>{R+=`output[${U}]: [${j.dims}] | ${Gt(j.dataType)}, `}),console.log(`[profiling] kernel \"${f}|${g}|${w}|${C}\" ${B}${R}execution time: ${O-E} ns`)}Mi(\"GPU\",`${C}::${P}::${x}`)}r.unmap(),this.pendingQueries.delete(r)}),Ht()}run(r,t,u,s,c,f){ar(r.name);let d=[];for(let j=0;jU):u;if($.length!==g.length)throw new Error(`Output size ${$.length} must be equal to ${g.length}.`);let A=[],P=[];for(let j=0;j=f)throw new Error(`Invalid output index: ${$[j]}`);if($[j]===-3)continue;let U=$[j]===-1,L=$[j]===-2,F=U||L?c(g[j].dataType,g[j].dims):s($[j],g[j].dataType,g[j].dims);if(A.push(F),F.data===0)continue;let te=this.gpuDataManager.get(F.data);if(!te)throw new Error(`no GPU data for output: ${F.data}`);if(U&&this.temporaryData.push(te),L){let J=this.kernelPersistentData.get(this.currentKernelId);J||(J=[],this.kernelPersistentData.set(this.currentKernelId,J)),J.push(te)}P.push(te)}if(d.length!==t.length||P.length!==A.length){if(P.length===0)return Ht(r.name),A;throw new Error(`Program ${r.name} has zero-sized tensor(s) in inputs or outputs. This is not supported now.`)}let x;if(C){let j=0,U=[];C.forEach(J=>{let oe=typeof J.data==\"number\"?[J.data]:J.data;if(oe.length===0)return;let le=J.type===10?2:4,ge,X;J.type===10?(X=oe.length>4?16:oe.length>2?8:oe.length*le,ge=oe.length>4?16:le*oe.length):(X=oe.length<=2?oe.length*le:16,ge=16),j=Math.ceil(j/X)*X,U.push(j);let pe=J.type===10?8:4;j+=oe.length>4?Math.ceil(oe.length/pe)*ge:oe.length*le});let L=16;j=Math.ceil(j/L)*L;let F=new ArrayBuffer(j);C.forEach((J,oe)=>{let le=U[oe],ge=typeof J.data==\"number\"?[J.data]:J.data;if(J.type===6)new Int32Array(F,le,ge.length).set(ge);else if(J.type===12)new Uint32Array(F,le,ge.length).set(ge);else if(J.type===10)new Uint16Array(F,le,ge.length).set(ge);else if(J.type===1)new Float32Array(F,le,ge.length).set(ge);else throw new Error(`Unsupported uniform type: ${Gt(J.type)}`)});let te=this.gpuDataManager.create(j,GPUBufferUsage.COPY_DST|GPUBufferUsage.UNIFORM);this.device.queue.writeBuffer(te.buffer,0,F,0,j),this.gpuDataManager.release(te.id),x={offset:0,size:j,buffer:te.buffer}}let E=this.programManager.normalizeDispatchGroupSize(w),O=E[1]===1&&E[2]===1,B=k0(r,t,O),R=this.programManager.getArtifact(B);if(R||(R=this.programManager.build(r,E),this.programManager.setArtifact(B,R),Fe(\"info\",()=>`[artifact] key: ${B}, programName: ${r.name}`)),C&&R.uniformVariablesInfo){if(C.length!==R.uniformVariablesInfo.length)throw new Error(`Uniform variables count mismatch: expect ${R.uniformVariablesInfo.length}, got ${C.length} in program \"${R.programInfo.name}\".`);for(let j=0;j`[ProgramManager] run \"${r.name}\" (key=${B}) with ${E[0]}x${E[1]}x${E[2]}`),this.queryType!==\"none\"||this.sessionStatus===\"capturing\"){let j={kernelId:this.currentKernelId,programName:R.programInfo.name,inputTensorViews:t,outputTensorViews:A};this.pendingKernels.push(j),this.sessionStatus===\"capturing\"&&this.capturedPendingKernels.get(this.currentSessionId).push(j)}return this.programManager.run(R,d,P,E,x),Ht(r.name),A}upload(r,t){this.gpuDataManager.upload(r,t)}memcpy(r,t){this.gpuDataManager.memcpy(r,t)}async download(r,t){await this.gpuDataManager.download(r,t)}alloc(r){return this.gpuDataManager.create(r).id}free(r){return this.gpuDataManager.release(r)}createKernel(r,t,u,s){let c=of.get(r);if(!c)throw new Error(`kernel not implemented: ${r}`);let f={kernelType:r,kernelName:s,kernelEntry:c[0],attributes:[c[1],u]};this.kernels.set(t,f)}releaseKernel(r){let t=this.kernelPersistentData.get(r);if(t){for(let u of t)this.gpuDataManager.release(u.id);this.kernelPersistentData.delete(r)}this.kernelCustomData.delete(r),this.kernels.delete(r)}computeKernel(r,t,u){let s=this.kernels.get(r);if(!s)throw new Error(`kernel not created: ${r}`);let c=s.kernelType,f=s.kernelName,d=s.kernelEntry,g=s.attributes;if(this.currentKernelId!==null)throw new Error(`kernel \"[${c}] ${f}\" is not allowed to be called recursively`);this.currentKernelId=r,g[0]&&(g[1]=g[0](g[1]),g[0]=void 0),Fe(\"info\",()=>`[WebGPU] Start to run kernel \"[${c}] ${f}\"...`);let w=this.env.debug;this.temporaryData=[];try{return w&&this.device.pushErrorScope(\"validation\"),d(t,g[1]),0}catch(C){return u.push(Promise.resolve(`[WebGPU] Kernel \"[${c}] ${f}\" failed. ${C}`)),1}finally{w&&u.push(this.device.popErrorScope().then(C=>C?`GPU validation error for kernel \"[${c}] ${f}\": ${C.message}`:null));for(let C of this.temporaryData)this.gpuDataManager.release(C.id);this.temporaryData=[],this.currentKernelId=null}}registerBuffer(r,t,u,s){let c=this.sessionExternalDataMapping.get(r);c||(c=new Map,this.sessionExternalDataMapping.set(r,c));let f=c.get(t),d=this.gpuDataManager.registerExternalBuffer(u,s,f?.[1]);return c.set(t,[d,u]),d}unregisterBuffers(r){let t=this.sessionExternalDataMapping.get(r);t&&(t.forEach(u=>this.gpuDataManager.unregisterExternalBuffer(u[1])),this.sessionExternalDataMapping.delete(r))}getBuffer(r){let t=this.gpuDataManager.get(r);if(!t)throw new Error(`no GPU data for buffer: ${r}`);return t.buffer}createDownloader(r,t,u){return async()=>{let s=await Ni(this,r,t);return Bu(s.buffer,u)}}writeTimestamp(r){this.queryType===\"inside-passes\"&&this.computePassEncoder.writeTimestamp(this.querySet,r)}setQueryType(){this.queryType=\"none\",(this.env.webgpu.profiling?.mode===\"default\"||(typeof this.env.trace>\"u\"?this.env.wasm.trace:this.env.trace))&&(this.device.features.has(\"chromium-experimental-timestamp-query-inside-passes\")?this.queryType=\"inside-passes\":this.device.features.has(\"timestamp-query\")&&(this.queryType=\"at-passes\"),this.queryType!==\"none\"&&typeof this.querySet>\"u\"&&(this.querySet=this.device.createQuerySet({type:\"timestamp\",count:this.maxDispatchNumber*2}),this.queryResolveBuffer=this.device.createBuffer({size:this.maxDispatchNumber*2*8,usage:GPUBufferUsage.COPY_SRC|GPUBufferUsage.QUERY_RESOLVE})))}captureBegin(){Fe(\"info\",\"captureBegin\"),this.capturedCommandList.get(this.currentSessionId)||this.capturedCommandList.set(this.currentSessionId,[]),this.capturedPendingKernels.get(this.currentSessionId)||this.capturedPendingKernels.set(this.currentSessionId,[]),this.flush(),this.sessionStatus=\"capturing\"}captureEnd(){Fe(\"info\",\"captureEnd\"),this.flush(),this.sessionStatus=\"default\"}replay(){Fe(\"info\",\"replay\"),this.sessionStatus=\"replaying\";let r=this.capturedCommandList.get(this.currentSessionId),t=this.capturedPendingKernels.get(this.currentSessionId),u=r.length;this.pendingKernels=[];for(let s=0;s=this.maxDispatchNumber||this.queryType===\"at-passes\")&&this.endComputePass(),this.pendingDispatchNumber>=this.maxDispatchNumber&&this.flush()}this.flush(),this.sessionStatus=\"default\"}onReleaseSession(r){this.unregisterBuffers(r),this.capturedCommandList.has(r)&&this.capturedCommandList.delete(r),this.capturedPendingKernels.has(r)&&this.capturedPendingKernels.delete(r),this.gpuDataManager.onReleaseSession(r)}onRunStart(r){this.currentSessionId=r,this.setQueryType()}}});var lf={};Hn(lf,{init:()=>O0});var mn,ho,O0,df=ae(()=>{\"use strict\";Te();uf();Lt();De();mn=class e{constructor(r,t,u,s){this.module=r;this.dataType=t;this.data=u;this.dims=s}getFloat32Array(){if(this.dataType!==1)throw new Error(\"Invalid data type\");let r=K.size(this.dims);return r===0?new Float32Array:new Float32Array(this.module.HEAP8.buffer,this.data,r)}getBigInt64Array(){if(this.dataType!==7)throw new Error(\"Invalid data type\");let r=K.size(this.dims);return r===0?new BigInt64Array:new BigInt64Array(this.module.HEAP8.buffer,this.data,r)}getInt32Array(){if(this.dataType!==6)throw new Error(\"Invalid data type\");let r=K.size(this.dims);return r===0?new Int32Array:new Int32Array(this.module.HEAP8.buffer,this.data,r)}reshape(r){if(K.size(r)!==K.size(this.dims))throw new Error(\"Invalid new shape\");return new e(this.module,this.dataType,this.data,r)}},ho=class{constructor(r,t,u){this.module=r;this.backend=t;this.customDataOffset=0;this.customDataSize=0;this.adapterInfo=t.adapterInfo;let s=r.HEAPU32,c=u>>>2;this.opKernelContext=s[c++];let f=s[c++];this.outputCount=s[c++],this.customDataOffset=s[c++],this.customDataSize=s[c++];let d=[];for(let g=0;gtypeof d==\"number\"?this.inputs[d]:d)??this.inputs,s=t?.outputs??[],c=(d,g,w)=>new mn(this.module,g,this.output(d,w),w),f=(d,g)=>{let w=or(d);if(!w)throw new Error(`Unsupported data type: ${d}`);let C=w*K.size(g),$=C>0?this.backend.gpuDataManager.create(C).id:0;return new mn(this.module,d,$,g)};return this.backend.run(r,u,s,c,f,this.outputCount)}output(r,t){let u=this.module.stackSave();try{let s=this.module.stackAlloc((1+t.length)*4),c=s>>2;this.module.HEAPU32[c++]=t.length;for(let f=0;f{let s=r.jsepInit;if(!s)throw new Error(\"Failed to initialize JSEP. The WebAssembly module is not built with JSEP support.\");if(e===\"webgpu\"){let c=new wi;await c.initialize(t,u),s(\"webgpu\",[c,f=>c.alloc(f),f=>c.free(f),(f,d,g,w=!1)=>{if(w)Fe(\"verbose\",()=>`[WebGPU] jsepCopyGpuToGpu: src=${f}, dst=${d}, size=${g}`),c.memcpy(f,d);else{Fe(\"verbose\",()=>`[WebGPU] jsepCopyCpuToGpu: dataOffset=${f}, gpuDataId=${d}, size=${g}`);let C=r.HEAPU8.subarray(f>>>0,(f>>>0)+g);c.upload(d,C)}},async(f,d,g)=>{Fe(\"verbose\",()=>`[WebGPU] jsepCopyGpuToCpu: gpuDataId=${f}, dataOffset=${d}, size=${g}`),await c.download(f,()=>r.HEAPU8.subarray(d>>>0,(d>>>0)+g))},(f,d,g)=>c.createKernel(f,d,g,r.UTF8ToString(r._JsepGetNodeName(d))),f=>c.releaseKernel(f),(f,d,g,w)=>{Fe(\"verbose\",()=>`[WebGPU] jsepRun: sessionHandle=${g}, kernel=${f}, contextDataOffset=${d}`);let C=new ho(r,c,d);return c.computeKernel(f,C,w)},()=>c.captureBegin(),()=>c.captureEnd(),()=>c.replay()])}else s(\"webnn\")}});var au;au=Zs();var Vh=nu(),Oi,Ri=!1,qn=!1,ou=!1,Wh=e=>{if(e===1)return!1;if(typeof SharedArrayBuffer>\"u\")return typeof self<\"u\"&&!self.crossOriginIsolated&&console.warn(\"env.wasm.numThreads is set to \"+e+\", but this will not work unless you enable crossOriginIsolated mode. See https://web.dev/cross-origin-isolation-guide/ for more info.\"),!1;typeof process<\"u\"&&process.versions&&process.versions.node&&console.warn(\"env.wasm.numThreads is set to \"+e+\", however, currently onnxruntime-web does not support multi-threads in Node.js. Please consider using onnxruntime-node for performance critical scenarios.\");try{return typeof MessageChannel<\"u\"&&new MessageChannel().port1.postMessage(new SharedArrayBuffer(1)),WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,5,4,1,3,1,1,10,11,1,9,0,65,0,254,16,2,0,26,11]))}catch{return!1}},Nh=()=>{try{return WebAssembly.validate(new Uint8Array([0,97,115,109,1,0,0,0,1,4,1,96,0,0,3,2,1,0,10,30,1,28,0,65,0,253,15,253,12,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,253,186,1,26,11]))}catch{return!1}},Gh=(e,r)=>e?r?\"ort-wasm-simd-threaded.wasm\":\"ort-wasm-simd.wasm\":r?\"ort-wasm-threaded.wasm\":\"ort-wasm.wasm\",su=async e=>{if(Ri)return Promise.resolve();if(qn)throw new Error(\"multiple calls to 'initializeWebAssembly()' detected.\");if(ou)throw new Error(\"previous call to 'initializeWebAssembly()' failed.\");qn=!0;let r=e.initTimeout,t=e.numThreads,u=e.simd,s=Wh(t),c=u&&Nh(),f=e.wasmPaths,d=typeof f==\"string\"?f:void 0,g=Gh(c,s),w=typeof f==\"object\"?f[g]:void 0,C=!1,$=[];if(r>0&&$.push(new Promise(A=>{setTimeout(()=>{C=!0,A()},r)})),$.push(new Promise((A,P)=>{let x=s?Vh:au,E={locateFile:(O,B)=>{if(s&&O.endsWith(\".worker.js\")&&typeof Blob<\"u\")return URL.createObjectURL(new Blob([iu()],{type:\"text/javascript\"}));if(O.endsWith(\".wasm\")){if(w)return w;let R=d??B;return g===\"ort-wasm-simd.wasm\"?R+\"ort-wasm-simd.jsep.wasm\":g===\"ort-wasm-simd-threaded.wasm\"?R+\"ort-wasm-simd-threaded.jsep.wasm\":R+g}return B+O}};if(s)if(E.numThreads=t,typeof Blob>\"u\")E.mainScriptUrlOrBlob=(void 0)(__dirname,\"ort-wasm-threaded.js\");else{let O=`var ortWasmThreaded=${x.toString()};`;E.mainScriptUrlOrBlob=new Blob([O],{type:\"text/javascript\"})}x(E).then(O=>{qn=!1,Ri=!0,Oi=O,A()},O=>{qn=!1,ou=!0,P(O)})})),await Promise.race($),C)throw new Error(`WebAssembly backend initializing failed due to timeout: ${r}ms`)},Qe=()=>{if(Ri&&Oi)return Oi;throw new Error(\"WebAssembly is not initialized yet.\")};var Xe=(e,r)=>{let t=Qe(),u=t.lengthBytesUTF8(e)+1,s=t._malloc(u);return t.stringToUTF8(e,s,u),r.push(s),s},ln=(e,r,t,u)=>{if(typeof e==\"object\"&&e!==null){if(t.has(e))throw new Error(\"Circular reference in options\");t.add(e)}Object.entries(e).forEach(([s,c])=>{let f=r?r+s:s;if(typeof c==\"object\")ln(c,f+\".\",t,u);else if(typeof c==\"string\"||typeof c==\"number\")u(f,c.toString());else if(typeof c==\"boolean\")u(f,c?\"1\":\"0\");else throw new Error(`Can't handle extra config type: ${typeof c}`)})},Le=e=>{let r=Qe(),t=r.stackSave();try{let u=r.stackAlloc(8);r._OrtGetLastError(u,u+4);let s=r.HEAP32[u/4],c=r.HEAPU32[u/4+1],f=c?r.UTF8ToString(c):\"\";throw new Error(`${e} ERROR_CODE: ${s}, ERROR_MESSAGE: ${f}`)}finally{r.stackRestore(t)}};var uu=e=>{let r=Qe(),t=0,u=[],s=e||{};try{if(e?.logSeverityLevel===void 0)s.logSeverityLevel=2;else if(typeof e.logSeverityLevel!=\"number\"||!Number.isInteger(e.logSeverityLevel)||e.logSeverityLevel<0||e.logSeverityLevel>4)throw new Error(`log serverity level is not valid: ${e.logSeverityLevel}`);if(e?.logVerbosityLevel===void 0)s.logVerbosityLevel=0;else if(typeof e.logVerbosityLevel!=\"number\"||!Number.isInteger(e.logVerbosityLevel))throw new Error(`log verbosity level is not valid: ${e.logVerbosityLevel}`);e?.terminate===void 0&&(s.terminate=!1);let c=0;return e?.tag!==void 0&&(c=Xe(e.tag,u)),t=r._OrtCreateRunOptions(s.logSeverityLevel,s.logVerbosityLevel,!!s.terminate,c),t===0&&Le(\"Can't create run options.\"),e?.extra!==void 0&&ln(e.extra,\"\",new WeakSet,(f,d)=>{let g=Xe(f,u),w=Xe(d,u);r._OrtAddRunConfigEntry(t,g,w)!==0&&Le(`Can't set a run config entry: ${f} - ${d}.`)}),[t,u]}catch(c){throw t!==0&&r._OrtReleaseRunOptions(t),u.forEach(f=>r._free(f)),c}};var Hh=e=>{switch(e){case\"disabled\":return 0;case\"basic\":return 1;case\"extended\":return 2;case\"all\":return 99;default:throw new Error(`unsupported graph optimization level: ${e}`)}},Lh=e=>{switch(e){case\"sequential\":return 0;case\"parallel\":return 1;default:throw new Error(`unsupported execution mode: ${e}`)}},Fh=e=>{e.extra||(e.extra={}),e.extra.session||(e.extra.session={});let r=e.extra.session;r.use_ort_model_bytes_directly||(r.use_ort_model_bytes_directly=\"1\"),e.executionProviders&&e.executionProviders.some(t=>(typeof t==\"string\"?t:t.name)===\"webgpu\")&&(e.enableMemPattern=!1)},qh=(e,r,t)=>{for(let u of r){let s=typeof u==\"string\"?u:u.name;switch(s){case\"webnn\":if(s=\"WEBNN\",typeof u!=\"string\"){let f=u;if(f?.deviceType){let d=Xe(\"deviceType\",t),g=Xe(f.deviceType,t);Qe()._OrtAddSessionConfigEntry(e,d,g)!==0&&Le(`Can't set a session config entry: 'deviceType' - ${f.deviceType}.`)}if(f?.numThreads){let d=f.numThreads;(typeof d!=\"number\"||!Number.isInteger(d)||d<0)&&(d=0);let g=Xe(\"numThreads\",t),w=Xe(d.toString(),t);Qe()._OrtAddSessionConfigEntry(e,g,w)!==0&&Le(`Can't set a session config entry: 'numThreads' - ${f.numThreads}.`)}if(f?.powerPreference){let d=Xe(\"powerPreference\",t),g=Xe(f.powerPreference,t);Qe()._OrtAddSessionConfigEntry(e,d,g)!==0&&Le(`Can't set a session config entry: 'powerPreference' - ${f.powerPreference}.`)}}break;case\"webgpu\":if(s=\"JS\",typeof u!=\"string\"){let f=u;if(f?.preferredLayout){if(f.preferredLayout!==\"NCHW\"&&f.preferredLayout!==\"NHWC\")throw new Error(`preferredLayout must be either 'NCHW' or 'NHWC': ${f.preferredLayout}`);let d=Xe(\"preferredLayout\",t),g=Xe(f.preferredLayout,t);Qe()._OrtAddSessionConfigEntry(e,d,g)!==0&&Le(`Can't set a session config entry: 'preferredLayout' - ${f.preferredLayout}.`)}}break;case\"wasm\":case\"cpu\":continue;default:throw new Error(`not supported execution provider: ${s}`)}let c=Xe(s,t);Qe()._OrtAppendExecutionProvider(e,c)!==0&&Le(`Can't append execution provider: ${s}.`)}},lu=e=>{let r=Qe(),t=0,u=[],s=e||{};Fh(s);try{let c=Hh(s.graphOptimizationLevel??\"all\"),f=Lh(s.executionMode??\"sequential\"),d=typeof s.logId==\"string\"?Xe(s.logId,u):0,g=s.logSeverityLevel??2;if(!Number.isInteger(g)||g<0||g>4)throw new Error(`log serverity level is not valid: ${g}`);let w=s.logVerbosityLevel??0;if(!Number.isInteger(w)||w<0||w>4)throw new Error(`log verbosity level is not valid: ${w}`);let C=typeof s.optimizedModelFilePath==\"string\"?Xe(s.optimizedModelFilePath,u):0;if(t=r._OrtCreateSessionOptions(c,!!s.enableCpuMemArena,!!s.enableMemPattern,f,!!s.enableProfiling,0,d,g,w,C),t===0&&Le(\"Can't create session options.\"),s.executionProviders&&qh(t,s.executionProviders,u),s.enableGraphCapture!==void 0){if(typeof s.enableGraphCapture!=\"boolean\")throw new Error(`enableGraphCapture must be a boolean value: ${s.enableGraphCapture}`);let $=Xe(\"enableGraphCapture\",u),A=Xe(s.enableGraphCapture.toString(),u);r._OrtAddSessionConfigEntry(t,$,A)!==0&&Le(`Can't set a session config entry: 'enableGraphCapture' - ${s.enableGraphCapture}.`)}if(s.freeDimensionOverrides)for(let[$,A]of Object.entries(s.freeDimensionOverrides)){if(typeof $!=\"string\")throw new Error(`free dimension override name must be a string: ${$}`);if(typeof A!=\"number\"||!Number.isInteger(A)||A<0)throw new Error(`free dimension override value must be a non-negative integer: ${A}`);let P=Xe($,u);r._OrtAddFreeDimensionOverride(t,P,A)!==0&&Le(`Can't set a free dimension override: ${$} - ${A}.`)}return s.extra!==void 0&&ln(s.extra,\"\",new WeakSet,($,A)=>{let P=Xe($,u),x=Xe(A,u);r._OrtAddSessionConfigEntry(t,P,x)!==0&&Le(`Can't set a session config entry: ${$} - ${A}.`)}),[t,u]}catch(c){throw t!==0&&r._OrtReleaseSessionOptions(t),u.forEach(f=>r._free(f)),c}};Te();var cu=async e=>{if(typeof e==\"string\")if(typeof process<\"u\"&&process.versions&&process.versions.node)try{return new Uint8Array(await(void 0)(e))}catch(r){if(r.code===\"ERR_FS_FILE_TOO_LARGE\"){let t=(void 0)(e),u=[];for await(let s of t)u.push(s);return new Uint8Array(Buffer.concat(u))}throw r}else{let r=await fetch(e);if(!r.ok)throw new Error(`failed to load external data file: ${e}`);let t=r.headers.get(\"Content-Length\"),u=t?parseInt(t,10):0;if(u<1073741824)return new Uint8Array(await r.arrayBuffer());{if(!r.body)throw new Error(`failed to load external data file: ${e}, no response body.`);let s=r.body.getReader(),c;try{c=new ArrayBuffer(u)}catch(d){if(d instanceof RangeError){let g=Math.ceil(u/65536);c=new WebAssembly.Memory({initial:g,maximum:g}).buffer}else throw d}let f=0;for(;;){let{done:d,value:g}=await s.read();if(d)break;let w=g.byteLength;new Uint8Array(c,f,w).set(g),f+=w}return new Uint8Array(c,0,u)}}else return e instanceof Blob?new Uint8Array(await e.arrayBuffer()):e instanceof Uint8Array?e:new Uint8Array(e)};var R0=(e,r)=>{Qe()._OrtInit(e,r)!==0&&Le(\"Can't initialize onnxruntime.\")},ff=async e=>{R0(e.wasm.numThreads,dn(e.logLevel))},pf=async(e,r)=>{{let t=(df(),ir(lf)).init;if(r===\"webgpu\"){if(typeof navigator>\"u\"||!navigator.gpu)throw new Error(\"WebGPU is not supported in current environment\");let u=e.webgpu.adapter;if(u){if(typeof u.limits!=\"object\"||typeof u.features!=\"object\"||typeof u.requestDevice!=\"function\")throw new Error(\"Invalid GPU adapter set in `env.webgpu.adapter`. It must be a GPUAdapter object.\")}else{let s=e.webgpu.powerPreference;if(s!==void 0&&s!==\"low-power\"&&s!==\"high-performance\")throw new Error(`Invalid powerPreference setting: \"${s}\"`);let c=e.webgpu.forceFallbackAdapter;if(c!==void 0&&typeof c!=\"boolean\")throw new Error(`Invalid forceFallbackAdapter setting: \"${c}\"`);if(u=await navigator.gpu.requestAdapter({powerPreference:s,forceFallbackAdapter:c}),!u)throw new Error('Failed to get GPU adapter. You may need to enable flag \"--enable-unsafe-webgpu\" if you are using Chrome.')}if(!e.wasm.simd)throw new Error(\"Not supported for WebGPU=ON and SIMD=OFF. Please set `env.wasm.simd` to true when using `webgpu` EP\");await t(\"webgpu\",Qe(),e,u)}if(r===\"webnn\"){if(typeof navigator>\"u\"||!navigator.ml)throw new Error(\"WebNN is not supported in current environment\");await t(\"webnn\",Qe(),e)}}},dr=new Map,B0=e=>{let r=Qe(),t=r.stackSave();try{let u=r.stackAlloc(8);return r._OrtGetInputOutputCount(e,u,u+4)!==0&&Le(\"Can't get session input/output count.\"),[r.HEAP32[u/4],r.HEAP32[u/4+1]]}finally{r.stackRestore(t)}},go=e=>{let r=Qe(),t=r._malloc(e.byteLength);if(t===0)throw new Error(`Can't create a session. failed to allocate a buffer of size ${e.byteLength}.`);return r.HEAPU8.set(e,t),[t,e.byteLength]},mf=async(e,r)=>{let t,u,s=Qe();Array.isArray(e)?[t,u]=e:e.buffer===s.HEAPU8.buffer?[t,u]=[e.byteOffset,e.byteLength]:[t,u]=go(e);let c=0,f=0,d=0,g=[],w=[],C=[];try{if([f,g]=lu(r),r?.externalData&&s.mountExternalData){let R=[];for(let j of r.externalData){let U=typeof j==\"string\"?j:j.path;R.push(cu(typeof j==\"string\"?j:j.data).then(L=>{s.mountExternalData(U,L)}))}await Promise.all(R)}c=await s._OrtCreateSession(t,u,f),c===0&&Le(\"Can't create a session.\");let[$,A]=B0(c),P=!!r?.enableGraphCapture,x=[],E=[],O=[];for(let R=0;R<$;R++){let j=s._OrtGetInputName(c,R);j===0&&Le(\"Can't get an input name.\"),w.push(j),x.push(s.UTF8ToString(j))}for(let R=0;RR===\"gpu-buffer\")&&(d=s._OrtCreateBinding(c),d===0&&Le(\"Can't create IO binding.\"),B={handle:d,outputPreferredLocations:O,outputPreferredLocationsEncoded:O.map(R=>Di(R))}),dr.set(c,[c,w,C,B,P,!1]),[c,x,E]}catch($){throw w.forEach(A=>s._OrtFree(A)),C.forEach(A=>s._OrtFree(A)),d!==0&&s._OrtReleaseBinding(d),c!==0&&s._OrtReleaseSession(c),$}finally{s._free(t),f!==0&&s._OrtReleaseSessionOptions(f),g.forEach($=>s._free($)),s.unmountExternalData?.()}},hf=e=>{let r=Qe(),t=dr.get(e);if(!t)throw new Error(`cannot release session. invalid session id: ${e}`);let[u,s,c,f,d]=t;f&&(d&&r._OrtClearBoundOutputs(f.handle),r._OrtReleaseBinding(f.handle)),r.jsepOnReleaseSession?.(e),s.forEach(g=>r._OrtFree(g)),c.forEach(g=>r._OrtFree(g)),r._OrtReleaseSession(u),dr.delete(e)},cf=(e,r,t,u,s,c=!1)=>{if(!e){r.push(0);return}let f=Qe(),d=e[0],g=e[1],w=e[3],C,$;if(d===\"string\"&&w===\"gpu-buffer\")throw new Error(\"String tensor is not supported on GPU.\");if(c&&w!==\"gpu-buffer\")throw new Error(`External buffer must be provided for input/output index ${s} when enableGraphCapture is true.`);if(w===\"gpu-buffer\"){let x=e[2].gpuBuffer,E=or(Bi(d));$=g.reduce((B,R)=>B*R,1)*E;let O=f.jsepRegisterBuffer;if(!O)throw new Error('Tensor location \"gpu-buffer\" is not supported without using WebGPU.');C=O(u,s,x,$)}else{let x=e[2];if(Array.isArray(x)){$=4*x.length,C=f._malloc($),t.push(C);let E=C/4;for(let O=0;Of.HEAP32[x++]=O);let E=f._OrtCreateTensor(Bi(d),C,$,P,g.length,Di(w));E===0&&Le(`Can't create tensor for input/output. session=${u}, index=${s}.`),r.push(E)}finally{f.stackRestore(A)}},gf=async(e,r,t,u,s,c)=>{let f=Qe(),d=dr.get(e);if(!d)throw new Error(`cannot run inference. invalid session id: ${e}`);let g=d[0],w=d[1],C=d[2],$=d[3],A=d[4],P=d[5],x=r.length,E=u.length,O=0,B=[],R=[],j=[],U=[],L=f.stackSave(),F=f.stackAlloc(x*4),te=f.stackAlloc(x*4),J=f.stackAlloc(E*4),oe=f.stackAlloc(E*4);try{[O,B]=uu(c);for(let me=0;meZe*rt,1);Ae=Gt(ce);let Pt=$?.outputPreferredLocations[u[me]];if(Ae===\"string\"){if(Pt===\"gpu-buffer\")throw new Error(\"String tensor is not supported on GPU.\");let Ze=[],rt=_e/4;for(let ct=0;ct0){let Ze=f.jsepGetBuffer;if(!Ze)throw new Error('preferredLocation \"gpu-buffer\" is not supported without using WebGPU.');let rt=Ze(_e),ct=or(ce);if(ct===void 0||!du(Ae))throw new Error(`Unsupported data type: ${Ae}`);be=!0,ue.push([Ae,Ye,{gpuBuffer:rt,download:f.jsepCreateDownloader(rt,Ke*ct,Ae),dispose:()=>{f._OrtReleaseTensor(Ee)}},\"gpu-buffer\"])}else{let Ze=Kn(Ae),rt=new Ze(Ke);new Uint8Array(rt.buffer,rt.byteOffset,rt.byteLength).set(f.HEAPU8.subarray(_e,_e+rt.byteLength)),ue.push([Ae,Ye,rt,\"cpu\"])}}finally{f.stackRestore(Pe),Ae===\"string\"&&_e&&f._free(_e),be||f._OrtReleaseTensor(Ee)}}return $&&!A&&(f._OrtClearBoundOutputs($.handle),dr.set(e,[g,w,C,$,A,!1])),ue}finally{f.stackRestore(L),R.forEach(le=>f._OrtReleaseTensor(le)),j.forEach(le=>f._OrtReleaseTensor(le)),U.forEach(le=>f._free(le)),O!==0&&f._OrtReleaseRunOptions(O),B.forEach(le=>f._free(le))}},yf=e=>{let r=Qe(),t=dr.get(e);if(!t)throw new Error(\"invalid session id\");let u=t[0],s=r._OrtEndProfiling(u);s===0&&Le(\"Can't get an profile file name.\"),r._OrtFree(s)},bf=e=>{let r=[];for(let t of e){let u=t[2];!Array.isArray(u)&&\"buffer\"in u&&r.push(u.buffer)}return r};self.onmessage=e=>{let{type:r,in:t}=e.data;try{switch(r){case\"init-wasm\":su(t.wasm).then(()=>{ff(t).then(()=>{postMessage({type:r})},u=>{postMessage({type:r,err:u})})},u=>{postMessage({type:r,err:u})});break;case\"init-ep\":{let{epName:u,env:s}=t;pf(s,u).then(()=>{postMessage({type:r})},c=>{postMessage({type:r,err:c})});break}case\"copy-from\":{let{buffer:u}=t,s=go(u);postMessage({type:r,out:s});break}case\"create\":{let{model:u,options:s}=t;mf(u,s).then(c=>{postMessage({type:r,out:c})},c=>{postMessage({type:r,err:c})});break}case\"release\":hf(t),postMessage({type:r});break;case\"run\":{let{sessionId:u,inputIndices:s,inputs:c,outputIndices:f,options:d}=t;gf(u,s,c,f,new Array(f.length).fill(null),d).then(g=>{g.some(w=>w[3]!==\"cpu\")?postMessage({type:r,err:\"Proxy does not support non-cpu tensor location.\"}):postMessage({type:r,out:g},bf([...c,...g]))},g=>{postMessage({type:r,err:g})});break}case\"end-profiling\":yf(t),postMessage({type:r});break;default:}}catch(u){postMessage({type:r,err:u})}};})();\n/**\n * @license\n * Copyright 2021 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * @license\n * Copyright 2020 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n/**\n * @license\n * Copyright 2019 Google LLC. All Rights Reserved.\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may not use this file except in compliance with the License.\n * You may obtain a copy of the License at\n *\n * http://www.apache.org/licenses/LICENSE-2.0\n *\n * Unless required by applicable law or agreed to in writing, software\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n * See the License for the specific language governing permissions and\n * limitations under the License.\n * =============================================================================\n */\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {env, InferenceSession} from 'onnxruntime-common';\n\nimport {OrtWasmMessage, SerializableInternalBuffer, SerializableSessionMetadata, SerializableTensorMetadata, TensorMetadata} from './proxy-messages';\nimport * as core from './wasm-core-impl';\nimport {initializeWebAssembly} from './wasm-factory';\n\nconst isProxy = (): boolean => !!env.wasm.proxy && typeof document !== 'undefined';\nlet proxyWorker: Worker|undefined;\nlet initializing = false;\nlet initialized = false;\nlet aborted = false;\n\ntype PromiseCallbacks = [resolve: (result: T) => void, reject: (reason: unknown) => void];\nlet initWasmCallbacks: PromiseCallbacks;\nconst queuedCallbacks: Map>> = new Map();\n\nconst enqueueCallbacks = (type: OrtWasmMessage['type'], callbacks: PromiseCallbacks): void => {\n const queue = queuedCallbacks.get(type);\n if (queue) {\n queue.push(callbacks);\n } else {\n queuedCallbacks.set(type, [callbacks]);\n }\n};\n\nconst ensureWorker = (): void => {\n if (initializing || !initialized || aborted || !proxyWorker) {\n throw new Error('worker not ready');\n }\n};\n\nconst onProxyWorkerMessage = (ev: MessageEvent): void => {\n switch (ev.data.type) {\n case 'init-wasm':\n initializing = false;\n if (ev.data.err) {\n aborted = true;\n initWasmCallbacks[1](ev.data.err);\n } else {\n initialized = true;\n initWasmCallbacks[0]();\n }\n break;\n case 'init-ep':\n case 'copy-from':\n case 'create':\n case 'release':\n case 'run':\n case 'end-profiling': {\n const callbacks = queuedCallbacks.get(ev.data.type)!;\n if (ev.data.err) {\n callbacks.shift()![1](ev.data.err);\n } else {\n callbacks.shift()![0](ev.data.out!);\n }\n break;\n }\n default:\n }\n};\n\nconst scriptSrc = typeof document !== 'undefined' ? (document?.currentScript as HTMLScriptElement)?.src : undefined;\n\nexport const initializeWebAssemblyAndOrtRuntime = async(): Promise => {\n if (initialized) {\n return;\n }\n if (initializing) {\n throw new Error('multiple calls to \\'initWasm()\\' detected.');\n }\n if (aborted) {\n throw new Error('previous call to \\'initWasm()\\' failed.');\n }\n\n initializing = true;\n\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // overwrite wasm filepaths\n if (env.wasm.wasmPaths === undefined) {\n if (scriptSrc && scriptSrc.indexOf('blob:') !== 0) {\n env.wasm.wasmPaths = scriptSrc.substr(0, +(scriptSrc).lastIndexOf('/') + 1);\n }\n }\n\n return new Promise((resolve, reject) => {\n proxyWorker?.terminate();\n\n const workerUrl = URL.createObjectURL(new Blob(\n [\n // This require() function is handled by esbuild plugin to load file content as string.\n // eslint-disable-next-line @typescript-eslint/no-require-imports\n require('./proxy-worker/main')\n ],\n {type: 'text/javascript'}));\n proxyWorker = new Worker(workerUrl, {name: 'ort-wasm-proxy-worker'});\n proxyWorker.onerror = (ev: ErrorEvent) => reject(ev);\n proxyWorker.onmessage = onProxyWorkerMessage;\n URL.revokeObjectURL(workerUrl);\n initWasmCallbacks = [resolve, reject];\n const message: OrtWasmMessage = {type: 'init-wasm', in : env};\n proxyWorker.postMessage(message);\n });\n\n } else {\n try {\n await initializeWebAssembly(env.wasm);\n await core.initRuntime(env);\n initialized = true;\n } catch (e) {\n aborted = true;\n throw e;\n } finally {\n initializing = false;\n }\n }\n};\n\nexport const initializeOrtEp = async(epName: string): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('init-ep', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'init-ep', in : {epName, env}};\n proxyWorker!.postMessage(message);\n });\n } else {\n await core.initEp(env, epName);\n }\n};\n\nexport const copyFromExternalBuffer = async(buffer: Uint8Array): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('copy-from', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'copy-from', in : {buffer}};\n proxyWorker!.postMessage(message, [buffer.buffer]);\n });\n } else {\n return core.copyFromExternalBuffer(buffer);\n }\n};\n\nexport const createSession =\n async(model: SerializableInternalBuffer|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check unsupported options\n if (options?.preferredOutputLocation) {\n throw new Error('session option \"preferredOutputLocation\" is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('create', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'create', in : {model, options: {...options}}};\n const transferable: Transferable[] = [];\n if (model instanceof Uint8Array) {\n transferable.push(model.buffer);\n }\n proxyWorker!.postMessage(message, transferable);\n });\n } else {\n return core.createSession(model, options);\n }\n };\n\nexport const releaseSession = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('release', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'release', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.releaseSession(sessionId);\n }\n};\n\nexport const run = async(\n sessionId: number, inputIndices: number[], inputs: TensorMetadata[], outputIndices: number[],\n outputs: Array, options: InferenceSession.RunOptions): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n // check inputs location\n if (inputs.some(t => t[3] !== 'cpu')) {\n throw new Error('input tensor on GPU is not supported for proxy.');\n }\n // check outputs location\n if (outputs.some(t => t)) {\n throw new Error('pre-allocated output tensor is not supported for proxy.');\n }\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('run', [resolve, reject]);\n const serializableInputs = inputs as SerializableTensorMetadata[]; // every input is on CPU.\n const message: OrtWasmMessage =\n {type: 'run', in : {sessionId, inputIndices, inputs: serializableInputs, outputIndices, options}};\n proxyWorker!.postMessage(message, core.extractTransferableBuffers(serializableInputs));\n });\n } else {\n return core.run(sessionId, inputIndices, inputs, outputIndices, outputs, options);\n }\n};\n\nexport const endProfiling = async(sessionId: number): Promise => {\n if (!BUILD_DEFS.DISABLE_WASM_PROXY && isProxy()) {\n ensureWorker();\n return new Promise((resolve, reject) => {\n enqueueCallbacks('end-profiling', [resolve, reject]);\n const message: OrtWasmMessage = {type: 'end-profiling', in : sessionId};\n proxyWorker!.postMessage(message);\n });\n } else {\n core.endProfiling(sessionId);\n }\n};\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {InferenceSession, InferenceSessionHandler, SessionHandler, Tensor, TRACE_FUNC_BEGIN, TRACE_FUNC_END} from 'onnxruntime-common';\n\nimport {SerializableInternalBuffer, TensorMetadata} from './proxy-messages';\nimport {copyFromExternalBuffer, createSession, endProfiling, releaseSession, run} from './proxy-wrapper';\nimport {isGpuBufferSupportedType} from './wasm-common';\nimport {loadFile} from './wasm-utils-load-file';\n\nexport const encodeTensorMetadata = (tensor: Tensor, getName: () => string): TensorMetadata => {\n switch (tensor.location) {\n case 'cpu':\n return [tensor.type, tensor.dims, tensor.data, 'cpu'];\n case 'gpu-buffer':\n return [tensor.type, tensor.dims, {gpuBuffer: tensor.gpuBuffer}, 'gpu-buffer'];\n default:\n throw new Error(`invalid data location: ${tensor.location} for ${getName()}`);\n }\n};\n\nexport const decodeTensorMetadata = (tensor: TensorMetadata): Tensor => {\n switch (tensor[3]) {\n case 'cpu':\n return new Tensor(tensor[0], tensor[2], tensor[1]);\n case 'gpu-buffer': {\n const dataType = tensor[0];\n if (!isGpuBufferSupportedType(dataType)) {\n throw new Error(`not supported data type: ${dataType} for deserializing GPU tensor`);\n }\n const {gpuBuffer, download, dispose} = tensor[2];\n return Tensor.fromGpuBuffer(gpuBuffer, {dataType, dims: tensor[1], download, dispose});\n }\n default:\n throw new Error(`invalid data location: ${tensor[3]}`);\n }\n};\n\nexport class OnnxruntimeWebAssemblySessionHandler implements InferenceSessionHandler {\n private sessionId: number;\n\n inputNames: string[];\n outputNames: string[];\n\n async fetchModelAndCopyToWasmMemory(path: string): Promise {\n // fetch model from url and move to wasm heap.\n return copyFromExternalBuffer(await loadFile(path));\n }\n\n async loadModel(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions): Promise {\n TRACE_FUNC_BEGIN();\n let model: Parameters[0];\n\n if (typeof pathOrBuffer === 'string') {\n if (typeof process !== 'undefined' && process.versions && process.versions.node) {\n // node\n model = await loadFile(pathOrBuffer);\n } else {\n // browser\n // fetch model and copy to wasm heap.\n model = await this.fetchModelAndCopyToWasmMemory(pathOrBuffer);\n }\n } else {\n model = pathOrBuffer;\n }\n\n [this.sessionId, this.inputNames, this.outputNames] = await createSession(model, options);\n TRACE_FUNC_END();\n }\n\n async dispose(): Promise {\n return releaseSession(this.sessionId);\n }\n\n async run(feeds: SessionHandler.FeedsType, fetches: SessionHandler.FetchesType, options: InferenceSession.RunOptions):\n Promise {\n TRACE_FUNC_BEGIN();\n const inputArray: Tensor[] = [];\n const inputIndices: number[] = [];\n Object.entries(feeds).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.inputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid input '${name}'`);\n }\n inputArray.push(tensor);\n inputIndices.push(index);\n });\n\n const outputArray: Array = [];\n const outputIndices: number[] = [];\n Object.entries(fetches).forEach(kvp => {\n const name = kvp[0];\n const tensor = kvp[1];\n const index = this.outputNames.indexOf(name);\n if (index === -1) {\n throw new Error(`invalid output '${name}'`);\n }\n outputArray.push(tensor);\n outputIndices.push(index);\n });\n\n const inputs =\n inputArray.map((t, i) => encodeTensorMetadata(t, () => `input \"${this.inputNames[inputIndices[i]]}\"`));\n const outputs = outputArray.map(\n (t, i) => t ? encodeTensorMetadata(t, () => `output \"${this.outputNames[outputIndices[i]]}\"`) : null);\n\n const results = await run(this.sessionId, inputIndices, inputs, outputIndices, outputs, options);\n\n const resultMap: SessionHandler.ReturnType = {};\n for (let i = 0; i < results.length; i++) {\n resultMap[this.outputNames[outputIndices[i]]] = outputArray[i] ?? decodeTensorMetadata(results[i]);\n }\n TRACE_FUNC_END();\n return resultMap;\n }\n\n startProfiling(): void {\n // TODO: implement profiling\n }\n\n endProfiling(): void {\n void endProfiling(this.sessionId);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {cpus} from 'node:os';\nimport {Backend, env, InferenceSession, InferenceSessionHandler} from 'onnxruntime-common';\n\nimport {initializeOrtEp, initializeWebAssemblyAndOrtRuntime} from './wasm/proxy-wrapper';\nimport {OnnxruntimeWebAssemblySessionHandler} from './wasm/session-handler-inference';\n\n/**\n * This function initializes all flags for WebAssembly.\n *\n * Those flags are accessible from `ort.env.wasm`. Users are allow to set those flags before the first inference session\n * being created, to override default value.\n */\nexport const initializeFlags = (): void => {\n if (typeof env.wasm.initTimeout !== 'number' || env.wasm.initTimeout < 0) {\n env.wasm.initTimeout = 0;\n }\n\n if (typeof env.wasm.simd !== 'boolean') {\n env.wasm.simd = true;\n }\n\n if (typeof env.wasm.proxy !== 'boolean') {\n env.wasm.proxy = false;\n }\n\n if (typeof env.wasm.trace !== 'boolean') {\n env.wasm.trace = false;\n }\n\n if (typeof env.wasm.numThreads !== 'number' || !Number.isInteger(env.wasm.numThreads) || env.wasm.numThreads <= 0) {\n // Web: when crossOriginIsolated is false, SharedArrayBuffer is not available so WebAssembly threads will not work.\n // Node.js: onnxruntime-web does not support multi-threads in Node.js.\n if ((typeof self !== 'undefined' && !self.crossOriginIsolated) ||\n (typeof process !== 'undefined' && process.versions && process.versions.node)) {\n env.wasm.numThreads = 1;\n }\n const numCpuLogicalCores = typeof navigator === 'undefined' ? cpus().length : navigator.hardwareConcurrency;\n env.wasm.numThreads = Math.min(4, Math.ceil((numCpuLogicalCores || 1) / 2));\n }\n};\n\nexport class OnnxruntimeWebAssemblyBackend implements Backend {\n /**\n * This function initializes the WebAssembly backend.\n *\n * This function will be called only once for each backend name. It will be called the first time when\n * `ort.InferenceSession.create()` is called with a registered backend name.\n *\n * @param backendName - the registered backend name.\n */\n async init(backendName: string): Promise {\n // populate wasm flags\n initializeFlags();\n\n // init wasm\n await initializeWebAssemblyAndOrtRuntime();\n\n // performe EP specific initialization\n await initializeOrtEp(backendName);\n }\n createInferenceSessionHandler(path: string, options?: InferenceSession.SessionOptions):\n Promise;\n createInferenceSessionHandler(buffer: Uint8Array, options?: InferenceSession.SessionOptions):\n Promise;\n async createInferenceSessionHandler(pathOrBuffer: string|Uint8Array, options?: InferenceSession.SessionOptions):\n Promise {\n const handler = new OnnxruntimeWebAssemblySessionHandler();\n await handler.loadModel(pathOrBuffer, options);\n return Promise.resolve(handler);\n }\n}\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\nimport {OnnxruntimeWebAssemblyBackend} from './backend-wasm';\nexport const wasmBackend = new OnnxruntimeWebAssemblyBackend();\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n/* eslint-disable @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports */\n// We use \"require\" instead of \"import\" here because import statement must be put in top level. Our current code does\n// not allow bundler to tree-shaking code as expected because some codes are treated as having side effects.\n// So we import code inside the if-clause to allow bundler remove the code safely.\n\nexport * from 'onnxruntime-common';\nimport * as ort from 'onnxruntime-common';\nexport default ort;\n\nimport {registerBackend, env} from 'onnxruntime-common';\nimport {version} from './version';\n\nif (!BUILD_DEFS.DISABLE_WEBGL) {\n const onnxjsBackend = require('./backend-onnxjs').onnxjsBackend;\n registerBackend('webgl', onnxjsBackend, -10);\n}\n\nif (!BUILD_DEFS.DISABLE_WASM) {\n const wasmBackend = BUILD_DEFS.DISABLE_TRAINING ? require('./backend-wasm-inference').wasmBackend :\n require('./backend-wasm-training').wasmBackend;\n if (!BUILD_DEFS.DISABLE_WEBGPU) {\n registerBackend('webgpu', wasmBackend, 5);\n registerBackend('webnn', wasmBackend, 5);\n }\n registerBackend('cpu', wasmBackend, 10);\n registerBackend('wasm', wasmBackend, 10);\n}\n\nObject.defineProperty(env.versions, 'web', {value: version, enumerable: true});\n", "// Copyright (c) Microsoft Corporation. All rights reserved.\n// Licensed under the MIT License.\n\n// This file is generated by /js/scripts/update-version.ts\n// Do not modify file content manually.\n\nexport const version = '1.18.0';\n"], + "mappings": ";;;;;ygBAAA,IAgBMA,GACAC,GAYOC,GAwCPC,GAwCOC,GA7GbC,GAAAC,GAAA,kBAgBMN,GAAqC,IAAI,IACzCC,GAAqC,CAAA,EAY9BC,GAAkB,CAACK,EAAcC,EAAkBC,IAA0B,CACxF,GAAID,GAAW,OAAOA,EAAQ,MAAS,YAAc,OAAOA,EAAQ,+BAAkC,WAAY,CAChH,IAAME,EAAiBV,GAAS,IAAIO,CAAI,EACxC,GAAIG,IAAmB,OACrBV,GAAS,IAAIO,EAAM,CAAC,QAAAC,EAAS,SAAAC,CAAQ,CAAC,MACjC,IAAIC,EAAe,SAAWD,EAEnC,OACK,GAAIC,EAAe,WAAaD,GACjCC,EAAe,UAAYF,EAC7B,MAAM,IAAI,MAAM,4BAA4BD,CAAI,oBAAoBE,CAAQ,EAAE,EAIlF,GAAIA,GAAY,EAAG,CACjB,IAAME,EAAIV,GAAyB,QAAQM,CAAI,EAC3CI,IAAM,IACRV,GAAyB,OAAOU,EAAG,CAAC,EAGtC,QAASA,EAAI,EAAGA,EAAIV,GAAyB,OAAQU,IACnD,GAAIX,GAAS,IAAIC,GAAyBU,CAAC,CAAC,EAAG,UAAYF,EAAU,CACnER,GAAyB,OAAOU,EAAG,EAAGJ,CAAI,EAC1C,OAGJN,GAAyB,KAAKM,CAAI,EAEpC,OAGF,MAAM,IAAI,UAAU,qBAAqB,CAC3C,EAQMJ,GAAiC,MAAMS,GAAgD,CAC3F,IAAMC,EAAcb,GAAS,IAAIY,CAAW,EAC5C,GAAI,CAACC,EACH,MAAO,qBAGT,GAAIA,EAAY,YACd,OAAOA,EAAY,QACd,GAAIA,EAAY,QACrB,OAAOA,EAAY,MACd,CACL,IAAMC,EAAiB,CAAC,CAACD,EAAY,YACrC,GAAI,CACF,OAAKC,IACHD,EAAY,YAAcA,EAAY,QAAQ,KAAKD,CAAW,GAEhE,MAAMC,EAAY,YAClBA,EAAY,YAAc,GACnBA,EAAY,cACZE,EAAG,CACV,OAAKD,IACHD,EAAY,MAAQ,GAAGE,CAAC,GACxBF,EAAY,QAAU,IAEjBA,EAAY,cAEnB,OAAOA,EAAY,aAGzB,EAWaT,GAAsC,MAAMY,GACmB,CAEtE,IAAMC,EAAMD,EAAQ,oBAAsB,CAAA,EACpCE,EAAeD,EAAI,IAAIN,GAAK,OAAOA,GAAM,SAAWA,EAAIA,EAAE,IAAI,EAC9DQ,EAAeD,EAAa,SAAW,EAAIjB,GAA2BiB,EAGxEV,EACEY,EAAS,CAAA,EACTC,EAAwB,IAAI,IAClC,QAAWT,KAAeO,EAAc,CACtC,IAAMG,EAAgB,MAAMnB,GAA+BS,CAAW,EAClE,OAAOU,GAAkB,SAC3BF,EAAO,KAAK,CAAC,KAAMR,EAAa,IAAKU,CAAa,CAAC,GAE9Cd,IACHA,EAAUc,GAERd,IAAYc,GACdD,EAAsB,IAAIT,CAAW,GAM3C,GAAI,CAACJ,EACH,MAAM,IAAI,MAAM,oCAAoCY,EAAO,IAAIL,GAAK,IAAIA,EAAE,IAAI,KAAKA,EAAE,GAAG,EAAE,EAAE,KAAK,IAAI,CAAC,EAAE,EAI1G,OAAW,CAAC,KAAAR,EAAM,IAAAgB,CAAG,IAAKH,EACpBF,EAAa,SAASX,CAAI,GAE5B,QAAQ,KAAK,0CACTA,CAAI,uDAAuDgB,CAAG,EAAE,EAIxE,IAAMC,EAAcP,EAAI,OAAON,GAAKU,EAAsB,IAAI,OAAOV,GAAM,SAAWA,EAAIA,EAAE,IAAI,CAAC,EAEjG,MAAO,CACLH,EAAS,IAAI,MAAMQ,EAAS,CAC1B,IAAK,CAACS,EAAQC,IACRA,IAAS,qBACJF,EAEF,QAAQ,IAAIC,EAAQC,CAAI,EAElC,EAEL,IChKJ,IAAAC,GAAAC,GAAA,kBAoFAC,OCpFA,IAMaC,GANbC,GAAAC,GAAA,kBAMaF,GAAU,WCNvB,IAQIG,GAESC,GAVbC,GAAAC,GAAA,kBAIAC,KAIIJ,GAAwC,UAE/BC,GAAW,CACtB,KAAM,CAAA,EACN,MAAO,CAAA,EACP,OAAQ,CAAA,EACR,SAAU,CAAC,OAAQI,EAAO,EAE1B,IAAI,SAASC,EAAmB,CAC9B,GAAIA,IAAU,OAGd,IAAI,OAAOA,GAAU,UAAY,CAAC,UAAW,OAAQ,UAAW,QAAS,OAAO,EAAE,QAAQA,CAAK,IAAM,GACnG,MAAM,IAAI,MAAM,8BAA8BA,CAAK,EAAE,EAEvDN,GAAgBM,EAClB,EACA,IAAI,UAAQ,CACV,OAAON,EACT,GAIF,OAAO,eAAeC,GAAK,WAAY,CAAC,WAAY,EAAI,CAAC,IC/BzD,IAgQaM,GAhQbC,GAAAC,GAAA,kBAGAC,KA6PaH,GAAWA,KChQxB,IASaI,GA+FAC,GAxGbC,GAAAC,GAAA,kBASaH,GAAkB,CAACI,EAAgBC,IAA4C,CAC1F,IAAMC,EAAS,OAAO,SAAa,IAAc,SAAS,cAAc,QAAQ,EAAK,IAAI,gBAAgB,EAAG,CAAC,EAC7GA,EAAO,MAAQF,EAAO,KAAK,CAAC,EAC5BE,EAAO,OAASF,EAAO,KAAK,CAAC,EAC7B,IAAMG,EACFD,EAAO,WAAW,IAAI,EAE1B,GAAIC,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAJ,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,IAEtBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,GAGxB,IAAMM,EAAcL,GAAS,SAAW,OAAYA,EAAQ,OAAS,MAE/DM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EAEpBO,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5B,QAASK,EAAI,EAAGA,EAAIV,EAAQU,IAC1B,QAASC,EAAI,EAAGA,EAAIZ,EAAOY,IAAK,CAC9B,IAAM,GAAMhB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1ES,GAAMjB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EU,GAAMlB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC1EW,EAAIL,IAAmB,GACzB,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,EAE1EL,EAAgB,UAAY,QAAU,EAAI,IAAMc,EAAI,IAAMC,EAAI,IAAMC,EAAI,IACxEhB,EAAgB,SAASa,EAAGD,EAAG,EAAG,CAAC,EAGvC,GAAI,cAAeb,EACjB,OAAOA,EAAO,UAAS,EAEvB,MAAM,IAAI,MAAM,4BAA4B,MAG9C,OAAM,IAAI,MAAM,2BAA2B,CAE/C,EAKaL,GAAoB,CAACG,EAAgBC,IAAiD,CACjG,IAAME,EAAkB,OAAO,SAAa,IACxC,SAAS,cAAc,QAAQ,EAAE,WAAW,IAAI,EAChD,IAAI,gBAAgB,EAAG,CAAC,EAAE,WAAW,IAAI,EACzCiB,EACJ,GAAIjB,GAAmB,KAAM,CAE3B,IAAIC,EACAC,EACAgB,EACApB,GAAS,eAAiB,QAAaA,EAAQ,eAAiB,QAClEG,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBqB,EAAWrB,EAAO,KAAK,CAAC,IAExBI,EAAQJ,EAAO,KAAK,CAAC,EACrBK,EAASL,EAAO,KAAK,CAAC,EACtBqB,EAAWrB,EAAO,KAAK,CAAC,GAE1B,IAAMM,EAAcL,IAAY,QAAaA,EAAQ,SAAW,OAAYA,EAAQ,OAAkB,MAEhGM,EAAON,GAAS,KAClBO,EACAC,EACAF,IAAS,QAAaA,EAAK,OAAS,OACtCC,EAAW,CAAC,IAAK,IAAK,IAAK,GAAG,EAE1B,OAAQD,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDC,EAAW,CAACD,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,GAAG,EACrDA,EAAK,KAAK,CAAC,IAAM,SACnBC,EAAS,CAAC,EAAID,EAAK,KAAK,CAAC,IAI3BA,IAAS,QAAaA,EAAK,OAAS,OACtCE,EAAW,CAAC,EAAG,EAAG,EAAG,CAAC,EAElB,OAAQF,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,GAEtDE,EAAW,CAACF,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAGA,EAAK,KAAK,CAAC,EAAG,CAAC,EACnDA,EAAK,KAAK,CAAC,IAAM,SACnBE,EAAS,CAAC,EAAIF,EAAK,KAAK,CAAC,IAK/B,IAAMG,EAASL,EAASD,EACxB,GAAIH,IAAY,SACVA,EAAQ,SAAW,QAAcoB,IAAa,GAAKpB,EAAQ,SAAW,QACrEoB,IAAa,GAAMpB,EAAQ,SAAW,OAASA,EAAQ,SAAW,OACrE,MAAM,IAAI,MAAM,+CAAgD,EAKpE,IAAMqB,EAAO,EACTC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACzEf,EAAiB,EAAGC,EAAiBF,EAAQG,EAAiBH,EAAS,EAAGI,EAAiB,GAG3FR,IAAgB,QAClBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,EAC1BI,EAAiBJ,EAAS,GACjBJ,IAAgB,OACzBK,EAAiB,EACjBC,EAAiBF,EACjBG,EAAiBH,EAAS,GACjBJ,IAAgB,QACzBK,EAAiB,EACjBE,EAAiBH,EACjBE,EAAiBF,EAAS,GAG5BU,EAAQjB,EAAgB,gBAAgBC,EAAOC,CAAM,EAErD,QAASU,EAAI,EAAGA,EAAIV,EAASD,EACxBmB,GAAiBD,EAAME,GAAiBF,EAAMG,GAAiBH,EAAMI,GAAiBJ,EAAMP,IAC/FK,EAAM,KAAKG,CAAa,GAAMvB,EAAO,KAAKW,GAAgB,EAAeF,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGY,EAAM,KAAKI,CAAa,GAAMxB,EAAO,KAAKY,GAAgB,EAAeH,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGY,EAAM,KAAKK,CAAa,GAAMzB,EAAO,KAAKa,GAAgB,EAAeJ,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClGY,EAAM,KAAKM,CAAa,EAAIZ,IAAmB,GAC3C,KACEd,EAAO,KAAKc,GAAgB,EAAeL,EAAS,CAAC,GAAKD,EAAS,CAAC,MAI5E,OAAM,IAAI,MAAM,2BAA2B,EAE7C,OAAOY,CACT,ICtMA,IAiBaO,GAkFAC,GAgKAC,GAWAC,GASAC,GAvRbC,GAAAC,GAAA,kBAIAC,KAaaP,GAAiB,CAACQ,EAAqCC,IAA0C,CAC5G,GAAID,IAAW,OACb,MAAM,IAAI,MAAM,8BAA8B,EAEhD,GAAIC,EAAQ,SAAW,QAAaA,EAAQ,QAAU,OACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAM,CAAC,OAAAC,EAAQ,MAAAC,CAAK,EAAIF,EAElBG,EAAOH,EAAQ,MAAQ,CAAC,KAAM,IAAK,KAAM,CAAC,EAC5CI,EACAC,EAEA,OAAQF,EAAK,MAAU,SACzBC,EAAW,CAACD,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDC,EAAW,CAACD,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,GAAG,EAG3E,OAAQA,EAAK,MAAU,SACzBE,EAAW,CAACF,EAAK,KAAMA,EAAK,KAAMA,EAAK,KAAMA,EAAK,IAAI,EAEtDE,EAAW,CAACF,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,EAAGA,EAAK,KAAM,CAAC,GAAK,CAAC,EAG7E,IAAMG,EAAcN,EAAQ,SAAW,OAAYA,EAAQ,OAAS,OAG9DO,EACFP,EAAQ,eAAiB,QAAaA,EAAQ,eAAiB,OAAYA,EAAQ,aAAwB,MACzGQ,EAASP,EAASC,EAClBO,EAAcF,IAAiB,OAAS,IAAI,aAAaC,EAAS,CAAC,EAAI,IAAI,aAAaA,EAAS,CAAC,EAGpGE,EAAO,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EAAGC,EAAgB,EACnFC,EAAiB,EAAGC,EAAiBR,EAAQS,EAAiBT,EAAS,EAAGU,EAAiB,GAG3FZ,IAAgB,QAClBI,EAAO,EACPC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,EAChBC,EAAgB,IAIdP,IAAiB,OACnBW,EAAiBV,EAAS,EACjBD,IAAiB,OAC1BQ,EAAiB,EACjBE,EAAiBT,EACjBQ,EAAiBR,EAAS,GACjBD,IAAiB,QAC1BU,EAAiB,EACjBD,EAAiBR,EACjBO,EAAiBP,EAAS,GAG5B,QAASW,EAAI,EAAGA,EAAIX,EACfW,IAAKR,GAAiBD,EAAMG,GAAiBH,EAAME,GAAiBF,EAAMI,GAAiBJ,EAC9FD,EAAYM,GAAgB,GAAKhB,EAAOY,CAAa,EAAIN,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYO,GAAgB,GAAKjB,EAAOa,CAAa,EAAIP,EAAS,CAAC,GAAKD,EAAS,CAAC,EAClFK,EAAYQ,GAAgB,GAAKlB,EAAOc,CAAa,EAAIR,EAAS,CAAC,GAAKD,EAAS,CAAC,EAC9Ec,IAAmB,IAAMJ,IAAkB,KAC7CL,EAAYS,GAAgB,GAAKnB,EAAOe,CAAa,EAAIT,EAAS,CAAC,GAAKD,EAAS,CAAC,GAOtF,OAFqBG,IAAiB,OAAS,IAAIa,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,EACxD,IAAIkB,GAAO,UAAWX,EAAa,CAAC,EAAG,EAAGR,EAAQC,CAAK,CAAC,CAEzG,EAKaV,GAAkB,MAC3B6B,EACArB,IACyC,CAE3C,IAAMsB,EAAiB,OAAQ,iBAAsB,KAAeD,aAAiB,iBAC/EE,EAAiB,OAAQ,UAAe,KAAeF,aAAiB,UACxEG,EAAgB,OAAQ,YAAiB,KAAeH,aAAiB,YACzEI,EAAW,OAAOJ,GAAU,SAE9BK,EACAC,EAA+C3B,GAAW,CAAA,EAExD4B,EAAe,IAAK,CACxB,GAAI,OAAO,SAAa,IACtB,OAAO,SAAS,cAAc,QAAQ,EACjC,GAAI,OAAO,gBAAoB,IACpC,OAAO,IAAI,gBAAgB,EAAG,CAAC,EAE/B,MAAM,IAAI,MAAM,yBAAyB,CAE7C,EACMC,EAAuBC,GACvBA,aAAkB,mBAEXA,aAAkB,gBADpBA,EAAO,WAAW,IAAI,EAItB,KAIX,GAAIR,EAAgB,CAElB,IAAMQ,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAI9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MAMlB,GALIrB,IAAY,QAAaA,EAAQ,gBAAkB,QAAaA,EAAQ,eAAiB,SAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,cAGdA,IAAY,OAAW,CAEzB,GADA2B,EAAwB3B,EACpBA,EAAQ,eAAiB,OAC3B,MAAM,IAAI,MAAM,6DAA6D,EAE7E2B,EAAsB,aAAe,OAEvCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,OAE9ByB,EAAsB,aAAe,OACrCA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAGhC6B,EAAgB,UAAUV,EAAO,EAAG,CAAC,EACrCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,UAEpCsB,EAAgB,CACzB,IAAItB,EACAC,EAiBJ,GAfIF,IAAY,QAAaA,EAAQ,eAAiB,QAAaA,EAAQ,gBAAkB,QAC3FC,EAASD,EAAQ,cACjBE,EAAQF,EAAQ,eAEhBC,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,OAGZrB,IAAY,SACd2B,EAAwB3B,GAE1B2B,EAAsB,OAAS,OAC/BA,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EAE1BF,IAAY,OAAW,CACzB,IAAMgC,EAAaJ,EAAY,EAE/BI,EAAW,MAAQ9B,EACnB8B,EAAW,OAAS/B,EAEpB,IAAM8B,EAAkBF,EAAoBG,CAAU,EAEtD,GAAID,GAAmB,KACrBA,EAAgB,aAAaV,EAAO,EAAG,CAAC,EACxCK,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,SAEzD,OAAM,IAAI,MAAM,2BAA2B,OAG7CyB,EAAOL,EAAM,aAENG,EAAe,CAExB,GAAIxB,IAAY,OACd,MAAM,IAAI,MAAM,yDAAyD,EAG3E,IAAM8B,EAASF,EAAY,EAC3BE,EAAO,MAAQT,EAAM,MACrBS,EAAO,OAAST,EAAM,OACtB,IAAMU,EAAkBF,EAAoBC,CAAM,EAElD,GAAIC,GAAmB,KAAM,CAC3B,IAAM9B,EAASoB,EAAM,OACfnB,EAAQmB,EAAM,MACpB,OAAAU,EAAgB,UAAUV,EAAO,EAAG,EAAGnB,EAAOD,CAAM,EACpDyB,EAAOK,EAAgB,aAAa,EAAG,EAAG7B,EAAOD,CAAM,EAAE,KACzD0B,EAAsB,OAAS1B,EAC/B0B,EAAsB,MAAQzB,EACvBX,GAAemC,EAAMC,CAAqB,MAEjD,OAAM,IAAI,MAAM,2BAA2B,MAExC,IAAIF,EACT,OAAO,IAAI,QAAQ,CAACQ,EAASC,IAAU,CACrC,IAAMJ,EAASF,EAAY,EACrBO,EAAUN,EAAoBC,CAAM,EAC1C,GAAI,CAACT,GAAS,CAACc,EACb,OAAOD,EAAM,EAEf,IAAME,EAAW,IAAI,MACrBA,EAAS,YAAc,YACvBA,EAAS,IAAMf,EACfe,EAAS,OAAS,IAAK,CACrBN,EAAO,MAAQM,EAAS,MACxBN,EAAO,OAASM,EAAS,OACzBD,EAAQ,UAAUC,EAAU,EAAG,EAAGN,EAAO,MAAOA,EAAO,MAAM,EAC7D,IAAMO,EAAMF,EAAQ,aAAa,EAAG,EAAGL,EAAO,MAAOA,EAAO,MAAM,EAElEH,EAAsB,OAASG,EAAO,OACtCH,EAAsB,MAAQG,EAAO,MACrCG,EAAQ1C,GAAe8C,EAAI,KAAMV,CAAqB,CAAC,CACzD,CACF,CAAC,EAED,MAAM,IAAI,MAAM,gEAAgE,EAGlF,GAAID,IAAS,OACX,OAAOnC,GAAemC,EAAMC,CAAqB,EAEjD,MAAM,IAAI,MAAM,gEAAgE,CAEpF,EAKalC,GAAoB,CAC7B6C,EAAsCtC,IAAgD,CACxF,GAAM,CAAC,MAAAE,EAAO,OAAAD,EAAQ,SAAAsC,EAAU,QAAAC,CAAO,EAAIxC,EAErCyC,EAAO,CAAC,EAAGxC,EAAQC,EAAO,CAAC,EACjC,OAAO,IAAIkB,GAAO,CAAC,SAAU,UAAW,KAAM,UAAW,QAAAkB,EAAS,KAAAG,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC5F,EAKa9C,GAAsB,CAC/BgD,EAA0C1C,IAAkD,CAC9F,GAAM,CAAC,SAAA2C,EAAU,KAAAF,EAAM,SAAAF,EAAU,QAAAC,CAAO,EAAIxC,EAC5C,OAAO,IAAIoB,GAAO,CAAC,SAAU,aAAc,KAAMuB,GAAY,UAAW,UAAAD,EAAW,KAAAD,EAAM,SAAAF,EAAU,QAAAC,CAAO,CAAC,CAC7G,EAKa7C,GAAyB,CAClCiD,EAAS7C,EAAwC0C,IACjD,IAAIrB,GAAO,CAAC,SAAU,aAAc,KAAAwB,EAAM,KAAM7C,EAAQ,KAAM0C,GAAQ,CAAC1C,EAAO,MAAM,CAAC,CAAC,ICzR1F,IAWa8C,GAaAC,GAoBTC,GACSC,GA7CbC,GAAAC,GAAA,kBAWaL,GAAwC,IAAI,IAA6C,CACpG,CAAC,UAAW,YAAY,EACxB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,SAAS,EAClB,CAAC,SAAU,WAAW,EACtB,CAAC,QAAS,UAAU,EACpB,CAAC,QAAS,UAAU,EACpB,CAAC,OAAQ,UAAU,EACnB,CAAC,UAAW,YAAY,EACxB,CAAC,SAAU,WAAW,EACvB,EAGYC,GAAwC,IAAI,IAAkD,CACzG,CAAC,aAAc,SAAS,EACxB,CAAC,WAAY,OAAO,EACpB,CAAC,UAAW,MAAM,EAClB,CAAC,YAAa,QAAQ,EACtB,CAAC,WAAY,OAAO,EACpB,CAAC,WAAY,OAAO,EACpB,CAAC,aAAc,SAAS,EACxB,CAAC,YAAa,QAAQ,EACvB,EAWGC,GAAsB,GACbC,GAAkB,IAAK,CAClC,GAAI,CAACD,GAAqB,CACxBA,GAAsB,GACtB,IAAMI,EAA2B,OAAO,cAAkB,KAAe,cAAc,KACjFC,EAA4B,OAAO,eAAmB,KAAe,eAAe,KACpFC,EAA0B,OAAO,aAAiB,KAAe,aAAa,KAEhFF,IACFN,GAAsC,IAAI,QAAS,aAAa,EAChEC,GAAsC,IAAI,cAAe,OAAO,GAE9DM,IACFP,GAAsC,IAAI,SAAU,cAAc,EAClEC,GAAsC,IAAI,eAAgB,QAAQ,GAEhEO,GACFR,GAAsC,IAAI,UAAW,YAAY,EACjEC,GAAsC,IAAI,aAAc,SAAS,GAGjED,GAAsC,IAAI,UAAW,WAAW,EAGtE,ICpEA,IAWaS,GAkBAC,GA7BbC,GAAAC,GAAA,kBAIAC,KAOaJ,GAAiBK,GAAoC,CAChE,IAAIC,EAAO,EACX,QAASC,EAAI,EAAGA,EAAIF,EAAK,OAAQE,IAAK,CACpC,IAAMC,EAAMH,EAAKE,CAAC,EAClB,GAAI,OAAOC,GAAQ,UAAY,CAAC,OAAO,cAAcA,CAAG,EACtD,MAAM,IAAI,UAAU,QAAQD,CAAC,8BAA8BC,CAAG,EAAE,EAElE,GAAIA,EAAM,EACR,MAAM,IAAI,WAAW,QAAQD,CAAC,0CAA0CC,CAAG,EAAE,EAE/EF,GAAQE,EAEV,OAAOF,CACT,EAKaL,GAAgB,CAACQ,EAAgBJ,IAAmC,CAC/E,OAAQI,EAAO,SAAU,CACvB,IAAK,MACH,OAAO,IAAIC,GAAOD,EAAO,KAAMA,EAAO,KAAMJ,CAAI,EAClD,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,KAAMD,EAAO,KACb,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,UACH,OAAO,IAAIK,GAAO,CAChB,SAAU,UACV,QAASD,EAAO,QAChB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,IAAK,aACH,OAAO,IAAIK,GAAO,CAChB,SAAU,aACV,UAAWD,EAAO,UAClB,KAAMA,EAAO,KACb,KAAAJ,EACD,EACH,QACE,MAAM,IAAI,MAAM,kCAAkCI,EAAO,QAAQ,mBAAmB,EAE1F,ICzDA,IAwBaE,GAxBbC,GAAAC,GAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAgBaN,GAAP,KAAa,CAyCjB,YACIO,EAEAC,EAA8EC,EAAwB,CAExGC,GAAe,EAEf,IAAIC,EACAC,EAEJ,GAAI,OAAOL,GAAS,UAAY,aAAcA,EAO5C,OAHA,KAAK,aAAeA,EAAK,SACzBI,EAAOJ,EAAK,KACZK,EAAOL,EAAK,KACJA,EAAK,SAAU,CACrB,IAAK,aAAc,CACjB,IAAMM,EAAgCC,GAAsC,IAAIH,CAAI,EACpF,GAAI,CAACE,EACH,MAAM,IAAI,UAAU,qBAAqBF,CAAI,uCAAuC,EAEtF,GAAI,EAAEJ,EAAK,gBAAgBM,GACzB,MAAM,IAAI,UAAU,4BAA4BA,EAA8B,IAAI,EAAE,EAEtF,KAAK,QAAUN,EAAK,KACpB,MAEF,IAAK,UAAW,CACd,GAAII,IAAS,UACX,MAAM,IAAI,UAAU,qBAAqBA,CAAI,iCAAiC,EAEhF,KAAK,eAAiBJ,EAAK,QAC3B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,IAAK,aAAc,CACjB,GAAKI,IAAS,WAAaA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAC7FA,IAAS,SAAWA,IAAS,OAChC,MAAM,IAAI,UAAU,qBAAqBA,CAAI,oCAAoC,EAEnF,KAAK,cAAgBJ,EAAK,UAC1B,KAAK,WAAaA,EAAK,SACvB,KAAK,SAAWA,EAAK,QACrB,MAEF,QACE,MAAM,IAAI,MAAM,6CAA6C,KAAK,YAAY,GAAG,MAEhF,CAIL,IAAIQ,EACAC,EAEJ,GAAI,OAAOT,GAAS,SAMlB,GAFAI,EAAOJ,EACPS,EAAYP,EACRF,IAAS,SAAU,CAErB,GAAI,CAAC,MAAM,QAAQC,CAAI,EACrB,MAAM,IAAI,UAAU,gDAAiD,EAIvEO,EAAOP,MACF,CAEL,IAAMS,EAAwBH,GAAsC,IAAIP,CAAI,EAC5E,GAAIU,IAA0B,OAC5B,MAAM,IAAI,UAAU,4BAA4BV,CAAI,GAAG,EAEzD,GAAI,MAAM,QAAQC,CAAI,EAAG,CACvB,GAAID,IAAS,WAAaU,IAA0B,YAMlD,MAAM,IAAI,UACN,+FAA+F,EAC1FV,IAAS,UAAYA,IAAS,QAYvCQ,EAAQE,EAA8B,KAAKT,EAAM,MAAM,EAIvDO,EAAQE,EAA8B,KAAKT,CAAI,UAExCA,aAAgBS,EACzBF,EAAOP,MAEP,OAAM,IAAI,UAAU,KAAKG,CAAI,kCAAkCM,CAAqB,EAAE,UAO1FD,EAAYR,EACR,MAAM,QAAQD,CAAI,EAAG,CAEvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qDAAqD,EAE3E,IAAMW,EAAmB,OAAOX,EAAK,CAAC,EACtC,GAAIW,IAAqB,SACvBP,EAAO,SACPI,EAAOR,UACEW,IAAqB,UAC9BP,EAAO,OAIPI,EAAO,WAAW,KAAKR,CAAa,MAEpC,OAAM,IAAI,UAAU,uCAAuCW,CAAgB,GAAG,MAE3E,CAEL,IAAMC,EACFC,GAAsC,IAAIb,EAAK,WAA8C,EACjG,GAAIY,IAAe,OACjB,MAAM,IAAI,UAAU,qCAAqCZ,EAAK,WAAW,GAAG,EAE9EI,EAAOQ,EACPJ,EAAOR,EAKX,GAAIS,IAAc,OAEhBA,EAAY,CAACD,EAAK,MAAM,UACf,CAAC,MAAM,QAAQC,CAAS,EACjC,MAAM,IAAI,UAAU,wCAAyC,EAE/DJ,EAAOI,EAEP,KAAK,QAAUD,EACf,KAAK,aAAe,MAItB,IAAMM,EAAOC,GAAcV,CAAI,EAE/B,GAAI,KAAK,SAAWS,IAAS,KAAK,QAAQ,OACxC,MAAM,IAAI,MAAM,iBAAiBA,CAAI,gCAAgC,KAAK,QAAQ,MAAM,IAAI,EAG9F,KAAK,KAAOV,EACZ,KAAK,KAAOC,EACZ,KAAK,KAAOS,CACd,CAIA,aAAa,UACTE,EACAC,EACoB,CACtB,OAAOC,GAAgBF,EAAOC,CAAO,CACvC,CAEA,OAAO,YACHE,EAA4BF,EAAoC,CAClE,OAAOG,GAAkBD,EAASF,CAAO,CAC3C,CAEA,OAAO,cACHI,EAAgCJ,EAAsC,CACxE,OAAOK,GAAoBD,EAAWJ,CAAO,CAC/C,CAEA,OAAO,iBACHb,EAASmB,EAAwClB,EAAwB,CAC3E,OAAOmB,GAAuBpB,EAAMmB,EAAQlB,CAAI,CAClD,CAKA,UAAUY,EAAgC,CACxC,OAAOQ,GAAgB,KAAMR,CAAO,CACtC,CAEA,YAAYA,EAAkC,CAC5C,OAAOS,GAAkB,KAAMT,CAAO,CACxC,CAgDA,IAAI,MAAI,CAEN,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,QACR,MAAM,IAAI,MACN,gJAC2E,EAEjF,OAAO,KAAK,OACd,CAEA,IAAI,UAAQ,CACV,OAAO,KAAK,YACd,CAEA,IAAI,SAAO,CAET,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,eACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,cACd,CAEA,IAAI,WAAS,CAEX,GADA,KAAK,YAAW,EACZ,CAAC,KAAK,cACR,MAAM,IAAI,MAAM,4CAA4C,EAE9D,OAAO,KAAK,aACd,CAKA,MAAM,QAAQU,EAAqB,CAEjC,OADA,KAAK,YAAW,EACR,KAAK,aAAc,CACzB,IAAK,MACL,IAAK,aACH,OAAO,KAAK,KACd,IAAK,UACL,IAAK,aAAc,CACjB,GAAI,CAAC,KAAK,WACR,MAAM,IAAI,MAAM,qEAAqE,EAEvF,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAE3D,GAAI,CACF,KAAK,cAAgB,GACrB,IAAMnB,EAAO,MAAM,KAAK,WAAU,EAClC,YAAK,WAAa,OAClB,KAAK,aAAe,MACpB,KAAK,QAAUA,EAEXmB,GAAe,KAAK,WACtB,KAAK,SAAQ,EACb,KAAK,SAAW,QAGXnB,UAGP,KAAK,cAAgB,IAGzB,QACE,MAAM,IAAI,MAAM,kCAAkC,KAAK,YAAY,EAAE,EAE3E,CAEA,SAAO,CACL,GAAI,KAAK,cACP,MAAM,IAAI,MAAM,yCAAyC,EAGvD,KAAK,WACP,KAAK,SAAQ,EACb,KAAK,SAAW,QAElB,KAAK,QAAU,OACf,KAAK,eAAiB,OACtB,KAAK,cAAgB,OACrB,KAAK,WAAa,OAClB,KAAK,cAAgB,OAErB,KAAK,aAAe,MACtB,CAKQ,aAAW,CACjB,GAAI,KAAK,eAAiB,OACxB,MAAM,IAAI,MAAM,yBAAyB,CAE7C,CAEA,QAAQH,EAAuB,CAE7B,GADA,KAAK,YAAW,EACZ,KAAK,YAAc,KAAK,SAC1B,MAAM,IAAI,MAAM,iDAAiD,EAEnE,OAAOuB,GAAc,KAAMvB,CAAI,CACjC,KCpaF,IAwUawB,GAxUbC,GAAAC,GAAA,kBAIAC,KAoUaH,GAASA,KCxUtB,IAQaI,GAQPC,GAqBOC,GAUAC,GA/CbC,GAAAC,GAAA,kBAGAC,KAKaN,GAAQ,CAACO,EAAoBC,IAAiB,EACrD,OAAOC,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAI9D,QAAQ,UAAU,GAAGF,CAAU,UAAUC,CAAK,EAAE,CAClD,EAEMP,GAAa,CAACS,EAAaC,IAAqB,CACpD,IAAMC,EAAQ,IAAI,MAAK,EAAG,OAAO,MAAM,aAAa,GAAK,CAAA,EACrDC,EAAe,GACnB,QAASC,EAAI,EAAGA,EAAIF,EAAM,OAAQE,IAAK,CACrC,GAAID,GAAgB,CAACD,EAAME,CAAC,EAAE,SAAS,YAAY,EAAG,CACpD,IAAIN,EAAQ,QAAQE,CAAG,KAAKE,EAAME,CAAC,EAAE,KAAI,EAAG,MAAM,GAAG,EAAE,CAAC,CAAC,GACrDH,IACFH,GAAS,KAAKG,CAAQ,IAExBX,GAAM,MAAOQ,CAAK,EAClB,OAEEI,EAAME,CAAC,EAAE,SAAS,YAAY,IAChCD,EAAe,IAGrB,EAKaX,GAAoBS,GAAqB,EAChD,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,QAASU,CAAQ,CAC9B,EAKaR,GAAkBQ,GAAqB,EAC9C,OAAOF,GAAI,MAAU,IAAc,CAACA,GAAI,KAAK,MAAQ,CAACA,GAAI,QAG9DR,GAAW,MAAOU,CAAQ,CAC5B,ICpDA,IAgBaI,GAhBbC,GAAAC,GAAA,kBAGAC,KAIAC,KACAC,KAQaL,GAAP,MAAOM,CAAgB,CAC3B,YAAoBC,EAAgC,CAClD,KAAK,QAAUA,CACjB,CAGA,MAAM,IAAIC,EAAkBC,EAA+BC,EAAiB,CAC1EC,GAAgB,EAChB,IAAMC,EAA4C,CAAA,EAC9CC,EAAsB,CAAA,EAE1B,GAAI,OAAOL,GAAU,UAAYA,IAAU,MAAQA,aAAiBM,IAAU,MAAM,QAAQN,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIO,EAAiB,GAErB,GAAI,OAAON,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBK,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQL,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DM,EAAiB,GAEjB,QAAWC,KAAQP,EAAM,CACvB,GAAI,OAAOO,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAI,KAAK,YAAY,QAAQA,CAAI,IAAM,GACrC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEJ,EAAQI,CAAI,EAAI,KAGlB,GAAI,OAAON,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIO,EAAY,GACVC,EAAW,OAAO,oBAAoBT,CAAI,EAChD,QAAWO,KAAQ,KAAK,YACtB,GAAIE,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKV,EAA4DO,CAAI,GACvEG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBH,EAAQI,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOP,GAAS,UAAYA,IAAS,KACvCG,EAAUH,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDG,EAAUJ,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWO,KAAQ,KAAK,WACtB,GAAI,OAAOR,EAAMQ,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQ,KAAK,YACtBJ,EAAQI,CAAI,EAAI,KAMpB,IAAMI,EAAU,MAAM,KAAK,QAAQ,IAAIZ,EAAOI,EAASC,CAAO,EACxDQ,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAAC,GAAc,EACPH,CACT,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,CAOA,aAAa,OACTI,EAAyChB,EAA8BC,EACvEgB,EAAqB,CACvBf,GAAgB,EAEhB,IAAIgB,EACAd,EAA0B,CAAA,EAE9B,GAAI,OAAOY,GAAS,UAElB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7CgB,aAAgB,YAEzB,GADAE,EAAuBF,EACnB,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAGpDgB,aAAgB,aACf,OAAO,kBAAsB,KAAeA,aAAgB,kBAAoB,CACnF,IAAMG,EAASH,EACXI,EAAa,EACbC,EAAaL,EAAK,WACtB,GAAI,OAAOhB,GAAS,UAAYA,IAAS,KACvCI,EAAUJ,UACD,OAAOA,GAAS,SAAU,CAEnC,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,EAAa,GAAKA,GAAcD,EAAO,WACzC,MAAM,IAAI,WAAW,oCAAoCA,EAAO,UAAU,IAAI,EAGhF,GADAE,EAAaL,EAAK,WAAaI,EAC3B,OAAOnB,GAAS,SAAU,CAE5B,GADAoB,EAAapB,EACT,CAAC,OAAO,cAAcoB,CAAU,EAClC,MAAM,IAAI,WAAW,kCAAoC,EAE3D,GAAIA,GAAc,GAAKD,EAAaC,EAAaF,EAAO,WACtD,MAAM,IAAI,WAAW,oCAAoCA,EAAO,WAAaC,CAAU,IAAI,EAE7F,GAAI,OAAOH,GAAS,UAAYA,IAAS,KACvCb,EAAUa,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,UAE7C,OAAOhB,EAAS,IACzB,MAAM,IAAI,UAAU,gCAAkC,UAE/C,OAAOD,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,EAEtDkB,EAAuB,IAAI,WAAWC,EAAQC,EAAYC,CAAU,MAEpE,OAAM,IAAI,UAAU,qDAAyD,EAI/E,GAAM,CAACC,EAASC,CAAuB,EAAI,MAAMC,GAAoCpB,CAAO,EACtFN,EAAU,MAAMwB,EAAQ,8BAA8BJ,EAAsBK,CAAuB,EACzG,OAAAR,GAAc,EACP,IAAIlB,EAAiBC,CAAO,CACrC,CAEA,gBAAc,CACZ,KAAK,QAAQ,eAAc,CAC7B,CACA,cAAY,CACV,KAAK,QAAQ,aAAY,CAC3B,CAEA,IAAI,YAAU,CACZ,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,aAAW,CACb,OAAO,KAAK,QAAQ,WACtB,KCxNF,IAuea2B,GAvebC,GAAAC,GAAA,kBAGAC,KAoeaH,GAA4CA,KCvezD,IAAAI,GAAAC,GAAA,oBCAA,IAAAC,GAAAC,GAAA,oBCAA,IAAAC,GAAAC,GAAA,oBCAA,IAAAC,GAAAC,GAAA,oBCAA,IAgBMC,GAGOC,GAnBbC,GAAAC,GAAA,kBAGAC,KAIAC,KASML,GAA0B,gHAGnBC,GAAP,MAAOK,CAAe,CAC1B,YAAoBC,EAAiCC,EAA4BC,EAAqB,CACpG,KAAK,QAAUF,EACf,KAAK,kBAAoBC,EACzB,KAAK,aAAeC,CACtB,CAKA,IAAI,oBAAkB,CACpB,OAAO,KAAK,QAAQ,UACtB,CACA,IAAI,qBAAmB,CACrB,OAAO,KAAK,QAAQ,WACtB,CAEA,IAAI,gBAAc,CAChB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,eAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CACA,IAAI,iBAAe,CACjB,GAAI,KAAK,aACP,OAAO,KAAK,QAAQ,gBAEpB,MAAM,IAAI,MAAM,gDAAgD,CAEpE,CAEA,aAAa,OAAOC,EAA+CC,EAA+B,CAEhG,IAAMC,EAA+BF,EAAgB,WAAa,GAC5DG,EAAoCH,EAAgB,gBAAkB,GACtEI,EAA0BH,GAAkB,CAAA,EAG5C,CAACI,EAASC,CAAuB,EAAI,MAAMC,GAAoCH,CAAO,EAC5F,GAAIC,EAAQ,6BAA8B,CACxC,IAAMR,EAAU,MAAMQ,EAAQ,6BAC1BL,EAAgB,gBAAiBA,EAAgB,WAAYE,EAAWC,EACxEG,CAAuB,EAC3B,OAAO,IAAIV,EAAgBC,EAAS,CAAC,CAACG,EAAgB,eAAgB,CAAC,CAACA,EAAgB,SAAS,MAEjG,OAAM,IAAI,MAAMV,EAAe,CAEnC,CAeA,wBACIkB,EAA+BC,EAAgCC,EAAkBC,EACjFC,EAAiB,CACnB,IAAMC,EAA4C,CAAA,EAC9CT,EAAsB,CAAA,EAE1B,GAAI,OAAOM,GAAU,UAAYA,IAAU,MAAQA,aAAiBI,IAAU,MAAM,QAAQJ,CAAK,EAC/F,MAAM,IAAI,UACN,+FAAiG,EAGvG,IAAIK,EAAiB,GAErB,GAAI,OAAOJ,GAAS,SAAU,CAC5B,GAAIA,IAAS,KACX,MAAM,IAAI,UAAU,yCAAyC,EAE/D,GAAIA,aAAgBG,GAClB,MAAM,IAAI,UAAU,8BAAgC,EAGtD,GAAI,MAAM,QAAQH,CAAI,EAAG,CACvB,GAAIA,EAAK,SAAW,EAClB,MAAM,IAAI,UAAU,qCAAuC,EAE7DI,EAAiB,GAEjB,QAAWC,KAAQL,EAAM,CACvB,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,UAAU,gDAAkD,EAExE,GAAIP,EAAY,QAAQO,CAAI,IAAM,GAChC,MAAM,IAAI,WAAW,2CAA2CA,CAAI,GAAG,EAEzEH,EAAQG,CAAI,EAAI,KAGlB,GAAI,OAAOJ,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,MAEjD,CAGL,IAAIK,EAAY,GACVC,EAAW,OAAO,oBAAoBP,CAAI,EAChD,QAAWK,KAAQP,EACjB,GAAIS,EAAS,QAAQF,CAAI,IAAM,GAAI,CACjC,IAAMG,EAAKR,EAAmDK,CAAI,GAC9DG,IAAM,MAAQA,aAAaL,MAC7BG,EAAY,GACZF,EAAiB,GACjBF,EAAQG,CAAI,EAAIG,GAKtB,GAAIF,GACF,GAAI,OAAOL,GAAS,UAAYA,IAAS,KACvCR,EAAUQ,UACD,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,8BAAgC,OAGtDR,EAAUO,WAGL,OAAOA,EAAS,IACzB,MAAM,IAAI,UAAU,yDAA6D,EAInF,QAAWK,KAAQR,EACjB,GAAI,OAAOE,EAAMM,CAAI,EAAM,IACzB,MAAM,IAAI,MAAM,UAAUA,CAAI,0BAA0B,EAK5D,GAAID,EACF,QAAWC,KAAQP,EACjBI,EAAQG,CAAI,EAAI,KAIpB,MAAO,CAACH,EAAST,CAAO,CAC1B,CASA,uCAAuCgB,EAAkC,CACvE,IAAMC,EAA2C,CAAA,EACjD,QAAWC,KAAOF,EAChB,GAAI,OAAO,eAAe,KAAKA,EAASE,CAAG,EAAG,CAC5C,IAAMC,EAASH,EAAQE,CAAG,EACtBC,aAAkBT,GACpBO,EAAYC,CAAG,EAAIC,EAEnBF,EAAYC,CAAG,EAAI,IAAIR,GAAOS,EAAO,KAAMA,EAAO,KAAMA,EAAO,IAAI,EAIzE,OAAOF,CACT,CAEA,MAAM,eAAa,CACjB,MAAM,KAAK,QAAQ,cAAa,CAClC,CAIA,MAAM,aAAaX,EAAkBC,EAA+BC,EAAiB,CACnF,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,mBAAoB,KAAK,oBAAqBM,EAAOC,EAAMC,CAAI,EAC/FQ,EAAU,MAAM,KAAK,QAAQ,aAAaV,EAAOG,EAAST,CAAO,EACvE,OAAO,KAAK,uCAAuCgB,CAAO,CAC5D,CAEA,MAAM,iBAAiBhB,EAA+C,CACpE,GAAI,KAAK,kBACP,MAAM,KAAK,QAAQ,iBAAiBA,GAAW,CAAA,CAAE,MAEjD,OAAM,IAAI,MAAM,oDAAoD,CAExE,CAIA,MAAM,YAAYM,EAAkBC,EAA+BC,EAAiB,CAClF,GAAI,KAAK,aAAc,CACrB,GAAM,CAACC,EAAST,CAAO,EACnB,KAAK,wBAAwB,KAAK,eAAgB,KAAK,gBAAiBM,EAAOC,EAAMC,CAAI,EACvFQ,EAAU,MAAM,KAAK,QAAQ,YAAYV,EAAOG,EAAST,CAAO,EACtE,OAAO,KAAK,uCAAuCgB,CAAO,MAE1D,OAAM,IAAI,MAAM,+CAA+C,CAEnE,CAEA,MAAM,kBAAkBI,EAAgB,GAAI,CAC1C,OAAO,KAAK,QAAQ,kBAAkBA,CAAa,CACrD,CAEA,MAAM,qBAAqBC,EAAmBD,EAAgB,GAAI,CAChE,IAAME,EAAa,MAAM,KAAK,kBAAkBF,CAAa,EAG7D,GAAIC,EAAM,SAAW,EAAIC,EACvB,MAAM,IAAI,MACN,qJAC0D,EAEhE,OAAO,KAAK,QAAQ,qBAAqBD,EAAOD,CAAa,CAC/D,CAEA,MAAM,wBAAwBA,EAAgB,GAAI,CAChD,OAAO,KAAK,QAAQ,wBAAwBA,CAAa,CAC3D,CAEA,MAAM,SAAO,CACX,OAAO,KAAK,QAAQ,QAAO,CAC7B,KCzPF,IAmMaG,GAnMbC,GAAAC,GAAA,kBAKAC,KA8LaH,GAA0CA,KCnMvD,IAAAI,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,UAAAC,GAAA,qBAAAC,GAAA,mBAAAC,GAAA,WAAAC,GAAA,oBAAAC,GAAA,QAAAC,GAAA,oBAAAC,KAAA,IAAAC,GAAAC,GAAA,kBAmBAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,OC5BA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,sBAAAE,GAAA,aAAAC,GAAA,iBAAAC,KAAA,IAAaD,GAAkCC,GAAsCF,GAArFG,GAAAC,GAAA,KAAaH,GAAW,OAAuBC,GAAe,OAAuBF,GAAmB,SCAxG,IAAAK,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAAaA,GAAbC,GAAAC,GAAA,KAAaF,GAAO,SCApB,IAAAG,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACA,IAAIC,IAAW,IAAM,CACnB,IAAIC,EAAa,OAAO,SAAY,IAAc,SAAS,eAAe,IAAM,OAChF,OAAI,OAAO,WAAc,MAAaA,IAAe,YAEvD,SAASC,EAAY,CAAC,EAAG,CAEzB,IAAIC,EAAED,EAAUE,EAAGC,EAAGC,EAAa,IAAI,QAAQ,CAACC,EAAEC,IAAI,CAACJ,EAAGG,EAAEF,EAAGG,CAAC,CAAC,EAAeL,EAAE,kBAAkB,CAACI,EAAEC,IAAI,EAAEL,EAAE,KAAKA,EAAE,GAAG,IAAI,MAAM,IAAII,EAAEC,CAAC,CAAC,EAAEL,EAAE,oBAAoB,IAAI,CAAC,OAAOA,EAAE,EAAE,EACnL,IAAIM,EAAG,IAAI,CAAC,IAAMF,EAAE,CAACG,EAAEC,EAAEC,IAAI,IAAI,IAAI,CAAC,IAAMC,EAAEC,GAAEC,EAAEJ,IAAI,EAAE,EAAED,EAAE,GAAG,CAAC,EAAE,IAAMM,EAAEL,IAAI,EAAE,OAAAI,IAAIC,IAAIN,EAAEM,EAAEJ,EAAEG,CAAC,EAAEJ,EAAEC,EAAE,MAAaE,IAAGD,EAAEI,GAAG,EAAE,CAAC,EAAET,EAAEE,GAAG,SAASC,IAAI,CAAC,GAAG,CAAC,GAAGR,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMS,EAAET,EAAE,GAAG,CAAC,GAAGQ,EAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAE,EAAE,MAAMD,EAAE,GAAGC,CAAC,EAAE,GAAGR,EAAE,KAAKS,EAAE,MAAM,MAAM,kBAAkB,EAAET,EAAE,IAAI,MAAM,EAAE,IAAMU,EAAED,EAAE,OAAO,GAAG,EAAEC,EAAE,OAAO,CAAC,IAAIE,EAAE,MAAM,QAAQ,IAAIF,CAAC,EAAmB,GAAjBE,EAAEA,EAAE,OAAOC,GAAGA,CAAC,EAAK,EAAED,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAE,CAAC,OAAO,CAAC,QAAC,CAAQZ,EAAE,GAAG,IAAI,CAAC,EAAEA,EAAE,kBAAkBI,EAAEJ,EAAE,kBAAkB,IAAIA,EAAE,kBAClfO,GAAGP,EAAE,kBAAkBO,CAAC,EAAEP,EAAE,QAAQK,EAAED,EAAEJ,EAAE,QAAQ,IAAIA,EAAE,QAAQO,GAAGP,EAAE,QAAQO,CAAC,CAAC,EAAEP,EAAE,mBAAmBK,EAAED,EAAEJ,EAAE,mBAAmB,IAAIA,EAAE,mBAAmBO,GAAGP,EAAE,mBAAmBO,CAAC,CAAC,EAAEP,EAAE,cAAcI,EAAEJ,EAAE,cAAc,IAAIA,EAAE,cAAcO,GAAGP,EAAE,cAAcO,CAAC,EAAED,EAAG,MAAM,EACzQN,EAAE,SAAS,CAACI,EAAEC,IAAI,CAAQ,GAAPC,IAAK,EAAgBF,IAAX,SAAa,CAAC,CAACJ,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,GAAGA,EAAE,EAAE,EAAEK,EAAE,IAAME,EAAEP,EAAE,GAAGA,EAAE,mBAAmB,CAACQ,EAAEC,EAAE,EAAEC,IAAIH,EAAE,eAAeC,EAAEC,EAAE,EAAEC,CAAC,EAAEV,EAAE,cAAcQ,GAAGD,EAAE,UAAUC,CAAC,EAAER,EAAE,qBAAqB,CAACQ,EAAEC,EAAE,IAAIF,EAAE,iBAAiBC,EAAEC,EAAE,CAAC,EAAET,EAAE,qBAAqBQ,GAAG,CAACD,EAAE,iBAAiBC,CAAC,CAAC,EAAER,EAAE,eAAeQ,GAAGD,EAAE,WAAWC,CAAC,CAAC,CAAC,EAC/V,IAAIO,EAAG,OAAO,OAAO,CAAC,EAAEf,CAAC,EAAEgB,EAAG,iBAAiBC,EAAG,CAACb,EAAEC,IAAI,CAAC,MAAMA,CAAE,EAAEa,EAAa,OAAO,QAAjB,SAAwBC,EAAe,OAAO,eAAnB,WAAiCC,EAAa,OAAO,SAAjB,UAAoC,OAAO,QAAQ,UAAzB,UAA6C,OAAO,QAAQ,SAAS,MAAlC,SAAuCC,EAAE,GAAGC,EAAGC,EAAGC,EACrP,GAAGJ,EAAG,CAAC,IAAIK,EAAG,cAAcC,EAAG,cAAgBL,EAAEF,EAAGO,EAAG,QAAQL,CAAC,EAAE,IAAI,UAAU,IAAIC,EAAG,CAAClB,EAAEC,KAAKD,EAAEuB,GAAGvB,CAAC,EAAE,IAAI,IAAIA,CAAC,EAAEsB,EAAG,UAAUtB,CAAC,EAASqB,EAAG,aAAarB,EAAEC,EAAE,OAAO,MAAM,GAAGmB,EAAGpB,IAAIA,EAAEkB,EAAGlB,EAAE,EAAE,EAAEA,EAAE,SAASA,EAAE,IAAI,WAAWA,CAAC,GAAUA,GAAGmB,EAAG,CAACnB,EAAEC,EAAEE,EAAEC,EAAE,KAAK,CAACJ,EAAEuB,GAAGvB,CAAC,EAAE,IAAI,IAAIA,CAAC,EAAEsB,EAAG,UAAUtB,CAAC,EAAEqB,EAAG,SAASrB,EAAEI,EAAE,OAAO,OAAO,CAACC,EAAE,IAAI,CAACA,EAAEF,EAAEE,CAAC,EAAEJ,EAAEG,EAAE,EAAE,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAACR,EAAE,aAAa,EAAE,QAAQ,KAAK,SAASgB,EAAG,QAAQ,KAAK,CAAC,EAAE,QAAQ,MAAM,GAAG,GAAG,QAAQ,KAAK,MAAM,CAAC,EAAEC,EAAG,CAACb,EAAEC,IAAI,CAAC,cAAQ,SAASD,EAAQC,CAAE,CAAC,MAASa,GACnfC,KAAGA,EAAGE,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAKvB,IAAauB,EAAEvB,GAAYuB,EAAE,WAAW,OAAO,EAAEA,EAAE,GAAGA,EAAEA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAEC,EAAGlB,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAAA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,EAASA,EAAE,YAAY,EAAEc,IAAKK,EAAGpB,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAAA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAS,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAGkB,EAAG,CAACnB,EAAEC,EAAEE,IAAI,CAAC,IAAIC,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAMJ,EAAE,EAAE,EAAEI,EAAE,aACtf,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASH,EAAEG,EAAE,QAAQ,EAAED,EAAE,CAAC,EAAEC,EAAE,QAAQD,EAAEC,EAAE,KAAK,IAAI,CAAC,GAAE,IAAIoB,EAAG,QAAQ,IAAI,KAAK,OAAO,EAAEC,EAAE,QAAQ,MAAM,KAAK,OAAO,EAAE,OAAO,OAAO7B,EAAEe,CAAE,EAAEA,EAAG,KAAK,IAAIe,EAAGC,EAAE,GAAGC,GAAGC,EAAEC,GAAEC,GAAGC,GAAGC,EAAEC,GAAEC,GAAGC,GAAGC,GAAGC,GACjP,SAASC,IAAI,CAAC,IAAIvC,EAAE0B,EAAG,OAAO9B,EAAE,MAAMiC,EAAE,IAAI,UAAU7B,CAAC,EAAEJ,EAAE,OAAOmC,GAAG,IAAI,WAAW/B,CAAC,EAAEJ,EAAE,OAAOkC,GAAE,IAAI,WAAW9B,CAAC,EAAEJ,EAAE,QAAQoC,GAAG,IAAI,YAAYhC,CAAC,EAAEJ,EAAE,OAAOqC,EAAE,IAAI,WAAWjC,CAAC,EAAEJ,EAAE,QAAQsC,GAAE,IAAI,YAAYlC,CAAC,EAAEJ,EAAE,QAAQuC,GAAG,IAAI,aAAanC,CAAC,EAAEJ,EAAE,QAAQ0C,GAAG,IAAI,aAAatC,CAAC,EAAEJ,EAAE,OAAOwC,GAAG,IAAI,cAAcpC,CAAC,EAAEJ,EAAE,QAAQyC,GAAG,IAAI,eAAerC,CAAC,CAAC,CAAC,IAAIwC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAEC,GAAG,KAAKC,GAAG,KAC5X,SAASC,GAAG9C,EAAE,CAAC,MAAAA,EAAE,WAAWA,EAAE,IAAIyB,EAAEzB,CAAC,EAAE2B,EAAE,GAAGC,GAAG,EAAE5B,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAEF,EAAGE,CAAC,EAAQA,CAAE,CAAC,IAAI+C,GAAG/C,GAAGA,EAAE,WAAW,uCAAuC,EAAEuB,GAAGvB,GAAGA,EAAE,WAAW,SAAS,EAAEgD,GAA2B,GAAxBA,GAAG,qBAAwB,CAACD,GAAGC,EAAE,EAAE,CAAC,IAAIC,GAAGD,GAAGA,GAAGpD,EAAE,WAAWA,EAAE,WAAWqD,GAAGhC,CAAC,EAAEA,EAAEgC,EAAE,CAAC,SAASC,GAAGlD,EAAE,CAAC,GAAGoB,EAAG,OAAOA,EAAGpB,CAAC,EAAE,KAAK,iDAAkD,CAC7Z,SAASmD,GAAGnD,EAAE,CAAC,GAAGc,GAAIC,EAAG,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAACQ,GAAGvB,CAAC,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAKC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,CAAC,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAC,EAAE,MAAM,IAAIiD,GAAGlD,CAAC,CAAC,EAAE,GAAGmB,EAAG,OAAO,IAAI,QAAQ,CAAClB,EAAEE,IAAI,CAACgB,EAAGnB,EAAEI,GAAGH,EAAE,IAAI,WAAWG,CAAC,CAAC,EAAED,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAK,IAAI+C,GAAGlD,CAAC,CAAC,CAAC,CAAC,SAASoD,GAAGpD,EAAEC,EAAEE,EAAE,CAAC,OAAOgD,GAAGnD,CAAC,EAAE,KAAKI,GAAG,YAAY,YAAYA,EAAEH,CAAC,CAAC,EAAE,KAAKE,EAAEC,GAAG,CAACqB,EAAE,0CAA0CrB,CAAC,EAAE,EAAE0C,GAAG1C,CAAC,CAAC,CAAC,CAAC,CAC5c,SAASiD,GAAGrD,EAAEC,EAAE,CAAC,IAAIE,EAAE6C,GAAG,OAAkB,OAAO,YAAY,sBAA/B,YAAqDD,GAAG5C,CAAC,GAAGoB,GAAGpB,CAAC,GAAGa,GAAgB,OAAO,OAAnB,WAAyBoC,GAAGjD,EAAEH,EAAEC,CAAC,EAAE,MAAME,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAKC,GAAG,YAAY,qBAAqBA,EAAEJ,CAAC,EAAE,KAAKC,EAAE,SAASI,EAAE,CAAC,OAAAoB,EAAE,kCAAkCpB,CAAC,EAAE,EAAEoB,EAAE,2CAA2C,EAAS2B,GAAGjD,EAAEH,EAAEC,CAAC,CAAC,CAAC,CAAC,CAAC,CAC3V,IAAIqD,GAAG,CAAC,QAAQ,CAACtD,EAAEC,EAAEE,EAAEC,IAAI,CAAC,GAAgB,OAAOR,EAApB,KAAuB,CAACA,EAAE,GAAG,MAAO,GAAkE,GAAhEI,EAAEuD,GAAEvD,IAAI,CAAC,EAAEA,EAAE,WAAW,IAAI,IAAIA,EAAEA,EAAE,UAAU,CAAC,GAAGA,EAAEJ,EAAE,GAAG,IAAII,CAAC,EAAK,CAACA,EAAE,MAAO,GAAgB,GAAdC,KAAK,EAAEE,KAAK,EAAKF,EAAEE,EAAEH,EAAE,WAAW,MAAO,GAAE,GAAG,CAAC,OAAO8B,GAAE,IAAI9B,EAAE,SAASC,EAAEA,EAAEE,CAAC,EAAEC,IAAI,IAAI,CAAC,EAAE,CAAC,MAAM,CAAC,MAAO,EAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,IAAI,CAACA,EAAEoD,GAAEpD,CAAC,EAAE,IAAMC,EAAE,IAAI,WAAWH,CAAC,EAAEG,EAAE,IAAI0B,GAAE,SAAS9B,IAAI,EAAEA,EAAEC,IAAI,CAAC,CAAC,EAAY,OAAO,SAAjB,UAAoC,OAAO,QAAQ,UAAzB,UAA6C,OAAO,QAAQ,SAAS,MAAlC,SAAuC,cAAc,cAAcE,EAAEC,CAAC,GAAGJ,EAAE,IAAI,KAAK,CAACI,CAAC,EAAED,EAAE,CAAC,KAAK,0BAA0B,CAAC,EAC5gBH,EAAE,IAAI,gBAAgBA,CAAC,EAAE,OAAO,KAAKA,EAAE,QAAQ,EAAE,EAAE,QAAQ,IAAI,CAACJ,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQ,IAAI,CAACA,EAAE,GAAG,CAAC,EAAE,QAAQI,GAAGJ,EAAE,GAAGI,CAAC,EAAE,QAAQA,GAAGJ,EAAE,GAAGI,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAGI,EAAEC,EAAEE,EAAE,EAAE,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAGI,EAAEC,EAAEE,CAAC,CAAC,EAAE,QAAQH,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,aAAaI,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAC3f,CAACJ,EAAE,GAAG,UAAUI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,cAAcI,EAAE,CAAC,MAAMC,EAAE,KAAKE,CAAC,CAAC,CAAC,EAAE,QAAQH,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EACtf,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,OAAOI,EAAE,CAAC,IAAIC,EAAE,IAAIE,CAAC,CAAC,CAAC,EAAE,QAAQH,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACL,EAAE,GAAG,MAAMI,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACL,EAAE,GAAG,YAAYI,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACL,EAAE,GAAG,kBAAkBI,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACL,EAAE,GAAG,OAAOI,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,QAAQD,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EACrf,QAAQA,GAAG,CAACJ,EAAE,GAAG,MAAMI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,UAAUI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,iBAAiBI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,cAAcI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,aAAaI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,YAAYI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IACpf,CAACT,EAAE,GAAG,YAAYI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,aAAaI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,YAAYI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,WAAWI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IACpf,CAACT,EAAE,GAAG,WAAWI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,eAAeI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,kBAAkBI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,kBAAkBI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACE,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQL,GACzf,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,YAAYI,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAKgC,EAAE,SAAShC,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,EAAEC,IAAI,CAACR,EAAE,GAAG,eAAeI,EAAE,CAAC,UAAUC,EAAE,KAAKsD,GAAEpD,CAAC,EAAE,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,EAAEC,IAAI,CAACR,EAAE,GAAG,eAAeI,EAAE,CAAC,UAAUC,EAAE,KAAKsD,GAAEpD,CAAC,EAAE,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACjE,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOS,EAAE,OAAO,OAAO,QAAQR,EAAE,UAAU,CAACE,CAAC,EAAE,MAAMC,EAAE,YAAY,CAACC,CAAC,EAAE,KAAK,CAAC,EAAEC,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACqB,EAAE2B,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAKxB,EAAE,SAASwB,IACpgB,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,EAAE,MAAM,KAAK1B,EAAE,SAAS0B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWL,GAAEM,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAChE,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOQ,EAAE,OAAO,OAAO,QAAQP,EAAE,UAAU,MAAM,KAAKgC,EAAE,SAAS9B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,MAAMC,EAAE,YAAY,MAAM,KAAK6B,EAAE,SAAS5B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK4B,EAAE,SAAS,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAKA,EAAE,SAAS3B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACuB,EAAEpB,IAAI,CAAC,EAAE,cAAc+C,EAAE,MAAM,KAAKvB,EAAE,SAASuB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,EAAE,MAAM,KAAKzB,EAAE,SAASyB,IACpgB,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWJ,GAAEK,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5D,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACjE,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOS,EAAE,OAAO,OAAO,QAAQR,EAAE,UAAU,CAACE,CAAC,EAAE,MAAMC,EAAE,YAAY,CAACC,CAAC,EAAE,KAAK,CAAC,EAAEC,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,SAAS,IAAI,CAAC,CAACqB,EAAE2B,IAAI,CAAC,EAAE,cAAcC,EAAE,MAAM,KAAKxB,EAAE,SAASwB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,EAAE,MAAM,KAAK1B,EAAE,SAAS0B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWL,GAAEM,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAChE,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOQ,EAAE,OAAO,OAAO,QAAQP,EAAE,UAAU,MAAM,KAAKgC,EAAE,SAAS9B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,MAAMC,EACpf,YAAY,MAAM,KAAK6B,EAAE,SAAS5B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,KAAK4B,EAAE,SAAS,IAAI,GAAG,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAKA,EAAE,SAAS3B,IAAI,GAAGA,IAAI,GAAG,IAAI,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACuB,EAAEpB,IAAI,CAAC,EAAE,cAAc+C,EAAE,MAAM,KAAKvB,EAAE,SAASuB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYC,EAAE,MAAM,KAAKzB,EAAE,SAASyB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,WAAWJ,GAAEK,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC5D,EAAEC,IAAI,CAACL,EAAE,GAAG,oBAAoBI,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAClE,EAAE,GAAG,cAAcI,EAAE,CAAC,OAAO8D,EAAE,OAAO,OAAO,SAAS7D,EAAE,UAAUE,EAAE,kBAAkBC,EAC5f,cAAcC,EAAE,UAAU,CAAC,EAAEC,CAAC,EAAE,aAAa,CAACE,EAAEC,CAAC,EAAE,KAAK,CAAC+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE,QAAQ,CAACC,EAAEC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,IAAI,CAACL,EAAE,GAAG,oBAAoBI,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAClE,EAAE,GAAG,cAAcI,EAAE,CAAC,OAAO8D,EAAE,OAAO,OAAO,SAAS7D,EAAE,UAAUE,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAAC,EAAEC,CAAC,EAAE,aAAa,CAACE,EAAEC,CAAC,EAAE,KAAK,CAAC+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE,QAAQ,CAACC,EAAEC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,IAAI,CAACL,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAClE,EAAE,GAAG,UAAUI,EAAE,CAAC,OAAO8D,EACvf,OAAO,OAAO,SAAS7D,EAAE,UAAUE,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAAC,EAAEC,CAAC,EAAE,aAAa,CAACE,EAAEC,CAAC,EAAE,KAAK,CAAC+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE,QAAQ,CAACC,EAAEC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,IAAI,CAACL,EAAE,GAAG,gBAAgBI,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAClE,EAAE,GAAG,UAAUI,EAAE,CAAC,OAAO8D,EAAE,OAAO,OAAO,SAAS7D,EAAE,UAAUE,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAAC,EAAEC,CAAC,EAAE,aAAa,CAACE,EAAEC,CAAC,EAAE,KAAK,CAAC+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE,QAAQ,CAACC,EAAEC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC7D,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,OAAOI,EAAE,CAAC,MAAMC,EAAE,KAAKE,EAAE,OAAOC,EAAE,OAAOC,CAAC,CAAC,CAAC,EAAE,QAAQL,GAClf,CAACJ,EAAE,GAAG,SAASI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,EAAEC,IAAI,CAACR,EAAE,GAAG,SAASI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACE,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,EAAEC,IAAI,CAACR,EAAE,GAAG,SAASI,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACE,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,IAAI,CAACL,EAAE,GAAG,UAAUI,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACL,EAAE,GAAG,SAASI,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,QAAQI,EAAE,CAAC,KAAKC,EAAE,WAAWE,EAAE,WAAWC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQL,GAAG,CAACJ,EAAE,GAAG,SAASI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACL,EAAE,GAAG,SAASI,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,IAAI,CAACL,EAAE,GAAG,iBAC/eI,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,IAAI,CAAC7D,EAAE,GAAG,SAASI,EAAE,CAAC,UAAUC,EAAE,KAAKE,EAAE,MAAM,KAAK8B,EAAE,SAAS9B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwBmD,GAAElD,CAAC,EAAE,YAAY,EAAE,eAAeC,EAAE,mBAAmBE,EAAE,sBAAsB+C,GAAE9C,CAAC,EAAE,KAAK8C,GAAEC,CAAC,EAAE,YAAYD,GAAEE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACzD,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,IAAI,CAACV,EAAE,GAAG,QAAQI,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAKgC,EAAE,SAAShC,IAAI,EAAEE,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAK,EAAE,MAAM,KAAK4B,EAAE,SAAS,IAAI,EAAE3B,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQN,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EACnfE,EAAEC,IAAI,CAACR,EAAE,GAAG,qBAAqBI,EAAE,CAAC,KAAKC,EAAE,QAAQE,EAAE,WAAW,CAAC,CAACC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,wBAAwBI,EAAE,CAAC,QAAQC,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,wBAAwBI,EAAE,CAAC,QAAQC,EAAE,OAAOE,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQH,GAAG,CAACJ,EAAE,GAAG,QAAQI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAI,CAACL,EAAE,GAAG,SAASI,EAAE,CAAC,SAASuD,GAAEtD,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACD,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,MAAMI,EAAE,CAAC,KAAKC,EAAE,MAAME,EAAE,KAAKC,EAAE,MAAM,KAAK6B,EAAE,SAAS7B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,IAAI,CAACT,EAAE,GAAG,qBAAqBI,EAAE,CAAC,QAAQC,EAAE,SAASE,EACrf,QAAQ,CAAC,CAACE,EAAE,aAAa,CAAC,CAACD,EAAE,OAAO,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,IAAI,CAACT,EAAE,GAAG,qBAAqBI,EAAE,CAAC,QAAQC,EAAE,SAASE,EAAE,QAAQ,CAAC,CAACE,EAAE,aAAa,CAAC,CAACD,EAAE,OAAO,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,QAAQ,CAACJ,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,SAASI,EAAE,CAAC,UAAU,OAAOC,CAAC,EAAE,QAAQ,OAAOE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,IAAI,CAACb,EAAE,GAAG,YAAYI,EAAE,CAAC,SAASC,EAAE,iBAAiBE,EAAE,gBAAgBC,EAAE,MAAMC,EAAE,SAAS,EAAE,eAAeC,EAAE,MAAM,KAAK2B,EAAE,SAAS,OAAOzB,CAAC,IAAI,EAAE,OAAOA,CAAC,EAAEF,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,uBAAuB,CAAC,CAACG,CAAC,CAAC,CAAC,EAAE,QAAQT,GAAG,CAACJ,EAAE,GAAG,UAClfI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,gBAAgBI,EAAE,MAAM,CAAC,EAAE,QAAQA,GAAG,CAACJ,EAAE,GAAG,WAAWI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,IAAI,CAAC/D,EAAE,GAAG,OAAOI,EAAE,CAAC,OAAOS,EAAE,OAAO,OAAO,SAASR,EAAE,UAAU,CAACE,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,CAAC,EAAE,KAAK,EAAE,MAAM,KAAK4B,EAAE,SAAS,IAAI,EAAE3B,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQ,CAACE,CAAC,EAAE,WAAW,IAAI,CAAC,CAACqB,EAAE2B,IAAI,CAAC,EAAE,WAAWD,GAAEE,CAAC,EAAE,kBAAkBC,EAAE,MAAM,KAAKvB,GAAG,SAASuB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAAC3D,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAAClE,EAAE,GAAG,OAAOI,EAAE,CAAC,OAAO0D,EAAE,OAAO,OAAO,SAASzD,EAAE,UAAU,CAACE,EAAEC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAAC,EAClgBC,CAAC,EAAE,KAAKE,EAAE,MAAM,KAAKyB,EAAE,SAASzB,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,QAAQ,CAAC+C,EAAEC,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC5B,EAAE8B,IAAI,CAAC,EAAE,WAAWJ,GAAEK,CAAC,EAAE,kBAAkBC,EAAE,MAAM,KAAK1B,GAAG,SAAS0B,IAAI,EAAEC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,QAAQ9D,GAAG,CAACJ,EAAE,GAAG,OAAOI,EAAE,MAAM,CAAC,EAAE,QAAQ,CAACA,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,IAAI,CAACT,EAAE,GAAG,cAAcI,EAAE,CAAC,EAAEC,EAAE,EAAEE,EAAE,cAAcC,EAAE,KAAKC,EAAE,UAAU,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,IAAI,CAACT,EAAE,GAAG,qBAAqBI,EAAE,CAAC,SAASC,EAAE,iBAAiBE,EAAE,gBAAgBC,EAAE,MAAMC,EAAE,SAAS,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,EAAEC,EAAEC,IAAI,CAACT,EAAE,GAAG,kBAAkBI,EAAE,CAAC,YAAY,CAAC,CAACC,EAAE,SAASE,EAAE,mBAAmBC,EAChgB,MAAMC,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACL,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,yBAAyBI,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACE,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,IAAI,CAACP,EAAE,GAAG,yBAAyBI,EAAE,CAAC,QAAQC,EAAE,WAAW,CAAC,CAACE,CAAC,CAAC,CAAC,EAAE,QAAQ,CAACH,EAAEC,EAAEE,EAAEC,IAAI,CAACR,EAAE,GAAG,qBAAqBI,EAAE,CAAC,KAAKC,EAAE,QAAQE,EAAE,WAAW,CAAC,CAACC,CAAC,CAAC,CAAC,EAAE,QAAQJ,GAAG,CAACJ,EAAE,GAAGI,CAAC,CAAC,EAAE,QAAQ,CAACA,EAAEC,IAAIL,EAAE,GAAGI,EAAEC,EAAEL,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,CAAC,EAAE,SAASmE,GAAG/D,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,IAAIgE,GAAG,CAAC,EAAEC,GAAG,EAAEC,GAAE,EAAE,MAAMC,EAAE,CAAC,YAAYnE,EAAE,CAAC,KAAK,GAAGA,EAAE,KAAK,GAAGA,EAAE,EAAE,CAAC,CACxe,IAAIoE,GAAGpE,GAAG,CAAC,IAAIC,EAAEiE,GAAE,GAAG,CAACjE,EAAE,OAAOoE,GAAG,CAAC,EAAE,EAAE,IAAIlE,EAAE,IAAIgE,GAAGlE,CAAC,EAAEiC,GAAE/B,EAAE,GAAG,KAAK,IAAI,CAAC,EAAEF,EAAE,IAAIG,EAAE8B,GAAE/B,EAAE,GAAG,IAAI,IAAI,CAAC,EAAE,GAAG,CAACC,EAAE,OAAOiE,GAAG,CAAC,EAAEpE,EAAE,QAAQI,KAAKL,EAAE,CAAC,IAAI,EAAEA,EAAEK,CAAC,EAAE,GAAO,IAAJ,GAAO,IAAID,EAAE,MAAM,GAAGkE,GAAG,EAAElE,EAAED,EAAE,GAAG,EAAE,EAAE,OAAOkE,GAAG,CAAC,EAAEpE,CAAC,CAAC,OAAAoE,GAAGjE,CAAC,EAASH,CAAC,EAAEsE,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAACxE,EAAEC,EAAEE,IAAI,CAACF,KAAK,EAAE,IAAIG,EAAEH,EAAEE,EAAE,IAAIA,EAAEF,EAAED,EAAEG,CAAC,GAAG,EAAEA,GAAGC,IAAI,EAAED,EAAE,GAAG,GAAGA,EAAEF,GAAGD,EAAE,QAAQuE,GAAG,OAAOA,GAAG,OAAOvE,EAAE,SAASC,EAAEE,CAAC,CAAC,EAAE,IAAIC,EAAE,GAAGH,EAAEE,GAAG,CAAC,IAAIE,EAAEL,EAAEC,GAAG,EAAE,GAAGI,EAAE,IAAI,CAAC,IAAI,EAAEL,EAAEC,GAAG,EAAE,GAAG,IAASI,EAAE,MAAR,IAAaD,GAAG,OAAO,cAAcC,EAAE,KAClf,EAAE,CAAC,MAAM,CAAC,IAAIC,EAAEN,EAAEC,GAAG,EAAE,GAAGI,GAAQA,EAAE,MAAR,KAAcA,EAAE,KAAK,GAAG,GAAG,EAAEC,GAAGD,EAAE,IAAI,GAAG,GAAG,GAAGC,GAAG,EAAEN,EAAEC,GAAG,EAAE,GAAG,MAAMI,EAAED,GAAG,OAAO,aAAaC,CAAC,GAAGA,GAAG,MAAMD,GAAG,OAAO,aAAa,MAAMC,GAAG,GAAG,MAAMA,EAAE,IAAI,EAAE,CAAC,MAAMD,GAAG,OAAO,aAAaC,CAAC,CAAC,CAAC,OAAOD,CAAC,EAAEmD,GAAE,CAACvD,EAAEC,KAAKD,KAAK,GAAGwE,GAAG1C,GAAE9B,EAAEC,CAAC,EAAE,GAAGwE,GAAGzE,GAAG,CAAC,QAAQC,EAAE,EAAEE,EAAE,EAAEA,EAAEH,EAAE,OAAO,EAAEG,EAAE,CAAC,IAAIC,EAAEJ,EAAE,WAAWG,CAAC,EAAE,KAAKC,EAAEH,IAAI,MAAMG,EAAEH,GAAG,EAAE,OAAOG,GAAG,OAAOA,GAAGH,GAAG,EAAE,EAAEE,GAAGF,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEyE,GAAE,CAAC1E,EAAEC,EAAEE,EAAEC,IAAI,CAAQ,GAAPD,KAAK,EAAK,EAAE,EAAEC,GAAG,MAAO,GAAE,IAAIC,EAAEF,EAAEC,EAAED,EAAEC,EAAE,EAAE,QAAQ,EAAE,EAAE,EAAEJ,EAAE,OAAO,EAAE,EAAE,CAAC,IAAIM,EAAEN,EAAE,WAAW,CAAC,EAAE,GAAG,OAChfM,GAAG,OAAOA,EAAE,CAAC,IAAIE,EAAER,EAAE,WAAW,EAAE,CAAC,EAAEM,EAAE,QAAQA,EAAE,OAAO,IAAIE,EAAE,IAAI,CAAC,GAAG,KAAKF,EAAE,CAAC,GAAGH,GAAGC,EAAE,MAAMH,EAAEE,MAAM,CAAC,EAAEG,CAAC,KAAK,CAAC,GAAG,MAAMA,EAAE,CAAC,GAAGH,EAAE,GAAGC,EAAE,MAAMH,EAAEE,MAAM,CAAC,EAAE,IAAIG,GAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,EAAE,CAAC,GAAGH,EAAE,GAAGC,EAAE,MAAMH,EAAEE,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,KAAK,CAAC,GAAGH,EAAE,GAAGC,EAAE,MAAMH,EAAEE,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAGL,EAAEE,MAAM,CAAC,EAAE,IAAIG,GAAG,GAAG,EAAE,CAACL,EAAEE,MAAM,CAAC,EAAE,IAAIG,GAAG,EAAE,EAAE,CAACL,EAAEE,MAAM,CAAC,EAAE,IAAIG,EAAE,EAAE,CAAC,CAAC,OAAAL,EAAEE,IAAI,CAAC,EAAE,EAASA,EAAEE,CAAC,EAAEsE,GAAGC,GAAE5E,GAAG,CAAC,QAAQC,EAAE,GAAG6B,GAAE9B,IAAI,CAAC,GAAGC,GAAG0E,GAAG7C,GAAE9B,MAAM,CAAC,CAAC,EAAE,OAAOC,CAAC,EAAE4E,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GACxa,SAASC,GAAGjF,EAAEC,EAAEE,EAAE,CAAC,EAAE,CAAC,IAAIC,EAAEH,EAAE,KAAK,GAAG,CAACD,EAAE,MAAM,IAAIgF,GAAE,SAAS5E,CAAC,+CAA+C,EAAE,GAAG0E,GAAG,eAAe9E,CAAC,EAAE,CAAC,GAAGG,EAAE,GAAG,OAAO,MAAM,IAAI6E,GAAE,yBAAyB5E,CAAC,SAAS,CAAE,CAAC0E,GAAG9E,CAAC,EAAEC,EAAE,OAAO8E,GAAG/E,CAAC,EAAE6E,GAAG,eAAe7E,CAAC,IAAIC,EAAE4E,GAAG7E,CAAC,EAAE,OAAO6E,GAAG7E,CAAC,EAAEC,EAAE,QAAQI,GAAGA,EAAE,CAAC,EAAE,CAAC,SAAS6E,GAAElF,EAAEC,EAAEE,EAAE,CAAC,EAAE,CAAC,GAAG,EAAE,mBAAmBF,GAAG,MAAM,IAAI,UAAU,yDAAyD,EAAE,OAAOgF,GAAGjF,EAAEC,EAAEE,CAAC,CAAC,CAC7a,IAAIgF,GAAG,CAACnF,EAAEC,EAAEE,IAAI,CAAC,OAAOF,EAAE,CAAC,IAAK,GAAE,OAAOE,EAAEC,GAAGyB,EAAEzB,IAAI,CAAC,EAAEA,GAAG0B,GAAE1B,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOD,EAAEC,GAAG2B,GAAG3B,IAAI,IAAI,CAAC,EAAEA,GAAG4B,GAAG5B,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOD,EAAEC,GAAG6B,EAAE7B,IAAI,IAAI,CAAC,EAAEA,GAAG8B,GAAE9B,IAAI,IAAI,CAAC,EAAE,IAAK,GAAE,OAAOD,EAAEC,GAAGgC,GAAGhC,IAAI,CAAC,EAAEA,GAAGiC,GAAGjC,IAAI,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,0BAA0BH,CAAC,MAAMD,CAAC,EAAE,CAAE,CAAC,EAAEoF,GAAG,CAAC,EAAEC,GAAE,CAAC,EAAE,SAASC,GAAGtF,EAAE,CAACA,KAAK,EAAE,EAAEA,GAAO,EAAEqF,GAAErF,EAAE,CAAC,IAAX,IAAeqF,GAAErF,CAAC,EAAE,OAAOoF,GAAG,KAAKpF,CAAC,EAAE,CAChW,IAAIuF,GAAEvF,GAAG,CAAC,GAAG,CAACA,EAAE,MAAM,IAAIgF,GAAE,oCAAoChF,CAAC,EAAE,OAAOqF,GAAErF,CAAC,CAAC,EAAEwF,GAAExF,GAAG,CAAC,OAAOA,EAAE,CAAC,KAAK,OAAO,MAAO,GAAE,KAAK,KAAK,MAAO,GAAE,IAAK,GAAG,MAAO,GAAE,IAAK,GAAG,MAAO,GAAE,QAAQ,IAAMC,EAAEmF,GAAG,IAAI,GAAGC,GAAE,OAAO,OAAAA,GAAEpF,CAAC,EAAED,EAAEqF,GAAEpF,EAAE,CAAC,EAAE,EAASA,CAAC,CAAC,EAAE,SAASwF,GAAGzF,EAAE,CAAC,OAAO,KAAK,aAAakC,GAAElC,IAAI,IAAI,CAAC,CAAC,CAAC,CAC7R,IAAI0F,GAAG,CAAC,KAAK,kBAAkB,aAAa1F,GAAG,CAAC,IAAIC,EAAEsF,GAAEvF,CAAC,EAAE,OAAAsF,GAAGtF,CAAC,EAASC,CAAC,EAAE,WAAW,CAACD,EAAEC,IAAIuF,GAAEvF,CAAC,EAAE,eAAe,EAAE,qBAAqBwF,GAAG,GAAG,IAAI,EAAEE,GAAG,CAAC3F,EAAEC,IAAI,CAAC,OAAOA,EAAE,CAAC,IAAK,GAAE,OAAO,SAASE,EAAE,CAAC,OAAO,KAAK,aAAagC,GAAGhC,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,IAAK,GAAE,OAAO,SAASA,EAAE,CAAC,OAAO,KAAK,aAAamC,GAAGnC,IAAI,IAAI,CAAC,CAAC,CAAC,EAAE,QAAQ,MAAM,IAAI,UAAU,wBAAwBF,CAAC,MAAMD,CAAC,EAAE,CAAE,CAAC,EAAE4F,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,UAAU,EAAE,OAAOC,GAAG,CAAC7F,EAAEC,IAAI,CAAY,QAAPE,EAAEH,GAAG,EAAUI,EAAED,EAAEF,EAAE,EAAE,EAAEE,GAAGC,IAAI4B,GAAG7B,IAAI,CAAC,GAAG,EAAEA,EAC9e,GAANA,IAAI,EAAK,GAAGA,EAAEH,GAAG4F,GAAG,OAAOA,GAAG,OAAO9D,GAAE,SAAS9B,IAAI,EAAEG,IAAI,CAAC,CAAC,EAAO,IAALA,EAAE,GAAOC,EAAE,EAAE,EAAEA,GAAGH,EAAE,GAAG,EAAEG,EAAE,CAAC,IAAIC,EAAE0B,GAAG/B,EAAE,EAAEI,IAAI,IAAI,CAAC,EAAE,GAAMC,GAAH,EAAK,MAAMF,GAAG,OAAO,aAAaE,CAAC,CAAC,CAAC,OAAOF,CAAC,EAAE2F,GAAG,CAAC9F,EAAEC,EAAEE,IAAI,CAAgB,GAAfA,IAAI,WAAc,EAAEA,EAAE,MAAO,GAAEA,GAAG,EAAE,IAAIC,EAAEH,EAAEE,EAAEA,EAAE,EAAEH,EAAE,OAAOG,EAAE,EAAEH,EAAE,OAAO,QAAQK,EAAE,EAAEA,EAAEF,EAAE,EAAEE,EAAE0B,GAAG9B,IAAI,IAAI,CAAC,EAAED,EAAE,WAAWK,CAAC,EAAEJ,GAAG,EAAE,OAAA8B,GAAG9B,IAAI,IAAI,CAAC,EAAE,EAASA,EAAEG,CAAC,EAAE2F,GAAG/F,GAAG,EAAEA,EAAE,OAAOgG,GAAG,CAAChG,EAAEC,IAAI,CAAC,QAAQE,EAAE,EAAEC,EAAE,GAAG,EAAED,GAAGF,EAAE,IAAI,CAAC,IAAII,EAAE4B,EAAEjC,EAAE,EAAEG,IAAI,IAAI,CAAC,EAAE,GAAME,GAAH,EAAK,MAAM,EAAEF,EAAE,OAAOE,GAAGA,GAAG,MAAMD,GAAG,OAAO,aAAa,MAAMC,GAAG,GAAG,MAAMA,EAAE,IAAI,GAAGD,GACnf,OAAO,aAAaC,CAAC,CAAC,CAAC,OAAOD,CAAC,EAAE6F,GAAG,CAACjG,EAAEC,EAAEE,IAAI,CAAuB,GAAtBF,KAAK,EAAEE,IAAI,WAAc,EAAEA,EAAE,MAAO,GAAE,IAAIC,EAAEH,EAAEE,EAAEC,EAAED,EAAE,EAAE,QAAQE,EAAE,EAAEA,EAAEL,EAAE,OAAO,EAAEK,EAAE,CAAC,IAAI,EAAEL,EAAE,WAAWK,CAAC,EAAE,GAAG,OAAO,GAAG,OAAO,EAAE,CAAC,IAAIC,EAAEN,EAAE,WAAW,EAAEK,CAAC,EAAE,EAAE,QAAQ,EAAE,OAAO,IAAIC,EAAE,IAAI,CAAqB,GAApB2B,EAAEhC,IAAI,IAAI,CAAC,EAAE,EAAEA,GAAG,EAAKA,EAAE,EAAEE,EAAE,KAAK,CAAC,OAAA8B,EAAEhC,IAAI,IAAI,CAAC,EAAE,EAASA,EAAEG,CAAC,EAAE8F,GAAGlG,GAAG,CAAC,QAAQC,EAAE,EAAEE,EAAE,EAAEA,EAAEH,EAAE,OAAO,EAAEG,EAAE,CAAC,IAAIC,EAAEJ,EAAE,WAAWG,CAAC,EAAE,OAAOC,GAAG,OAAOA,GAAG,EAAED,EAAEF,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEkG,GAAG,CAACnG,EAAEC,IAAI,CAAC,IAAIE,EAAE2E,GAAG9E,CAAC,EAAE,GAAYG,IAAT,OAAW,MAAMH,EAAEoG,GAAGpG,CAAC,EAAEG,EAAEyE,GAAE5E,CAAC,EAAEqG,GAAErG,CAAC,EAAE,IAAIgF,GAAE,GAAG/E,CAAC,qBAAqBE,CAAC,EAAE,EAAE,OAAOA,CAAC,EAAEmG,GAClf,CAACtG,EAAEC,EAAEE,IAAI,CAAC,IAAIC,EAAE,CAAC,EAAE,OAAAJ,EAAEA,EAAE,WAAWI,EAAED,CAAC,EAAEC,EAAE,SAAS8B,GAAEjC,IAAI,IAAI,CAAC,EAAEuF,GAAEpF,CAAC,GAAUJ,CAAC,EAAEuG,GAAGvG,GAAG,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOC,EAAE,CAAC6C,GAAG7C,CAAC,CAAC,CAAC,EAAEuG,GAAGxG,GAAG,CAAC,GAAG,CAAC2B,EAAE,GAAG,CAAC3B,EAAE,EAAE,GAAG,CAAC4B,GAAGA,GAAG5B,EAAE4B,GAAGhC,EAAE,SAASI,CAAC,EAAE2B,EAAE,GAAGd,EAAGb,EAAE,IAAI+D,GAAG/D,CAAC,CAAC,CAAC,OAAOC,EAAE,CAACA,aAAa8D,IAAc9D,GAAV,UAAaY,EAAG,EAAEZ,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAa8D,IAAc9D,GAAV,UAAaY,EAAG,EAAEZ,CAAC,CAAC,CAAC,EACrR,SAASwG,IAAI,CAAC,IAAIzG,EAAE0G,EAAEzG,EAAE,CAAC,EAAE,OAAQ,CAACE,EAAEC,CAAC,IAAI,OAAO,QAAQJ,CAAC,EAAEC,EAAEE,CAAC,EAAc,OAAOC,GAAnB,WAAqB,IAAIC,IAAI,CAACsG,GAAG,KAAKxG,CAAC,EAAE,GAAG,CAAC,OAAOC,EAAE,GAAGC,CAAC,CAAC,QAAC,CAAQsB,IAAIgF,GAAG,IAAI,EAAEpG,IAAOqG,KAAJ,GAAWD,GAAG,SAAP,IAAgBC,GAAE,EAAEL,GAAGM,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEzG,EAAE,OAAOH,CAAC,CAAC,IAAI2G,GAAE,EAAErG,GAAE,KAAKuG,GAAG,EAAEH,GAAG,CAAC,EAAEI,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAEC,GAAG,KAAKC,GAAG,CAAC,EAAE,SAASzG,IAAI,CAAC,OAAO,IAAI,QAAQ,CAACV,EAAEC,IAAI,CAACiH,GAAG,CAAC,QAAQlH,EAAE,OAAOC,CAAC,CAAC,CAAC,CAAC,CAC1W,SAASmH,IAAI,CAAC,IAAIpH,EAAEqH,GAAG,KAAK,EAAEpH,EAAED,EAAE,GAAGkC,GAAElC,IAAI,IAAI,CAAC,EAAEC,EAAEiC,GAAElC,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAE0G,GAAG,CAAC,EAAE,IAAIxG,EAAE4G,GAAG9G,CAAC,EAAE,OAASE,IAAT,SAAaA,EAAE8G,KAAKF,GAAG9G,CAAC,EAAEE,EAAE6G,GAAG7G,CAAC,EAAEF,GAAGgC,EAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEG,EAASH,CAAC,CAC7J,SAASsH,GAAGtH,EAAE,CAAC,GAAG,CAAC2B,EAAE,CAAC,GAAOiF,KAAJ,EAAM,CAAC,IAAI3G,EAAE,GAAGE,EAAE,GAAGH,EAAE,CAACI,EAAE,IAAI,CAAC,GAAG,CAACuB,IAAImF,GAAG1G,EAAEH,EAAE,GAAGE,GAAG,CAACyG,GAAE,EAAEL,GAAG,IAAIgB,GAAGhH,EAAC,CAAC,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAEH,EAAE,GAAG,GAAG,CAAC,IAAIC,KAAKqG,EAAEM,GAAG/E,EAAE1B,GAAE,IAAI,IAAI,CAAC,CAAC,CAAC,GAAG,CAAC,OAAOC,EAAE,CAACH,EAAEG,EAAEJ,EAAE,EAAE,CAAC,IAAI,EAAE,GAAG,GAAG,CAACG,GAAE,CAAC,IAAID,EAAE4G,GAAG5G,IAAI4G,GAAG,MAAM9G,EAAEE,EAAE,OAAOA,EAAE,SAASD,CAAC,EAAE,EAAE,GAAG,CAAC,GAAGD,GAAG,CAAC,EAAE,MAAMC,CAAE,CAAC,CAAC,EAAEF,EAAE,GAAGF,IAAI2G,GAAE,EAAErG,GAAE6G,GAAG,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEb,GAAG,IAAIiB,GAAGjH,EAAC,CAAC,EAAE,MAAUqG,KAAJ,GAAOA,GAAE,EAAEL,GAAGkB,EAAE,EAAEpB,GAAE9F,EAAC,EAAEA,GAAE,KAAK4G,GAAG,QAAQX,EAAE,GAAG1D,GAAG,kBAAkB8D,EAAC,EAAE,EAAE,OAAOE,EAAE,CAAC,CAChf,SAASY,GAAG1H,EAAE,CAAC,OAAOsH,GAAGrH,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI0H,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG7H,GAAG,CAAC,IAAIC,EAAE2H,GAAG5H,CAAC,EAAE,OAAgBC,IAAT,OAAW2E,GAAE5E,CAAC,EAAEC,CAAC,EAAE6H,GAAG,IAAc,OAAO,YAAjB,SAA4B,WAAW,SAAS,aAAa,EAAE,EAAEC,GAAG/H,GAAG,CAAC,IAAIC,EAAE0H,GAAG,OAAO,OAAAA,GAAG,KAAK3H,CAAC,EAASC,CAAC,EAAE+H,GAAG,CAAChI,EAAEC,IAAI,CAAC,QAAQE,EAAE,MAAMH,CAAC,EAAEI,EAAE,EAAEA,EAAEJ,EAAE,EAAEI,EAAED,EAAEC,CAAC,EAAE+F,GAAGjE,GAAEjC,EAAE,EAAEG,IAAI,IAAI,CAAC,EAAE,aAAaA,CAAC,EAAE,OAAOD,CAAC,EAAE8H,GAAG,CAACjI,EAAEC,IAAI,OAAO,eAAeA,EAAE,OAAO,CAAC,MAAMD,CAAC,CAAC,EAC3W,SAASkI,GAAGlI,EAAE,CAAC,IAAIC,EAAE,SAAS,GAAG,EAAEA,aAAa,UAAU,MAAM,IAAI,UAAU,qCAAqC,OAAOA,CAAC,0BAA0B,EAAE,IAAIE,EAAE8H,GAAGhI,EAAE,MAAM,sBAAsB,UAAU,CAAC,CAAC,EAAE,OAAAE,EAAE,UAAUF,EAAE,UAAUE,EAAE,IAAIA,EAAEH,EAAEC,EAAE,MAAME,EAAEH,CAAC,EAASA,aAAa,OAAOA,EAAEG,CAAC,CACzR,IAAIgI,GAAEnI,GAAOA,EAAE,IAAN,IAAcA,EAAE,MAAN,GAAeA,EAAE,MAAN,GAAWoI,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAACvI,EAAEC,IAAI,CAACqI,GAAG,OAAO,EAAE,QAAQnI,EAAEA,EAAE2B,GAAE9B,MAAM,CAAC,GAAG,CAAC,IAAII,EAAOD,GAAL,IAAOC,GAAQD,GAAL,IAAOF,GAAGG,GAAGH,EAAE,EAAE,EAAE,EAAEqI,GAAG,KAAUnI,GAAL,IAAO+B,GAAEjC,IAAI,IAAI,CAAC,EAAOE,GAAL,IAAOiC,GAAGnC,IAAI,CAAC,EAAOE,GAAL,IAAO8B,EAAEhC,IAAI,IAAI,CAAC,EAAEqC,GAAGrC,IAAI,IAAI,CAAC,CAAC,EAAEA,GAAGG,EAAE,EAAE,CAAC,CAAC,OAAOkI,EAAE,EAAEE,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACC,GAAG,CAAC,IAAI1I,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAC3f,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEY,GAAI,gBAAgB,EAAEX,EAAE,IAAIA,KAAKuI,GAAYA,GAAGvI,CAAC,IAAb,OAAe,OAAOD,EAAEC,CAAC,EAAED,EAAEC,CAAC,EAAEuI,GAAGvI,CAAC,EAAE,IAAIE,EAAE,CAAC,EAAE,IAAIF,KAAKD,EAAEG,EAAE,KAAK,GAAGF,CAAC,IAAID,EAAEC,CAAC,CAAC,EAAE,EAAEyI,GAAGvI,CAAC,CAAC,OAAOuI,EAAE,EAAEA,GAAGC,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAE,SAASC,GAAG9I,EAAE,CAAC,IAAIC,EAAE,MAAMwE,GAAGzE,CAAC,EAAE,CAAC,EAAE,OAAA0E,GAAE1E,EAAEC,EAAE,EAAEA,EAAE,MAAM,EAASA,CAAC,CACzU,SAAS8I,GAAG/I,EAAEC,EAAEE,EAAEC,EAAE,CAAC,SAASC,EAAEuD,EAAEC,EAAEC,EAAE,CAAC,IAAIF,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,GAAGD,EAAEE,EAAE,CAAC,EAAEF,EAAE,OAAOA,CAAC,CAAC,SAAS,EAAEA,EAAEC,EAAE,CAAC,OAAOxD,EAAEuD,EAAEC,EAAE,GAAG,CAAC,CAAC,SAASvD,EAAEsD,EAAEC,EAAE,CAAC,SAASC,EAAEkF,GAAE,CAAC,MAAO,GAAEA,GAAE,GAAG,EAAEA,GAAE,EAAE,CAAC,CAAC,IAAIC,GAAE,OAAKA,GAAEnF,EAAEF,EAAE,YAAY,EAAEC,EAAE,YAAY,CAAC,KAAxC,IAAiDoF,GAAEnF,EAAEF,EAAE,SAAS,EAAEC,EAAE,SAAS,CAAC,KAAlC,IAAuCoF,GAAEnF,EAAEF,EAAE,QAAQ,EAAEC,EAAE,QAAQ,CAAC,GAAUoF,EAAC,CAAC,SAASzI,EAAEoD,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EACzf,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAASnD,EAAEmD,EAAE,CAAC,IAAIC,EAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAM,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAG,QAAQ,CAAC,EAAE,EAAEC,GAAG,CAAC,IAAIC,EAAEF,EAAE,SAAS,EAAEqF,IAAGd,GAAEvE,EAAE,YAAY,CAAC,EAAEgF,GAAGC,IAAI/E,CAAC,EAAE,GAAGD,EAAEoF,GAAErF,EAAE,QAAQ,EAAEC,GAAGoF,GAAErF,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAGE,EAAEF,EAAE,SAASE,EAAE,CAAC,GAAGF,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,OAAO,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,CAAC,EAAE,KAAK,CAAC,CAAC,OAAAC,EAAE,IAAI,KAAKF,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,EAAErD,EAAE,IAAI,KAAKoD,EAAE,YAAY,EACnf,EAAE,CAAC,CAAC,EAAEE,EAAEtD,EAAEsD,CAAC,EAAS,GAAGxD,EAAEuD,EAAED,CAAC,EAAE,GAAGtD,EAAEwD,EAAEF,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAAC5D,KAAK,EAAEC,KAAK,EAAEE,KAAK,EAAEC,KAAK,EAAE,IAAIoD,EAAEtB,GAAE9B,EAAE,KAAK,IAAI,CAAC,EAAEA,EAAE,CAAC,GAAG6B,EAAE7B,IAAI,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,IAAI,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAG6B,EAAE7B,EAAE,KAAK,IAAI,CAAC,EAAE,GAAGoD,EAAED,GAAEC,CAAC,EAAE,EAAE,EAAErD,EAAEoD,GAAEpD,CAAC,EAAEqD,EAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WAC7e,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAE,QAAQC,KAAKD,EAAErD,EAAEA,EAAE,QAAQ,IAAI,OAAOsD,EAAE,GAAG,EAAED,EAAEC,CAAC,CAAC,EAAE,IAAIC,EAAE,2DAA2D,MAAM,GAAG,EAAEC,EAAE,wFAAwF,MAAM,GAAG,EAAEH,EAAE,CAAC,KAAKI,GAAGF,EAAEE,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGF,EAAEE,EAAE,EAAE,EAAE,KAAKA,GACzfD,EAAEC,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGD,EAAEC,EAAE,EAAE,EAAE,KAAKA,GAAG,GAAGA,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAG,EAAEA,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGvD,EAAEuD,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAGnD,EAAEmD,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKnD,EAAE,KAAKmD,GAAG,EAAEA,EAAE,GAAG,CAAC,EAAE,KAAKA,IAAIA,EAAEA,EAAE,GAAMA,GAAH,EAAKA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAW,EAAEA,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,EAAE,EAAEC,EAAE,EAAEA,GAAGF,EAAE,GAAG,EAAEC,IAAIsE,GAAEvE,EAAE,GAAG,IAAI,EAAEgF,GAAGC,IAAI/E,GAAG,EAAE,CAAC,OAAO,EAAEF,EAAE,GAAGC,EAAE,CAAC,CAAC,EAAE,KAAKD,GAAG,EAAEA,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAG,EAAEA,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAG,EAAEA,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAG,EAAE,KAAK,OAAOA,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GAAG,CAAC,IAAIC,EACrf,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAA8B,GAA5B,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,IAAOA,EAAMA,GAAJ,KAAQC,GAAGF,EAAE,GAAG,IAAIA,EAAE,IAAI,EAAKE,GAAH,GAASA,GAAH,GAAMqE,GAAEvE,EAAE,EAAE,IAAIC,EAAE,QAAQ,CAACA,EAAE,GAAG,IAAIC,GAAGF,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAME,GAAH,GAASA,GAAH,GAAMqE,GAAEvE,EAAE,GAAG,IAAI,CAAC,IAAIC,GAAG,CAAC,OAAO,EAAEA,EAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAG,EAAE,KAAK,OAAOA,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAACA,EAAEA,EAAE,GAAG,IAAIC,EAAE,GAAGD,EAAE,OAAAA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAUC,EAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAEzD,EAAEA,EAAE,QAAQ,MAAM,MAAU,EAAE,IAAIsD,KAAKD,EAAErD,EAAE,SAASsD,CAAC,IAC9ftD,EAAEA,EAAE,QAAQ,IAAI,OAAOsD,EAAE,GAAG,EAAED,EAAEC,CAAC,EAAErD,CAAC,CAAC,GAAoC,OAAjCD,EAAEA,EAAE,QAAQ,QAAQ,GAAG,EAAEsD,EAAEqF,GAAG3I,CAAC,EAAKsD,EAAE,OAAOxD,EAAS,GAAE4B,EAAE,IAAI4B,EAAEzD,IAAI,CAAC,EAASyD,EAAE,OAAO,EAAC,CAAC,QAAQyF,GAAG,MAAM,GAAG,EAAEC,GAAG,EAAE,IAAIA,GAAG,EAAEA,GAAGD,GAAGC,EAAE,EAAE,OAAO,aAAaA,EAAE,EAAExE,GAAGuE,GAAGlE,GAAEpF,EAAE,aAAa,cAAc,KAAK,CAAC,YAAYI,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,cAAc,CAAC,EAAEJ,EAAE,cAAc,cAAc,KAAK,CAAC,YAAYI,EAAE,CAAC,MAAMA,CAAC,EAAE,KAAK,KAAK,eAAe,CAAC,EAAEqF,GAAE,KAAK,EAAE,EAAE,OAAO,EAAE,KAAK,EAAE,GAAG,EAAE,GAAG,CAAC,EAAEzF,EAAE,oBAAoB,IAAIyF,GAAE,OAAO,EAAE,EAAED,GAAG,OAC9c,IAAIgE,GAAG,CAAC,GAAG,SAASpJ,EAAEC,EAAEE,EAAE,CAAC,OAAOuH,GAAG,SAAS,CAAC,MAAM9H,EAAE,GAAGI,EAAEC,EAAEE,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,SAASH,EAAE,CAAiG,GAAhGA,EAAE,IAAImE,GAAGnE,IAAI,CAAC,EAAK6B,EAAE7B,EAAE,GAAG,KAAK,CAAC,GAAhB,IAAoB6B,EAAE7B,EAAE,GAAG,KAAK,CAAC,EAAE,EAAEiE,MAAMpC,EAAE7B,EAAE,GAAG,KAAK,CAAC,EAAE,EAAEgE,GAAG,KAAKhE,CAAC,EAAEqJ,GAAGrJ,EAAE,EAAE,EAAKsJ,GAAGpH,GAAElC,EAAE,GAAG,IAAI,IAAI,CAAC,CAAC,EAAEA,EAAEkC,GAAElC,EAAE,KAAK,IAAI,CAAC,MAAM,CAAC,IAAIC,EAAEiC,GAAElC,EAAE,GAAG,KAAK,IAAI,CAAC,EAAEA,EAAMC,IAAJ,EAAMA,EAAED,EAAE,EAAE,CAAC,OAAOA,CAAC,EAAE,EAAE,IAAI,CAACuJ,EAAE,EAAE,CAAC,EAAE,IAAIvJ,EAAEgE,GAAG,IAAI,EAAEwF,GAAGxJ,EAAE,EAAE,EAAEkE,GAAE,CAAC,EAAE,EAAE,UAAU,CAAC,OAAOE,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,SAASpE,EAAE,CAAC,OAAOoE,GAAG,CAACpE,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,SAASA,EAAEC,EAAE,CAAC,OAAOmE,GAAG,CAACpE,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,EAAE,EAAE,SAASD,EAAEC,EAAEE,EAAE,CAAC,OAAOiE,GAAG,CAACpE,IAAI,EAAEC,IAAI,EAAEE,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,IAAIH,EAAEgE,GAAG,IAAI,EAAEhE,GAAG8C,GAAG,uBAAuB,EAC7gB,IAAI7C,EAAED,EAAE,GAAG,MAAG6B,EAAE7B,EAAE,GAAG,KAAK,CAAC,GAAhB,IAAoBgE,GAAG,KAAKhE,CAAC,EAAE6B,EAAE7B,EAAE,GAAG,KAAK,CAAC,EAAE,EAAE6B,EAAE7B,EAAE,GAAG,KAAK,CAAC,EAAE,EAAEiE,MAAMC,GAAEjE,EAAQiE,EAAE,EAAE,EAAE,SAASlE,EAAEC,EAAEE,EAAE,CAACH,KAAK,EAAE,IAAII,EAAE,IAAI+D,GAAGnE,CAAC,EAAE,MAAAkC,GAAE9B,EAAE,GAAG,KAAK,IAAI,CAAC,EAAE,EAAE8B,GAAE9B,EAAE,GAAG,IAAI,IAAI,CAAC,EAAEH,IAAI,EAAEiC,GAAE9B,EAAE,GAAG,IAAI,IAAI,CAAC,EAAED,IAAI,EAAE+D,GAAElE,EAAEiE,KAAWC,EAAE,EAAE,GAAG,IAAID,GAAG,EAAE,SAASjE,EAAE,CAAC,MAAAkE,KAAIlE,IAAI,EAAQkE,EAAE,EAAE,GAAG,UAAU,CAAC,MAAO,EAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,MAAO,EAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,SAASlE,EACzfC,EAAEE,EAAE,CAACF,EAAE2E,GAAE3E,IAAI,CAAC,EAAEiF,GAAElF,IAAI,EAAE,CAAC,KAAKC,EAAE,aAAaG,GAAGA,EAAE,WAAW,SAASA,EAAEC,EAAE,CAAC,GAAa,OAAOA,GAAjB,UAA8B,OAAOA,GAAjB,SAAmB,MAAaA,IAAP,KAASA,EAAE,QAAQD,EAAE,OAAOC,EAAEA,EAAaD,IAAX,UAAwBA,IAAV,SAA0BA,IAAb,WAAeC,EAAE,SAAS,EAAE,GAAGA,GAAG,IAAI,UAAU,mBAAmBA,CAAC,QAAQ,KAAK,IAAI,EAAE,EAAE,OAAU,OAAOA,GAAjB,WAAqBA,EAAE,OAAOA,CAAC,GAAUA,CAAC,EAAE,eAAe,EAAE,qBAAqB8E,GAAGlF,EAAEE,IAAI,EAAMF,EAAE,QAAQ,GAAG,GAAjB,EAAkB,EAAE,GAAG,IAAI,CAAC,CAAC,EAAE,GAAG,SAASD,EAAEC,EAAEE,EAAEC,EAAE,CAACH,EAAE2E,GAAE3E,IAAI,CAAC,EAAEiF,GAAElF,IAAI,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASI,EAAE,CAAC,MAAM,CAAC,CAACA,CAAC,EAAE,WAAW,SAASA,EACnf,EAAE,CAAC,OAAO,EAAEF,EAAEC,CAAC,EAAE,eAAe,EAAE,qBAAqB,SAASC,EAAE,CAAC,OAAO,KAAK,aAAayB,GAAEzB,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,EAAE,GAAG,SAASL,EAAE,CAAC,OAAOkF,GAAElF,IAAI,EAAE0F,EAAE,CAAC,EAAE,GAAG,SAAS1F,EAAEC,EAAEE,EAAE,CAACF,EAAE2E,GAAE3E,IAAI,CAAC,EAAEiF,GAAElF,IAAI,EAAE,CAAC,KAAKC,EAAE,aAAaG,GAAGA,EAAE,WAAW,CAACA,EAAEC,IAAIA,EAAE,eAAe,EAAE,qBAAqBsF,GAAG1F,EAAEE,IAAI,CAAC,EAAE,GAAG,IAAI,CAAC,CAAC,EAAE,GAAG,SAASH,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAwD,GAAvDL,KAAK,EAAEG,KAAK,EAAEF,EAAE2E,GAAE3E,IAAI,CAAC,EAAOI,IAAL,KAASA,EAAE,YAAYA,EAAEG,GAAGA,EAASJ,IAAJ,EAAM,CAAC,IAAI,EAAE,GAAG,EAAED,EAAEE,EAAEG,GAAGA,GAAG,IAAI,CAAC,CAAC,IAAIF,EAAEL,EAAE,SAAS,UAAU,EAAE,SAASO,EAAEC,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,SAASD,EAAEC,EAAE,CAAC,OAAOA,CAAC,EAAEyE,GAAElF,EAAE,CAAC,KAAKC,EACpf,aAAaI,EAAE,WAAWC,EAAE,eAAe,EAAE,qBAAqB6E,GAAGlF,EAAEE,EAAMC,IAAJ,CAAK,EAAE,GAAG,IAAI,CAAC,CAAC,EAAE,EAAE,SAASJ,EAAEC,EAAEE,EAAE,CAAC,SAASC,EAAE,EAAE,CAAC,OAAO,IAAIC,EAAEwB,EAAE,OAAOK,GAAE,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAE,IAAI,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI7B,EAAE,CAAC,UAAU,WAAW,WAAW,YAAY,WAAW,YAAY,aAAa,aAAa,cAAc,cAAc,EAAEJ,CAAC,EAAEE,EAAEyE,GAAEzE,IAAI,CAAC,EAAE+E,GAAElF,IAAI,EAAE,CAAC,KAAKG,EAAE,aAAaC,EAAE,eAAe,EAAE,qBAAqBA,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,GAAG,SAASJ,EAAEC,EAAE,CAACA,EAAE2E,GAAE3E,IAAI,CAAC,EAAE,IAAIE,EAAkBF,IAAhB,cAAkBiF,GAAElF,IAAI,EAAE,CAAC,KAAKC,EAAE,aAAa,SAASG,EAAE,CAAC,IAAIC,EAAE6B,GAAE9B,IACrf,IAAI,CAAC,EAAE,EAAEA,EAAE,EAAE,GAAGD,EAAE,QAAQG,EAAE,EAAEE,EAAE,EAAEA,GAAGH,EAAE,EAAEG,EAAE,CAAC,IAAIC,EAAE,EAAED,EAAE,GAAGA,GAAGH,GAAMyB,GAAErB,IAAI,CAAC,GAAV,EAAY,CAAY,GAAXH,EAAEiD,GAAEjD,EAAEG,EAAEH,CAAC,EAAckD,IAAT,OAAW,IAAIA,EAAElD,OAAOkD,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGlD,EAAEA,EAAEG,EAAE,CAAC,CAAC,KAAK,CAAY,IAAX+C,EAAE,MAAMnD,CAAC,EAAMG,EAAE,EAAEA,EAAEH,EAAE,EAAEG,EAAEgD,EAAEhD,CAAC,EAAE,OAAO,aAAasB,GAAE,EAAEtB,IAAI,CAAC,CAAC,EAAEgD,EAAEA,EAAE,KAAK,EAAE,CAAC,CAAC,OAAA6C,GAAEjG,CAAC,EAASoD,CAAC,EAAE,WAAW,SAASpD,EAAEC,EAAE,CAACA,aAAa,cAAcA,EAAE,IAAI,WAAWA,CAAC,GAAG,IAAI,EAAY,OAAOA,GAAjB,SAAmB,GAAG,EAAE,GAAGA,aAAa,YAAYA,aAAa,mBAAmBA,aAAa,WAAW,MAAM,IAAI2E,GAAE,uCAAuC,EAAE,IAAI1E,EAAEH,GAAG,EAAEsE,GAAGpE,CAAC,EACxfA,EAAE,OAAWG,EAAE6G,GAAG,EAAE/G,EAAE,CAAC,EAAEG,EAAED,EAAE,EAAiB,GAAf0B,GAAE1B,IAAI,IAAI,CAAC,EAAEF,EAAKH,GAAG,EAAEuE,GAAErE,EAAEyB,GAAErB,EAAEH,EAAE,CAAC,UAAU,EAAE,IAAI,EAAE,EAAE,EAAEA,EAAE,EAAE,EAAE,CAAC,IAAIkD,EAAEnD,EAAE,WAAW,CAAC,EAAE,GAAG,IAAImD,EAAE,MAAM6C,GAAE5F,CAAC,EAAE,IAAIuE,GAAE,wDAAwD,EAAElD,GAAErB,EAAE,IAAI,CAAC,EAAE+C,CAAC,KAAM,KAAI,EAAE,EAAE,EAAElD,EAAE,EAAE,EAAEwB,GAAErB,EAAE,IAAI,CAAC,EAAEJ,EAAE,CAAC,EAAE,OAAOD,IAAP,MAAUA,EAAE,KAAKiG,GAAE7F,CAAC,EAASA,CAAC,EAAE,eAAe,EAAE,qBAAqBiF,GAAG,GAAGrF,EAAE,CAACiG,GAAEjG,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,SAASJ,EAAEC,EAAEE,EAAE,CAAsB,GAArBF,KAAK,EAAEE,KAAK,EAAEA,EAAEyE,GAAEzE,CAAC,EAASF,IAAJ,EAAO,IAAIG,EAAEyF,GAAOxF,EAAEyF,GAAO,EAAEC,GAAOzF,EAAEE,GAAGwB,GAAGxB,IAAI,IAAI,CAAC,OAAWP,IAAJ,IAAQG,EAAE4F,GAAG3F,EAAE4F,GAAG,EAAEC,GAAG5F,EAAEE,GAAG0B,GAAE1B,IAAI,IAAI,CAAC,GAAG0E,GAAElF,IAAI,EAAE,CAAC,KAAKG,EACnf,aAAaK,GAAG,CAAC,QAAQC,EAAEyB,GAAE1B,IAAI,IAAI,CAAC,EAAEgD,EAAEC,EAAEjD,EAAE,EAAEkD,EAAE,EAAEA,GAAGjD,EAAE,EAAEiD,EAAE,CAAC,IAAIC,EAAEnD,EAAE,EAAEkD,EAAEzD,GAAKyD,GAAGjD,GAAMH,EAAEqD,CAAC,GAAN,KAAQF,EAAErD,EAAEqD,EAAEE,EAAEF,CAAC,EAAWD,IAAT,OAAWA,EAAEC,GAAGD,GAAG,OAAO,aAAa,CAAC,EAAEA,GAAGC,GAAGA,EAAEE,EAAE1D,EAAC,CAAC,OAAAoG,GAAE7F,CAAC,EAASgD,CAAC,EAAE,WAAW,CAAChD,EAAEC,IAAI,CAAC,GAAa,OAAOA,GAAjB,SAAmB,MAAM,IAAIuE,GAAE,6CAA6C7E,CAAC,EAAE,EAAE,IAAIqD,EAAE,EAAE/C,CAAC,EAAEgD,EAAE4D,GAAG,EAAE7D,EAAEvD,CAAC,EAAE,OAAAiC,GAAEuB,IAAI,IAAI,CAAC,EAAED,EAAEvD,EAAEI,EAAEI,EAAEgD,EAAE,EAAED,EAAEvD,CAAC,EAASO,IAAP,MAAUA,EAAE,KAAK6F,GAAE5C,CAAC,EAASA,CAAC,EAAE,eAAe,EAAE,qBAAqBgC,GAAG,GAAGjF,EAAE,CAAC6F,GAAE7F,CAAC,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,SAASR,EAAEC,EAAE,CAACA,EAAE2E,GAAE3E,IAAI,CAAC,EAAEiF,GAAElF,IAAI,EAAE,CAAC,GAAG,GAAG,KAAKC,EAAE,eAAe,EAAE,aAAa,IACjf,CAAC,EAAE,WAAW,IAAI,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,IAAI,EAAE,GAAG,SAASD,EAAEC,EAAEE,EAAE,CAAC,OAAAF,KAAK,EAAEE,KAAK,EAAEH,EAAEuF,GAAEvF,IAAI,CAAC,EAAEC,EAAEkG,GAAGlG,EAAE,WAAW,EAASqG,GAAGrG,EAAEE,EAAEH,CAAC,CAAC,EAAE,GAAG,SAASA,EAAE,CAAC,OAAAA,KAAK,EAAS0H,GAAG,KAAK1H,EAAEuF,GAAEvF,CAAC,EAASA,EAAE,KAAKwF,EAAC,EAAE,CAAC,EAAE,GAAG,SAASxF,EAAEC,EAAEE,EAAEC,EAAE,CAAC,OAAAD,KAAK,EAAEC,KAAK,EAAEJ,EAAE2H,GAAG3H,IAAI,CAAC,EAAEC,EAAEsF,GAAEtF,IAAI,CAAC,EAASD,EAAE,KAAKC,EAAEE,EAAEC,CAAC,CAAC,EAAE,GAAG,SAASJ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,OAAAF,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAEL,EAAE2H,GAAG3H,IAAI,CAAC,EAAEC,EAAEsF,GAAEtF,IAAI,CAAC,EAAEE,EAAE0H,GAAG1H,CAAC,EAASH,EAAEC,EAAEA,EAAEE,CAAC,EAAEC,EAAEC,CAAC,CAAC,EAAE,GAAGiF,GAAG,GAAG,SAAStF,EAAEC,EAAE,CAAC,OAAAA,KAAK,EAAED,EAAEuF,GAAEvF,IAAI,CAAC,EAAEC,EAAEsF,GAAEtF,CAAC,EAASD,GAAGC,CAAC,EAAE,GAAG,SAASD,EAAE,CAAQ,OAAPA,KAAK,EAASA,IAAJ,EAAawF,GAAEsC,GAAG,CAAC,GAAE9H,EAAE6H,GAAG7H,CAAC,EAASwF,GAAEsC,GAAG,EAAE9H,CAAC,CAAC,EAAC,EAAE,GAAG,SAASA,EACtfC,EAAEE,EAAE,CAACF,EAAE+H,GAAGhI,EAAEC,IAAI,CAAC,EAAE,IAAIG,EAAEH,EAAE,MAAM,EAAED,IAAI,IAAIK,EAAE;AAAA,EAAwD,EAAE,EAAEC,EAAE,CAAC,EAAMH,IAAJ,GAAOG,EAAE,KAAK,KAAK,EAAE,QAAQE,EAAE,CAAC,SAAS,EAAEC,EAAE,CAACL,CAAC,EAAEoD,EAAE,EAAEA,EAAExD,EAAE,EAAEwD,EAAElD,EAAE,KAAK,MAAMkD,CAAC,EAAEhD,EAAE,KAAK,UAAUgD,CAAC,EAAE/C,EAAE,KAAKR,EAAEuD,CAAC,CAAC,EAAEnD,GAAG,YAAYmD,CAAC,aAAaA,CAAC,6BAA6B,EAAE,IAAI,EAAE,EAAE;AAAA,EAAO,GAAGvD,EAAEuD,CAAC,EAAE,eAAe,OAAAnD,GAAG,cAAkBF,IAAJ,EAAM,WAAW,WAAW,IAAIG,EAAE,KAAK,IAAI,CAAC;AAAA,EAAOF,EAAE,KAAKI,EAAE,KAAK,mBAAmB,EAAEC,EAAE,KAAK6F,EAAE,EAAEjG,GAAG;AAAA,GAA8DG,EAAE,KAAKH,EACzf;AAAA,CAAM,EAAEL,EAAEkI,GAAG1H,CAAC,EAAE,GAAGC,CAAC,EAAEN,EAAE,iBAAiBF,EAAE,IAAIwD,GAAGA,EAAE,IAAI,EAAE,KAAK,IAAI,CAAC,QAAQrD,EAAE,IAAI,IAAW2H,GAAGE,GAAG9H,EAAEH,CAAC,CAAC,CAAC,EAAE,GAAG,SAASA,EAAEC,EAAE,CAAC,OAAAA,KAAK,EAAED,EAAEuF,GAAEvF,IAAI,CAAC,EAAEC,EAAEsF,GAAEtF,CAAC,EAASuF,GAAExF,EAAEC,CAAC,CAAC,CAAC,EAAE,GAAG,SAASD,EAAE,CAACA,KAAK,EAAE,EAAEA,IAAIqF,GAAErF,EAAE,CAAC,GAAG,EAAE,EAAE,GAAG,UAAU,CAAC,OAAOwF,GAAE,CAAC,CAAC,CAAC,EAAE,GAAG,SAASxF,EAAE,CAACA,EAAEuF,GAAEvF,IAAI,CAAC,EAAE,QAAQC,EAAE,MAAMD,EAAE,MAAM,EAAEG,EAAE,EAAEA,EAAEH,EAAE,OAAOG,IAAIF,EAAEE,CAAC,EAAEH,EAAEG,CAAC,EAAE,OAAOqF,GAAEvF,CAAC,CAAC,EAAE,EAAE,SAASD,EAAE,CAAC,OAAOwF,GAAEqC,GAAG7H,IAAI,CAAC,CAAC,CAAC,EAAE,GAAG,UAAU,CAAC,OAAOwF,GAAE,CAAC,CAAC,CAAC,EAAE,GAAG,SAASxF,EAAE,CAACA,KAAK,EAAE,QAAQC,EAAEsF,GAAEvF,CAAC,EAAEC,EAAE,QAAQ,CAAC,IAAIE,EAAEF,EAAE,IAAI,EAAEA,EAAE,IAAI,EAAEE,CAAC,CAAC,CAACmF,GAAGtF,CAAC,CAAC,EAAE,GAAG,SAASA,EAAEC,EAAEE,EAAE,CAACF,KAAK,EAAEE,KAAK,EACpfH,EAAEuF,GAAEvF,IAAI,CAAC,EAAEC,EAAEsF,GAAEtF,CAAC,EAAEE,EAAEoF,GAAEpF,CAAC,EAAEH,EAAEC,CAAC,EAAEE,CAAC,EAAE,GAAG,SAASH,EAAEC,EAAE,CAAC,OAAAA,KAAK,EAAED,EAAEmG,GAAGnG,IAAI,EAAE,mBAAmB,EAAEA,EAAEA,EAAE,qBAAqBC,CAAC,EAASuF,GAAExF,CAAC,CAAC,EAAE,GAAG,SAASA,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAEiC,EAAEhC,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAEiC,EAAEhC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,cAAc,EAAEiC,EAAEhC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,YAAY,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,WAAW,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,eAAe,EAAE,KAAKiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,UAAU,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,GAAGD,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAC/f,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,CAAC,EAAE,GAAG,SAASA,EAAEC,EAAE,CAACD,EAAE,kBAAkBA,GAAG,iBAAiBA,EAAE,IAAI,OAAOA,CAAC,EAAEC,KAAK,EAAED,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAEiC,EAAEhC,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAEiC,EAAEhC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,WAAW,EAAEiC,EAAEhC,EAAE,IAAI,IAAI,CAAC,EAAED,EAAE,SAAS,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,QAAQ,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,SAAS,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,YAAY,EAAE,KAAKiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAED,EAAE,OAAO,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,GAAGkI,GAAEnI,EAAE,YAAY,CAAC,EAAEoI,GAAGC,IAAIrI,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,EAAE,EAAE,GAAGD,EAAE,kBAAkB,GAAG,IAAIG,EAAG,IAAI,KAAKH,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EACrfI,EAAG,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAEiC,EAAEhC,EAAE,KAAK,IAAI,CAAC,GAAGE,GAAGC,GAAGJ,EAAE,kBAAkB,GAAG,KAAK,IAAII,EAAED,CAAC,GAAG,CAAC,EAAE,GAAG,SAASH,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAKgC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAE,KAAKiC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEiC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEiC,EAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEiC,EAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEiC,EAAEjC,IAAI,IAAI,CAAC,EAAE,CAAC,EAAEG,EAAE8B,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEI,EAAEH,EAAE,kBAAkB,EAAEI,EAAG,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAE,EAAG,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAEK,EAAE,KAAK,IAAI,EAAED,CAAC,EAAE,SAAEF,EAAE8B,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAE,EAAOK,GAAG,GAAGC,GAAGF,GAAG,EAAED,IAAIG,GAAGF,KAAKC,EAAE,KAAK,IAAI,EAAEA,CAAC,EAAEJ,EAAE,QAAQA,EAAE,QAAQ,EAC7f,MAAM,EAAEE,EAAEG,EAAED,GAAGD,EAAE,GAAG6B,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAEgC,EAAEjC,EAAE,KAAK,IAAI,CAAC,GAAGmI,GAAElI,EAAE,YAAY,CAAC,EAAEmI,GAAGC,IAAIpI,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAEgC,EAAEjC,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAEgC,EAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAEgC,EAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAEgC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAEgC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAEgC,EAAEjC,EAAE,KAAK,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAS,OAAO,MAAMD,CAAC,EAAE,GAAGA,EAAE,GAAG,CAAC,EAAE,GAAG,UAAU,CAAC,MAAM,GAAG,EAAE,GAAG,UAAU,CAAC,EAAE,GAAG,SAASA,EAAEC,EAAEE,EAAEC,EAAE,CAACD,KAAK,EAAEC,KAAK,EAAE,IAAIC,EAAG,IAAI,OAAM,YAAY,EAAE,EAAE,IAAI,KAAKA,EAAE,EAAE,CAAC,EAAEC,EAAE,IAAI,KAAKD,EAAE,EAAE,CAAC,EAAEA,EAAE,EAAE,kBAAkB,EACxf,IAAIG,EAAEF,EAAE,kBAAkB,EAAE4B,GAAElC,IAAI,IAAI,IAAI,CAAC,EAAE,GAAG,KAAK,IAAIK,EAAEG,CAAC,EAAEyB,EAAEhC,IAAI,IAAI,IAAI,CAAC,EAAE,EAAOI,GAAGG,GAAGR,EAAES,GAAGA,EAAE,mBAAmB,OAAO,CAAC,OAAO,GAAG,aAAa,OAAO,CAAC,EAAE,MAAM,GAAG,EAAE,CAAC,EAAE,EAAET,EAAE,CAAC,EAAEM,EAAEN,EAAEM,CAAC,EAAEE,EAAEH,GAAGqE,GAAE,EAAE5C,GAAE3B,EAAE,EAAE,EAAEuE,GAAEpE,EAAEwB,GAAE1B,EAAE,EAAE,IAAIsE,GAAE,EAAE5C,GAAE1B,EAAE,EAAE,EAAEsE,GAAEpE,EAAEwB,GAAE3B,EAAE,EAAE,EAAE,EAAE,GAAG,IAAI,CAAC2C,GAAG,EAAE,CAAC,EAAE,EAAE,SAAS9C,EAAEC,EAAEE,EAAE,CAAC,OAAAH,KAAK,EAAEC,EAAEsI,GAAGtI,IAAI,EAAEE,IAAI,CAAC,EAASmD,GAAGtD,CAAC,EAAE,GAAGC,CAAC,CAAC,EAAE,GAAG,SAASD,EAAEC,EAAEE,EAAE,CAAC,OAAAH,KAAK,EAAEC,EAAEsI,GAAGtI,IAAI,EAAEE,IAAI,CAAC,EAASmD,GAAGtD,CAAC,EAAE,GAAGC,CAAC,CAAC,EAAE,GAAG,IAAI,KAAK,IAAI,EAAE,GAAG,UAAU,CAAC,MAAO,WAAU,EAAE,GAAG,IAAI,YAAY,IAAI,EAAE,GAAG,SAASD,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE6B,GAAE,OAAO,GAAG,WACvf9B,EAAE,MAAM,GAAG,QAAQG,EAAE,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAIC,EAAEH,GAAG,EAAE,GAAGE,GAAGC,EAAE,KAAK,IAAIA,EAAEJ,EAAE,SAAS,EAAE,IAAIK,EAAE,KAAKD,EAAE,KAAK,IAAIJ,EAAEI,CAAC,EAAEJ,EAAE,CAACK,GAAGA,EAAE,IAAI,KAAKA,EAAE,WAAWD,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEsB,EAAG,OAAO,WAAW,OAAO,MAAM,GAAG,CAACA,EAAG,KAAKrB,CAAC,EAAEkC,GAAG,EAAE,IAAI,EAAE,EAAE,MAAMvC,CAAC,MAAS,CAAC,CAAC,EAAE,MAAM,CAAC,GAAG,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,EAAE,GAAG,SAASA,EAAEC,EAAE,CAACD,KAAK,EAAEC,KAAK,EAAE,IAAIE,EAAE,EAAE,OAAAsI,GAAG,EAAE,QAAQ,CAACrI,EAAEC,IAAI,CAAC,IAAI,EAAEJ,EAAEE,EAAuB,IAArBE,EAAE6B,GAAElC,EAAE,EAAEK,IAAI,IAAI,CAAC,EAAE,EAAM,EAAE,EAAE,EAAED,EAAE,OAAO,EAAE,EAAEyB,EAAExB,MAAM,CAAC,EAAED,EAAE,WAAW,CAAC,EAAEyB,EAAExB,IAAI,CAAC,EAAE,EAAEF,GAAGC,EAAE,OAAO,CAAC,CAAC,EAAS,CAAC,EAAE,GAAG,SAASJ,EAAEC,EAAE,CAACD,KAAK,EAAEC,KAAK,EAAE,IAAIE,EAAEsI,GAAG,EAAEvG,GAAElC,IAAI,IAClf,CAAC,EAAEG,EAAE,OAAO,IAAIC,EAAE,EAAE,OAAAD,EAAE,QAAQE,GAAGD,GAAGC,EAAE,OAAO,CAAC,EAAE6B,GAAEjC,IAAI,IAAI,CAAC,EAAEG,EAAS,CAAC,EAAE,GAAG,IAAI,GAAG,GAAG,UAAU,CAAC,MAAO,GAAE,EAAE,GAAG,UAAU,CAAC,MAAO,GAAE,EAAE,GAAG,SAASJ,EAAEC,EAAEE,EAAEC,EAAE,CAACH,KAAK,EAAEE,KAAK,EAAEC,KAAK,EAAE,QAAQC,EAAE,EAAE,EAAE,EAAE,EAAEF,EAAE,IAAI,CAAC,IAAIG,EAAE4B,GAAEjC,IAAI,IAAI,CAAC,EAAEO,EAAE0B,GAAEjC,EAAE,IAAI,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQQ,EAAE,EAAEA,EAAED,EAAEC,IAAI,CAAC,IAAI+C,EAAE1B,GAAExB,EAAEG,IAAI,CAAC,EAAEgD,EAAEkF,GAAG3I,CAAC,EAAMwD,IAAJ,GAAYA,IAAL,KAAaxD,IAAJ,EAAMwB,EAAGC,GAAG+C,GAAGf,EAAE,CAAC,CAAC,EAAEA,EAAE,OAAO,GAAGA,EAAE,KAAKD,CAAC,CAAC,CAACnD,GAAGG,CAAC,CAAC,OAAA0B,GAAE9B,IAAI,IAAI,CAAC,EAAEC,EAAS,CAAC,EAAE,GAAGoJ,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GACnf,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAClf,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAEC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,GAAGC,GAAG,EAAE,SAASpU,EAAE,CAAC,OAAOA,IAAI,CAAC,EAAE,GAAG+I,GAAG,GAAG,SAAS/I,EAAEC,EAAEE,EAAEC,EAAE,CAAC,OAAO2I,GAAG/I,IAAI,EAAEC,IAAI,EAAEE,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,EAAEsG,EAAE,UAAU,CAAC,SAAS1G,EAAEG,EAAE,CAAC,OAAAuG,EAAEvG,EAAE,QAAQuG,EAAED,GAAG,EACnfC,EAAE2N,GAAG,EAAE3S,EAAGgF,EAAE,GAAGnE,GAAG,EAAEE,GAAG,QAAQiE,EAAE,EAAE,EAAE/D,KAAQA,IAAH,IAAeC,KAAP,OAAY,cAAcA,EAAE,EAAEA,GAAG,MAAMC,KAAK1C,EAAE0C,GAAGA,GAAG,KAAK1C,EAAE,IAAWuG,CAAC,CAAC,IAAIzG,EAAE,CAAC,EAAEmJ,EAAE,EAAO,GAALzG,KAAQ/C,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBK,EAAED,CAAC,CAAC,OAAOG,EAAE,CAACsB,EAAE,sDAAsDtB,CAAC,EAAE,EAAEL,EAAGK,CAAC,CAAC,CAAC,OAAAkD,GAAGpD,EAAE,SAASE,EAAE,CAACH,EAAEG,EAAE,QAAQ,CAAC,CAAC,EAAE,MAAML,CAAE,EAAQ,CAAC,CAAC,EAAE,EAAEsG,GAAGpG,IAAIoG,GAAGM,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,SAAS,CAACI,EAAEC,KAAKL,EAAE,SAAS8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEL,EAAE,iBAAiB,CAACI,EAAEC,KAAKL,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,CAAC,EACzbL,EAAE,yBAAyB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK5D,EAAE,yBAAyB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAE5D,EAAE,4BAA4B,CAACI,EAAEC,KAAKL,EAAE,4BAA4B8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEL,EAAE,6BAA6B,CAACI,EAAEC,EAAEE,KAAKP,EAAE,6BAA6B8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,0BAA0B,CAACI,EAAEC,EAAEE,KAAKP,EAAE,0BAA0B8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,0BAA0BI,IAAIJ,EAAE,0BAA0B8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,KAAKP,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAC9dP,EAAE,mBAAmBI,IAAIJ,EAAE,mBAAmB8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,wBAAwB,CAACI,EAAEC,EAAEE,KAAKP,EAAE,wBAAwB8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,iBAAiB,CAACI,EAAEC,KAAKL,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEL,EAAE,kBAAkB,CAACI,EAAEC,KAAKL,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEL,EAAE,SAASI,IAAIJ,EAAE,SAAS8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKT,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAET,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKT,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAET,EAAE,kBAAkBI,IAAIJ,EAAE,kBAAkB8G,EAAE,IAAI1G,CAAC,EAC5dJ,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,KAAKR,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAER,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,KAAKP,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,sBAAsBI,IAAIJ,EAAE,sBAAsB8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,kBAAkBI,IAAIJ,EAAE,kBAAkB8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,cAAc,CAACI,EAAEC,EAAEE,KAAKP,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,KAAKR,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAER,EAAE,sBAAsBI,IAAIJ,EAAE,sBAAsB8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,mBAAmBI,IAAIJ,EAAE,mBAAmB8G,EAAE,IAAI1G,CAAC,EACxeJ,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKT,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAET,EAAE,QAAQ,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKZ,EAAE,QAAQ8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEZ,EAAE,iBAAiBI,IAAIJ,EAAE,iBAAiB8G,EAAE,IAAI1G,CAAC,EAAEJ,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKP,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEP,EAAE,iBAAiBI,IAAIJ,EAAE,iBAAiB8G,EAAE,IAAI1G,CAAC,EAC/R,IAAIqH,GAAGzH,EAAE,QAAQI,IAAIqH,GAAGzH,EAAE,QAAQ8G,EAAE,IAAI1G,CAAC,EAAEqG,GAAEzG,EAAE,MAAMI,IAAIqG,GAAEzG,EAAE,MAAM8G,EAAE,IAAI1G,CAAC,EAAEuJ,EAAE,CAACvJ,EAAEC,KAAKsJ,EAAE7C,EAAE,IAAI1G,EAAEC,CAAC,EAAEoE,GAAGrE,IAAIqE,GAAGqC,EAAE,IAAI1G,CAAC,EAAEsU,EAAEtU,IAAIsU,EAAE5N,EAAE,IAAI1G,CAAC,EAAEuU,GAAGvU,IAAIuU,GAAG7N,EAAE,IAAI1G,CAAC,EAAEwU,EAAE,KAAKA,EAAE9N,EAAE,IAAI,EAAE8C,GAAGxJ,IAAIwJ,GAAG9C,EAAE,IAAI1G,CAAC,EAAEqJ,GAAGrJ,IAAIqJ,GAAG3C,EAAE,IAAI1G,CAAC,EAAEsE,GAAG,CAACtE,EAAEC,EAAEE,KAAKmE,GAAGoC,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEmJ,GAAGtJ,IAAIsJ,GAAG5C,EAAE,IAAI1G,CAAC,EAAEyU,GAAY7U,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKsU,GAAY7U,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEuU,GAAG9U,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKsU,GAAG9U,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEuU,GAAY/U,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKwU,GAAY/U,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEyU,GAAGhV,EAAE,WAAW,CAACI,EAAEC,KAAK2U,GAAGhV,EAAE,WACnf8G,EAAE,IAAI1G,EAAEC,CAAC,EAAE4U,GAAGjV,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKuU,GAAGjV,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEwU,GAAWlV,EAAE,WAAW,CAACI,EAAEC,KAAK6U,GAAWlV,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAE8U,GAAUnV,EAAE,UAAUI,IAAI+U,GAAUnV,EAAE,UAAU8G,EAAE,IAAI1G,CAAC,EAAEgV,GAAGpV,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK2U,GAAGpV,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE4U,GAAGrV,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK6U,GAAGrV,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE8U,GAAGtV,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAK6U,GAAGtV,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE8U,GAAGvV,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK+U,GAAGvV,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEgV,GAAGxV,EAAE,eAC3e,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK+U,GAAGxV,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEgV,GAAGzV,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKgV,GAAGzV,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEiV,GAAG1V,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKgV,GAAG1V,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEiV,GAAG3V,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKkV,GAAG3V,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEmV,GAAG5V,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAK8R,GAAG5V,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAE+R,GAAG7V,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKgV,GAAG7V,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEiV,GAAG9V,EAAE,YAAY,CAACI,EAAEC,EAAEE,KACnfuV,GAAG9V,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEwV,GAAG/V,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKwV,GAAG/V,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEyV,GAAGhW,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKyV,GAAGhW,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAE0V,GAAGjW,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAK0V,GAAGjW,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAE2V,EAAGlW,EAAE,UAAUI,IAAI8V,EAAGlW,EAAE,UAAU8G,EAAE,IAAI1G,CAAC,EAAE+V,EAAGnW,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKsV,EAAGnW,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEuV,EAAGpW,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAK0V,EAAGpW,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAE2V,EAAGrW,EAAE,WAAW,CAACI,EAAEC,KAAKgW,EAAGrW,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEiW,EAAGtW,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK8V,EACpftW,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE+V,EAAGvW,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAKyS,EAAGvW,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAE0S,GAAGxW,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK4S,GAAGxW,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAE6S,GAAGzW,EAAE,WAAW,CAACI,EAAEC,KAAKoW,GAAGzW,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEqW,GAAG1W,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKiW,GAAG1W,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEkW,GAAG3W,EAAE,UAAUI,IAAIuW,GAAG3W,EAAE,UAAU8G,EAAE,IAAI1G,CAAC,EAAEwW,GAAG5W,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKqW,GAAG5W,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEsW,GAAG7W,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EACnfC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAK+S,GAAG7W,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAEgT,GAAG9W,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAKgT,GAAG9W,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAEiT,EAAG/W,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKmW,EAAG/W,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEoW,GAAGhX,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKoW,GAAGhX,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEqW,GAAGjX,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKoW,GAAGjX,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEqW,GAAGlX,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EACnf+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKkT,GAAGlX,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEmT,GAAGnX,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKsW,GAAGnX,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEuW,GAAGpX,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAKuT,GAAGpX,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEwT,GAAGrX,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKwW,GAAGrX,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEyW,GAAGtX,EAAE,WAAW,CAACI,EAAEC,KAAKiX,GAAGtX,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEkX,GAAGvX,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKgX,GAAGvX,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEiX,GAAGxX,EAAE,WAAW,CAACI,EAAEC,KAAKmX,GACnfxX,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEoX,GAAGzX,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKkX,GAAGzX,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEmX,GAAG1X,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKkX,GAAG1X,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEmX,GAAG3X,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK+T,GAAG3X,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEgU,GAAG5X,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKkX,GAAG5X,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEmX,GAAG7X,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKqX,GAAG7X,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEsX,GAAG9X,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKoX,GAAG9X,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEqX,GAAG/X,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EACpfC,KAAKqX,GAAG/X,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEsX,GAAGhY,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAKmU,GAAGhY,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEoU,GAAGjY,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAKmU,GAAGjY,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAEoU,GAAGlY,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK0X,GAAGlY,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE2X,GAAGnY,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKuX,GAAGnY,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEwX,GAAGpY,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAK2X,GAAGpY,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE4X,GAAGrY,EAAE,qBACve,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAKuU,GAAGrY,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAEwU,GAAGtY,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK8X,GAAGtY,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE+X,GAAGvY,EAAE,yBAAyB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKqU,GAAGvY,EAAE,yBAAyB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEsU,GAAGxY,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK+X,GAAGxY,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEgY,GAAGzY,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAK4X,GAAGzY,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAE6X,GAAG1Y,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKiY,GAAG1Y,EAAE,cACze8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEkY,GAAG3Y,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAK2U,GAAG3Y,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAE4U,GAAG5Y,EAAE,WAAW,CAACI,EAAEC,KAAKuY,GAAG5Y,EAAE,WAAW8G,EAAE,IAAI1G,EAAEC,CAAC,EAAEwY,GAAG7Y,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAKsY,GAAG7Y,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEuY,GAAG9Y,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAKkV,GAAG9Y,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEmV,GAAG/Y,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKsY,GAAG/Y,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEuY,GAAGhZ,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKsY,GAAGhZ,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEuY,GAAGjZ,EAAE,iBACtf,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKqY,GAAGjZ,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEsY,GAAGlZ,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKyY,GAAGlZ,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE0Y,GAAGnZ,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKmV,GAAGnZ,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEoV,GAAGpZ,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAKuV,GAAGpZ,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEwV,GAAGrZ,EAAE,0BAA0B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,MAAKgQ,GAAGrZ,EAAE,0BAA0B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,EACrfiQ,GAAGtZ,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKuV,GAAGtZ,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEwV,GAAGvZ,EAAE,6BAA6B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,MAAKF,GAAGvZ,EAAE,6BAA6B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,EAAEC,GAAG1Z,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK8V,GAAG1Z,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAE+V,GAAG3Z,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKiZ,GAAG3Z,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEkZ,GAAG5Z,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EACpfC,KAAK+V,GAAG5Z,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEgW,GAAG7Z,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKoZ,GAAG7Z,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEqZ,GAAG9Z,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKoZ,GAAG9Z,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEqZ,GAAG/Z,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKsZ,GAAG/Z,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEuZ,GAAGha,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKgW,GAAGha,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEiW,GAAGja,EAAE,2BAA2B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,MAAK6Q,GAAGja,EAAE,2BAC1e8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,EAAC,EAAE8Q,GAAGla,EAAE,0BAA0B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,MAAK6Q,GAAGla,EAAE,0BAA0B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,EAAE8Q,GAAGna,EAAE,yBAAyB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKiW,GAAGna,EAAE,yBAAyB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEkW,GAAGpa,EAAE,wBAAwB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKmW,GAAGpa,EAAE,wBAAwB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEoW,GAAGra,EAAE,8BAA8B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EACpf+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,MAAKD,GAAGra,EAAE,8BAA8B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,EAAEC,GAAGva,EAAE,4BAA4B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,MAAKe,GAAGva,EAAE,4BAA4B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,EAAC,EAAEgB,GAAGxa,EAAE,2BAA2B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,MAAKoR,GAAGxa,EAAE,2BAA2B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,EAAC,EAAEqR,GAAGza,EAAE,8BAA8B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,MAAKG,GAAGza,EAAE,8BACjf8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,EAAEI,GAAG1a,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAK2W,GAAG1a,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE4W,GAAG3a,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK+W,GAAG3a,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEgX,GAAG5a,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAK6W,GAAG5a,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE8W,GAAG7a,EAAE,wBAAwB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAK4W,GAAG7a,EAAE,wBAAwB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EACpfC,EAAEC,EAAEC,EAAEC,CAAC,EAAE6W,GAAG9a,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKqa,GAAG9a,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEsa,GAAG/a,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAKmX,GAAG/a,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEoX,GAAGhb,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKma,GAAGhb,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEoa,GAAGjb,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKiX,GAAGjb,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEkX,GAAGlb,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKya,GAAGlb,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE0a,GAAGnb,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EACpfC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKoX,GAAGnb,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEqX,GAAGpb,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAK0a,GAAGpb,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAE2a,GAAGrb,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK4a,GAAGrb,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE6a,GAAGtb,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKuX,GAAGtb,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEwX,GAAGvb,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAK0X,GAAGvb,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAE2X,GAAGxb,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAK+a,GAAGxb,EAAE,cACnf8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEgb,GAAGzb,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAK6a,GAAGzb,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAE8a,GAAG1b,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAK8a,GAAG1b,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAE+a,GAAG3b,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKmb,GAAG3b,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEob,GAAG5b,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAKgY,GAAG5b,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEiY,GAAG7b,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKgb,GAAG7b,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEib,GAAG9b,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKib,GAAG9b,EAAE,kBAChf8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAEkb,GAAG/b,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAKmY,GAAG/b,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEoY,GAAGhc,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKub,GAAGhc,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEwb,GAAGjc,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAKmY,GAAGjc,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAEoY,GAAGlc,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAKsb,GAAGlc,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEub,GAAGnc,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKmY,GAAGnc,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EACnf,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEoY,GAAGpc,EAAE,wBAAwB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKmY,GAAGpc,EAAE,wBAAwB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEoY,GAAGrc,EAAE,YAAY,CAACI,EAAEC,EAAEE,KAAK8b,GAAGrc,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAE+b,GAAGtc,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK8b,GAAGtc,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE+b,GAAGvc,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK+b,GAAGvc,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEgc,GAAGxc,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKgc,GAAGxc,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEic,GAAGzc,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAK6Y,GAAGzc,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAE8Y,GAAG1c,EAAE,mCAClf,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAEqC,GAAEC,GAAEC,GAAGC,GAAGC,MAAML,GAAG1c,EAAE,mCAAmC8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAEqC,GAAEC,GAAEC,GAAGC,GAAGC,EAAE,EAAEC,GAAGhd,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKuc,GAAGhd,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEwc,GAAGjd,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKkZ,GAAGjd,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEmZ,GAAGld,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKmZ,GAAGld,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEoZ,GAAGnd,EAAE,8BAC3d,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,MAAK6C,GAAGnd,EAAE,8BAA8B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,EAAE8C,GAAGpd,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAKqZ,GAAGpd,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEsZ,GAAGrd,EAAE,uBAAuB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,KAAKqZ,GAAGrd,EAAE,uBAAuB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEsZ,GAAGtd,EAAE,4BAA4B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,MAAK8D,GAAGtd,EAAE,4BAA4B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EACpf+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,EAAC,EAAE+D,GAAGvd,EAAE,6BAA6B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,MAAK8D,GAAGvd,EAAE,6BAA6B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,EAAE+D,GAAGxd,EAAE,yBAAyB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKsZ,GAAGxd,EAAE,yBAAyB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEuZ,GAAGzd,EAAE,0BAA0B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,MAAKoU,GAAGzd,EAAE,0BAA0B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,EAAEqU,GAAG1d,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAK8c,GAClf1d,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAE+c,GAAG3d,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKkd,GAAG3d,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEmd,GAAG5d,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAK+Z,GAAG5d,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEga,GAAG7d,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAKga,GAAG7d,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEia,GAAG9d,EAAE,6BAA6B,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,MAAKqE,GAAG9d,EAAE,6BAA6B8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,EAAEsE,GAAG/d,EAAE,cAC3e,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKsd,GAAG/d,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEud,GAAGhe,EAAE,mBAAmB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,KAAKoa,GAAGhe,EAAE,mBAAmB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,EAAEqa,GAAGje,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKwd,GAAGje,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEyd,GAAGle,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKyd,GAAGle,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE0d,GAAGne,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKyd,GAAGne,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAE0d,GAAGpe,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK2d,GAAGpe,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE4d,GAAGre,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK4d,GAAGre,EAAE,eAAe8G,EAAE,IAAI1G,EACzfC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE6d,GAAGte,EAAE,kBAAkB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,KAAKyd,GAAGte,EAAE,kBAAkB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,EAAE0d,GAAGve,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAK2d,GAAGve,EAAE,iBAAiB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAE4d,GAAGxe,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK+d,GAAGxe,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEge,GAAGze,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAKge,GAAGze,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAEie,GAAG1e,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAK4a,GAAG1e,EAAE,qBAAqB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAE6a,GAAG3e,EAAE,iBAAiB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,KAAK+d,GAAG3e,EAAE,iBACnf8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,EAAEge,GAAG5e,EAAE,oBAAoB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,KAAK+a,GAAG5e,EAAE,oBAAoB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,EAAEgb,GAAG7e,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKme,GAAG7e,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEoe,GAAG9e,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAK+a,GAAG9e,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAEgb,GAAG/e,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKqe,GAAG/e,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEse,GAAGhf,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKse,GAAGhf,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEue,GAAGjf,EAAE,YAC7e,CAACI,EAAEC,EAAEE,KAAK0e,GAAGjf,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAE2e,GAAGlf,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK0e,GAAGlf,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE2e,GAAGnf,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK2e,GAAGnf,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE4e,GAAGpf,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAK4e,GAAGpf,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAE6e,GAAGrf,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAK4e,GAAGrf,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE6e,GAAGtf,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAK6e,GAAGtf,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAE8e,GAAGvf,EAAE,eAAe,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,KAAK8e,GAAGvf,EAAE,eAAe8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,EAAE+e,GAAGxf,EAAE,qBAAqB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,KAAK0b,GAAGxf,EAAE,qBACrf8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,EAAE2b,GAAGzf,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAK0b,GAAGzf,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE2b,GAAG1f,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKif,GAAG1f,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEkf,GAAG3f,EAAE,gBAAgB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,KAAKif,GAAG3f,EAAE,gBAAgB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,EAAEkf,GAAG5f,EAAE,sBAAsB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,KAAK6b,GAAG5f,EAAE,sBAAsB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,EAAE8b,GAAG7f,EAAE,cAAc,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,KAAKof,GAAG7f,EAAE,cAAc8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,EAAEqf,GAAG9f,EAAE,YACze,CAACI,EAAEC,EAAEE,KAAKuf,GAAG9f,EAAE,YAAY8G,EAAE,IAAI1G,EAAEC,EAAEE,CAAC,EAAEwf,GAAG/f,EAAE,wBAAwB,CAACI,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAK8b,GAAG/f,EAAE,wBAAwB8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAE+b,GAAGhgB,EAAE,aAAa,CAACI,EAAEC,EAAEE,EAAEC,KAAKwf,GAAGhgB,EAAE,aAAa8G,EAAE,IAAI1G,EAAEC,EAAEE,EAAEC,CAAC,EAAEoH,GAAGxH,IAAIwH,GAAGd,EAAE,IAAI1G,CAAC,EAAE6G,GAAG,KAAKA,GAAGH,EAAE,IAAI,EAAEa,GAAGvH,IAAIuH,GAAGb,EAAE,IAAI1G,CAAC,EAAEyH,GAAG,KAAKA,GAAGf,EAAE,IAAI,EAAE9G,EAAE,eAAe,QAAQA,EAAE,cAAc,QAAQ,SAAS6K,GAAGzK,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOE,GAAG1U,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACnc,SAASgB,GAAGvK,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOG,GAAY3U,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4E,GAAGnO,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAACC,GAAYzU,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASa,GAAGpK,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAAC,OAAOI,GAAG5U,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwE,GAAG/N,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAACM,GAAW9U,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6C,GAAGpM,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOS,GAAGjV,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAC9c,SAASsE,GAAG7N,EAAE,CAAC,IAAIC,EAAEuU,EAAE,EAAE,GAAG,CAACO,GAAU/U,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALmU,EAAErU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAEoJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuB,GAAG9K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOK,GAAG7U,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASsB,GAAG7K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAOQ,GAAGhV,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoB,GAAG3K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOU,GAAGlV,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmF,GAAG1O,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAACW,GAAGnV,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAC/d,SAASsF,GAAG7O,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACa,GAAGrV,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwF,GAAG/O,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAACY,GAAGpV,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4F,GAAGnP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACc,GAAGtV,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwG,GAAG/P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACgB,GAAGxV,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASc,GAAGrK,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOkB,GAAG1V,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAClf,SAASe,GAAGtK,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOmB,GAAG3V,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqD,GAAG5M,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOoB,GAAG5V,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiE,GAAGxN,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOqB,GAAG7V,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASkG,GAAGzP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACuB,EAAG/V,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASY,GAAGnK,EAAE,CAAC,IAAIC,EAAEuU,EAAE,EAAE,GAAG,CAAC,OAAOsB,EAAG9V,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALmU,EAAErU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAEoJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACzd,SAASsJ,GAAG7S,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAACgC,GAAGxW,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8I,GAAGrS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACe,GAAGvV,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0H,GAAGjR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACwB,EAAGhW,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqJ,GAAG5S,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACiC,GAAGzW,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3Y,SAASyI,GAAGhS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACkC,GAAG1W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwB,GAAG/K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC,OAAOmC,EAAG3W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiG,GAAGxP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAACoC,GAAG5W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4B,GAAGnL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAAC,OAAO2B,EAAGnW,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC7c,SAASmG,GAAG1P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC4B,GAAGpW,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0G,GAAGjQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACsC,GAAG9W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuC,GAAG9L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAAC,OAAOwC,GAAGhX,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyB,GAAGhL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAAC,OAAOuC,GAAG/W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC7d,SAAS+J,GAAGtT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACyC,GAAGjX,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6I,GAAGpS,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC0B,EAAGlW,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASQ,GAAG/J,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAAC,OAAO0C,GAAGlX,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgE,GAAGvN,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAAC,OAAOyB,EAAGjW,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAmJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASE,GAAGzJ,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAAC,OAAO4C,GAAGpX,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACld,SAAS2J,GAAGlT,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC8C,GAAGtX,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASsI,GAAG7R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC+K,GAAGvf,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiJ,GAAGxS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC4G,GAAGpb,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0B,GAAGjL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC,OAAO+C,GAAGvX,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+I,GAAGtS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACgD,GAAGxX,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAClf,SAAS6E,GAAGpO,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAACiD,GAAGzX,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASkC,GAAGzL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAAC,OAAOqC,GAAG7W,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2E,GAAGlO,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACkD,GAAG1X,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4I,GAAGnS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACiB,GAAGzV,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC1Z,SAASkF,GAAGzO,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACmD,GAAG3X,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyF,GAAGhP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAACoD,GAAG5X,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASkE,GAAGzN,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOsD,GAAG9X,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASsK,GAAG7T,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAACuD,GAAG/X,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CACxa,SAASoF,GAAG3O,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACwD,GAAGhY,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuJ,GAAG9S,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC0D,GAAGlY,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmK,GAAG1T,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAImF,GAAEuL,EAAE,EAAE,GAAG,CAAC2D,GAAGnY,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOkF,GAAE,CAAM,GAALsL,EAAErL,EAAC,EAAKD,KAAIA,GAAE,EAAE,MAAMA,GAAEO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqI,GAAG5R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC4D,GAAGpY,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3Z,SAASgK,GAAGvT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAAC6D,GAAGrY,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8C,GAAGrM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO8D,GAAGtY,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmD,GAAG1M,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAAC,OAAO+D,GAAGvY,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoK,GAAG3T,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAACgE,GAAGxY,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACra,SAASqE,GAAG5N,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOiE,GAAGzY,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASkD,GAAGzM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC,OAAOkE,GAAG1Y,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASsG,GAAG7P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACuE,GAAG/Y,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuE,GAAG9N,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACmE,GAAG3Y,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACvb,SAASuF,GAAG9O,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACoE,GAAG5Y,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+E,GAAGtO,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACsE,GAAG9Y,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0F,GAAGjP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAACqE,GAAG7Y,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASkH,GAAGzQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAACwE,GAAGhZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3Z,SAAS6B,GAAGpL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAE,CAAC,IAAID,GAAEwL,EAAE,EAAE,GAAG,CAAC,OAAOyE,GAAGjZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,CAAC,OAAOmQ,GAAE,CAAM,GAAL9E,EAAEtL,EAAC,EAAKoQ,KAAIA,GAAE,EAAE,MAAMA,GAAE7P,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyG,GAAGhQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC0E,GAAGlZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2D,GAAGlN,EAAEC,EAAE,CAAC,IAAIE,EAAEqU,EAAE,EAAE,GAAG,CAAC,OAAO6B,GAAGrW,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALkU,EAAEnU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEmJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASW,GAAGlK,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOiL,GAAGzf,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACxc,SAAS8B,GAAGrL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAE,CAAC,IAAIa,GAAE1F,EAAE,EAAE,GAAG,CAAC,OAAO2E,GAAGnZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,CAAC,OAAOkD,GAAE,CAAM,GAALjI,EAAE4F,EAAC,EAAKqC,KAAIA,GAAE,EAAE,MAAMA,GAAEhT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8J,GAAGrT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC8E,GAAGtZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4C,GAAGnM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAO+E,GAAGvZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuG,GAAG9P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAACgF,GAAGxZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC7e,SAASoD,GAAG3M,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAOiF,GAAGzZ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgI,GAAGvR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAACmF,GAAG3Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASsH,GAAG7Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACoF,GAAG5Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2K,GAAGlU,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAE,CAAC,IAAIoQ,GAAE5E,EAAE,EAAE,GAAG,CAACqF,GAAG7Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,EAAC,CAAC,OAAOqQ,GAAE,CAAM,GAAL/E,EAAE8E,EAAC,EAAKC,KAAIA,GAAE,EAAE,MAAMA,GAAE9P,EAAE,EAAE,CAAC,CAAC,CAAC,CACtd,SAAS8F,GAAGrP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAE,CAAC,IAAID,GAAEwL,EAAE,EAAE,GAAG,CAACsF,GAAG9Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,CAAC,OAAOmQ,GAAE,CAAM,GAAL9E,EAAEtL,EAAC,EAAKoQ,KAAIA,GAAE,EAAE,MAAMA,GAAE7P,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6F,GAAGpP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAImF,GAAEuL,EAAE,EAAE,GAAG,CAACuF,GAAG/Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOkF,GAAE,CAAM,GAALsL,EAAErL,EAAC,EAAKD,KAAIA,GAAE,EAAE,MAAMA,GAAEO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+F,GAAGtP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE0Q,EAAE,EAAE,GAAG,CAACwF,GAAGha,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOoF,GAAE,CAAM,GAALqL,EAAExQ,CAAC,EAAKmF,KAAIA,GAAE,EAAE,MAAMA,GAAEM,EAAE,EAAE,CAAC,CAAC,CAAC,CACvZ,SAAS4K,GAAGnU,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAE,CAAC,IAAIqC,GAAE/H,EAAE,EAAE,GAAG,CAACyF,GAAGja,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,CAAC,OAAOsC,GAAE,CAAM,GAALlI,EAAEiI,EAAC,EAAKC,KAAIA,GAAE,EAAE,MAAMA,GAAEjT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0K,GAAGjU,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAE,CAAC,IAAIC,GAAE7E,EAAE,EAAE,GAAG,CAAC2F,GAAGna,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,EAAC,CAAC,OAAOc,GAAE,CAAM,GAAL5F,EAAE+E,EAAC,EAAKa,KAAIA,GAAE,EAAE,MAAMA,GAAE3Q,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyK,GAAGhU,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAE,CAAC,IAAIoQ,GAAE5E,EAAE,EAAE,GAAG,CAAC4F,GAAGpa,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,EAAC,CAAC,OAAOqQ,GAAE,CAAM,GAAL/E,EAAE8E,EAAC,EAAKC,KAAIA,GAAE,EAAE,MAAMA,GAAE9P,EAAE,EAAE,CAAC,CAAC,CAAC,CAC/b,SAAS6K,GAAGpU,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAE,CAAC,IAAIqC,GAAE/H,EAAE,EAAE,GAAG,CAAC6F,GAAGra,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,CAAC,OAAOsC,GAAE,CAAM,GAALlI,EAAEiI,EAAC,EAAKC,KAAIA,GAAE,EAAE,MAAMA,GAAEjT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyJ,GAAGhT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC8F,GAAGta,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4J,GAAGnT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC+F,GAAGva,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CACvY,SAASmH,GAAG1Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAACgG,GAAGxa,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASkK,GAAGzT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE0Q,EAAE,EAAE,GAAG,CAACiG,GAAGza,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOoF,GAAE,CAAM,GAALqL,EAAExQ,CAAC,EAAKmF,KAAIA,GAAE,EAAE,MAAMA,GAAEM,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwK,GAAG/T,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAACmG,GAAG3a,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuK,GAAG9T,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACoG,GAAG5a,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3d,SAASO,GAAG9J,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOkF,GAAG1Z,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgF,GAAGvO,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACkG,GAAG1a,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+D,GAAGtN,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOkL,GAAG1f,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASuH,GAAG9Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACqG,GAAG7a,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3a,SAAS8H,GAAGrR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAACuG,GAAG/a,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASsC,GAAG7L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOwG,GAAGhb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0C,GAAGjM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAOyG,GAAGjb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2H,GAAGlR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC0G,GAAGlb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CACzc,SAAS6J,GAAGpT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAACsG,GAAG9a,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwI,GAAG/R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAAC2G,GAAGnb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASkI,GAAGzR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC6G,GAAGrb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmJ,GAAG1S,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC8G,GAAGtb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CACna,SAAS4D,GAAGnN,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO+G,GAAGvb,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+H,GAAGtR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAACgH,GAAGxb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiK,GAAGxT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACiH,GAAGzb,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6H,GAAGpR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAACkH,GAAG1b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CACta,SAASwH,GAAG/Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAACmH,GAAG3b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwJ,GAAG/S,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAACoH,GAAG5b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqG,GAAG5P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACqH,GAAG7b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiD,GAAGxM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC,OAAOsH,GAAG9b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CACtb,SAAS2G,GAAGlQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE0Q,EAAE,EAAE,GAAG,CAACmL,GAAG3f,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOoF,GAAE,CAAM,GAALqL,EAAExQ,CAAC,EAAKmF,KAAIA,GAAE,EAAE,MAAMA,GAAEM,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyH,GAAGhR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACuH,GAAG/b,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoH,GAAG3Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE0Q,EAAE,EAAE,GAAG,CAACwH,GAAGhc,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOoF,GAAE,CAAM,GAALqL,EAAExQ,CAAC,EAAKmF,KAAIA,GAAE,EAAE,MAAMA,GAAEM,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0E,GAAGjO,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAACyH,GAAGjc,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAC3d,SAASU,GAAGjK,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO2H,GAAGnc,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASI,GAAG3J,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO4H,GAAGpc,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgG,GAAGvP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAAC6H,GAAGrc,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS4H,GAAGnR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAEqC,GAAEC,GAAEC,GAAGC,GAAGC,GAAG,CAAC,IAAIkD,GAAGrL,EAAE,EAAE,GAAG,CAAC8H,GAAGtc,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAEqC,GAAEC,GAAEC,GAAGC,GAAGC,EAAE,CAAC,OAAOmD,GAAG,CAAO,GAANxL,EAAEuL,EAAE,EAAKC,KAAKA,GAAG,EAAE,MAAMA,GAAGvW,EAAE,EAAE,CAAC,CAAC,CAAC,CACze,SAASkJ,GAAGzS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAACoI,GAAG5c,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgC,GAAGvL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC,OAAOqI,GAAG7c,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiF,GAAGxO,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAACsI,GAAG9c,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC9V,SAASgH,GAAGvQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,GAAE,CAAC,IAAIqC,GAAE/H,EAAE,EAAE,GAAG,CAACuI,GAAG/c,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAEa,EAAC,CAAC,OAAOsC,GAAE,CAAM,GAALlI,EAAEiI,EAAC,EAAKC,KAAIA,GAAE,EAAE,MAAMA,GAAEjT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2F,GAAGlP,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAACwI,GAAGhd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0J,GAAGjT,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE2Q,EAAE,EAAE,GAAG,CAACyI,GAAGjd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALwQ,EAAEzQ,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEyF,EAAE,EAAE,CAAC,CAAC,CAAC,CACvZ,SAAS+G,GAAGtQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAE,CAAC,IAAIa,GAAE1F,EAAE,EAAE,GAAG,CAAC2I,GAAGnd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,CAAC,OAAOkD,GAAE,CAAM,GAALjI,EAAE4F,EAAC,EAAKqC,KAAIA,GAAE,EAAE,MAAMA,GAAEhT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6G,GAAGpQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAE,CAAC,IAAID,GAAEwL,EAAE,EAAE,GAAG,CAAC6I,GAAGrd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,EAAC,CAAC,OAAOmQ,GAAE,CAAM,GAAL9E,EAAEtL,EAAC,EAAKoQ,KAAIA,GAAE,EAAE,MAAMA,GAAE7P,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8G,GAAGrQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAE,CAAC,IAAIC,GAAE7E,EAAE,EAAE,GAAG,CAAC0I,GAAGld,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,EAAC,CAAC,OAAOc,GAAE,CAAM,GAAL5F,EAAE+E,EAAC,EAAKa,KAAIA,GAAE,EAAE,MAAMA,GAAE3Q,EAAE,EAAE,CAAC,CAAC,CAAC,CACvb,SAASmI,GAAG1R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC8I,GAAGtd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqH,GAAG5Q,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAACgJ,GAAGxd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2B,GAAGlL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAAC,OAAOiJ,GAAGzd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CACtV,SAAS+B,GAAGtL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,GAAE,CAAC,IAAIa,GAAE1F,EAAE,EAAE,GAAG,CAAC,OAAOkJ,GAAG1d,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEmF,GAAED,GAAEoQ,GAAEC,EAAC,CAAC,OAAOkD,GAAE,CAAM,GAALjI,EAAE4F,EAAC,EAAKqC,KAAIA,GAAE,EAAE,MAAMA,GAAEhT,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqF,GAAG5O,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACmJ,GAAG3d,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoG,GAAG3P,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACqD,GAAG7X,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuI,GAAG9R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAE,CAAC,IAAIC,EAAE+Q,EAAE,EAAE,GAAG,CAACoJ,GAAG5d,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL4Q,EAAE7Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE6F,EAAE,EAAE,CAAC,CAAC,CAAC,CACle,SAASoI,GAAG3R,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACqJ,GAAG7d,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgJ,GAAGvS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAACgL,GAAGxf,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwD,GAAG/M,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOsJ,GAAG9d,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmE,GAAG1N,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOoL,GAAG5f,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CACna,SAASiI,GAAGxR,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAACuJ,GAAG/d,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyD,GAAGhN,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAOwJ,GAAGhe,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS6D,GAAGpN,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAOyJ,GAAGje,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0I,GAAGjS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAI+C,EAAEgR,EAAE,EAAE,GAAG,CAAC0J,GAAGle,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL6Q,EAAE9Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8F,EAAE,EAAE,CAAC,CAAC,CAAC,CAC7Z,SAAS8E,GAAGrO,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC0H,GAAGlc,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASwC,GAAG/L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC,OAAO2J,GAAGne,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASmB,GAAG1K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAO4J,GAAGpe,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS+C,GAAGtM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAO6J,GAAGre,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CACpZ,SAASiC,GAAGxL,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAAC,OAAO8J,GAAGte,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2C,GAAGlM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAE+T,EAAE,EAAE,GAAG,CAAC,OAAO+J,GAAGve,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,CAAC,CAAC,OAAOgD,EAAE,CAAM,GAAL8Q,EAAE7T,CAAC,EAAK+C,IAAIA,EAAE,EAAE,MAAMA,EAAE+F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoC,GAAG3L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAE,CAAC,IAAIC,EAAE8Q,EAAE,EAAE,GAAG,CAAC,OAAOgK,GAAGxe,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL2Q,EAAE5Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE4F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASgD,GAAGvM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOiK,GAAGze,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CACnd,SAASmC,GAAG1L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC,OAAOkK,GAAG1e,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASuD,GAAG9M,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOmK,GAAG3e,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS0D,GAAGjN,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAE,CAAC,IAAIE,EAAEgU,EAAE,EAAE,GAAG,CAAC,OAAOoK,GAAG5e,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL6T,EAAE9T,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE8I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoE,GAAG3N,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAOqK,GAAG7e,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAkJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAC7b,SAASsD,GAAG7M,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOsK,GAAG9e,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASyE,GAAGhO,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAACwK,GAAGhf,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiB,GAAGxK,EAAEC,EAAEE,EAAEC,EAAE,CAAC,IAAIC,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAOuK,GAAG/e,EAAEC,EAAEE,EAAEC,CAAC,CAAC,OAAO,EAAE,CAAM,GAALkU,EAAEjU,CAAC,EAAK,IAAI,EAAE,EAAE,MAAM,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqK,GAAG5T,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAACyK,GAAGjf,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASK,GAAG5J,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO0K,GAAGlf,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CACpd,SAASM,GAAG7J,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAO2K,GAAGnf,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS2I,GAAGlS,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAAC4K,GAAGpf,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASoJ,GAAG3S,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE4Q,EAAE,EAAE,GAAG,CAAC6K,GAAGrf,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALyQ,EAAE1Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE0F,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASiH,GAAGxQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAE,CAAC,IAAIC,EAAE6Q,EAAE,EAAE,GAAG,CAACyD,GAAGjY,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,CAAC,CAAC,OAAOE,EAAE,CAAM,GAAL0Q,EAAE3Q,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAE2F,EAAE,EAAE,CAAC,CAAC,CAAC,CACld,SAASyC,GAAGhM,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,IAAI,EAAEmU,EAAE,EAAE,GAAG,CAAC,OAAO8K,GAAGtf,EAAEC,EAAEE,EAAEC,EAAEC,CAAC,CAAC,OAAOC,EAAE,CAAM,GAALgU,EAAE,CAAC,EAAKhU,IAAIA,EAAE,EAAE,MAAMA,EAAEiJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8D,GAAGrN,EAAE,CAAC,IAAIC,EAAEuU,EAAE,EAAE,GAAG,CAAC,OAAO+B,GAAGvW,CAAC,CAAC,OAAOG,EAAE,CAAM,GAALmU,EAAErU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE,OAAAoJ,EAAE,EAAE,CAAC,EAAS,EAAE,CAAC,CAAC,SAASqC,GAAG5L,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAO8B,GAAGtW,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASqB,GAAG5K,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAE,CAAC,IAAIC,EAAEkU,EAAE,EAAE,GAAG,CAAC,OAAO+I,GAAGvd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,CAAC,CAAC,OAAOG,EAAE,CAAM,GAAL8T,EAAEhU,CAAC,EAAKE,IAAIA,EAAE,EAAE,MAAMA,EAAE+I,EAAE,EAAE,CAAC,CAAC,CAAC,CAC7Y,SAAS4G,GAAGnQ,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,IAAImF,GAAEuL,EAAE,EAAE,GAAG,CAAC4I,GAAGpd,EAAEC,EAAEE,EAAEC,EAAEC,EAAE,EAAEC,EAAEE,EAAEC,EAAE+C,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,CAAC,OAAOkF,GAAE,CAAM,GAALsL,EAAErL,EAAC,EAAKD,KAAIA,GAAE,EAAE,MAAMA,GAAEO,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASS,GAAGhK,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAO2C,GAAGnX,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAASG,GAAG1J,EAAEC,EAAEE,EAAE,CAAC,IAAIC,EAAEoU,EAAE,EAAE,GAAG,CAAC,OAAO6C,GAAGrX,EAAEC,EAAEE,CAAC,CAAC,OAAOE,EAAE,CAAM,GAALiU,EAAElU,CAAC,EAAKC,IAAIA,EAAE,EAAE,MAAMA,EAAEkJ,EAAE,EAAE,CAAC,CAAC,CAAC,CAAC,SAAS8K,IAAI,CAAC,IAAIrU,EAAE0G,EAAE1G,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,EAAE,IAAIC,EAAEE,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAE,OAAAJ,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,GAAGC,EAAED,EAAE,EAAE,EAAEA,EAAE,IAAIG,GAAG,IAAIA,EAAE,IAAI,GAAGH,EAAE,EAAE,EAASA,CAAC,CAACJ,EAAE,UAAU,IAAI4U,EAAE,EAAE5U,EAAE,aAAaI,GAAGsU,EAAEtU,CAAC,EACpfJ,EAAE,WAAWI,GAAGuU,GAAGvU,CAAC,EAAEJ,EAAE,aAAa2D,GAAE3D,EAAE,aAAa,CAACI,EAAEC,EAAEE,IAAIuE,GAAE1E,EAAE8B,GAAE7B,EAAEE,CAAC,EAAEP,EAAE,gBAAgB6E,GAAG,IAAIsb,GAAGld,GAAG,SAASmd,GAAI,CAACD,IAAIE,GAAG,EAAEF,KAAKld,GAAGmd,EAAG,EAAE,SAASC,IAAI,CAAC,GAAG,EAAE,EAAEtd,IAAI,CAAC,GAAG/C,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQ,CAAC,IAAII,EAAEJ,EAAE,OAAO,MAAM,EAAE4C,GAAG,QAAQxC,CAAC,CAAC,CAAC,KAAK,EAAEwC,GAAG,QAAQA,GAAG,MAAM,EAAE5C,CAAC,EAAE,GAAG,EAAE,EAAE+C,IAAIod,KAAKA,GAAG,GAAGngB,EAAE,UAAU,GAAG+B,IAAI,CAAC,KAAK,EAAEc,GAAG,QAAQA,GAAG,MAAM,EAAE7C,CAAC,EAAE,IAAIC,EAAGD,CAAC,EAAE,EAAE8C,GAAG,QAAQA,GAAG,MAAM,EAAE9C,CAAC,CAAC,CAAC,CAAC,CAAC,OAAAqgB,GAAG,EAG5algB,CACT,CAEA,GAAG,EACC,OAAOR,IAAY,UAAY,OAAOC,IAAW,SACnDA,GAAO,QAAUC,GACV,OAAO,QAAW,YAAc,OAAO,KAC9C,OAAO,CAAC,EAAG,IAAMA,EAAO,IC7K1B,IAAAygB,GAAAC,GAAA,QCAA,IAAAC,GAAAC,GAAA,QCAA,IAAAC,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAAaA,GAAbC,GAAAC,GAAA,KAAaF,GAAO,SCApB,IAAAG,GAAAC,GAAA,CAAAC,GAAAC,KAAA,cACA,IAAIC,IAAmB,IAAM,CAC3B,IAAIC,EAAa,OAAO,SAAa,KAAe,SAAS,cAAgB,SAAS,cAAc,IAAM,OAC1G,OAAI,OAAO,WAAe,MAAaA,EAAaA,GAAc,YAEpE,SAASC,EAAY,CAAC,EAAG,CAEzB,SAASC,GAAG,CAAC,OAAAC,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAASD,EAAC,CAAC,SAASE,GAAG,CAAC,OAAAH,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAASE,EAAE,CAAC,SAASC,GAAG,CAAC,OAAAL,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAASI,EAAE,CAAC,SAASC,GAAG,CAAC,OAAAP,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAASM,EAAE,CAAC,SAASC,GAAI,CAAC,OAAAT,GAAE,QAAQC,GAAE,QAAQC,GAAE,EAASQ,EAAE,CAAC,IAAIC,EAAEb,EAAUc,EAAGC,EAAEF,EAAE,MAAM,IAAI,QAAQ,CAACG,EAAEC,IAAI,CAACH,EAAGE,EAAED,EAAEE,CAAC,CAAC,EACrSJ,EAAE,SAAS,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,KAAI,CAACV,EAAE,GAAGG,EAAEH,EAAE,GAAGI,EAAEJ,EAAE,GAAGK,EAAEL,EAAE,GAAGM,EAAEN,EAAE,GAAGO,EAAEP,EAAE,GAAGQ,EAAER,EAAE,GAAGS,GAAET,EAAE,GAAGU,GAAEN,EAAE,CAACO,GAAEC,GAAEC,KAAI,IAAIC,KAAI,CAAC,IAAMC,GAAEC,GAAEC,EAAEL,KAAI,EAAEE,GAAEH,GAAE,GAAGG,EAAC,EAAE,IAAMI,GAAEN,KAAI,EAAE,OAAAK,IAAIC,KAAIP,GAAEO,GAAEL,GAAEI,CAAC,EAAEL,GAAEC,GAAE,MAAaG,IAAGD,GAAEI,EAAG,EAAEL,EAAC,EAAET,EAAEM,IAAG,SAASC,KAAI,CAAC,GAAG,CAAC,GAAGZ,EAAE,GAAG,MAAM,MAAM,yBAAyB,EAAE,IAAMa,GAAEb,EAAE,GAAG,CAAC,GAAGY,GAAE,CAAC,EAAE,OAAO,CAAC,CAAC,EAAEE,GAAE,MAAMH,GAAE,GAAGC,EAAC,EAAE,GAAGZ,EAAE,KAAKa,GAAE,MAAM,MAAM,kBAAkB,EAAEV,EAAE,MAAM,EAAE,IAAMY,GAAEF,GAAE,OAAO,GAAG,EAAEE,GAAE,OAAO,CAAC,IAAIE,EAAE,MAAM,QAAQ,IAAIF,EAAC,EAAmB,GAAjBE,EAAEA,EAAE,OAAOC,IAAGA,EAAC,EAAK,EAAED,EAAE,OAAO,MAAM,MAAMA,EAAE,KAAK;AAAA,CAAI,CAAC,CAAE,CAAC,OAAOH,EAAC,QAAC,CAAQd,EAAE,GACzf,IAAI,CAAC,EAAEA,EAAE,QAAQK,EAAED,EAAEJ,EAAE,QAAQ,IAAIA,EAAE,QAAQW,IAAGX,EAAE,QAAQW,EAAC,CAAC,EAAEX,EAAE,mBAAmBK,EAAED,EAAEJ,EAAE,mBAAmB,IAAIA,EAAE,mBAAmBW,IAAGX,EAAE,mBAAmBW,EAAC,CAAC,EAAEX,EAAE,cAAcI,EAAEJ,EAAE,cAAc,IAAIA,EAAE,cAAcW,IAAGX,EAAE,cAAcW,EAAC,EAAEX,EAAE,mBAAmB,CAACW,GAAEC,GAAEC,GAAEC,KAAIX,EAAE,eAAeQ,GAAEC,GAAEC,GAAEC,EAAC,EAAEd,EAAE,sBAAsBW,IAAG,CAACR,EAAE,kBAAkBQ,EAAC,CAAC,EAAEX,EAAE,cAAcW,IAAGR,EAAE,UAAUQ,EAAC,EAAEX,EAAE,qBAAqB,CAACW,GAAEC,GAAEC,KAAIV,EAAE,iBAAiBQ,GAAEC,GAAEC,EAAC,CAAC,EACtb,IAAIO,EAAG,OAAO,OAAO,CAAC,EAAEpB,CAAC,EAAEqB,EAAG,iBAAiBC,EAAE,CAACnB,EAAEC,IAAI,CAAC,MAAMA,CAAE,EAAEmB,EAAa,OAAO,QAAjB,SAAwBC,EAAc,OAAO,eAAnB,WAAiCC,EAAY,OAAO,SAAjB,UAAoC,OAAO,QAAQ,UAAzB,UAA6C,OAAO,QAAQ,SAAS,MAAlC,SAAuCC,EAAE1B,EAAE,wBAAwB,GAAG2B,EAAE,GAAG,SAASC,EAAGzB,EAAE,CAAC,OAAOH,EAAE,WAAWA,EAAE,WAAWG,EAAEwB,CAAC,EAAEA,EAAExB,CAAC,CAAC,IAAI0B,EAAGC,EAAEC,EAC7U,GAAGN,EAAE,CAAC,IAAIO,EAAG,cAAcC,GAAG,cAAgBN,EAAEH,EAAES,GAAG,QAAQN,CAAC,EAAE,IAAI,UAAU,IAAIE,EAAG,CAACzB,EAAEC,KAAKD,EAAEA,EAAE,WAAW,SAAS,EAAE,IAAI,IAAIA,CAAC,EAAE6B,GAAG,UAAU7B,CAAC,EAAS4B,EAAG,aAAa5B,EAAEC,EAAE,OAAO,MAAM,GAAG0B,EAAG3B,IAAIA,EAAEyB,EAAGzB,EAAE,EAAE,EAAEA,EAAE,SAASA,EAAE,IAAI,WAAWA,CAAC,GAAUA,GAAG0B,EAAE,CAAC1B,EAAEC,EAAEC,EAAEC,EAAE,KAAK,CAACH,EAAEA,EAAE,WAAW,SAAS,EAAE,IAAI,IAAIA,CAAC,EAAE6B,GAAG,UAAU7B,CAAC,EAAE4B,EAAG,SAAS5B,EAAEG,EAAE,OAAO,OAAO,CAACC,EAAEC,KAAI,CAACD,EAAEF,EAAEE,CAAC,EAAEH,EAAEE,EAAEE,GAAE,OAAOA,EAAC,CAAC,CAAC,CAAC,EAAE,CAACT,EAAE,aAAa,EAAE,QAAQ,KAAK,SAASqB,EAAG,QAAQ,KAAK,CAAC,EAAE,QAAQ,MAAM,GAAG,GAAG,QAAQ,KAAK,MAAM,CAAC,EAAEC,EAAE,CAAClB,EAAEC,IAAI,CAAC,cAAQ,SACtfD,EAAQC,CAAE,EAAEL,EAAE,QAAQ,IAAI,6BAA6B,IAAIG,EAAE,GAAG,CAACA,EAAE,IAAyB,OAAOC,EAAE,CAAC,MAAM,QAAQ,MAAM,yGAAyG,EAAEA,CAAE,CAAC,OAAO,OAAOD,EAAE,MAAM,MAASoB,GAAIC,KAAEA,EAAEG,EAAE,KAAK,SAAS,KAAkB,OAAO,SAApB,KAA8B,SAAS,gBAAgBA,EAAE,SAAS,cAAc,KAAM,OAAOzC,EAAe,KAAeA,IAAcyC,EAAEzC,GAAgByC,EAAE,QAAQ,OAAO,IAArB,EAAuBA,EAAEA,EAAE,OAAO,EAAEA,EAAE,QAAQ,SAAS,EAAE,EAAE,YAAY,GAAG,EAAE,CAAC,EAAEA,EAAE,GAAGF,IAAII,EAAG1B,GAAG,CAAC,IAAIC,EAC9hB,IAAI,eAAe,OAAAA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,KAAK,IAAI,EAASA,EAAE,YAAY,EAAEoB,IAAIO,EAAG5B,GAAG,CAAC,IAAIC,EAAE,IAAI,eAAe,OAAAA,EAAE,KAAK,MAAMD,EAAE,EAAE,EAAEC,EAAE,aAAa,cAAcA,EAAE,KAAK,IAAI,EAAS,IAAI,WAAWA,EAAE,QAAQ,CAAC,GAAG0B,EAAE,CAAC3B,EAAEC,EAAEC,IAAI,CAAC,IAAIC,EAAE,IAAI,eAAeA,EAAE,KAAK,MAAMH,EAAE,EAAE,EAAEG,EAAE,aAAa,cAAcA,EAAE,OAAO,IAAI,CAAMA,EAAE,QAAP,KAAkBA,EAAE,QAAL,GAAaA,EAAE,SAASF,EAAEE,EAAE,QAAQ,EAAED,EAAE,CAAC,EAAEC,EAAE,QAAQD,EAAEC,EAAE,KAAK,IAAI,CAAC,IAAGmB,GAAgB,OAAO,YAApB,MAAkC,OAAO,YAAY,KAAsB,aACrd,IAAIS,EAAG,QAAQ,IAAI,KAAK,OAAO,EAAEC,GAAG,QAAQ,MAAM,KAAK,OAAO,EAAEV,IAAIS,EAAG,IAAI/B,IAAI6B,EAAG,UAAU,EAAE7B,EAAE,KAAK,GAAG,EAAE;AAAA,CAAI,EAAEgC,GAAG,IAAIhC,IAAI6B,EAAG,UAAU,EAAE7B,EAAE,KAAK,GAAG,EAAE;AAAA,CAAI,GAAG,IAAIiC,GAAGpC,EAAE,OAAOkC,EAAGG,GAAErC,EAAE,UAAUmC,GAAG,OAAO,OAAOnC,EAAEoB,CAAE,EAAEA,EAAG,KAAKpB,EAAE,cAAcqB,EAAGrB,EAAE,aAAaA,EAAE,OAAOsB,EAAEtB,EAAE,MAAM,IAAIsC,EAAEtC,EAAE,aAAasC,EAAEtC,EAAE,YAAY,IAAIuC,GAAcvC,EAAE,eAAe,GAAa,OAAO,aAAjB,UAA8BwC,GAAE,iCAAiC,EAAE,IAAInD,GAAEoD,GAAEC,GAAGC,GAAE,GAAGC,GAAEtD,GAAEG,GAAGE,GAAGE,GAAGE,GAC7b,SAASR,IAAG,CAAC,IAAIY,EAAEd,GAAE,OAAOW,EAAE,MAAMV,GAAE,IAAI,UAAUa,CAAC,EAAEH,EAAE,OAAO,IAAI,WAAWG,CAAC,EAAEH,EAAE,OAAOL,GAAG,IAAI,WAAWQ,CAAC,EAAEH,EAAE,OAAOP,GAAG,IAAI,WAAWU,CAAC,EAAEH,EAAE,QAAQ,IAAI,YAAYG,CAAC,EAAEH,EAAE,QAAQH,GAAG,IAAI,YAAYM,CAAC,EAAEH,EAAE,QAAQ,IAAI,aAAaG,CAAC,EAAEH,EAAE,QAAQD,GAAG,IAAI,aAAaI,CAAC,CAAC,CAAC,IAAI0C,GAAG7C,EAAE,gBAAgB,SACnS,GAD4S,SAAS6C,IAAIL,GAAE,wDAAwDK,GAAG,wBAAwB,EAC3YnB,EAAErC,GAAEW,EAAE,mBAAmBA,EAAE,WAAWX,GAAEW,EAAE,mBAAmBX,GAAE,IAAI,YAAY,OAAO,CAAC,QAAQwD,GAAG,MAAM,QAAQ,MAAM,OAAO,EAAE,CAAC,EAAE,EAAExD,GAAE,kBAAkB,mBAAmB,MAAMgD,GAAE,6NAA6N,EAAEZ,GAAGY,GAAE,2GAA2G,EACrgB,MAAM,YAAY,EAAE9C,GAAE,EAAEsD,GAAGxD,GAAE,OAAO,WAAW,IAAIyD,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAE,SAASC,IAAI,CAAC,OAAOX,IAAe,EAAEU,EAAE,CAAC,IAAIE,GAAE,EAAEC,GAAG,KAAKC,GAAE,KAAK,SAASC,IAAI,CAACH,KAAInD,EAAE,wBAAwBA,EAAE,uBAAuBmD,EAAC,CAAC,CAAC,SAASI,IAAI,CAA2D,GAA1DJ,KAAInD,EAAE,wBAAwBA,EAAE,uBAAuBmD,EAAC,EAAQA,IAAH,IAAcC,KAAP,OAAY,cAAcA,EAAE,EAAEA,GAAG,MAAMC,IAAG,CAAC,IAAIlD,EAAEkD,GAAEA,GAAE,KAAKlD,EAAE,CAAC,CAAC,CAClW,SAASqC,GAAErC,EAAE,CAAC,MAAGH,EAAE,SAAQA,EAAE,QAAQG,CAAC,EAAEA,EAAE,WAAWA,EAAE,IAAIkC,GAAElC,CAAC,EAAEwC,GAAE,GAAGC,GAAE,EAAEzC,EAAE,IAAI,YAAY,aAAaA,EAAE,0CAA0C,EAAED,EAAEC,CAAC,EAAQA,CAAE,CAAC,SAASqD,GAAGrD,EAAE,CAAC,OAAOA,EAAE,WAAW,uCAAuC,CAAC,CAAC,IAAIsD,GAAEA,GAAE,8BAA8BD,GAAGC,EAAC,IAAIA,GAAE7B,EAAG6B,EAAC,GAAG,SAASC,GAAGvD,EAAE,CAAC,GAAGA,GAAGsD,IAAGnB,EAAE,OAAO,IAAI,WAAWA,CAAC,EAAE,GAAGP,EAAG,OAAOA,EAAG5B,CAAC,EAAE,KAAK,iDAAkD,CACpa,SAASwD,GAAGxD,EAAE,CAAC,GAAG,CAACmC,IAAIf,GAAIC,GAAG,CAAC,GAAe,OAAO,OAAnB,YAA0B,CAACrB,EAAE,WAAW,SAAS,EAAE,OAAO,MAAMA,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAKC,GAAG,CAAC,GAAG,CAACA,EAAE,GAAG,KAAK,uCAAuCD,EAAE,IAAI,OAAOC,EAAE,YAAY,CAAC,CAAC,EAAE,MAAM,IAAIsD,GAAGvD,CAAC,CAAC,EAAE,GAAG2B,EAAE,OAAO,IAAI,QAAQ,CAAC1B,EAAEC,IAAI,CAACyB,EAAE3B,EAAEG,GAAGF,EAAE,IAAI,WAAWE,CAAC,CAAC,EAAED,CAAC,CAAC,CAAC,CAAC,CAAC,OAAO,QAAQ,QAAQ,EAAE,KAAK,IAAIqD,GAAGvD,CAAC,CAAC,CAAC,CAAC,SAASyD,GAAGzD,EAAEC,EAAEC,EAAE,CAAC,OAAOsD,GAAGxD,CAAC,EAAE,KAAKG,GAAG,YAAY,YAAYA,EAAEF,CAAC,CAAC,EAAE,KAAKE,GAAGA,CAAC,EAAE,KAAKD,EAAEC,GAAG,CAAC+B,GAAE,0CAA0C/B,CAAC,EAAEkC,GAAElC,CAAC,CAAC,CAAC,CAAC,CAC1e,SAASuD,GAAG1D,EAAEC,EAAE,CAAC,IAAIC,EAAEoD,GAAE,OAAOnB,GAAe,OAAO,YAAY,sBAA/B,YAAqDkB,GAAGnD,CAAC,GAAGA,EAAE,WAAW,SAAS,GAAGoB,GAAe,OAAO,OAAnB,WAAyBmC,GAAGvD,EAAEF,EAAEC,CAAC,EAAE,MAAMC,EAAE,CAAC,YAAY,aAAa,CAAC,EAAE,KAAKC,GAAG,YAAY,qBAAqBA,EAAEH,CAAC,EAAE,KAAKC,EAAE,SAASG,EAAE,CAAC,OAAA8B,GAAE,kCAAkC9B,CAAC,EAAE8B,GAAE,2CAA2C,EAASuB,GAAGvD,EAAEF,EAAEC,CAAC,CAAC,CAAC,CAAC,CAAC,CAC7W,IAAI0D,GAAEC,GAAG,CAAC,OAAO5D,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,aAAaG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,UAAUG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OACxfG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAG,UAAUG,EAAE,CAAC,IAAIC,EAAE,IAAIC,CAAC,CAAC,CAAC,EAAE,OAAOF,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAI,CAACJ,EAAE,GAAG,MAAMG,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,OAAOD,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAI,CAACJ,EAAE,GAAG,YAAYG,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,IACnf,CAACJ,EAAE,GAAG,kBAAkBG,EAAE,CAAC,MAAMC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,IAAI,CAACJ,EAAE,GAAG,OAAOG,EAAE,CAAC,GAAGC,CAAC,CAAC,CAAC,EAAE,OAAOD,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,MAAMG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,UAAUG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,iBAAiBG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,cAAcG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,aAAaG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAC3f,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,YAAYG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,YAAYG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,aAAaG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,YAAYG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EACngB,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,WAAWG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,WAAWG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,eAAeG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,kBAAkBG,EAAE,CAAC,SAAS,CAAC,CAACC,EACnf,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,kBAAkBG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,kBAAkB,CAAC,CAACC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAOH,GAAG,CAACH,EAAE,GAAG,QAAQG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAG,YAAYG,EAAE,CAAC,KAAKC,EAAE,MAAM,KAAKV,EAAE,EAAE,SAASW,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,KAAI,CAACZ,EAAE,GAAG,OAAOG,EAAE,CAAC,OAAOQ,GAAE,OAAO,OAAO,SAASP,EAAE,UAAU,CAACC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,CAAC,EAAE,KAAK,CAACC,EAAEC,EAAC,EAAE,QAAQ,CAACC,EAAC,EAAE,WAAW,IAAI,CAAC,CAACtB,EAAE,EAAEwB,KAClf,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACT,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,KAAI,CAAClB,EAAE,GAAG,OAAOG,EAAE,CAAC,OAAOc,EAAE,OAAO,OAAO,SAASb,EAAE,UAAU,CAACC,EAAEC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,EAAEC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACC,GAAEC,EAAC,EAAE,WAAW,IAAI,CAAC,CAAC3B,EAAE,EAAE8B,KAAI,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,KAAI,CAACZ,EAAE,GAAG,OAAOG,EAAE,CAAC,OAAOQ,GAAE,OAAO,OAAO,SAASP,EAAE,UAAU,CAACC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,CAAC,EAAE,KAAK,CAACC,EAAEC,EAAC,EAAE,QAAQ,CAACC,EAAC,EAAE,WAAW,IAAI,CAAC,CAACtB,EAAE,EAAEwB,KAAI,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACT,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,KAAI,CAAClB,EAAE,GAAG,OAAOG,EAAE,CAAC,OAAOc,EAAE,OAAO,OAAO,SAASb,EAAE,UAAU,CAACC,EAAEC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,EACpfC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACC,GAAEC,EAAC,EAAE,WAAW,IAAI,CAAC,CAAC3B,EAAE,EAAE8B,KAAI,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,IAAI,CAACjB,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOQ,GAAE,OAAO,OAAO,QAAQP,EAAE,UAAU,CAACC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,CAAC,EAAE,KAAK,CAACC,EAAEC,EAAC,EAAE,QAAQ,CAACC,EAAC,EAAE,SAAS,IAAI,CAAC,CAACtB,EAAE,EAAEwB,KAAI,CAAC,EAAE,cAAcC,GAAE,MAAM,KAAKnB,EAAE,EAAE,SAASoB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYE,GAAE,MAAM,KAAKrB,EAAE,EAAE,SAASuB,IAAI,EAAEA,EAAEF,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACZ,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,KAAI,CAACf,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOO,GAAE,OAAO,OAAO,QAAQN,EAAE,UAAU,MAAM,KAAKV,EAAE,EAAE,SAASW,IAAI,EAAEA,EACpf,IAAI,CAAC,CAAC,EAAE,MAAMC,EAAE,YAAY,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAE,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,KAAKb,EAAE,EAAE,SAASc,IAAI,EAAEA,EAAE,IAAI,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAKd,EAAE,EAAE,SAASe,KAAI,EAAEA,GAAE,IAAI,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACrB,EAAE,EAAEuB,KAAI,CAAC,EAAE,cAAc,EAAEC,GAAE,MAAM,KAAKlB,EAAE,EAAE,SAASmB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAY,EAAEE,GAAE,MAAM,KAAKpB,EAAE,EAAE,SAASqB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACX,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,IAAI,CAACjB,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOQ,GAAE,OAAO,OAAO,QAAQP,EAAE,UAAU,CAACC,CAAC,EAAE,MAAMC,EAAE,aAAa,CAACC,CAAC,EAAE,KAAK,CAACC,EAAEC,EAAC,EAAE,QAAQ,CAACC,EAAC,EAAE,SAAS,IAAI,CAAC,CAACtB,EAAE,EAAEwB,KAAI,CAAC,EAAE,cAAcC,GAC5f,MAAM,KAAKnB,EAAE,EAAE,SAASoB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAYE,GAAE,MAAM,KAAKrB,EAAE,EAAE,SAASuB,IAAI,EAAEA,EAAEF,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACZ,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,KAAI,CAACf,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOO,GAAE,OAAO,OAAO,QAAQN,EAAE,UAAU,MAAM,KAAKV,EAAE,EAAE,SAASW,IAAI,EAAEA,EAAE,IAAI,CAAC,CAAC,EAAE,MAAMC,EAAE,YAAY,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAE,IAAI,CAAC,CAAC,EAAE,KAAK,MAAM,KAAKb,EAAE,EAAE,SAASc,IAAI,EAAEA,EAAE,IAAI,CAAC,CAAC,EAAE,QAAQ,MAAM,KAAKd,EAAE,EAAE,SAASe,KAAI,EAAEA,GAAE,IAAI,CAAC,CAAC,EAAE,SAAS,IAAI,CAAC,CAACrB,EAAE,EAAEuB,KAAI,CAAC,EAAE,cAAc,EAAEC,GAAE,MAAM,KAAKlB,EAAE,EAAE,SAASmB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,EAAE,YAAY,EACnfE,GAAE,MAAM,KAAKpB,EAAE,EAAE,SAASqB,KAAI,EAAEA,GAAED,KAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACX,EAAEC,IAAI,CAACJ,EAAE,GAAG,oBAAoBG,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,GAAE8C,KAAI,CAAChE,EAAE,GAAG,cAAcG,EAAE,CAAC,OAAO6D,GAAE,OAAO,OAAO,SAAS5D,EAAE,UAAUC,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAACC,EAAEC,EAAC,EAAE,aAAa,CAACC,GAAEC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACE,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,IAAI,CAACJ,EAAE,GAAG,oBAAoBG,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,GAAE8C,KAAI,CAAChE,EAAE,GAAG,cAAcG,EAAE,CAAC,OAAO6D,GAAE,OAAO,OAAO,SAAS5D,EAAE,UAAUC,EAC7f,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAACC,EAAEC,EAAC,EAAE,aAAa,CAACC,GAAEC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACE,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,GAAE8C,KAAI,CAAChE,EAAE,GAAG,UAAUG,EAAE,CAAC,OAAO6D,GAAE,OAAO,OAAO,SAAS5D,EAAE,UAAUC,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAACC,EAAEC,EAAC,EAAE,aAAa,CAACC,GAAEC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACE,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,IAAI,CAACJ,EAAE,GAAG,gBAAgBG,EAAE,CAAC,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEC,GAAEE,EAAEC,GAAE8C,KAAI,CAAChE,EAAE,GAAG,UAC3eG,EAAE,CAAC,OAAO6D,GAAE,OAAO,OAAO,SAAS5D,EAAE,UAAUC,EAAE,kBAAkBC,EAAE,cAAcC,EAAE,UAAU,CAACC,EAAEC,EAAC,EAAE,aAAa,CAACC,GAAEC,EAAC,EAAE,KAAK,CAACC,GAAEC,GAAEC,GAAEC,EAAC,EAAE,QAAQ,CAACE,EAAEC,EAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACf,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,OAAOG,EAAE,CAAC,MAAMC,EAAE,KAAKC,EAAE,OAAOC,EAAE,OAAOC,CAAC,CAAC,CAAC,EAAE,OAAOJ,GAAG,CAACH,EAAE,GAAG,SAASG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,EAAEC,IAAI,CAACN,EAAE,GAAG,SAASG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACC,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,EAAEC,EAAEC,IAAI,CAACN,EAAE,GAAG,SAASG,EAAE,CAAC,SAAS,CAAC,CAACC,EAAE,gBAAgB,CAAC,CAACC,EAAE,KAAKC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACH,EAAEC,IAAI,CAACJ,EAAE,GAAG,UAAUG,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,IAAI,CAACJ,EAAE,GAAG,SAASG,EAAE,CAAC,KAAKC,CAAC,CAAC,CAAC,EACrf,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,QAAQG,EAAE,CAAC,KAAKC,EAAE,WAAWC,EAAE,WAAWC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAOH,GAAG,CAACH,EAAE,GAAG,SAASG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAI,CAACJ,EAAE,GAAG,SAASG,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,IAAI,CAACJ,EAAE,GAAG,iBAAiBG,EAAE,CAAC,KAAK,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,GAAEC,KAAI,CAACb,EAAE,GAAG,SAASG,EAAE,CAAC,UAAUC,EAAE,KAAKC,EAAE,MAAM,KAAKX,EAAE,EAAE,SAASY,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,wBAAwB4D,GAAE1D,CAAC,EAAE,YAAYC,EAAE,eAAeC,GAAE,mBAAmBC,GAAE,sBAAsBuD,GAAEtD,EAAC,EAAE,KAAKsD,GAAErD,EAAC,EAAE,YAAYqD,GAAEpD,EAAC,CAAC,CAAC,CAAC,EAC5f,OAAO,CAACV,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAI,CAACT,EAAE,GAAG,QAAQG,EAAE,CAAC,OAAOC,EAAE,MAAM,KAAKV,EAAE,EAAE,SAASW,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKE,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,KAAKE,EAAE,MAAM,KAAKd,EAAE,EAAE,SAASe,KAAI,EAAEA,GAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAOL,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAG,qBAAqBG,EAAE,CAAC,KAAK,OAAOC,CAAC,EAAE,QAAQ,OAAOC,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACF,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAG,wBAAwBG,EAAE,CAAC,QAAQC,EAAE,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAO,CAACF,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAG,wBAAwBG,EAAE,CAAC,QAAQC,EAAE,OAAOC,EAAE,OAAO,MAAM,CAAC,CAAC,EAAE,OAAOF,GAAG,CAACH,EAAE,GAAG,QAClfG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAI,CAACJ,EAAE,GAAG,SAASG,EAAE,CAAC,SAAS8D,GAAE7D,CAAC,CAAC,CAAC,CAAC,EAAE,OAAO,CAACD,EAAEC,EAAEC,EAAEC,EAAEC,IAAI,CAACP,EAAE,GAAG,MAAMG,EAAE,CAAC,KAAKC,EAAE,MAAMC,EAAE,KAAKC,EAAE,MAAM,KAAKZ,EAAE,EAAE,SAASa,IAAI,EAAEA,EAAED,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,EAAE,OAAOH,GAAG,CAACH,EAAE,GAAG,OAAOG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,UAAUG,EAAE,MAAM,CAAC,EAAE,OAAOA,GAAG,CAACH,EAAE,GAAG,gBAAgBG,EAAE,MAAM,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAI,CAACJ,EAAE,GAAG,yBAAyBG,EAAE,CAAC,QAAQC,CAAC,CAAC,CAAC,EAAE,OAAOD,GAAG,CAACH,EAAE,GAAGG,CAAC,CAAC,EAAE,OAAO,CAACA,EAAEC,IAAIJ,EAAE,GAAGG,EAAEC,EAAEJ,EAAE,GAAG,GAAGA,EAAE,GAAG,MAAM,EAAE,OAAOG,GAAGH,EAAE,GAAGG,CAAC,EAAE,OAAOA,GAAGH,EAAE,GAAGG,CAAC,EAAE,OAAO,CAACA,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAGG,EAAEC,EAAEC,EAAE,EAAE,CAAC,EAAE,OAAO,CAACF,EAAEC,EAAEC,IAAI,CAACL,EAAE,GAAGG,EAAEC,EAAEC,CAAC,CAAC,CAAC,EACpf,SAAS6D,GAAG/D,EAAE,CAAC,KAAK,KAAK,aAAa,KAAK,QAAQ,gCAAgCA,CAAC,IAAI,KAAK,OAAOA,CAAC,CAAC,SAASgE,GAAGhE,EAAE,CAACA,EAAE,UAAU,EAAEA,EAAE,UAAU,IAAI,CAAC,CAAC,CAAC,SAASiE,GAAGjE,EAAE,EAAEA,EAAEkE,GAAE,GAAGlE,CAAC,IAAIqC,GAAE,EAAE6B,GAAE,GAAGlE,CAAC,CAAC,CAAC,SAASmE,GAAGnE,EAAE,CAAC,IAAIC,EAAEiE,GAAE,GAAG,EAAE,GAAG,CAACjE,EAAE,MAAO,GAAEiE,GAAE,GAAG,KAAKjE,CAAC,EAAEiE,GAAE,GAAGlE,EAAE,EAAE,EAAEC,EAAEA,EAAE,GAAGD,EAAE,GAAG,IAAIE,EAAE,CAAC,IAAI,MAAM,cAAcF,EAAE,GAAG,IAAIA,EAAE,GAAG,YAAYA,EAAE,EAAE,EAAE,OAAAsB,GAAGrB,EAAE,MAAM,EAAEA,EAAE,YAAYC,EAAEF,EAAE,EAAE,EAAS,CAAC,CACvX,IAAIoE,GAAgB,OAAO,YAApB,IAAgC,IAAI,YAAY,MAAM,EAAE,OAAOC,GAAG,CAACrE,EAAEC,EAAEC,IAAI,CAACD,KAAK,EAAE,IAAIE,EAAEF,EAAEC,EAAE,IAAIA,EAAED,EAAED,EAAEE,CAAC,GAAG,EAAEA,GAAGC,IAAI,EAAED,EAAE,GAAG,GAAGA,EAAED,GAAGD,EAAE,QAAQoE,GAAG,OAAOA,GAAG,OAAOpE,EAAE,kBAAkB,kBAAkBA,EAAE,MAAMC,EAAEC,CAAC,EAAEF,EAAE,SAASC,EAAEC,CAAC,CAAC,EAAE,IAAIC,EAAE,GAAGF,EAAEC,GAAG,CAAC,IAAIE,EAAEJ,EAAEC,GAAG,EAAE,GAAGG,EAAE,IAAI,CAAC,IAAIC,EAAEL,EAAEC,GAAG,EAAE,GAAG,IAASG,EAAE,MAAR,IAAaD,GAAG,OAAO,cAAcC,EAAE,KAAK,EAAEC,CAAC,MAAM,CAAC,IAAIC,GAAEN,EAAEC,GAAG,EAAE,GAAGG,GAAQA,EAAE,MAAR,KAAcA,EAAE,KAAK,GAAGC,GAAG,EAAEC,IAAGF,EAAE,IAAI,GAAGC,GAAG,GAAGC,IAAG,EAAEN,EAAEC,GAAG,EAAE,GAAG,MAAMG,EAAED,GAAG,OAAO,aAAaC,CAAC,GAAGA,GAAG,MAAMD,GAAG,OAAO,aAAa,MAAMC,GACpf,GAAG,MAAMA,EAAE,IAAI,EAAE,CAAC,MAAMD,GAAG,OAAO,aAAaC,CAAC,CAAC,CAAC,OAAOD,CAAC,EAAE2D,GAAE,CAAC9D,EAAEC,KAAKD,KAAK,GAAGqE,GAAGhF,EAAE,EAAEW,EAAEC,CAAC,EAAE,GAAG,SAASqE,GAAGtE,EAAE,CAAC,GAAGuB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,CAAC,EAAEyC,GAAEzC,EAAM+C,GAAG,IAAGmB,GAAE,GAAG,EAAKrE,EAAE,QAAOA,EAAE,OAAOG,CAAC,EAAEwC,GAAE,IAAGrB,EAAEnB,EAAE,IAAI+D,GAAG/D,CAAC,CAAC,CAAC,CACjM,IAAIwE,GAAGxE,GAAG,CAAK,GAAJyC,GAAEzC,EAAKuB,EAAE,MAAMkD,GAAGzE,CAAC,EAAE,SAASsE,GAAGtE,CAAC,CAAC,EAAEkE,GAAE,CAAC,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,CAAC,EAAE,GAAG,UAAU,CAAC3C,EAAE2C,GAAE,GAAG,EAAEA,GAAE,GAAG,CAAC,EAAE,GAAG,UAAU,CAACvB,GAAG,QAAQ,IAAI,CAACQ,GAAG,EAAEe,GAAE,GAAG,IAAId,GAAG,CAAC,CAAC,CAAC,CAAC,EAAE,GAAG,UAAU,CAACc,GAAE,sBAAsBA,GAAE,GAAGA,GAAE,cAAcA,GAAE,GAAGA,GAAE,cAAcA,GAAE,GAAG9B,GAAc,EAAE,EAAE,GAAG,SAASpC,EAAE,CAACyC,GAAEzC,CAAC,EAAE,GAAG,CAAC,kBAAkB,EAAE,GAAG,UAAU,CAAC,QAAQA,KAAKkE,GAAE,GAAGF,GAAGhE,CAAC,EAAE,IAAIA,KAAKkE,GAAE,GAAGF,GAAGhE,CAAC,EAAEkE,GAAE,GAAG,CAAC,EAAEA,GAAE,GAAG,CAAC,EAAEA,GAAE,GAAG,CAAC,CAAC,EAAE,GAAG,SAASlE,EAAE,CAAC,IAAIC,EAAED,EAAE,GAAG,OAAOkE,GAAE,GAAGjE,CAAC,EAAEiE,GAAE,GAAG,KAAKlE,CAAC,EAAEkE,GAAE,GAAG,OAAOA,GAAE,GAAG,QAAQlE,CAAC,EAAE,CAAC,EAAEA,EAAE,GAAG,EAAE0E,GAAGzE,CAAC,CAAC,EAAE,GAAG,UAAU,CAAC,EACtf,GAAG,UAAU,CAACiE,GAAE,GAAG,QAAQlE,GAAGA,EAAE,CAAC,CAAC,EAAE,GAAGA,GAAG,IAAI,QAAQC,GAAG,CAACD,EAAE,UAAUK,GAAG,CAACA,EAAEA,EAAE,KAAK,IAAIC,GAAED,EAAE,IAAI,GAAGA,EAAE,cAAcA,EAAE,cAAcsE,GAAG,EAAE,CAAC,IAAIpE,GAAE2D,GAAE,GAAG7D,EAAE,EAAE,EAAEE,GAAEA,GAAE,YAAYF,EAAEA,EAAE,YAAY,EAAE6B,GAAE,0CAA0C5B,GAAE,uBAAuBD,EAAE,aAAa,qCAAqC,CAAC,MAA0BC,KAAjB,eAAmBsE,GAAG,EAA0BtE,KAAhB,cAAkB6D,GAAG9D,CAAC,EAA4BC,KAAlB,gBAAoB2D,GAAG5D,EAAE,MAAM,EAAyBC,KAAf,cAAiBD,EAAEA,EAAE,OAAOC,GAAE4D,GAAE,GAAG7D,CAAC,EAAE,OAAO6D,GAAE,GAAG7D,CAAC,EAAE2D,GAAG1D,EAAC,EAAEoE,GAAGrE,CAAC,EAAE6D,GAAE,GAAG,OAAOA,GAAE,GAAG,QAAQ5D,EAAC,EAClgB,CAAC,EAAEA,GAAE,GAAG,GAA2BA,KAAjB,eAAmB4D,GAAE,GAAG7D,EAAE,MAAM,EAAE,YAAY,CAAC,IAAI,QAAQ,CAAC,EAAqBC,KAAX,UAAaN,EAAE,OAAO,GAAGC,EAAED,CAAC,GAAoBM,KAAV,QAAY,MAAM,UAAUD,EAAE,SAAS,KAAKA,EAAE,IAAI,EAA2BA,EAAE,SAAnB,eAA0BL,EAAE,YAAYK,CAAC,EAA0BC,KAAhB,cAAkBT,EAAEQ,EAAE,OAAO,EAAE,GAAGA,EAAE,IAAI,EAAOC,IAAG4B,GAAE,kCAAkC5B,EAAC,CAAC,EAAEN,EAAE,QAAQK,GAAG,CAAC,MAAA6B,GAAE,yBAAyB7B,EAAE,SAAS,IAAIA,EAAE,OAAO,KAAKA,EAAE,OAAO,EAAQA,CAAE,EAAEiB,IAAItB,EAAE,GAAG,UAAU,SAASK,EAAE,CAACL,EAAE,UAAU,CAAC,KAAKK,CAAC,CAAC,CAAC,CAAC,EAAEL,EAAE,GAAG,QAAQ,SAASK,EAAE,CAACL,EAAE,QAAQK,CAAC,CAAC,CAAC,GAC/f,IAAIH,EAAE,CAAC,EAAEC,EAAE,CAAC,SAAS,UAAU,QAAQ,UAAU,EAAEC,EAAE,IAAIA,KAAKD,EAAEN,EAAE,eAAeO,CAAC,GAAGF,EAAE,KAAKE,CAAC,EAAEJ,EAAE,YAAY,CAAC,IAAI,OAAO,SAASE,EAAE,UAAUL,EAAE,qBAAqBd,EAAW,WAAWG,GAAE,WAAWqD,EAAE,CAAC,CAAC,CAAC,EAAE,GAAG,SAASvC,EAAE,CAACA,EAAE,CAAC,EAAE,GAAG,UAAU,CAAC,IAAIA,EAAEyB,EAAG,kCAAkC,EAAEzB,EAAE,IAAI,OAAOA,CAAC,EAAEkE,GAAE,GAAG,KAAKlE,CAAC,CAAC,EAAE,GAAG,UAAU,CAAC,OAAGkE,GAAE,GAAG,QAAR,IAAiBA,GAAE,GAAG,EAAEA,GAAE,GAAGA,GAAE,GAAG,CAAC,CAAC,GAAUA,GAAE,GAAG,IAAI,CAAC,CAAC,EAAErE,EAAE,QAAQqE,GAAE,IAAIW,GAAG7E,GAAG,CAAC,KAAK,EAAEA,EAAE,QAAQA,EAAE,MAAM,EAAEH,CAAC,CAAC,EACzbA,EAAE,oBAAoB,UAAU,CAAC,IAAIG,EAAE2E,GAAG,EAAE1E,EAAEV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEA,EAAET,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAE8E,GAAG7E,EAAEA,EAAED,CAAC,EAAE+E,GAAG9E,CAAC,CAAC,EAAE,SAASwE,GAAGzE,EAAE,CAAC,GAAGuB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,CAAC,EAAEwE,GAAGxE,CAAC,CAAC,CAACH,EAAE,iBAAiB,SAASG,EAAEC,EAAE,CAACD,EAAEgF,GAAG,MAAM,KAAK,CAAChF,EAAEC,CAAC,CAAC,EAAE8C,GAAG,EAAEmB,GAAE,GAAGlE,CAAC,EAAEiF,GAAGjF,CAAC,CAAC,EAAE,SAASkF,GAAGlF,EAAE,CAAC,KAAK,GAAGA,EAAE,GAAG,KAAK,GAAG,SAASC,EAAE,CAACR,EAAE,EAAE,KAAK,GAAG,GAAG,IAAI,CAAC,EAAEQ,CAAC,EAAE,KAAK,GAAG,SAASA,EAAE,CAACR,EAAE,EAAE,KAAK,GAAG,GAAG,IAAI,CAAC,EAAEQ,CAAC,EAAE,KAAK,GAAG,SAASA,EAAEC,EAAE,CAAC,KAAK,GAAG,EAAE,KAAK,GAAGD,CAAC,EAAE,KAAK,GAAGC,CAAC,CAAC,EAAE,KAAK,GAAG,UAAU,CAACT,EAAE,EAAE,KAAK,GAAG,IAAI,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI0F,GAAG,EAAEC,GAAG,EAC/b,SAASC,GAAGrF,EAAEC,EAAEC,EAAEC,EAAE,CAAC,OAAOoB,EAAEgD,GAAE,EAAE,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,EAAEmF,GAAGtF,EAAEC,EAAEC,EAAEC,CAAC,CAAC,CAAC,SAASmF,GAAGtF,EAAEC,EAAEC,EAAEC,EAAE,CAA6B,GAA5BH,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAkB,OAAO,kBAApB,IAAsC,OAAO+B,GAAE,qFAAqF,EAAE,EAAE,IAAI9B,EAAE,CAAC,EAAE,OAAGmB,GAAOnB,EAAE,SAAN,EAAoBiF,GAAGrF,EAAEC,EAAEC,EAAEC,CAAC,GAAEH,EAAE,CAAC,GAAGE,EAAE,GAAGF,EAAE,GAAGG,EAAE,GAAGC,CAAC,EAASmB,GAAGvB,EAAE,GAAG,cAAc,YAAYA,EAAEI,CAAC,EAAE,GAAG+D,GAAGnE,CAAC,EAAC,CAAC,SAASuF,GAAGvF,EAAEC,EAAEC,EAAE,CAAC,OAAOqB,EAAEgD,GAAE,EAAE,EAAEvE,EAAEC,EAAEC,CAAC,EAAE,CAAC,CAAC,SAASsF,GAAGxF,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,EAAEC,CAAC,CAAC,CACrc,IAAIwF,GAAGzF,GAAG,CAAC,QAAQC,EAAE,EAAEC,EAAE,EAAEA,EAAEF,EAAE,OAAO,EAAEE,EAAE,CAAC,IAAIC,EAAEH,EAAE,WAAWE,CAAC,EAAE,KAAKC,EAAEF,IAAI,MAAME,EAAEF,GAAG,EAAE,OAAOE,GAAG,OAAOA,GAAGF,GAAG,EAAE,EAAEC,GAAGD,GAAG,CAAC,CAAC,OAAOA,CAAC,EAAEyF,GAAG,CAAC1F,EAAEC,EAAEC,EAAEC,IAAI,CAAQ,GAAPD,KAAK,EAAK,EAAE,EAAEC,GAAG,MAAO,GAAE,IAAIC,EAAEF,EAAEC,EAAED,EAAEC,EAAE,EAAE,QAAQE,EAAE,EAAEA,EAAEL,EAAE,OAAO,EAAEK,EAAE,CAAC,IAAIC,GAAEN,EAAE,WAAWK,CAAC,EAAE,GAAG,OAAOC,IAAG,OAAOA,GAAE,CAAC,IAAIC,GAAEP,EAAE,WAAW,EAAEK,CAAC,EAAEC,GAAE,QAAQA,GAAE,OAAO,IAAIC,GAAE,IAAI,CAAC,GAAG,KAAKD,GAAE,CAAC,GAAGJ,GAAGC,EAAE,MAAMF,EAAEC,MAAM,CAAC,EAAEI,EAAC,KAAK,CAAC,GAAG,MAAMA,GAAE,CAAC,GAAGJ,EAAE,GAAGC,EAAE,MAAMF,EAAEC,MAAM,CAAC,EAAE,IAAII,IAAG,CAAC,KAAK,CAAC,GAAG,OAAOA,GAAE,CAAC,GAAGJ,EAAE,GAAGC,EAAE,MAAMF,EAAEC,MAAM,CAAC,EAAE,IAAII,IAAG,EAAE,KAAK,CAAC,GAAGJ,EAAE,GAAGC,EAAE,MAAMF,EAAEC,MAAM,CAAC,EAAE,IAAII,IACpf,GAAGL,EAAEC,MAAM,CAAC,EAAE,IAAII,IAAG,GAAG,EAAE,CAACL,EAAEC,MAAM,CAAC,EAAE,IAAII,IAAG,EAAE,EAAE,CAACL,EAAEC,MAAM,CAAC,EAAE,IAAII,GAAE,EAAE,CAAC,CAAC,OAAAL,EAAEC,IAAI,CAAC,EAAE,EAASA,EAAEE,CAAC,EAAEuF,GAAG,CAAC3F,EAAEC,EAAEC,IAAIwF,GAAG1F,EAAEX,EAAE,EAAEY,EAAEC,CAAC,EAAE,SAAS0F,GAAG5F,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,EAAEC,CAAC,CAAC,CAAC,SAAS4F,GAAG7F,EAAEC,EAAEC,EAAE,CAAC,GAAGqB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,EAAEC,EAAEC,CAAC,CAAC,CAAC,SAAS4F,GAAG9F,EAAEC,EAAEC,EAAE,CAAC,OAAOqB,EAAEgD,GAAE,EAAE,EAAEvE,EAAEC,EAAEC,CAAC,EAAE,CAAC,CAAC,SAAS6F,GAAG/F,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,EAAE,EAAEvE,EAAEC,CAAC,CAAC,CAAC,SAAS+F,GAAGhG,EAAEC,EAAEC,EAAE,CAAC,GAAGqB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,CAAC,CAAC,CAAC,SAAS+F,GAAGjG,EAAEC,EAAEC,EAAEC,EAAE,CAAC,GAAGoB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,CAAC,CAAC,SAAS+F,GAAGlG,EAAEC,EAAEC,EAAEC,EAAE,CAAC,GAAGoB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,CAAC,CAAC,SAASgG,GAAGnG,EAAEC,EAAEC,EAAEC,EAAE,CAAC,GAAGoB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,CAAC,CAC9d,SAASiG,GAAGpG,EAAE,CAAC,GAAGuB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,CAAC,CAAC,CAAC,SAASqG,GAAGrG,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,CAAC,CAAC,CAAC,SAASqG,GAAGtG,EAAEC,EAAEC,EAAE,CAAC,GAAGqB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,CAAC,CAAC,CAAC,IAAIqG,GAAGvG,GAAG,CAAC,GAAG,CAACwC,GAAE,GAAG,CAAC,GAAGxC,EAAE,EAAE,CAAC+C,GAAG,EAAE,GAAG,CAACxB,EAAE0D,GAAGxC,EAAC,EAAE+B,GAAG/B,EAAC,CAAC,OAAOxC,EAAE,CAACA,aAAa8D,IAAc9D,GAAV,UAAakB,EAAE,EAAElB,CAAC,CAAC,CAAC,OAAOA,EAAE,CAACA,aAAa8D,IAAc9D,GAAV,UAAakB,EAAE,EAAElB,CAAC,CAAC,CAAC,EAAE,SAASuG,GAAGxG,EAAE,CAACA,KAAK,EAAe,OAAO,QAAQ,IAA5B,aAAiC,QAAQ,GAAGT,EAAE,EAAES,GAAG,EAAEA,CAAC,EAAE,MAAM,KAAK4E,EAAE,EAAE5E,GAAG,IAAI,QAAQ,MAAMT,EAAE,EAAES,GAAG,EAAE,CAAC,EAAE,CAACH,EAAE,kCAAkC2G,GAAG,SAAS5B,IAAI,CAAC,IAAI5E,EAAE2E,GAAG,EAAE3E,IAAIwG,GAAGxG,CAAC,EAAEuG,GAAG,IAAIE,GAAG,CAAC,EAAE,CAAC5G,EAAE,aAAa+E,GACpf,IAAI8B,GAAE1G,GAAOA,EAAE,IAAN,IAAcA,EAAE,MAAN,GAAeA,EAAE,MAAN,GAAW2G,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAEC,GAAG,CAAC,EAAE,GAAG,GAAG,GAAG,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,IAAI,GAAG,EAAE,SAASC,GAAG7G,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAE,CAAC,OAAOgB,EAAEgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,EAAC,EAAE,GAAG,CAAC,SAASuG,GAAG9G,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAE,CAAC,GAAGiB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAC,CAAC,CAAC,IAAIyG,GAAG/G,GAAG,CAAC,IAAIC,EAAEwF,GAAGzF,CAAC,EAAE,EAAEE,EAAE8G,GAAG/G,CAAC,EAAE,OAAAC,GAAGyF,GAAG3F,EAAEE,EAAED,CAAC,EAASC,CAAC,EAAE+G,GAAG,CAAC,EAAEC,GAAG,CAAClH,EAAEC,IAAI,CAACgH,GAAG,OAAO,EAAE,IAAI/G,EAAE,IAAID,IAAI,EAAEC,EAAEb,EAAE,EAAEW,MAAM,CAAC,GAAGC,GAAQC,GAAL,IAAOD,EAAEgH,GAAG,KAAU/G,GAAL,IAAOX,EAAE,EAAEU,IAAI,CAAC,EAAEN,EAAG,EAAEM,MAAM,CAAC,CAAC,EAAE,EAAEA,EAAE,OAAOgH,EAAE,EAAEE,GAAGnH,GAAG,CAAC,IAAIC,EAAEmH,GAAG,EAAE,OAAApH,EAAEA,EAAE,EAAE+E,GAAG9E,CAAC,EAASD,CAAC,EACve,SAASuE,GAAEvE,EAAEC,EAAE,CAAC,IAAIC,EAAE,UAAU,OAAO,EAAEC,EAAE,UAAU,OAAOgH,GAAG,IAAI,CAAC,QAAQ/G,EAAEiH,GAAG,EAAEnH,CAAC,EAAEG,EAAED,GAAG,EAAEE,GAAE,EAAEA,GAAEJ,EAAEI,KAAI,CAAC,IAAIC,GAAEJ,EAAE,EAAEG,EAAC,EAAEX,EAAG,EAAEU,EAAEC,KAAI,CAAC,EAAEC,EAAC,CAAC,OAAO+G,GAAGtH,EAAEE,EAAEE,EAAEH,CAAC,CAAC,CAAC,CAAC,CAC3J,IAAIsH,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,IAAI,CAAC,GAAG,CAACC,GAAG,CAAC,IAAI1H,EAAE,CAAC,KAAK,WAAW,QAAQ,WAAW,KAAK,IAAI,IAAI,IAAI,KAAK,iBAAiB,MAAgB,OAAO,WAAjB,UAA4B,UAAU,WAAW,UAAU,UAAU,CAAC,GAAG,KAAK,QAAQ,IAAI,GAAG,EAAE,SAAS,EAAEkB,GAAI,gBAAgB,EAAEjB,EAAE,IAAIA,KAAKuH,GAAYA,GAAGvH,CAAC,IAAb,OAAe,OAAOD,EAAEC,CAAC,EAAED,EAAEC,CAAC,EAAEuH,GAAGvH,CAAC,EAAE,IAAIC,EAAE,CAAC,EAAE,IAAID,KAAKD,EAAEE,EAAE,KAAK,GAAGD,CAAC,IAAID,EAAEC,CAAC,CAAC,EAAE,EAAEyH,GAAGxH,CAAC,CAAC,OAAOwH,EAAE,EAAEA,GACtW,SAASC,GAAG3H,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIC,EAAE,EAAE,OAAAuH,GAAG,EAAE,QAAQ,SAAStH,EAAEC,EAAE,CAAC,IAAIC,EAAEJ,EAAEC,EAAwB,IAAtBE,EAAEX,EAAE,EAAEO,EAAE,EAAEI,GAAG,IAAI,CAAC,EAAEC,EAAMA,EAAE,EAAEA,EAAEF,EAAE,OAAO,EAAEE,EAAEpB,EAAE,EAAEmB,KAAK,IAAI,CAAC,EAAED,EAAE,WAAWE,CAAC,EAAEpB,EAAE,EAAEmB,GAAG,IAAI,CAAC,EAAE,EAAEF,GAAGC,EAAE,OAAO,CAAC,CAAC,EAAS,CAAC,CAAC,SAASyH,GAAG5H,EAAEC,EAAE,CAAC,GAAGsB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,CAAC,EAAED,KAAK,EAAEC,KAAK,EAAE,IAAIC,EAAEuH,GAAG,EAAEhI,EAAE,EAAEO,GAAG,IAAI,CAAC,EAAEE,EAAE,OAAO,IAAIC,EAAE,EAAE,OAAAD,EAAE,QAAQ,SAASE,EAAE,CAACD,GAAGC,EAAE,OAAO,CAAC,CAAC,EAAEX,EAAE,EAAEQ,GAAG,IAAI,CAAC,EAAEE,EAAS,CAAC,CAAC,SAAS0H,GAAG7H,EAAE,CAAC,OAAOuB,EAAEgD,GAAE,GAAG,EAAEvE,CAAC,EAAE,EAAE,CAAC,SAAS8H,GAAG9H,EAAEC,EAAEC,EAAEC,EAAE,CAAC,OAAOoB,EAAEgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,EAAE,EAAE,CAC/c,SAAS4H,GAAG/H,EAAEC,EAAEC,EAAEC,EAAEC,EAAE,CAAC,OAAOmB,EAAEgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAE,EAAE,CAAC,IAAI4H,GAAG,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,EAAE,SAASC,GAAGjI,EAAEC,EAAEC,EAAEC,EAAE,CAAC,GAAGoB,EAAE,OAAOgD,GAAE,GAAG,EAAEvE,EAAEC,EAAEC,EAAEC,CAAC,EAAEF,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAE,QAAQC,EAAE,EAAEC,EAAE,EAAEA,EAAEH,EAAEG,IAAI,CAAC,IAAIC,GAAEb,EAAE,EAAEQ,GAAG,IAAI,CAAC,EAAEM,GAAEd,EAAE,EAAEQ,EAAE,GAAG,IAAI,CAAC,EAAEA,GAAG,EAAE,QAAQO,GAAE,EAAEA,GAAED,GAAEC,KAAI,CAAC,IAAIC,GAAEpB,EAAE,EAAEiB,GAAEE,KAAI,CAAC,EAAEE,GAAEsH,GAAGhI,CAAC,EAAMS,KAAJ,GAAYA,KAAL,KAAaT,IAAJ,EAAMiC,GAAGC,IAAGmC,GAAG3D,GAAE,CAAC,CAAC,EAAEA,GAAE,OAAO,GAAGA,GAAE,KAAKD,EAAC,CAAC,CAACL,GAAGG,EAAC,CAAC,OAAAd,EAAE,EAAEU,GAAG,IAAI,CAAC,EAAEC,EAAS,CAAC,CAAC,IAAI8H,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAEC,GAAG,CAAC,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,GAAG,EAAE,EAAE,SAASC,GAAGpI,EAAE,CAAC,IAAIC,EAAE,MAAMwF,GAAGzF,CAAC,EAAE,CAAC,EAAE,OAAA0F,GAAG1F,EAAEC,EAAE,EAAEA,EAAE,MAAM,EAASA,CAAC,CACjf,IAAIoI,GAAG,CAACrI,EAAEC,IAAI,CAAChB,EAAE,EAAE,IAAIe,EAAEC,IAAI,CAAC,CAAC,EAC/B,SAASqI,GAAGtI,EAAEC,EAAEC,EAAEC,EAAE,CAAC,SAASC,EAAEU,EAAEC,GAAE8C,GAAE,CAAC,IAAI/C,EAAY,OAAOA,GAAjB,SAAmBA,EAAE,SAAS,EAAEA,GAAG,GAAGA,EAAE,OAAOC,IAAGD,EAAE+C,GAAE,CAAC,EAAE/C,EAAE,OAAOA,CAAC,CAAC,SAAST,EAAES,EAAEC,GAAE,CAAC,OAAOX,EAAEU,EAAEC,GAAE,GAAG,CAAC,CAAC,SAAST,GAAEQ,EAAEC,GAAE,CAAC,SAAS8C,GAAE0E,GAAG,CAAC,MAAO,GAAEA,GAAG,GAAG,EAAEA,GAAG,EAAE,CAAC,CAAC,IAAIC,GAAE,OAAKA,GAAE3E,GAAE/C,EAAE,YAAY,EAAEC,GAAE,YAAY,CAAC,KAAxC,IAAiDyH,GAAE3E,GAAE/C,EAAE,SAAS,EAAEC,GAAE,SAAS,CAAC,KAAlC,IAAuCyH,GAAE3E,GAAE/C,EAAE,QAAQ,EAAEC,GAAE,QAAQ,CAAC,GAAUyH,EAAC,CAAC,SAASjI,GAAEO,EAAE,CAAC,OAAOA,EAAE,OAAO,EAAE,CAAC,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAOA,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAC5f,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,CAAC,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,EAAE,IAAK,GAAE,OAAO,IAAI,KAAKA,EAAE,YAAY,EAAE,EAAE,GAAG,EAAE,CAAC,CAAC,CAAC,SAASN,GAAEM,EAAE,CAAC,IAAIC,GAAED,EAAE,GAAG,IAAIA,EAAE,IAAI,KAAM,IAAI,KAAKA,EAAE,GAAG,KAAK,EAAE,CAAC,EAAG,QAAQ,CAAC,EAAE,EAAEC,IAAG,CAAC,IAAI8C,GAAE/C,EAAE,SAAS,EAAE0H,IAAG9B,GAAE5F,EAAE,YAAY,CAAC,EAAEoH,GAAGC,IAAItE,EAAC,EAAE,GAAG9C,GAAEyH,GAAE1H,EAAE,QAAQ,EAAEC,IAAGyH,GAAE1H,EAAE,QAAQ,EAAE,EAAEA,EAAE,QAAQ,CAAC,EAAE,GAAG+C,GAAE/C,EAAE,SAAS+C,GAAE,CAAC,GAAG/C,EAAE,SAAS,CAAC,EAAEA,EAAE,YAAYA,EAAE,YAAY,EAAE,CAAC,OAAO,CAACA,EAAE,QAAQA,EAAE,QAAQ,EAAEC,EAAC,EAAE,KAAK,CAAC,CAAC,OAAA8C,GAAE,IAAI,KAAK/C,EAAE,YAAY,EAAE,EAAE,EAAE,CAAC,EAAEC,GAAER,GAAE,IAAI,KAAKO,EAAE,YAAY,EACnf,EAAE,CAAC,CAAC,EAAE+C,GAAEtD,GAAEsD,EAAC,EAAS,GAAGvD,GAAES,GAAED,CAAC,EAAE,GAAGR,GAAEuD,GAAE/C,CAAC,EAAEA,EAAE,YAAY,EAAE,EAAEA,EAAE,YAAY,EAAEA,EAAE,YAAY,EAAE,CAAC,CAACd,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAE,IAAIM,GAAElB,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAEA,EAAE,CAAC,GAAGZ,EAAE,EAAEY,GAAG,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,GAAG,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,GAAG,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGZ,EAAE,EAAEY,EAAE,IAAI,IAAI,CAAC,EAAE,GAAGM,GAAEqD,GAAErD,EAAC,EAAE,EAAE,EAAEP,EAAE4D,GAAE5D,CAAC,EAAEO,GAAE,CAAC,KAAK,uBAAuB,KAAK,WAAW,KAAK,WAAW,KAAK,KAAK,KAAK,cAAc,KAAK,QAAQ,KAAK,WAAW,KAAK,WAAW,KAAK,WACxf,MAAM,KAAK,MAAM,KAAK,MAAM,WAAW,MAAM,WAAW,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,KAAK,MAAM,IAAI,EAAE,QAAQC,MAAKD,GAAEP,EAAEA,EAAE,QAAQ,IAAI,OAAOQ,GAAE,GAAG,EAAED,GAAEC,EAAC,CAAC,EAAE,IAAIC,GAAE,2DAA2D,MAAM,GAAG,EAAEC,GAAE,wFAAwF,MAAM,GAAG,EAAEH,GAAE,CAAC,KAAKK,GAAGH,GAAEG,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGH,GAAEG,EAAE,EAAE,EAAE,KAAKA,GACzfF,GAAEE,EAAE,EAAE,EAAE,UAAU,EAAE,CAAC,EAAE,KAAKA,GAAGF,GAAEE,EAAE,EAAE,EAAE,KAAKA,GAAGT,GAAGS,EAAE,GAAG,MAAM,IAAI,EAAE,CAAC,EAAE,KAAKA,GAAGT,EAAES,EAAE,GAAG,CAAC,EAAE,KAAKA,GAAGV,EAAEU,EAAE,GAAG,EAAE,GAAG,EAAE,KAAKA,GAAGN,GAAEM,CAAC,EAAE,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGN,GAAEM,CAAC,EAAE,KAAKA,GAAGT,EAAES,EAAE,GAAG,CAAC,EAAE,KAAKA,IAAIA,EAAEA,EAAE,GAAMA,GAAH,EAAKA,EAAE,GAAG,GAAGA,IAAIA,GAAG,IAAWT,EAAES,EAAE,CAAC,GAAG,KAAKA,GAAG,CAAC,QAAQC,GAAE,EAAE8C,GAAE,EAAEA,IAAG/C,EAAE,GAAG,EAAEC,KAAI2F,GAAE5F,EAAE,GAAG,IAAI,EAAEoH,GAAGC,IAAItE,IAAG,EAAE,CAAC,OAAOxD,EAAES,EAAE,GAAGC,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGT,EAAES,EAAE,GAAG,EAAE,CAAC,EAAE,KAAKA,GAAGT,EAAES,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI;AAAA,EAAK,KAAKA,GAAG,GAAGA,EAAE,IAAI,GAAGA,EAAE,GAAG,KAAK,KAAK,KAAKA,GAAGT,EAAES,EAAE,GAAG,CAAC,EAAE,KAAK,IAAI,IAAK,KAAKA,GAAGA,EAAE,IAAI,EAAE,KAAKA,GAAGT,EAAE,KAAK,OAAOS,EAAE,GAAG,EAAEA,EAAE,IAAI,CAAC,EAAE,CAAC,EAAE,KAAKA,GACnf,CAAC,IAAIC,GAAE,KAAK,OAAOD,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAA8B,GAA5B,IAAIA,EAAE,GAAG,IAAIA,EAAE,GAAG,GAAG,GAAGC,KAAOA,GAAMA,IAAJ,KAAQ8C,IAAG/C,EAAE,GAAG,IAAIA,EAAE,IAAI,EAAK+C,IAAH,GAASA,IAAH,GAAM6C,GAAE5F,EAAE,EAAE,IAAIC,GAAE,QAAQ,CAACA,GAAE,GAAG,IAAI8C,IAAG/C,EAAE,GAAG,EAAEA,EAAE,GAAG,GAAG,GAAM+C,IAAH,GAASA,IAAH,GAAM6C,GAAE5F,EAAE,GAAG,IAAI,CAAC,IAAIC,IAAG,CAAC,OAAOV,EAAEU,GAAE,CAAC,CAAC,EAAE,KAAKD,GAAGA,EAAE,GAAG,KAAKA,GAAGT,EAAE,KAAK,OAAOS,EAAE,GAAG,GAAGA,EAAE,GAAG,GAAG,GAAG,CAAC,EAAE,CAAC,EAAE,KAAKA,IAAIA,EAAE,GAAG,MAAM,SAAS,EAAE,UAAU,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,KAAKA,GAAG,CAACA,EAAEA,EAAE,GAAG,IAAIC,GAAE,GAAGD,EAAE,OAAAA,EAAE,KAAK,IAAIA,CAAC,EAAE,IAAUC,GAAE,IAAI,MAAY,QAAQD,EAAE,GAAG,IAAIA,EAAE,KAAK,MAAM,EAAE,CAAC,EAAE,KAAKA,GAAGA,EAAE,GAAG,KAAK,IAAI,GAAG,EAAEZ,EAAEA,EAAE,QAAQ,MAAM,MAAU,EAAE,IAAIQ,MAAKD,GAAEP,EAAE,SAASQ,EAAC,IACrgBR,EAAEA,EAAE,QAAQ,IAAI,OAAOQ,GAAE,GAAG,EAAED,GAAEC,EAAC,EAAEP,CAAC,CAAC,GAAoC,OAAjCD,EAAEA,EAAE,QAAQ,QAAQ,GAAG,EAAEQ,GAAE0H,GAAGlI,CAAC,EAAKQ,GAAE,OAAOT,EAAS,GAAEoI,GAAG3H,GAAEV,CAAC,EAASU,GAAE,OAAO,EAAC,CAAC,SAAS+H,GAAGzI,EAAE,CAAC,GAAG,CAACA,EAAE,CAAC,OAAOC,EAAE,CAACoC,GAAEpC,CAAC,CAAC,CAAC,CAAC,SAASyI,GAAG1I,EAAE,CAAC,IAAIC,EAAE,CAAC,EAAEC,EAAE,IAAIA,KAAKF,GAAG,SAASG,EAAE,CAAC,IAAIC,EAAEJ,EAAEG,CAAC,EAAEF,EAAEE,CAAC,EAAc,OAAOC,GAAnB,WAAqB,UAAU,CAACuI,EAAG,KAAKxI,CAAC,EAAE,GAAG,CAAC,OAAOC,EAAE,MAAM,KAAK,SAAS,CAAC,QAAC,CAAQoC,KAAImG,EAAG,IAAI,IAAIxI,GAAGkC,GAAE,EAAExB,IAAO+H,KAAJ,GAAWD,EAAG,SAAP,IAAgBC,GAAE,EAAE9F,IAAI,EAAE2F,GAAGI,EAAE,EAAe,OAAO,OAApB,KAA4B,OAAO,GAAG,GAAG,CAAC,EAAEzI,CAAC,GAAGF,CAAC,EAAE,OAAOD,CAAC,CAAC,IAAI2I,GAAE,EAAE/H,GAAE,KAAKiI,GAAG,EAAEH,EAAG,CAAC,EAAEI,GAAG,CAAC,EAAEC,GAAG,CAAC,EAAEC,GAAG,EAAEC,EAAG,KAAKC,GAAG,CAAC,EAC7e,SAASnI,GAAI,CAAC,OAAO,IAAI,QAAQ,CAAChB,EAAEC,IAAI,CAACiJ,EAAG,CAAC,QAAQlJ,EAAE,OAAOC,CAAC,CAAC,CAAC,CAAC,CAAC,SAASmJ,IAAI,CAAC,IAAIpJ,EAAEgH,GAAG,KAAK,EAAE/G,EAAED,EAAE,GAAGP,EAAE,EAAEO,GAAG,IAAI,CAAC,EAAEC,EAAER,EAAE,EAAEO,EAAE,GAAG,IAAI,CAAC,EAAEC,EAAE,MAAMA,EAAE0I,EAAG,CAAC,EAAE,IAAIzI,EAAE6I,GAAG9I,CAAC,EAAE,OAASC,IAAT,SAAaA,EAAE+I,KAAKF,GAAG9I,CAAC,EAAEC,EAAE8I,GAAG9I,CAAC,EAAED,GAAGA,EAAEC,EAAEX,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAEC,EAASD,CAAC,CAAC,SAASqJ,GAAI,CAAC,IAAIrJ,EAAET,EAAE,EAAEsB,GAAE,GAAG,IAAI,CAAC,EAAE,OAAAb,EAAEsC,GAAE0G,GAAGhJ,CAAC,CAAC,EAAE,EAAE8C,GAAU9C,EAAE,CAAC,CACtS,SAASsJ,GAAGtJ,EAAE,CAAC,GAAG,CAACwC,GAAE,CAAC,GAAOoG,KAAJ,EAAM,CAAC,IAAI3I,EAAE,GAAGC,EAAE,GAAGF,EAAE,CAACG,EAAE,IAAI,CAAC,GAAG,CAACqC,KAAIsG,GAAG3I,EAAEF,EAAE,GAAGC,GAAG,CAAC0I,GAAE,EAAEH,GAAG,IAAIc,GAAG1I,EAAC,CAAC,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,OAAO,EAAEV,EAAE,GAAG,GAAG,CAAC,IAAIC,EAAEiJ,EAAG,CAAC,OAAO9I,GAAE,CAACH,EAAEG,GAAEJ,EAAE,EAAE,CAAC,IAAIE,EAAE,GAAG,GAAG,CAACQ,GAAE,CAAC,IAAIP,GAAE4I,EAAG5I,KAAI4I,EAAG,MAAM/I,EAAEG,GAAE,OAAOA,GAAE,SAASF,CAAC,EAAEC,EAAE,GAAG,CAAC,GAAGF,GAAG,CAACE,EAAE,MAAMD,CAAE,CAAC,CAAC,EAAEF,EAAE,GAAGD,IAAI2I,GAAE,EAAE/H,GAAEuI,GAAG,EAAe,OAAO,QAApB,KAA6B,QAAQ,GAAG,IAAI,QAAQ,GAAG,MAAM,EAAEX,GAAG,IAAIe,GAAG3I,EAAC,CAAC,EAAE,MAAU+H,KAAJ,GAAOA,GAAE,EAAEH,GAAGgB,EAAE,EAAEC,GAAG7I,EAAC,EAAEA,GAAE,KAAKsI,GAAG,QAAQhJ,GAAGoG,GAAGpG,CAAC,CAAC,GAAGkC,GAAE,kBAAkBuG,EAAC,EAAE,EAAE,OAAOE,EAAE,CAAC,CAC/d,SAASa,GAAG3J,EAAE,CAAC,OAAOsJ,GAAGrJ,GAAG,CAACD,EAAE,EAAE,KAAKC,CAAC,CAAC,CAAC,CAAC,CAACiE,GAAE,GAAG,EAChD,IAAI0F,GAAG,CAAC,KAAKtF,GAAGG,GAAGY,GAAGE,GAAGC,GAAGI,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGC,GAAGO,GAAGC,GAAGa,GAAGC,GAAGC,GAAGC,GAAGC,GAAGE,EAAE,EAAE4B,GAAG,CAAC,EAAE,SAAS7J,EAAEC,EAAEC,EAAE,CAAC,OAAOyJ,GAAG,SAAS,CAAC,MAAM9J,EAAE,GAAGG,EAAEC,EAAEC,CAAC,CAAC,CAAC,CAAC,EAAE,EAAE,SAASF,EAAEC,EAAEC,EAAE,CAAC,MAAAF,KAAK,EAAG,IAAIkF,GAAGlF,CAAC,EAAG,GAAGC,IAAI,EAAEC,IAAI,CAAC,EAAEiF,GAAGnF,EAAEoF,KAAWD,EAAG,EAAE,EAAE,SAASnF,EAAE,CAAC8J,GAAG9J,IAAI,EAAE,CAACqB,EAAE,EAAE,CAACD,EAAG,OAAO,EAAE,EAAE8C,GAAE,GAAG,CAAC,EAAE,EAAE,SAASlE,EAAE,CAACA,KAAK,EAAEuB,EAAE,YAAY,CAAC,IAAI,gBAAgB,OAAOvB,CAAC,CAAC,EAAEiE,GAAGjE,CAAC,CAAC,EAAE,EAAEsF,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEI,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAE,IAAI,GAAG,EAAE,SAAStG,EAAEC,EAAE,CAACD,KAAK,EAAEA,GAAGC,IAAI,EAAE,WAAW,IAAI2E,GAAG,CAAC,EAAErD,EAAE,YAAY,CAAC,aAAavB,EAC5f,IAAI,cAAc,CAAC,GAAGA,EAAEkE,GAAE,GAAGlE,CAAC,IAAIA,EAAE,YAAY,CAAC,IAAI,cAAc,CAAC,CAAC,EAAE,EAAE,UAAU,CAAC,MAAM,EAAE,EAAE,EAAEwG,GAAG,EAAE,SAASxG,EAAE,CAACsB,GAAG4C,GAAE,GAAGlE,IAAI,CAAC,EAAE,IAAI,CAAC,EAAE,EAAE,SAASA,EAAEC,EAAEC,EAAE,CAACF,EAAEC,EAAE,UAAU,EAAE,QAAQ,CAAC,CAACD,GAAGA,IAAI,GAAG,WAAWC,EAAE,IAAIC,KAAK,EAAEF,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAET,EAAE,EAAEW,GAAG,IAAI,CAAC,EAAEF,EAAE,cAAc,EAAET,EAAE,EAAEW,EAAE,GAAG,IAAI,CAAC,EAAEF,EAAE,cAAc,EAAET,EAAE,EAAEW,EAAE,GAAG,IAAI,CAAC,EAAEF,EAAE,YAAY,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,WAAW,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,YAAY,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,eAAe,EAAE,KAAKT,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,UAAU,EAAEA,GAAGA,EAAE,QAAQ,EAAE,KAAK,IAAIA,EAAE,eAAe,EAC3f,EAAE,EAAE,EAAE,EAAE,EAAE,CAAC,GAAG,MAAM,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,CAAC,EAAE,EAAE,SAASA,EAAEC,EAAEC,EAAE,CAACF,EAAEC,EAAE,UAAU,EAAE,QAAQ,CAAC,CAACD,GAAGA,IAAI,GAAG,WAAWC,EAAE,IAAIC,KAAK,EAAEF,EAAE,IAAI,KAAK,IAAIA,CAAC,EAAET,EAAE,EAAEW,GAAG,IAAI,CAAC,EAAEF,EAAE,WAAW,EAAET,EAAE,EAAEW,EAAE,GAAG,IAAI,CAAC,EAAEF,EAAE,WAAW,EAAET,EAAE,EAAEW,EAAE,GAAG,IAAI,CAAC,EAAEF,EAAE,SAAS,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,QAAQ,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,SAAS,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,YAAY,EAAE,KAAKT,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,EAAE,OAAO,EAAEC,GAAGyG,GAAE1G,EAAE,YAAY,CAAC,EAAE2G,GAAGC,IAAI5G,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAET,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAED,EAAEV,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAE,EAAE,GAAGF,EAAE,kBAAkB,GAAGC,EAAG,IAAI,KAAKD,EAAE,YAAY,EACrf,EAAE,CAAC,EAAG,kBAAkB,EAAE,IAAIG,EAAG,IAAI,KAAKH,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAEA,GAAGC,GAAGE,GAAGH,EAAE,kBAAkB,GAAG,KAAK,IAAIG,EAAEF,CAAC,GAAG,EAAEV,EAAE,EAAEW,EAAE,IAAI,IAAI,CAAC,EAAEF,CAAC,EAAE,EAAE,SAASA,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAE,IAAI,KAAKV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAE,KAAKT,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAET,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAET,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAET,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAET,EAAE,EAAES,GAAG,IAAI,CAAC,EAAE,CAAC,EAAEE,EAAEX,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEG,EAAEF,EAAE,kBAAkB,EAAEG,EAAG,IAAI,KAAKH,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAEI,EAAG,IAAI,KAAKJ,EAAE,YAAY,EAAE,EAAE,CAAC,EAAG,kBAAkB,EAAEK,GAAE,KAAK,IAAID,EAAED,CAAC,EAAE,SAAEF,EAAEX,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAE,EAAOI,GAAGC,GAAGC,IAAGH,GAClf,EAAED,IAAII,IAAGH,KAAKC,EAAE,KAAK,IAAIC,EAAED,CAAC,EAAEH,EAAE,QAAQA,EAAE,QAAQ,EAAE,MAAM,EAAEC,EAAEI,GAAEF,GAAGD,EAAE,GAAGZ,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,OAAO,EAAEC,GAAGwG,GAAEzG,EAAE,YAAY,CAAC,EAAE0G,GAAGC,IAAI3G,EAAE,SAAS,CAAC,EAAEA,EAAE,QAAQ,EAAE,EAAE,EAAEV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEE,EAAEX,EAAE,EAAES,GAAG,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAEV,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAEC,EAAE,WAAW,EAAEV,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAEV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAEV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,SAAS,EAAEV,EAAE,EAAES,EAAE,IAAI,IAAI,CAAC,EAAEC,EAAE,QAAQ,EAAED,EAAEC,EAAE,QAAQ,EAAE,IAAW8J,IAAIpG,GAAE3D,EAAE,GAAG,CAAC,KAAK,IAAI2D,EAAC,EAAE,EAAEA,GAAE,CAAC,KAAK,MAAMA,GAAE,UAAU,IAAI,EAAE,CAAC,CAAC,CAAC,KAAK,MAAMA,GAAE,EAAE,CAAC,CAACA,KAAI,IAAI,UAAU,IAAI,EAAE,EAAE,EAAE3D,IAAI,CAAC,EAAE,EAAE6G,GAAG,EAAEC,GACpf,EAAE,SAAS9G,EAAEC,EAAEC,EAAE,CAAC,SAASC,EAAEM,GAAE,CAAC,OAAOA,GAAEA,GAAE,aAAa,EAAE,MAAM,mBAAmB,GAAGA,GAAE,CAAC,EAAE,KAAK,CAACT,KAAK,EAAEC,KAAK,EAAEC,KAAK,EAAE,IAAIE,EAAG,IAAI,OAAM,YAAY,EAAEC,EAAE,IAAI,KAAKD,EAAE,EAAE,CAAC,EAAEE,GAAE,IAAI,KAAKF,EAAE,EAAE,CAAC,EAAEA,EAAEC,EAAE,kBAAkB,EAAE,IAAIE,GAAED,GAAE,kBAAkB,EAAEE,GAAE,KAAK,IAAIJ,EAAEG,EAAC,EAAEd,EAAE,EAAEO,GAAG,IAAI,CAAC,EAAE,GAAGQ,GAAEjB,EAAE,EAAEU,GAAG,IAAI,CAAC,EAAE,EAAOG,GAAGG,IAAGP,EAAEG,EAAEE,CAAC,EAAEJ,EAAEE,EAAEG,EAAC,EAAEN,EAAE+G,GAAG/G,CAAC,EAAEC,EAAE8G,GAAG9G,CAAC,EAAEM,GAAEH,GAAGX,EAAE,EAAES,GAAG,IAAI,CAAC,EAAEF,EAAEP,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAED,IAAIR,EAAE,EAAES,GAAG,IAAI,CAAC,EAAED,EAAER,EAAE,EAAES,EAAE,GAAG,IAAI,CAAC,EAAEF,EAAE,EAAE,EAAE,IAAI,CAACqC,GAAE,EAAE,CAAC,EAAE,EAAE,SAASrC,EAAEC,EAAEC,EAAE,CAAC,OAAAF,KAAK,EAAEC,EAAEiH,GAAGjH,IAAI,EAAEC,IAAI,CAAC,EAAS0D,GAAG5D,CAAC,EAAE,MAAM,KAAKC,CAAC,CAAC,EAAE,EAAE,SAASD,EACtfC,EAAEC,EAAE,CAAC,OAAAF,KAAK,EAAEC,EAAEiH,GAAGjH,IAAI,EAAEC,IAAI,CAAC,EAAS0D,GAAG5D,CAAC,EAAE,MAAM,KAAKC,CAAC,CAAC,EAAE,EAAE,UAAU,CAAC,EAAE,EAAE,UAAU,CAAC,OAAO,KAAK,IAAI,CAAC,EAAE,EAAE,IAAI,CAAC,MAAA6C,IAAI,EAAO,QAAS,EAAE,EAAE,UAAU,CAAC,MAAO,WAAU,EAAE,EAAE,IAAI,YAAY,WAAW,YAAY,IAAI,EAAE,EAAE,UAAU,CAAC,OAAOxB,EAAE,cAAc,KAAK,EAAE,OAAO,UAAU,mBAAmB,EAAE,EAAE,SAAStB,EAAEC,EAAEC,EAAEC,EAAE,CAAmC,IAAlC+D,GAAE,GAAGjE,IAAI,EAAEsH,GAAG,OAAOrH,EAAED,EAAEE,IAAI,GAAG,EAAMA,EAAE,EAAEA,EAAED,EAAEC,IAAIoH,GAAGpH,CAAC,EAAER,EAAG,EAAEM,EAAEE,IAAI,CAAC,EAAE,OAAO,EAAEH,EAAE4D,GAAG,CAAC5D,EAAE,CAAC,EAAE4J,GAAG5J,CAAC,GAAG,MAAM,KAAKuH,EAAE,CAAC,EAAE,EAAE,SAASvH,EAAE,CAACA,KAAK,EAAE,IAAIC,EAAEZ,EAAE,EAAE,OAAO,GAAGW,GAAGC,GAAG,WAAWD,EAAE,MAAM,GAAG,QAAQE,EACxf,EAAE,GAAGA,EAAEA,GAAG,EAAE,CAAC,IAAIC,EAAEF,GAAG,EAAE,GAAGC,GAAGC,EAAE,KAAK,IAAIA,EAAEH,EAAE,SAAS,EAAE,IAAII,EAAE,KAAKD,EAAE,KAAK,IAAIH,EAAEG,CAAC,EAAEH,EAAE,CAACI,EAAEA,EAAE,IAAI,KAAKA,EAAE,WAAWD,GAAG,MAAMA,EAAE,OAAO,KAAK,EAAEjB,GAAE,OAAO,WAAW,QAAQ,GAAG,GAAG,CAACA,GAAE,KAAKkB,CAAC,EAAEhB,GAAE,EAAE,IAAIiB,EAAE,EAAE,MAAML,CAAC,MAAS,CAAC,CAACK,EAAE,MAAM,CAAC,GAAGA,EAAE,MAAM,EAAE,CAAC,MAAM,EAAE,EAAE,EAAEsH,GAAG,EAAEC,GAAG,EAAEpD,GAAG,EAAEqD,GAAG,EAAEC,GAAG,EAAEC,GAAG,EAAEE,GAAG,EAAE/I,IAAGW,EAAE,WAAW,EAAEyI,GAAG,EAAE,SAAStI,EAAEC,EAAEC,EAAEC,EAAE,CAAC,OAAOmI,GAAGtI,IAAI,EAAEC,IAAI,EAAEC,IAAI,EAAEC,IAAI,CAAC,CAAC,CAAC,GACrW,UAAU,CAAC,SAASH,EAAEE,EAAEC,EAAE,CAAC,OAAAD,EAAEA,EAAE,QAAQA,EAAEwI,GAAGxI,CAAC,EAAEoC,GAAEpC,EAAE8J,GAAG9J,CAAC,EAAEgE,GAAE,GAAG,KAAK5B,GAAE,EAAE,EAAEM,GAAG,QAAQN,GAAE,CAAC,EAAEC,GAAGpC,EAAEiD,GAAG,EAASlD,CAAC,CAAC,IAAID,EAAE,CAAC,EAAE4J,EAAE,EAAO,GAAL1G,GAAG,EAAKtD,EAAE,gBAAgB,GAAG,CAAC,OAAOA,EAAE,gBAAgBI,EAAED,CAAC,CAAC,OAAOE,EAAE,CAACgC,GAAE,sDAAsDhC,CAAC,EAAEH,EAAEG,CAAC,CAAC,CAAC,OAAAwD,GAAGzD,EAAE,SAASC,EAAE,CAACF,EAAEE,EAAE,SAASA,EAAE,MAAM,CAAC,CAAC,EAAE,MAAMH,CAAC,EAAQ,CAAC,CAAC,GAAG,EAAEF,EAAE,SAAS,CAACG,EAAEC,KAAKJ,EAAE,SAASyC,GAAE,GAAGtC,EAAEC,CAAC,EAAEJ,EAAE,iBAAiB,CAACG,EAAEC,KAAKJ,EAAE,iBAAiByC,GAAE,GAAGtC,EAAEC,CAAC,EAC7ZJ,EAAE,yBAAyB,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,MAAKZ,EAAE,yBAAyByC,GAAE,GAAGtC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,GAAEC,GAAEC,EAAC,EAAEZ,EAAE,4BAA4B,CAACG,EAAEC,KAAKJ,EAAE,4BAA4ByC,GAAE,IAAItC,EAAEC,CAAC,EAAEJ,EAAE,6BAA6B,CAACG,EAAEC,EAAEC,KAAKL,EAAE,6BAA6ByC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,0BAA0B,CAACG,EAAEC,EAAEC,KAAKL,EAAE,0BAA0ByC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,0BAA0BG,IAAIH,EAAE,0BAA0ByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,kBAAkB,CAACG,EAAEC,EAAEC,KAAKL,EAAE,kBAAkByC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAC7dL,EAAE,mBAAmBG,IAAIH,EAAE,mBAAmByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,wBAAwB,CAACG,EAAEC,EAAEC,KAAKL,EAAE,wBAAwByC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,iBAAiB,CAACG,EAAEC,KAAKJ,EAAE,iBAAiByC,GAAE,IAAItC,EAAEC,CAAC,EAAEJ,EAAE,kBAAkB,CAACG,EAAEC,KAAKJ,EAAE,kBAAkByC,GAAE,IAAItC,EAAEC,CAAC,EAAEJ,EAAE,SAASG,IAAIH,EAAE,SAASyC,GAAE,IAAItC,CAAC,EAAEH,EAAE,iBAAiB,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKR,EAAE,iBAAiByC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAER,EAAE,kBAAkB,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,KAAKP,EAAE,kBAAkByC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEP,EAAE,kBAAkBG,IAAIH,EAAE,kBAAkByC,GAAE,IAAItC,CAAC,EAC5dH,EAAE,qBAAqB,CAACG,EAAEC,EAAEC,EAAEC,KAAKN,EAAE,qBAAqByC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,CAAC,EAAEN,EAAE,sBAAsB,CAACG,EAAEC,EAAEC,KAAKL,EAAE,sBAAsByC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,sBAAsBG,IAAIH,EAAE,sBAAsByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,kBAAkBG,IAAIH,EAAE,kBAAkByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,cAAc,CAACG,EAAEC,EAAEC,KAAKL,EAAE,cAAcyC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,eAAe,CAACG,EAAEC,EAAEC,EAAEC,KAAKN,EAAE,eAAeyC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,CAAC,EAAEN,EAAE,sBAAsBG,IAAIH,EAAE,sBAAsByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,mBAAmBG,IAAIH,EAAE,mBAAmByC,GAAE,IAAItC,CAAC,EACxeH,EAAE,mBAAmB,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,KAAKP,EAAE,mBAAmByC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAEP,EAAE,QAAQ,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,MAAKV,EAAE,QAAQyC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,GAAEC,EAAC,EAAEV,EAAE,iBAAiBG,IAAIH,EAAE,iBAAiByC,GAAE,IAAItC,CAAC,EAAEH,EAAE,YAAY,CAACG,EAAEC,EAAEC,KAAKL,EAAE,YAAYyC,GAAE,IAAItC,EAAEC,EAAEC,CAAC,EAAEL,EAAE,iBAAiBG,IAAIH,EAAE,iBAAiByC,GAAE,IAAItC,CAAC,EAAE,IAAI2E,GAAG9E,EAAE,cAAc,KAAK8E,GAAG9E,EAAE,cAAcyC,GAAE,IAAI,EAAE0E,GAAGnH,EAAE,QAAQG,IAAIgH,GAAGnH,EAAE,QAAQyC,GAAE,IAAItC,CAAC,EAAE0J,GAAG7J,EAAE,MAAMG,IAAI0J,GAAG7J,EAAE,MAAMyC,GAAE,IAAItC,CAAC,EAAEH,EAAE,sBAAsB,KAAKA,EAAE,sBAAsByC,GAAE,IAAI,EAC7d,IAAIwH,GAAGjK,EAAE,yBAAyB,CAACG,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,KAAKyJ,GAAGjK,EAAE,yBAAyByC,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,EAAEC,EAAEC,CAAC,EAAER,EAAE,4BAA4B,KAAKA,EAAE,4BAA4ByC,GAAE,IAAI,EAC1K,IAAIgF,GAAG,CAACtH,EAAEC,EAAEC,EAAEC,KAAKmH,GAAGhF,GAAE,IAAItC,EAAEC,EAAEC,EAAEC,CAAC,EAAEuE,GAAG1E,IAAI0E,GAAGpC,GAAE,IAAItC,CAAC,EAAEiF,GAAGpF,EAAE,yBAAyBG,IAAIiF,GAAGpF,EAAE,yBAAyByC,GAAE,IAAItC,CAAC,EAAEyG,GAAG5G,EAAE,2BAA2B,KAAK4G,GAAG5G,EAAE,2BAA2ByC,GAAE,IAAI,EAAEyH,GAAG/J,IAAI+J,GAAGzH,GAAE,IAAItC,CAAC,EAAE8E,GAAG,CAAC9E,EAAEC,KAAK6E,GAAGxC,GAAE,IAAItC,EAAEC,CAAC,EAAEmH,GAAG,KAAKA,GAAG9E,GAAE,IAAI,EAAEyC,GAAG/E,IAAI+E,GAAGzC,GAAE,IAAItC,CAAC,EAAEqH,GAAGrH,IAAIqH,GAAG/E,GAAE,IAAItC,CAAC,EAAEgF,GAAGnF,EAAE,WAAW,CAACG,EAAEC,KAAK+E,GAAGnF,EAAE,WAAWyC,GAAE,IAAItC,EAAEC,CAAC,EAAEuJ,GAAGxJ,IAAIwJ,GAAGlH,GAAE,IAAItC,CAAC,EAAE6I,GAAG,KAAKA,GAAGvG,GAAE,IAAI,EAAEiH,GAAGvJ,IAAIuJ,GAAGjH,GAAE,IAAItC,CAAC,EAAEyJ,GAAG,KAAKA,GAAGnH,GAAE,IAAI,EAAEzC,EAAE,eAAe,OAAOA,EAAE,cAAc,OAC1d,SAASmK,GAAGhK,EAAE,CAACA,EAAE,OAAO,OAAO,CAAC,EAAEA,CAAC,EAAE,IAAIC,EAAEE,GAAG,IAAIA,EAAE,IAAI,EAAED,EAAEC,GAAGC,GAAGD,EAAEC,CAAC,IAAI,EAAE,OAAAJ,EAAE,iBAAiBC,EAAED,EAAE,gBAAgB,EAAEA,EAAE,aAAaC,EAAED,EAAE,YAAY,EAAEA,EAAE,OAAOE,EAAEF,EAAE,MAAM,EAAEA,EAAE,UAAUC,EAAED,EAAE,SAAS,EAAEA,EAAE,WAAWE,EAAEF,EAAE,UAAU,EAASA,CAAC,CAACH,EAAE,iBAAiBkD,GAAGlD,EAAE,WAAWX,GAAEW,EAAE,WAAWwH,GAAGxH,EAAE,UAAUuH,GAAGvH,EAAE,aAAakF,GAAGlF,EAAE,aAAaiE,GAAEjE,EAAE,aAAa8F,GAAG9F,EAAE,gBAAgB4F,GAAG5F,EAAE,WAAWkE,GAAGlE,EAAE,QAAQqE,GAAE,IAAI+F,GAAG/G,GAAE,SAASgH,GAAI,CAACD,IAAIE,GAAG,EAAEF,KAAK/G,GAAEgH,EAAG,EAC/b,SAASC,IAAI,CAAC,SAASnK,GAAG,CAAC,GAAG,CAACiK,KAAKA,GAAG,GAAGpK,EAAE,UAAU,GAAG,CAAC2C,MAAIjB,GAAGsD,GAAGjC,EAAE,EAAE9C,EAAGD,CAAC,EAAKA,EAAE,sBAAqBA,EAAE,qBAAqB,EAAK,CAAC0B,GAAE,CAAC,GAAG1B,EAAE,QAAQ,IAAgB,OAAOA,EAAE,SAArB,aAA+BA,EAAE,QAAQ,CAACA,EAAE,OAAO,GAAGA,EAAE,QAAQ,QAAQ,CAAC,IAAII,EAAEJ,EAAE,QAAQ,MAAM,EAAEgD,GAAG,QAAQ5C,CAAC,CAAC,CAAC4E,GAAGhC,EAAE,CAAC,CAAE,CAAC,GAAG,EAAE,EAAEG,IAAG,GAAGzB,EAAEzB,EAAGD,CAAC,EAAE0B,GAAGsD,GAAGjC,EAAE,EAAE,YAAY/C,CAAC,MAAM,CAAC,GAAGA,EAAE,OAAO,IAAgB,OAAOA,EAAE,QAArB,aAA8BA,EAAE,OAAO,CAACA,EAAE,MAAM,GAAGA,EAAE,OAAO,QAAQ8C,GAAG,QAAQ9C,EAAE,OAAO,MAAM,CAAC,EAAEgF,GAAGlC,EAAE,EAAE,EAAEK,KAAInD,EAAE,WAAWA,EAAE,UAAU,YAAY,EAAE,WAAW,UAAU,CAAC,WAAW,UAAU,CAACA,EAAE,UAAU,EAAE,CAAC,EACpiB,CAAC,EAAEG,EAAE,CAAC,EAAE,CAAC,GAAGA,EAAE,EAAE,CAAC,CAAC,GAAGH,EAAE,QAAQ,IAAgB,OAAOA,EAAE,SAArB,aAA+BA,EAAE,QAAQ,CAACA,EAAE,OAAO,GAAG,EAAEA,EAAE,QAAQ,QAAQA,EAAE,QAAQ,IAAI,EAAE,EAAE,OAAAsK,GAAG,EAGzHnL,EAAU,KACnB,CAGA,GAAG,EACC,OAAOJ,IAAY,UAAY,OAAOC,IAAW,SACnDA,GAAO,QAAUC,GACV,OAAO,QAAW,YAAc,OAAO,KAC9C,OAAO,CAAC,EAAG,IAAMA,EAAe,IC7FlC,IAAAsL,GAAAC,GAAA,CAAAC,GAAAC,KAAA,CAAAA,GAAA,0/ECAA,IAUIC,GASEC,GAMFC,GACAC,GACAC,GACAC,GAEEC,GA6CAC,GAyBAC,GAWOC,GA+GAC,GA9NbC,GAAAC,GAAA,kBAeEZ,GACmE,KAG/DC,GAE2B,KAK7BE,GAAc,GACdC,GAAe,GACfC,GAAU,GAERC,GAA0BO,GAAgC,CAE9D,GAAIA,IAAe,EACjB,MAAO,GAIT,GAAI,OAAO,kBAAsB,IAC/B,OAAI,OAAO,KAAS,KAAe,CAAC,KAAK,qBAEvC,QAAQ,KACJ,iCAAmCA,EACnC,uIACkE,EAEjE,GAIL,OAAO,QAAY,KAAe,QAAQ,UAAY,QAAQ,SAAS,MAEzE,QAAQ,KACJ,iCAAmCA,EACnC,4JAC4E,EAGlF,GAAI,CAGF,OAAI,OAAO,eAAmB,KAC5B,IAAI,eAAe,EAAE,MAAM,YAAY,IAAI,kBAAkB,CAAC,CAAC,EAK1D,YAAY,SAAS,IAAI,WAAW,CACzC,EAAG,GAAI,IAAK,IAAK,EAAG,EAAI,EAAI,EAAG,EAAG,EAAG,EAAI,GAAI,EAAK,EAAI,EAAG,EAAG,EAAI,EAAG,EACnE,EAAG,EAAI,EAAK,EAAK,EAAG,GAAI,GAAI,EAAG,EAAG,EAAG,GAAI,EAAI,IAAK,GAAI,EAAG,EAAG,GAAI,EAClE,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMN,GAAkB,IAAe,CACrC,GAAI,CAeF,OAAO,YAAY,SAAS,IAAI,WAAW,CACzC,EAAK,GAAI,IAAK,IAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,GAAI,GAAI,EAAK,GAAK,EAAG,GAAI,EACvF,IAAK,GAAI,IAAK,GAAK,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAI,EAAI,IAAK,IAAK,EAAG,GAAI,EACzF,CAAC,CAAC,CACJ,MAAY,CACV,MAAO,EACT,CACF,EAEMC,GAAkB,CAACM,EAAkBC,IACrCD,EAIKC,EAAa,8BAAgC,qBAE7CA,EAAa,yBAA2B,gBAItCN,GAAwB,MAAMO,GAA+C,CACxF,GAAIb,GACF,OAAO,QAAQ,QAAQ,EAEzB,GAAIC,GACF,MAAM,IAAI,MAAM,uDAAyD,EAE3E,GAAIC,GACF,MAAM,IAAI,MAAM,oDAAsD,EAGxED,GAAe,GAGf,IAAMa,EAAUD,EAAM,YAChBH,EAAaG,EAAM,WACnBE,EAAOF,EAAM,KAEbD,EAAaT,GAAuBO,CAAU,EAC9CC,EAAUI,GAAQX,GAAgB,EAElCY,EAAYH,EAAM,UAClBI,EAAqB,OAAOD,GAAc,SAAWA,EAAY,OACjEE,EAAeb,GAAgBM,EAASC,CAAU,EAClDO,EAAmB,OAAOH,GAAc,SAAWA,EAAUE,CAAY,EAAI,OAE/EE,EAAY,GAEVC,EAA8B,CAAC,EA8ErC,GA3EIP,EAAU,GACZO,EAAM,KAAK,IAAI,QAASC,GAAY,CAClC,WAAW,IAAM,CACfF,EAAY,GACZE,EAAQ,CACV,EAAGR,CAAO,CACZ,CAAC,CAAC,EAIJO,EAAM,KAAK,IAAI,QAAQ,CAACC,EAASC,IAAW,CAC1C,IAAMC,EAAUZ,EAAad,GAAyBD,GAChD4B,EAAiC,CACrC,WAAY,CAACC,EAAkBC,IAA4B,CACzD,GAAuCf,GAAcc,EAAS,SAAS,YAAY,GAC/E,OAAO,KAAS,IAClB,OAAO,IAAI,gBAAgB,IAAI,KAC3B,CAGE,IACF,EACA,CAAC,KAAM,iBAAiB,CAAC,CAAC,EAGhC,GAAIA,EAAS,SAAS,OAAO,EAAG,CAC9B,GAAIP,EACF,OAAOA,EAGT,IAAMS,EAASX,GAAsBU,EAGnC,OAAIT,IAAiB,qBACZU,EAAS,0BACPV,IAAiB,8BACnBU,EAAS,mCAIbA,EAASV,CAClB,CAEA,OAAOS,EAAkBD,CAC3B,CACF,EAEA,GAAuCd,EAErC,GADAa,EAAO,WAAaf,EAChB,OAAO,KAAS,IAClBe,EAAO,oBAA2B,SAAK,UAAW,sBAAsB,MACnE,CACL,IAAMI,EAAmB,uBAAuBL,EAAQ,SAAS,CAAC,IAClEC,EAAO,oBAAsB,IAAI,KAAK,CAACI,CAAgB,EAAG,CAAC,KAAM,iBAAiB,CAAC,CACrF,CAGFL,EAAQC,CAAM,EAAE,KAEZK,GAAU,CACR7B,GAAe,GACfD,GAAc,GACdD,GAAO+B,EACPR,EAAQ,CACV,EAECS,GAAS,CACR9B,GAAe,GACfC,GAAU,GACVqB,EAAOQ,CAAI,CACb,CAAC,CACP,CAAC,CAAC,EAEF,MAAM,QAAQ,KAAKV,CAAK,EAEpBD,EACF,MAAM,IAAI,MAAM,2DAA2DN,CAAO,IAAI,CAE1F,EAEaP,GAAc,IAAqB,CAC9C,GAAIP,IAAeD,GACjB,OAAOA,GAGT,MAAM,IAAI,MAAM,qCAAqC,CACvD,ICpOA,IAKaiC,GAeAC,GA6BAC,GAjDbC,GAAAC,GAAA,kBAGAC,KAEaL,GAAkB,CAACM,EAAcC,IAA6B,CACzE,IAAMC,EAAOC,GAAY,EAEnBC,EAAaF,EAAK,gBAAgBF,CAAI,EAAI,EAC1CK,EAAaH,EAAK,QAAQE,CAAU,EAC1C,OAAAF,EAAK,aAAaF,EAAMK,EAAYD,CAAU,EAC9CH,EAAO,KAAKI,CAAU,EAEfA,CACT,EAMaV,GACT,CAACW,EAAkCC,EAAgBC,EAClDC,IAAuC,CACtC,GAAI,OAAOH,GAAW,UAAYA,IAAY,KAAM,CAClD,GAAIE,EAAK,IAAIF,CAAO,EAClB,MAAM,IAAI,MAAM,+BAA+B,EAE/CE,EAAK,IAAIF,CAAO,CAEpB,CAEA,OAAO,QAAQA,CAAO,EAAE,QAAQ,CAAC,CAACI,EAAKC,CAAK,IAAM,CAChD,IAAMC,EAAQL,EAAUA,EAASG,EAAMA,EACvC,GAAI,OAAOC,GAAU,SACnBhB,GAAoBgB,EAAkCC,EAAO,IAAKJ,EAAMC,CAAO,UACtE,OAAOE,GAAU,UAAY,OAAOA,GAAU,SACvDF,EAAQG,EAAMD,EAAM,SAAS,CAAC,UACrB,OAAOA,GAAU,UAC1BF,EAAQG,EAAOD,EAAS,IAAM,GAAG,MAEjC,OAAM,IAAI,MAAM,mCAAmC,OAAOA,CAAK,EAAE,CAErE,CAAC,CACH,EAMSf,GAAkBiB,GAA0B,CACvD,IAAMX,EAAOC,GAAY,EAEnBW,EAAQZ,EAAK,UAAU,EAC7B,GAAI,CACF,IAAMa,EAAeb,EAAK,WAAW,CAAC,EACtCA,EAAK,iBAAiBa,EAAcA,EAAe,CAAC,EACpD,IAAMC,EAAYd,EAAK,OAAOa,EAAe,CAAC,EACxCE,EAAsBf,EAAK,QAAQa,EAAe,EAAI,CAAC,EACvDG,EAAeD,EAAsBf,EAAK,aAAae,CAAmB,EAAI,GACpF,MAAM,IAAI,MAAM,GAAGJ,CAAO,gBAAgBG,CAAS,oBAAoBE,CAAY,EAAE,CACvF,QAAE,CACAhB,EAAK,aAAaY,CAAK,CACzB,CACF,IC/DA,IAQaK,GARbC,GAAAC,GAAA,kBAKAC,KACAC,KAEaJ,GAAiBK,GAA6D,CACzF,IAAMC,EAAOC,GAAY,EACrBC,EAAmB,EACjBC,EAAmB,CAAC,EAEpBC,EAA0CL,GAAW,CAAC,EAE5D,GAAI,CACF,GAAIA,GAAS,mBAAqB,OAChCK,EAAW,iBAAmB,UAE5B,OAAOL,EAAQ,kBAAqB,UAAY,CAAC,OAAO,UAAUA,EAAQ,gBAAgB,GAC1FA,EAAQ,iBAAmB,GAAKA,EAAQ,iBAAmB,EAC7D,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,gBAAgB,EAAE,EAGjF,GAAIA,GAAS,oBAAsB,OACjCK,EAAW,kBAAoB,UACtB,OAAOL,EAAQ,mBAAsB,UAAY,CAAC,OAAO,UAAUA,EAAQ,iBAAiB,EACrG,MAAM,IAAI,MAAM,qCAAqCA,EAAQ,iBAAiB,EAAE,EAG9EA,GAAS,YAAc,SACzBK,EAAW,UAAY,IAGzB,IAAIC,EAAgB,EACpB,OAAIN,GAAS,MAAQ,SACnBM,EAAgBC,GAAgBP,EAAQ,IAAKI,CAAM,GAGrDD,EAAmBF,EAAK,qBACpBI,EAAW,iBAAmBA,EAAW,kBAAoB,CAAC,CAACA,EAAW,UAAYC,CAAa,EACnGH,IAAqB,GACvBK,GAAe,2BAA4B,EAGzCR,GAAS,QAAU,QACrBS,GAAoBT,EAAQ,MAAO,GAAI,IAAI,QAAoC,CAACU,EAAKC,IAAU,CAC7F,IAAMC,EAAgBL,GAAgBG,EAAKN,CAAM,EAC3CS,EAAkBN,GAAgBI,EAAOP,CAAM,EAEjDH,EAAK,sBAAsBE,EAAkBS,EAAeC,CAAe,IAAM,GACnFL,GAAe,iCAAiCE,CAAG,MAAMC,CAAK,GAAG,CAErE,CAAC,EAGI,CAACR,EAAkBC,CAAM,CAClC,OAASU,EAAG,CACV,MAAIX,IAAqB,GACvBF,EAAK,sBAAsBE,CAAgB,EAE7CC,EAAO,QAAQW,GAASd,EAAK,MAAMc,CAAK,CAAC,EACnCD,CACR,CACF,IChEA,IAQME,GAeAC,GAWAC,GAoBAC,GA4EOC,GAlIbC,GAAAC,GAAA,kBAKAC,KACAC,KAEMR,GAA4BS,GAAmD,CACnF,OAAQA,EAAwB,CAC9B,IAAK,WACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,IAAK,MACH,MAAO,IACT,QACE,MAAM,IAAI,MAAM,yCAAyCA,CAAsB,EAAE,CACrF,CACF,EAEMR,GAAoBS,GAAmD,CAC3E,OAAQA,EAAe,CACrB,IAAK,aACH,MAAO,GACT,IAAK,WACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,+BAA+BA,CAAa,EAAE,CAClE,CACF,EAEMR,GAAwBS,GAAmD,CAC1EA,EAAQ,QACXA,EAAQ,MAAQ,CAAC,GAEdA,EAAQ,MAAM,UACjBA,EAAQ,MAAM,QAAU,CAAC,GAE3B,IAAMC,EAAUD,EAAQ,MAAM,QACzBC,EAAQ,+BAEXA,EAAQ,6BAA+B,KAIrCD,EAAQ,oBACRA,EAAQ,mBAAmB,KAAKE,IAAO,OAAOA,GAAO,SAAWA,EAAKA,EAAG,QAAU,QAAQ,IAC5FF,EAAQ,iBAAmB,GAE/B,EAEMR,GACF,CAACW,EAA8BC,EAC9BC,IAA2B,CAC1B,QAAWH,KAAME,EAAoB,CACnC,IAAIE,EAAS,OAAOJ,GAAO,SAAWA,EAAKA,EAAG,KAG9C,OAAQI,EAAQ,CACd,IAAK,QAEH,GADAA,EAAS,QACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMK,EAAeL,EACrB,GAAIK,GAAc,WAAY,CAC5B,IAAMC,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBF,EAAa,WAAYF,CAAM,EACnEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,EAAa,UAAU,GAAG,CAEjG,CACA,GAAIA,GAAc,WAAY,CAC5B,IAAIM,EAAaN,EAAa,YAE1B,OAAOM,GAAc,UAAY,CAAC,OAAO,UAAUA,CAAU,GAAKA,EAAa,KACjFA,EAAa,GAEf,IAAML,EAAgBC,GAAgB,aAAcJ,CAAM,EACpDK,EAAkBD,GAAgBI,EAAW,SAAS,EAAGR,CAAM,EACjEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GAAe,oDAAoDL,EAAa,UAAU,GAAG,CAEjG,CACA,GAAIA,GAAc,gBAAiB,CACjC,IAAMC,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBF,EAAa,gBAAiBF,CAAM,EACxEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDL,EAAa,eAAe,GAAG,CAEhG,CACF,CACA,MACF,IAAK,SAEH,GADAD,EAAS,KACL,OAAOJ,GAAO,SAAU,CAC1B,IAAMY,EAAgBZ,EACtB,GAAIY,GAAe,gBAAiB,CAClC,GAAIA,EAAc,kBAAoB,QAAUA,EAAc,kBAAoB,OAChF,MAAM,IAAI,MAAM,oDAAoDA,EAAc,eAAe,EAAE,EAErG,IAAMN,EAAgBC,GAAgB,kBAAmBJ,CAAM,EACzDK,EAAkBD,GAAgBK,EAAc,gBAAiBT,CAAM,EACzEM,GAAY,EAAE,0BAA0BR,EAAsBK,EAAeE,CAAe,IAC5F,GACFE,GACI,yDAAyDE,EAAc,eAAe,GAAG,CAEjG,CACF,CACA,MACF,IAAK,OACL,IAAK,MACH,SACF,QACE,MAAM,IAAI,MAAM,qCAAqCR,CAAM,EAAE,CACjE,CAEA,IAAMS,EAAmBN,GAAgBH,EAAQD,CAAM,EACnDM,GAAY,EAAE,4BAA4BR,EAAsBY,CAAgB,IAAM,GACxFH,GAAe,oCAAoCN,CAAM,GAAG,CAEhE,CACF,EAESb,GAAqBO,GAAkE,CAClG,IAAMgB,EAAOL,GAAY,EACrBR,EAAuB,EACrBE,EAAmB,CAAC,EAEpBY,EAAkDjB,GAAW,CAAC,EACpET,GAAqB0B,CAAc,EAEnC,GAAI,CACF,IAAMnB,EAAyBT,GAAyB4B,EAAe,wBAA0B,KAAK,EAChGlB,EAAgBT,GAAiB2B,EAAe,eAAiB,YAAY,EAC7EC,EACF,OAAOD,EAAe,OAAU,SAAWR,GAAgBQ,EAAe,MAAOZ,CAAM,EAAI,EAEzFc,EAAmBF,EAAe,kBAAoB,EAC5D,GAAI,CAAC,OAAO,UAAUE,CAAgB,GAAKA,EAAmB,GAAKA,EAAmB,EACpF,MAAM,IAAI,MAAM,qCAAqCA,CAAgB,EAAE,EAGzE,IAAMC,EAAoBH,EAAe,mBAAqB,EAC9D,GAAI,CAAC,OAAO,UAAUG,CAAiB,GAAKA,EAAoB,GAAKA,EAAoB,EACvF,MAAM,IAAI,MAAM,qCAAqCA,CAAiB,EAAE,EAG1E,IAAMC,EAA+B,OAAOJ,EAAe,wBAA2B,SAClFR,GAAgBQ,EAAe,uBAAwBZ,CAAM,EAC7D,EAcJ,GAZAF,EAAuBa,EAAK,yBACxBlB,EAAwB,CAAC,CAACmB,EAAe,kBAAmB,CAAC,CAACA,EAAe,iBAAkBlB,EAC/F,CAAC,CAACkB,EAAe,gBAAiB,EAAGC,EAAiBC,EAAkBC,EACxEC,CAA4B,EAC5BlB,IAAyB,GAC3BS,GAAe,+BAAgC,EAG7CK,EAAe,oBACjBzB,GAAsBW,EAAsBc,EAAe,mBAAoBZ,CAAM,EAGnFY,EAAe,qBAAuB,OAAW,CACnD,GAAI,OAAOA,EAAe,oBAAuB,UAC/C,MAAM,IAAI,MAAM,+CAA+CA,EAAe,kBAAkB,EAAE,EAEpG,IAAMT,EAAgBC,GAAgB,qBAAsBJ,CAAM,EAC5DK,EAAkBD,GAAgBQ,EAAe,mBAAmB,SAAS,EAAGZ,CAAM,EACxFW,EAAK,0BAA0Bb,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GACI,4DAA4DK,EAAe,kBAAkB,GAAG,CAExG,CAEA,GAAIA,EAAe,uBACjB,OAAW,CAACK,EAAMC,CAAK,IAAK,OAAO,QAAQN,EAAe,sBAAsB,EAAG,CACjF,GAAI,OAAOK,GAAS,SAClB,MAAM,IAAI,MAAM,kDAAkDA,CAAI,EAAE,EAE1E,GAAI,OAAOC,GAAU,UAAY,CAAC,OAAO,UAAUA,CAAK,GAAKA,EAAQ,EACnE,MAAM,IAAI,MAAM,iEAAiEA,CAAK,EAAE,EAE1F,IAAMC,EAAaf,GAAgBa,EAAMjB,CAAM,EAC3CW,EAAK,6BAA6Bb,EAAsBqB,EAAYD,CAAK,IAAM,GACjFX,GAAe,wCAAwCU,CAAI,MAAMC,CAAK,GAAG,CAE7E,CAGF,OAAIN,EAAe,QAAU,QAC3BQ,GAAoBR,EAAe,MAAO,GAAI,IAAI,QAAoC,CAACS,EAAKH,IAAU,CACpG,IAAMf,EAAgBC,GAAgBiB,EAAKrB,CAAM,EAC3CK,EAAkBD,GAAgBc,EAAOlB,CAAM,EAEjDW,EAAK,0BAA0Bb,EAAsBK,EAAeE,CAAe,IAAM,GAC3FE,GAAe,qCAAqCc,CAAG,MAAMH,CAAK,GAAG,CAEzE,CAAC,EAGI,CAACpB,EAAsBE,CAAM,CACtC,OAASsB,EAAG,CACV,MAAIxB,IAAyB,GAC3Ba,EAAK,0BAA0Bb,CAAoB,EAErDE,EAAO,QAAQuB,GAASZ,EAAK,MAAMY,CAAK,CAAC,EACnCD,CACR,CACF,ICxNA,IAuCaE,GAqCAC,GAsCAC,GAMAC,GAqCAC,GAoBAC,GAOAC,GAxLbC,GAAAC,GAAA,kBAuCaR,GAA8BS,GAA2B,CACpE,OAAQA,EAAM,CACZ,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IACT,IAAK,UACH,MAAO,IACT,IAAK,UACH,MAAO,GACT,IAAK,UACH,MAAO,IACT,IAAK,SACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,SACH,MAAO,IAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,CACpD,CACF,EAKaR,GAA8BS,GAAqC,CAC9E,OAAQA,EAAW,CACjB,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,OACT,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,UACT,IAAK,IACH,MAAO,UACT,IAAK,GACH,MAAO,SACT,IAAK,GACH,MAAO,QACT,IAAK,IACH,MAAO,SAET,QACE,MAAM,IAAI,MAAM,0BAA0BA,CAAS,EAAE,CACzD,CACF,EAMaR,GAAwBS,GACpB,CAAC,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,EAAG,EAAG,EAAG,EAAG,EAAG,OAAW,OAAW,MAAS,EAAEA,CAAQ,EAKxGR,GAAqCM,GAEoD,CAChG,OAAQA,EAAM,CACZ,IAAK,UAEH,OAAO,OAAO,aAAiB,KAAe,aAAa,KAAO,aAAe,YACnF,IAAK,UACH,OAAO,aACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,UACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,WACT,IAAK,QACH,OAAO,WACT,IAAK,OACH,OAAO,WACT,IAAK,UACH,OAAO,aACT,IAAK,SACH,OAAO,YACT,IAAK,QACH,OAAO,cACT,IAAK,SACH,OAAO,eACT,QACE,MAAM,IAAI,MAAM,qBAAqBA,CAAI,EAAE,CAC/C,CACF,EAKSL,GAAwBQ,GAAkE,CACrG,OAAQA,EAAU,CAChB,IAAK,UACH,MAAO,GACT,IAAK,OACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,IAAK,QACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,EAKaP,GAA4BI,GAAyDA,IAAS,WACvGA,IAAS,WAAaA,IAAS,SAAWA,IAAS,SAAWA,IAAS,UAAYA,IAAS,SAC5FA,IAAS,OAKAH,GAA4BO,GAA0C,CACjF,OAAQA,EAAU,CAChB,IAAK,OACH,MAAO,GACT,IAAK,MACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,IAAK,UACH,MAAO,GACT,IAAK,aACH,MAAO,GACT,QACE,MAAM,IAAI,MAAM,8BAA8BA,CAAQ,EAAE,CAC5D,CACF,ICvMA,IAYaC,GAZbC,GAAAC,GAAA,kBAYaF,GAAW,MAAMG,GAAsE,CAClG,GAAI,OAAOA,GAAS,SAClB,GAAI,OAAO,QAAY,KAAe,QAAQ,UAAY,QAAQ,SAAS,KAEzE,GAAI,CACF,OAAO,IAAI,WAAW,KAAM,SAASA,CAAI,CAAC,CAC5C,OAASC,EAAG,CACV,GAAIA,EAAE,OAAS,wBAAyB,CAEtC,IAAMC,EAAY,SAAiBF,CAAI,EACjCG,EAAuB,CAAC,EAC9B,cAAiBC,KAASF,EACxBC,EAAO,KAAKC,CAAK,EAEnB,OAAO,IAAI,WAAW,OAAO,OAAOD,CAAM,CAAC,CAC7C,CACA,MAAMF,CACR,KACK,CAEL,IAAMI,EAAW,MAAM,MAAML,CAAI,EACjC,GAAI,CAACK,EAAS,GACZ,MAAM,IAAI,MAAM,sCAAsCL,CAAI,EAAE,EAE9D,IAAMM,EAAsBD,EAAS,QAAQ,IAAI,gBAAgB,EAC3DE,EAAWD,EAAsB,SAASA,EAAqB,EAAE,EAAI,EAC3E,GAAIC,EAAW,WAGb,OAAO,IAAI,WAAW,MAAMF,EAAS,YAAY,CAAC,EAC7C,CAEL,GAAI,CAACA,EAAS,KACZ,MAAM,IAAI,MAAM,sCAAsCL,CAAI,qBAAqB,EAEjF,IAAMQ,EAASH,EAAS,KAAK,UAAU,EAEnCI,EACJ,GAAI,CAEFA,EAAS,IAAI,YAAYF,CAAQ,CACnC,OAASN,EAAG,CACV,GAAIA,aAAa,WAAY,CAE3B,IAAMS,EAAQ,KAAK,KAAKH,EAAW,KAAK,EACxCE,EAAS,IAAI,YAAY,OAAO,CAAC,QAASC,EAAO,QAASA,CAAK,CAAC,EAAE,MACpE,KACE,OAAMT,CAEV,CAEA,IAAIU,EAAS,EAEb,OAAa,CACX,GAAM,CAAC,KAAAC,EAAM,MAAAC,CAAK,EAAI,MAAML,EAAO,KAAK,EACxC,GAAII,EACF,MAEF,IAAME,EAAYD,EAAM,WACV,IAAI,WAAWJ,EAAQE,EAAQG,CAAS,EAChD,IAAID,CAAK,EACfF,GAAUG,CACZ,CACA,OAAO,IAAI,WAAWL,EAAQ,EAAGF,CAAQ,CAC3C,CACF,KAEK,QAAIP,aAAgB,KAClB,IAAI,WAAW,MAAMA,EAAK,YAAY,CAAC,EACrCA,aAAgB,WAClBA,EAEA,IAAI,WAAWA,CAAI,CAE9B,ICtFA,IAYMe,GAEAC,GAKFC,GACAC,GAESC,GAQAC,GAWAC,GAzCbC,GAAAC,GAAA,kBAKAC,KAOMT,GAAiB,CAAC,IAAK,IAAK,IAAK,IAAK,GAAG,EAEzCC,GAAQ,CAACS,EAAeC,IAA0B,CAEtD,QAAQ,IAAI,IAAIX,GAAeU,CAAK,CAAC,IAAI,IAAI,KAAK,EAAE,YAAY,CAAC,IAAIC,CAAO,EAAE,CAChF,EAKaP,GAAkB,CAACQ,EAA2BC,IAA0B,CACnFX,GAAiBU,EACjBT,GAAQU,CACV,EAKaR,GAAM,CAACS,EAAoBC,IAAuB,CAC7D,IAAMC,EAAeC,GAAqBH,CAAQ,EAC5CI,EAAcD,GAAqBf,EAAc,EACnDc,GAAgBE,GAClBjB,GAAMe,EAAc,OAAOD,GAAQ,WAAaA,EAAI,EAAIA,CAAG,CAE/D,EAKaT,GAAwB,IAAIa,IAAiC,CACpEhB,IACFE,GAAI,GAAGc,CAAI,CAEf,IC7CA,IAOaC,GAPbC,GAAAC,GAAA,kBAKAC,KAEaH,GAAa,CAACI,EAAyBC,IAE5C,IAAKC,GAAkCD,CAAI,GAAGD,CAAU,ICThE,IAAAG,GAAAC,GAAA,oBCAA,IA8EMC,GA+BAC,GAKAC,GAKAC,GAWFC,GACEC,GAYOC,GAkCPC,GAoSOC,GArdbC,GAAAC,GAAA,kBAIAC,KAEAC,KAwEMZ,GAAsC,IAAI,IAAI,CAClD,CAAC,GAAI,GAAG,EACR,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,IAAK,GAAG,EACT,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,GAAG,EACV,CAAC,KAAM,EAAE,EACT,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,MAAO,EAAE,EACV,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,OAAQ,EAAE,EACX,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,QAAS,EAAE,EACZ,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,EAAE,EACb,CAAC,SAAU,CAAC,EACZ,CAAC,SAAU,CAAC,EAGZ,CAAC,SAAU,CAAC,EACZ,CAAC,UAAW,CAAC,EACb,CAAC,UAAW,CAAC,CACf,CAAC,EAEKC,GAAsB,CAAC,EAKvBC,GAA4BW,GAAiB,KAAK,KAAKA,EAAO,EAAE,EAAI,GAKpEV,GAAwBU,GAAiB,CAC7C,QAASC,EAAM,EAAGA,EAAMb,GAAU,OAAQa,IAAO,CAC/C,IAAMC,EAAgBd,GAAUa,CAAG,EACnC,GAAID,GAAQE,EACV,OAAOA,CAEX,CAEA,OAAO,KAAK,KAAKF,EAAO,EAAE,EAAI,EAChC,EAEIT,GAAO,EACLC,GAAqB,IAAMD,KAYpBE,GACT,MAAMU,EAAwBC,EAAsBC,EAAsBC,IAC/C,CACrB,IAAMC,EAAalB,GAAyBgB,CAAY,EAClDG,EAAgBL,EAAQ,OAAO,aAEjC,CAAC,KAAMI,EAAY,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAChF,GAAI,CACF,IAAME,EAAiBN,EAAQ,kBAAkB,EACjDA,EAAQ,eAAe,EACvBM,EAAe,mBACXL,EAA+B,EAAuBI,EACtD,EAA4BD,CAChC,EACAJ,EAAQ,MAAM,EAEd,MAAMK,EAAc,SAAS,WAAW,IAAI,EAE5C,IAAME,EAAcF,EAAc,eAAe,EACjD,GAAIF,EAAiB,CAEnB,IAAMK,EAAeL,EAAgB,EACrC,OAAAK,EAAa,IAAI,IAAI,WAAWD,EAAa,EAAGL,CAAY,CAAC,EACtDM,CACT,KAGE,QAAO,IAAI,WAAWD,EAAY,MAAM,EAAGL,CAAY,CAAC,CAE5D,QAAE,CACAG,EAAc,QAAQ,CACxB,CACF,EAEFd,GAAN,KAAmD,CAqBjD,YAAoBS,EAAwB,CAAxB,aAAAA,EAClB,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,2BAA6B,CAAC,EACnC,KAAK,eAAiB,CAAC,EACvB,KAAK,gBAAkB,IAAI,IAC3B,KAAK,uBAAyB,IAAI,IAElC,OAAW,CAACS,CAAK,IAAKzB,GACpBC,GAAU,KAAKwB,CAAG,EAClB,KAAK,YAAY,IAAIA,EAAK,CAAC,CAAC,EAC5B,KAAK,mBAAmB,IAAIA,EAAK,CAAC,CAAC,CAEvC,CAEA,OAAOC,EAAeC,EAAwB,CAC5C,IAAMC,EAAiBD,EAAK,OACtBE,EAAYF,EAAK,WACjBG,EAAYH,EAAK,WACjBd,EAAOX,GAAyB4B,CAAS,EAGzCC,EAAe,KAAK,aAAa,IAAIL,CAAE,EAC7C,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,uCAAuC,EAEzD,GAAIA,EAAa,eAAiBD,EAChC,MAAM,IAAI,MAAM,yCAAyCC,EAAa,YAAY,eAAeD,CAAS,EAAE,EAI9G,IAAME,EAAwB,KAAK,QAAQ,OAAO,aAE9C,CAAC,iBAAkB,GAAM,KAAAnB,EAAM,MAAO,eAAe,UAAY,eAAe,QAAQ,CAAC,EAGvFU,EAAcS,EAAsB,eAAe,EACzD,IAAI,WAAWT,CAAW,EAAE,IAAI,IAAI,WAAWK,EAAgBC,EAAWC,CAAS,CAAC,EACpFE,EAAsB,MAAM,EAI5B,IAAMV,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBAAmBU,EAAuB,EAAGD,EAAa,QAAQ,OAAQ,EAAGlB,CAAI,EAEhGoB,GAAU,UAAW,IAAM,qCAAqCP,CAAE,GAAG,EAErE,KAAK,2BAA2B,KAAKM,CAAqB,CAC5D,CAEA,OAAOE,EAAqBC,EAAgC,CAE1D,IAAMC,EAAqB,KAAK,aAAa,IAAIF,CAAQ,EACzD,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,2CAA2C,EAG7D,IAAMC,EAA0B,KAAK,aAAa,IAAIF,CAAa,EACnE,GAAI,CAACE,EACH,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAID,EAAmB,eAAiBC,EAAwB,aAC9D,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMxB,EAAOX,GAAyBkC,EAAmB,YAAY,EAG/Dd,EAAiB,KAAK,QAAQ,kBAAkB,EACtD,KAAK,QAAQ,eAAe,EAC5BA,EAAe,mBACXc,EAAmB,QAAQ,OAAQ,EAAGC,EAAwB,QAAQ,OAAQ,EAAGxB,CAAI,CAC3F,CAEA,uBAAuByB,EAAmBpB,EAAsBqB,EAAoC,CAClG,IAAIb,EACJ,GAAIa,EAAgB,CAElB,GADAb,EAAK,KAAK,gBAAgB,IAAIa,CAAc,EACxCb,IAAO,OACT,MAAM,IAAI,MAAM,mCAAmC,EAErD,GAAIY,IAAWC,EACb,OAAAN,GACI,UACA,IAAM,uDAAuDf,CAAY,WACrEQ,CAAE,6BAA6B,EAChCA,EACF,GAAI,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC5E,MAAM,IAAI,MAAM;AAAA,sDAC8B,EAEhD,KAAK,gBAAgB,OAAOa,CAAc,CAC5C,MACEb,EAAKrB,GAAmB,EAG1B,YAAK,aAAa,IAAIqB,EAAI,CAAC,QAAS,CAAC,GAAAA,EAAI,OAA2B,OAAAY,CAAM,EAAG,aAAApB,CAAY,CAAC,EAC1F,KAAK,gBAAgB,IAAIoB,EAAQZ,CAAE,EACnCO,GACI,UACA,IAAM,uDAAuDf,CAAY,WAAWQ,CAAE,eAAe,EAClGA,CACT,CAEA,yBAAyBY,EAAyB,CAChD,IAAMZ,EAAK,KAAK,gBAAgB,IAAIY,CAAM,EACtCZ,IAAO,SACT,KAAK,aAAa,OAAOA,CAAE,EAC3B,KAAK,gBAAgB,OAAOY,CAAM,EAClCL,GAAU,UAAW,IAAM,4DAA4DP,CAAE,EAAE,EAE/F,CAGA,OAAOb,EAAc2B,EAAQ,eAAe,QAAU,eAAe,SAAW,eAAe,SAAmB,CAChH,IAAMpB,EAAajB,GAAqBU,CAAI,EAExCI,EAGEwB,GAAaD,EAAQ,eAAe,WAAa,eAAe,QAEhEE,GAAaF,EAAQ,eAAe,WAAa,eAAe,QACtE,GAAIC,GAAaC,EAAW,CAE1B,IAAMC,GADcF,EAAY,KAAK,YAAc,KAAK,oBAC5B,IAAIrB,CAAU,EACrCuB,EAICA,EAAQ,OAAS,EAEnB1B,EAAY0B,EAAQ,IAAI,EAGxB1B,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAPxEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,CAU1E,MAEEvB,EAAY,KAAK,QAAQ,OAAO,aAAa,CAAC,KAAMG,EAAY,MAAAoB,CAAK,CAAC,EAGxE,IAAMI,EAAU,CAAC,GAAIvC,GAAmB,EAAG,OAA2B,OAAQY,CAAS,EACvF,YAAK,aAAa,IAAI2B,EAAQ,GAAI,CAAC,QAAAA,EAAS,aAAc/B,CAAI,CAAC,EAE/DoB,GAAU,UAAW,IAAM,uCAAuCpB,CAAI,WAAW+B,EAAQ,EAAE,EAAE,EACtFA,CACT,CAEA,IAAIlB,EAAkC,CACpC,OAAO,KAAK,aAAa,IAAIA,CAAE,GAAG,OACpC,CAEA,QAAQA,EAAuB,CAC7B,IAAMmB,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,+BAA+B,EAGjD,OAAAZ,GAAU,UAAW,IAAM,sCAAsCP,CAAE,gBAAgBmB,EAAW,QAAQ,EAAE,EAAE,EAE1G,KAAK,aAAa,OAAOnB,CAAE,EAC3B,KAAK,eAAe,KAAKmB,EAAW,QAAQ,MAAM,EAG3CA,EAAW,YACpB,CAEA,MAAM,SAASnB,EAAeP,EAAkD,CAC9E,IAAM0B,EAAa,KAAK,aAAa,IAAInB,CAAE,EAC3C,GAAI,CAACmB,EACH,MAAM,IAAI,MAAM,qBAAqB,EAEvC,MAAMvC,GAAgB,KAAK,QAASuC,EAAW,QAAQ,OAAQA,EAAW,aAAc1B,CAAe,CACzG,CAEA,uBAA8B,CAC5B,QAAWmB,KAAU,KAAK,2BAExBA,EAAO,QAAQ,EAIjB,GAFA,KAAK,2BAA6B,CAAC,EAE/B,KAAK,eAAe,SAAW,EAInC,GAAI,KAAK,QAAQ,gBAAkB,UAAW,CAC5C,QAAWA,KAAU,KAAK,eAAgB,CACxC,IAAMQ,EAAgB9C,GAAe,IAAIsC,EAAO,IAAI,EAGpD,IAAKA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAEtE,IAAMS,EAAW,KAAK,YAAY,IAAIT,EAAO,IAAI,GAAK,CAAC,EACnDQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAGxB,UAAYA,EAAO,MAAQ,eAAe,WAAa,eAAe,QAAS,CAE7E,IAAMS,EAAW,KAAK,mBAAmB,IAAIT,EAAO,IAAI,GAAK,CAAC,EAC1DQ,IAAkB,QAAaC,EAAS,QAAUD,EACpDR,EAAO,QAAQ,EAEfS,EAAS,KAAKT,CAAM,CAExB,MACEA,EAAO,QAAQ,CAEnB,CACA,KAAK,eAAiB,CAAC,CACzB,KAAO,CAGL,IAAIU,EAAkB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,gBAAiB,EAC/EA,IACHA,EAAkB,CAAC,EACnB,KAAK,uBAAuB,IAAI,KAAK,QAAQ,iBAAmBA,CAAe,GAEjF,QAAWV,KAAU,KAAK,eACxBU,EAAgB,KAAKV,CAAM,EAE7B,KAAK,eAAiB,CAAC,CACzB,CACF,CAEA,SAAU,CACR,KAAK,YAAY,QAASK,GAAY,CACpCA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,mBAAmB,QAASK,GAAY,CAC3CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EAED,KAAK,aAAa,QAASW,GAAY,CACrCA,EAAQ,QAAQ,OAAO,QAAQ,CACjC,CAAC,EAED,KAAK,uBAAuB,QAASN,GAAY,CAC/CA,EAAQ,QAAQL,GAAU,CACxBA,EAAO,QAAQ,CACjB,CAAC,CACH,CAAC,EACD,KAAK,aAAe,IAAI,IACxB,KAAK,YAAc,IAAI,IACvB,KAAK,mBAAqB,IAAI,IAC9B,KAAK,uBAAyB,IAAI,GACpC,CAEA,iBAAiBY,EAAmB,CAElC,IAAMC,EAAiB,KAAK,uBAAuB,IAAID,CAAS,EAC5DC,IACFA,EAAe,QAAQb,GAAU,CAC/BA,EAAO,QAAQ,CACjB,CAAC,EACD,KAAK,uBAAuB,OAAOY,CAAS,EAEhD,CACF,EAEa1C,GAAuB,IAAI4C,IACpC,IAAI7C,GAAmB,GAAG6C,CAAI,ICtdlC,IAGMC,GAsBOC,GAzBbC,GAAAC,GAAA,kBAGMH,GAAN,KAAgC,CAC9B,YAAYI,EAAoC,CAC9C,OAAO,OAAO,KAAMA,CAAS,CAC/B,CAGA,IAAW,UAAmB,CAC5B,OAAK,KAAK,MACR,KAAK,IACD,OAAO,oBAAoB,IAAI,EAAE,KAAK,EAAE,IAAIC,GAAQ,GAAI,KAAiCA,CAAI,CAAC,EAAE,EAAE,KAAK,GAAG,GAEzG,KAAK,GACd,CACF,EASaJ,GAAkEG,GAC3E,IAAIJ,GAA0BI,CAAS,IC1B3C,IAKaE,GAaAC,GA6EAC,EA6IAC,GA0MAC,GAkDAC,GACAC,GAzebC,GAAAC,GAAA,kBAKaR,GAAN,KAAiB,CAOtB,OAAO,gBAAgBS,EAAqBC,EAAiD,CAC3F,OAAQD,EAAE,CAAC,IAAMC,EAAE,CAAC,EAAK,OAAY,CAACD,EAAE,CAAC,EAAGC,EAAE,CAAC,CAAC,CAClD,CACF,EAGaT,GAAN,KAAoB,CAQzB,OAAO,UAAUU,EAA0BC,EAA0BC,EAAW,GAAoC,CAClH,IAAMC,EAAQH,EAAM,OACdI,EAAQH,EAAM,OACpB,GAAIE,IAAU,EACZ,OAAOF,EAET,GAAIG,IAAU,EACZ,OAAOJ,EAET,IAAMK,EAAQ,KAAK,IAAIL,EAAM,OAAQC,EAAM,MAAM,EAC3CK,EAAQ,IAAI,MAAcD,CAAK,EAGrC,GAAIH,EAAU,CACZ,GAAIC,EAAQ,GAAKC,EAAQ,EACvB,OAEF,IAAMG,EACFlB,GAAW,gBAAgB,CAACW,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,EAAG,CAACF,EAAMG,EAAQ,CAAC,EAAGH,EAAMG,EAAQ,CAAC,CAAC,CAAC,EACzG,GAAIG,IAAiB,OACnB,OAEF,CAACD,EAAMD,EAAQ,CAAC,EAAGC,EAAMD,EAAQ,CAAC,CAAC,EAAIE,CACzC,CAEA,QAASC,EAAIN,EAAW,EAAI,EAAGM,GAAKH,EAAOG,IAAK,CAC9C,IAAMC,EAAON,EAAQK,EAAI,EAAI,EAAIR,EAAMG,EAAQK,CAAC,EAC1CE,EAAON,EAAQI,EAAI,EAAI,EAAIP,EAAMG,EAAQI,CAAC,EAEhD,GAAIC,IAASC,GAAQD,EAAO,GAAKC,EAAO,EACtC,OAEF,IAAMC,EAAM,KAAK,IAAIF,EAAMC,CAAI,EAC/B,GAAID,GAAQC,EACVJ,EAAMD,EAAQG,CAAC,EAAI,KAAK,IAAIC,EAAMC,CAAI,MACjC,CAEL,GAAIC,EAAM,EACR,OAEFL,EAAMD,EAAQG,CAAC,EAAI,CACrB,CACF,CAEA,OAAOF,CACT,CAOA,OAAO,iBAAiBM,EAA0BC,EAAwC,CAExF,IAAMC,EAAYF,EAAM,OAClBG,EAAYF,EAAW,OAC7B,GAAIC,EAAYC,EACd,MAAO,GAET,QAASP,EAAI,EAAGA,GAAKM,EAAWN,IAC9B,GAAII,EAAME,EAAYN,CAAC,IAAM,GAAKI,EAAME,EAAYN,CAAC,IAAMK,EAAWE,EAAYP,CAAC,EACjF,MAAO,GAGX,MAAO,EACT,CACF,EAGajB,EAAN,MAAMyB,CAAU,CAIrB,OAAO,KAAKC,EAAiC,CAC3C,OAAOD,EAAU,0BAA0BC,EAAM,EAAGA,EAAK,MAAM,CACjE,CAKA,OAAO,aAAaA,EAAyBC,EAAO,EAAsB,CACxE,IAAMC,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EAEV,IAAMC,EAAU,IAAI,MAAMD,CAAI,EAC1BX,EAAIW,EAAO,EACf,KAAOX,GAAK,GAAG,CACb,GAAIS,EAAKT,CAAC,EAAIU,IAAS,EAAG,CACxBE,EAAQZ,CAAC,EAAIS,EAAKT,CAAC,EAAIU,EACvB,KACF,CACA,GAAIA,EAAOD,EAAKT,CAAC,IAAM,EACrB,MAAM,IAAI,MAAM,sBAAsB,EAExCY,EAAQZ,CAAC,EAAI,EACbU,GAAQD,EAAKT,CAAC,EACdA,GACF,CACA,IAAKA,IAAKA,GAAK,EAAGA,IAChBY,EAAQZ,CAAC,EAAIS,EAAKT,CAAC,EAErB,OAAOY,CACT,CAKA,OAAO,kBAAkBH,EAAyBI,EAAsB,CACtE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,wCAAwCJ,EAAK,MAAM,cAAc,EAE/G,OAAOD,EAAU,0BAA0BC,EAAMI,EAAMJ,EAAK,MAAM,CACpE,CAKA,OAAO,gBAAgBA,EAAyBI,EAAsB,CACpE,GAAIA,EAAO,GAAKA,EAAOJ,EAAK,OAC1B,MAAM,IAAI,MAAM,wBAAwBI,CAAI,sCAAsCJ,EAAK,MAAM,cAAc,EAE7G,OAAOD,EAAU,0BAA0BC,EAAM,EAAGI,CAAI,CAC1D,CAKA,OAAO,0BAA0BJ,EAAyBK,EAAeC,EAAqB,CAC5F,IAAIL,EAAO,EACX,QAASV,EAAIc,EAAOd,EAAIe,EAAKf,IAAK,CAGhC,GAAIS,EAAKT,CAAC,EAAI,EACZ,MAAM,IAAI,MAEN,+GAA+G,EAErHU,GAAQD,EAAKT,CAAC,CAChB,CACA,OAAOU,CACT,CAEA,OAAO,eAAeD,EAA4C,CAChE,IAAME,EAAOF,EAAK,OAClB,GAAIE,IAAS,EACX,MAAO,CAAC,EACH,GAAIA,IAAS,EAClB,MAAO,CAAC,CAAC,EAEX,IAAMK,EAAU,IAAI,MAAML,CAAI,EAC9BK,EAAQL,EAAO,CAAC,EAAI,EACpBK,EAAQL,EAAO,CAAC,EAAIF,EAAKE,EAAO,CAAC,EACjC,QAASX,EAAIW,EAAO,EAAGX,GAAK,EAAG,EAAEA,EAC/BgB,EAAQhB,CAAC,EAAIgB,EAAQhB,EAAI,CAAC,EAAIS,EAAKT,EAAI,CAAC,EAE1C,OAAOgB,CACT,CAKA,OAAO,cAAcH,EAAcI,EAA4B,CAC7D,GAAIJ,EAAO,CAACI,GAAcJ,GAAQI,EAChC,MAAM,IAAI,MAAM,sCAAsC,EAExD,OAAOJ,EAAO,EAAIA,EAAOI,EAAaJ,CACxC,CAEA,OAAO,cAAcK,EAAyBD,EAA+B,CAC3E,OAAOC,EAAK,IAAIC,GAAK,KAAK,cAAcA,EAAGF,GAAcC,EAAK,MAAM,CAAC,CACvE,CAQA,OAAO,gBAAgB5B,EAAsB8B,EAA6C,CACxF,OAAIA,EACKA,EAAK,IAAKC,GAAM/B,EAAE+B,CAAC,CAAC,EAEpB/B,EAAE,MAAM,EAAE,QAAQ,CAE7B,CAOA,OAAO,SAASmB,EAAyBa,EAA2C,CAClF,IAAMX,EAAOF,EAAK,OAClB,OAAOA,EAAK,IAAI,CAACY,EAAGrB,IAAMqB,EAAIC,EAAItB,CAAC,EAAIsB,EAAItB,EAAIW,CAAI,CAAC,CACtD,CAOA,OAAO,SAASY,EAA2BC,EAAoC,CAC7E,OAAID,EAAO,SAAWC,EAAO,OACpB,GAEFD,EAAO,MAAM,CAACF,EAAGrB,IAAMqB,IAAMG,EAAOxB,CAAC,CAAC,CAC/C,CACF,EAEahB,GAAN,MAAMyC,CAAa,CAUxB,OAAO,qBACHC,EAA2BC,EAA8BC,EAAuBZ,EAChFa,EAAqBC,EAAsB,CAC7C,GAAI,CAACJ,GAAoBE,EAAY,SAAWD,EAAU,OAAS,EACjE,MAAM,IAAI,MAAM,oFAAoF,EAGtG,GAAID,EAEF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IACxCA,GAAOH,EAAY,OACrBA,EAAY,KAAKD,EAAUI,EAAM,CAAC,CAAC,EAEnCH,EAAYG,CAAG,EAAIJ,EAAUI,EAAM,CAAC,EAM1C,QAASA,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMf,EAAQ,QAChB,GAAIA,EAAQe,CAAG,EAAI,EACjB,MAAM,IAAI,MAAM,8CAA8C,OAGhEf,EAAQ,KAAK,CAAC,EAKlB,QAASe,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAC1C,GAAIA,EAAMF,EAAU,QAClB,GAAIA,EAAUE,CAAG,EAAI,EACnB,MAAM,IAAI,MAAM,gDAAgD,OAGlEF,EAAU,KAAK,CAAC,EAKpB,QAASE,EAAM,EAAGA,EAAMH,EAAY,OAAS,EAAGG,IAC9C,GAAIA,EAAMD,EAAK,QACb,GAAIA,EAAKC,CAAG,EAAI,EACd,MAAM,IAAI,MAAM,0CAA0C,OAG5DD,EAAK,KAAK,CAAC,EAKf,QAASC,EAAM,EAAGA,EAAMH,EAAY,OAAQG,IAAO,CACjD,GAAIH,EAAYG,CAAG,GAAK,EACtB,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAID,EAAKC,CAAG,GAAKH,EAAYG,CAAG,GAAKD,EAAKC,EAAMH,EAAY,MAAM,GAAKA,EAAYG,CAAG,EACpF,MAAM,IAAI,MAAM,oCAAoC,CAExD,CACF,CAGA,OAAO,yBACHJ,EAA8BX,EAA4Ba,EAC1DD,EAAgCE,EAAgBE,EAAwBC,EAAwB,CAClG,GAAKA,EAIL,IAAIH,EAAK,SAAW,GAAKH,EAAU,OAAS,GAC1C,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIX,EAAQ,SAAYW,EAAU,OAAS,EACzC,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIC,EAAY,SAAYD,EAAU,OAAS,EAC7C,MAAM,IAAI,MAAM,iEAAiE,EAGnF,QAASI,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CN,EAAa,wBACTE,EAAUI,GAAOC,EAAgB,EAAI,EAAE,EAAGhB,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAChGA,EAAMJ,EAAU,OAAS,EAAGM,CAAO,EAE3C,CAaA,OAAO,uBACHP,EAA2BC,EAA8BX,EAAmBa,EAC5ED,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,EACtB,MAAM,IAAI,MAAM,4CAA4C,EAI9D,IAAMO,EAAa,CAACP,EAAU,CAAC,EAAGA,EAAU,CAAC,CAAC,EAE9C,OAAAF,EAAa,mBACTC,EAAkBC,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACpFC,CACT,CAYA,OAAO,uBACHP,EAA8BQ,EAA+BnB,EAAmBa,EAChFD,EAAuBE,EAAgBG,EAA4B,CACrE,GAAIN,EAAU,QAAU,GAAKQ,EAAW,QAAU,EAChD,MAAM,IAAI,MAAM,yDAAyD,EAI3E,IAAMD,EAAa,CAACP,EAAU,CAAC,EAAGQ,EAAW,CAAC,CAAC,EAE/C,OAAAV,EAAa,mBAAmB,GAAOE,EAAWO,EAAYlB,EAASa,EAAWD,EAAaE,EAAMG,CAAO,EACrGC,CACT,CAKA,OAAe,mBACXR,EAA2BC,EAA8BO,EAAsBlB,EAC/Ea,EAA8BD,EAAgCE,EAAgBG,EAAkB,CAClG,GAAIP,EACF,QAASK,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAK,CAAC,MAGnB,SAASH,EAAM,EAAGA,EAAMJ,EAAU,OAAS,EAAGI,IAC5CG,EAAW,KAAKT,EAAa,wBACzBE,EAAUI,EAAM,CAAC,EAAGf,EAAQe,CAAG,EAAGF,EAAUE,CAAG,EAAGH,EAAYG,CAAG,EAAGD,EAAMC,EAAKA,EAAMJ,EAAU,OAAS,EACxGM,CAAO,CAAC,CAGlB,CAIA,OAAe,wBACXG,EAAgBC,EAAgBC,EAAkBC,EAAgBT,EAAgBU,EAClFC,EAAsBR,EAA0B,CAClD,IAAMS,EAAUJ,GAAYC,EAAS,GAAK,EAC1C,GAAIN,GAAWA,IAAY,SACzB,OAAQA,EAAS,CACf,IAAK,QACH,OAAAH,EAAKU,CAAY,EAAI,EACrBV,EAAKW,CAAY,EAAI,EACd,KAAK,OAAQL,EAASM,GAAWL,EAAU,CAAC,EACrD,IAAK,aACL,IAAK,aACH,GAAIC,IAAa,EACf,MAAM,IAAI,MAAM,qDAAqD,EAChE,CAEL,IAAMK,IADoBP,EAASC,EAAS,GAAKA,EACX,GAAKA,EAASE,EAASH,EAC7D,OAAAN,EAAKU,CAAY,EACgB,KAAK,MAAjCP,IAAY,cAA4BU,EAAY,GAAK,EAAgBA,EAAY,CAA3B,EAC/Db,EAAKW,CAAY,EAAIE,EAAYb,EAAKU,CAAY,EAC3C,KAAK,OAAQJ,EAASO,EAAYJ,GAAUF,EAAU,CAAC,CAChE,CACF,QACE,MAAM,IAAI,MAAM,0BAA0B,CAC9C,KAEA,QAAO,KAAK,OAAQD,EAASN,EAAKU,CAAY,EAAIV,EAAKW,CAAY,EAAIC,GAAWL,EAAU,CAAC,CAEjG,CACF,EAEapD,GAAN,KAAe,CAIpB,OAAO,qBACH2D,EAA8BC,EAAoBC,EAA+BC,EACjFC,EAAkD,CACpD,GAAIJ,EAAU,SAAW,GAAKE,EAAW,SAAW,EAClD,MAAM,IAAI,MAAM,4BAA4B,EAG9C,IAAIG,EACAC,EACAC,EAEAN,GACFI,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,IAEfK,EAAIL,EAAU,CAAC,EACfM,EAAIN,EAAU,CAAC,GAGjB,IAAIQ,EAAO,GAUX,GARIL,GACFI,EAAIL,EAAW,CAAC,EAChBM,EAAO,IAEPD,EAAIL,EAAW,CAAC,EAChBM,EAAO,GAGLN,EAAWM,CAAI,IAAMF,EACvB,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAID,GAAK,GAAKE,GAAK,GAAKD,GAAK,EAC3B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIF,GAAa,CAAClE,GAAc,iBAAiBkE,EAAW,CAACC,EAAGE,CAAC,CAAC,EAChE,MAAM,IAAI,MAAM,wCAAwC,EAG1D,MAAO,CAACF,EAAGE,EAAGD,CAAC,CACjB,CACF,EAGahE,GAAW,sBACXC,GAAW,uBCzexB,IAiBakE,GAsMPC,GAoCOC,GAKAC,GAKAC,GAeAC,GAiBAC,GAcAC,GAgBAC,GAmBAC,GA+BPC,GAiTOC,EAaAC,GAaAC,GAgFPC,GAwJOC,GAaAC,GAr7BbC,GAAAC,GAAA,kBAGAC,KACAC,KAaapB,GAAiB,GAsMxBC,GAAoB,CAACoB,EAAcC,IAAiD,CACxF,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,OAAQD,EAAM,CACZ,QACE,OAAOC,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,QACE,OAAOA,EAAa,EAAI,MAAMA,CAAU,QAAU,MACpD,OACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,QACE,GAAIA,EAAa,EACf,MAAM,IAAI,MAAM,4CAA4C,EAE9D,MAAO,CAAC,YAAa,KAAK,EAC5B,OACE,GAAIA,IAAe,EACjB,MAAM,IAAI,MAAM,mBAAmB,EAErC,MAAO,CAAC,MAAO,YAAY,EAE7B,QACE,MAAM,IAAI,MAAM,sBAAsBD,CAAI,EAAE,CAChD,CACF,EAEanB,GAA8B,CAACmB,EAAgBC,EAAsB,IAAM,CACtF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEapB,GAA4B,CAACkB,EAAgBC,EAAsB,IAAM,CACpF,IAAMC,EAAatB,GAAkBoB,EAAMC,CAAU,EACrD,OAAO,OAAOC,GAAe,SAAWA,EAAaA,EAAW,CAAC,CACnE,EAEanB,GAA6B,IAAIoB,IAA6D,CACzG,IAAMC,EAAoC,CAAC,EAC3C,OAAAD,EAAK,QAAQE,GAAO,CACdA,EAAI,SAAW,GACjBD,EAAgB,KACZ,CAAC,QAAuB,KAAMC,CAAG,EAAG,CAAC,QAAuB,KAAMC,EAAU,eAAeD,CAAG,CAAC,CAAC,CAExG,CAAC,EACMD,CACT,EAMapB,GAAoBuB,GAE3BA,EAAO,IAAM,EACR,EACEA,EAAO,IAAM,EACf,EAGF,EASItB,GAAa,CAACuB,EAAW,MAAOP,EAAqBQ,EAAQ,MACpE,CAACR,GAAcA,IAAe,EACzB,GAAGO,CAAQ,IAAIC,CAAK,IAGtB,MAAMR,CAAU,IAAIO,CAAQ,KAAKC,CAAK,IASlCvB,GAAY,CAACsB,EAAkBP,EAAoBQ,IAC1DD,IAAa,MACRC,EAELR,IAAe,EACV,OAAOQ,CAAK,IAGd,MAAMR,CAAU,SAASQ,CAAK,IAQ1BtB,GAAY,CAACuB,EAAcT,IAClCA,IAAe,EACV,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAC1CT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,MAClBT,IAAe,EACjB,IAAIS,CAAI,QAAQA,CAAI,QAAQA,CAAI,MAGlCA,EAUItB,GACT,CAACsB,EAAcC,EAAsBC,EAAgBZ,IAC/CU,EAAK,WAAW,WAAW,GAAKE,EAAS,EACvC,OAAQD,GAAW,SACjBX,IAAS,MACJ,GAAGU,CAAI,KAAKC,CAAK,WAAWA,CAAK,eAAeA,CAAK,aAErD,GAAGD,CAAI,KAAKC,CAAK,WAAWA,CAAK,SAGtCX,IAAS,MACJ,GAAGU,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAK,KAAK,MAAMA,EAAQ,EAAI,CAAC,CAAC,KAAKA,EAAQ,EAAI,CAAC,IAEhF,GAAGD,CAAI,IAAI,KAAK,MAAMC,EAAQ,CAAC,CAAC,KAAKA,EAAQ,CAAC,IAIlDC,EAAS,EAAI,GAAGF,CAAI,IAAIC,CAAK,IAAMD,EAc5CrB,GACF,CAACqB,EAAcG,EAAoBC,EAAuCC,EACzEd,IAAuC,CACtC,IAAMe,EAAa,OAAOF,GAAgB,SACpCG,EAAOD,EAAaF,EAAcA,EAAY,OAC9CI,EAAe,CAAC,GAAG,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAC,EACzCE,EAAcF,EAAO,EAAI,MAAQA,GAAQ,EAAI,MAAMA,CAAI,QAAU,cAAcA,CAAI,IACnFf,EAAatB,GAAkBiC,EAAYZ,CAAU,EACrDmB,EAAY,OAAOlB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACtEmB,EAAc,OAAOnB,GAAe,SAAWA,EAAaA,EAAW,CAAC,EACxEF,EAAO,CAAC,QAASmB,EAAa,MAAOC,EAAW,QAASC,EAAa,OAAQR,CAAU,EAExFS,EAAgBjB,IAA+B,OAAOA,IAAQ,SAAWA,GAAM,GAAGA,EAAG,IAErFkB,EAAqB,CACzB,gBAAiB,GACjB,gBAAiB,GACjB,2BAA4B,GAC5B,IAAK,GACL,aAAc,GACd,IAAK,GACL,aAAc,EAChB,EAEMC,EAAgBR,EAAa,YAAc,GAC3CS,EAAQ,GAAGD,CAAa,GAAGd,CAAI,SAC/BgB,EAAU,GAAGF,CAAa,GAAGd,CAAI,WAEnCiB,EAAa,GACjB,QAASC,GAAI,EAAGA,GAAIX,EAAO,EAAGW,KAC5BD,GAAc;AAAA,aACTC,EAAC,gBAAgBxC,GAAasC,EAASE,GAAGX,CAAI,CAAC;AAAA,cAC9CW,EAAC,gBAAgBxC,GAAasC,EAASE,GAAGX,CAAI,CAAC;AAAA,cAC/CW,EAAC,UAAUA,EAAC;AAAA,oBACNA,EAAC;AAAA,MAGfD,GAAc,WAAWV,EAAO,CAAC,eAEjC,IAAMY,EAAgCZ,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,oBAAoBV,EAAK,OAAO;AAAA,mBAC5BA,EAAK,OAAO;AAAA;AAAA,MAEzB2B,CAAU;AAAA;AAAA,KAIJG,EAAmBC,KACvBR,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIc,GAAY,OAAOrB,CAAI,IAAIqB,EAAS,KAGlDC,EAAoB,CAAC,EAC3B,GAAIf,GAAQ,EACV,QAASW,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAC7BI,EAAQ,KAAK,GAAG5C,GAAasC,EAASE,GAAGX,CAAI,CAAC,eAAeW,EAAC,IAAI,EAItE,IAAMK,EAAgChB,EAAO,EAAI,GAAK;AAAA,WACjDP,CAAI,aAAaV,EAAK,OAAO;AAAA,aAC3BgC,EAAQ,KAAK,GAAG,CAAC;AAAA,KAGlBE,GAAmBC,KACvBZ,EAAmB,gBAAkB,GAC9BN,EAAO,EAAIkB,GAAa,OAAOzB,CAAI,IAAIyB,EAAU,KAGpDC,EAAU,IAAIC,KAChBpB,IAAS,EAAI,KAAO,GAAGjB,EAAK,OAAO,IAAIqC,GAAK,IAAIf,CAAY,EAAE,KAAK,GAAG,CAAC,IAErEgB,GAAa,CAACH,GAAoBI,KAClCtB,EAAO,EACF,GAAGkB,EAAU,GAEb,GAAG/C,GAAa+C,GAAYI,GAAKtB,CAAI,CAAC,GAI3CuB,GAAa,CAACL,GAAoBI,GAAoB9B,KACtDQ,EAAO,EACF,GAAGkB,EAAU,IAAI1B,EAAK,IAEtB,GAAGrB,GAAa+C,GAAYI,GAAKtB,CAAI,CAAC,IAAIR,EAAK,IAIpDgC,GAAoE,CAAC,EACrEC,EAA6B,CAACP,GAAoBQ,KAA0B,CAChFpB,EAAmB,2BAA6B,GAChD,IAAMqB,GAAU,GAAGD,GAAO,IAAI,uBAAuBjC,CAAI,SACzD,GAAIkC,MAAWH,GACb,MAAO,GAAGG,EAAO,IAAIT,EAAU,IAEjC,IAAMH,GAAU,CAAC,EACjB,QAASJ,GAAIX,EAAO,EAAGW,IAAK,EAAGA,KAAK,CAClC,IAAMW,GAAMI,GAAO,WAAW,gBAAiBf,GAAIe,GAAO,KAAO1B,CAAI,EACrEe,GAAQ,KAAK,GAAGM,GAAWZ,EAASE,EAAC,CAAC,OAAOW,EAAG,MAAMD,GAAWb,EAAOG,EAAC,CAAC,GAAG,CAC/E,CACA,OAAAa,GAAyCG,EAAO,EAC5C,MAAMA,EAAO,mBAAmBD,GAAO,KAAK,OAAO;AAAA,sBACzCX,GAAQ,OAAS,EAAIA,GAAQ,KAAK,GAAG,EAAI,IAAI;AAAA,cAGpD,GAAGY,EAAO,IAAIT,EAAU,GACjC,EAEMU,GAAc,CAACC,GAAuBrC,MAAmB,IAAM,CACnE,GAAIT,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,EAAM,KAAKrC,EAAK,IAC7B,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,EAAM,mBAAmBrC,EAAK,8BAA8BA,EAAK,UAC9E,GAAIT,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,GAAGU,CAAI,IAAIoC,EAAM,mBAAmBrC,EAAK,UAC3C,GAAIT,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,GAAGU,CAAI,IAAIoC,EAAM,8DAA8DrC,EAAK,MAE3F,MAAM,IAAI,MAAM,6CAA6CT,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEG+C,GAAeD,KAA2B,IAAM,CACpD,GAAI9C,EAAK,UAAYA,EAAK,MACxB,MAAO,GAAGU,CAAI,IAAIoC,EAAM,IACnB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,EAAM,OACvB,GAAI9C,EAAK,UAAY,aAAeA,EAAK,QAAU,MAExD,MAAO,OAAOU,CAAI,IAAIoC,EAAM,OACvB,GAAI9C,EAAK,UAAY,OAASA,EAAK,QAAU,aAElD,MAAO,mBAAmBU,CAAI,IAAIoC,EAAM,oBAAoBpC,CAAI,IAAIoC,EAAM,sBAAsBpC,CAAI,IAChGoC,EAAM,wBAAwBpC,CAAI,IAAIoC,EAAM,oBAEhD,MAAM,IAAI,MAAM,6CAA6C9C,EAAK,OAAO,mBAAmBA,EAAK,KAAK,MAAM,CAEhH,GAAG,EAEGgD,GAA6B/B,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,QAAQoB,CAAS;AAAA,aACrD2B,GAAY,OAAOrC,CAAI,WAAW,CAAC;AAAA,KAGpCuC,GAAoBhC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,GAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,EAAc,QAAQ9B,CAAS;AAAA,iBACjCV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAE/C,GAAG,EAEGC,GAAM,IAAIhB,KAA0C,CACxD,GAAIA,GAAQ,SAAWnB,EACrB,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAGlD,IAAMoC,GAAoBjB,GAAQ,IAAId,CAAY,EAAE,KAAK,GAAG,EAE5D,OAAIL,IAAS,EACJ8B,GAAY,IAAI,EACd9B,IAAS,EACX8B,GAAYM,GAAkB,CAAC,CAAC,GAEvC9B,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,IAE3C,EAEMC,GAAgBnB,IAChBlB,EAAO,EACF8B,GAAYZ,EAAU,GAE7BZ,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,EAAU,KAIvCoB,GAA6BtC,EAAO,EAAI,GAAK;AAAA,WAC9CP,CAAI,sBAAsBV,EAAK,OAAO,YAAYoB,CAAS;AAAA,MAChEyB,GAAY,OAAOnC,CAAI,YAAa,OAAO,CAAC;AAAA,KAGtC8C,GAAoBvC,EAAO,EAAI,IAAM,IAAM,CAC/C,IAAMiC,GAAiBhC,EAAa,IAAIU,IAAK,IAAIA,EAAC,OAAO,EAAE,KAAK,IAAI,EAC9DuB,GAAajC,EAAa,IAAIU,IAAK,IAAIA,EAAC,EAAE,EAAE,KAAK,IAAI,EAC3D,MAAO;AAAA,WACJlB,CAAI,IAAIwC,EAAc,YAAY9B,CAAS;AAAA,UAC5CV,CAAI,aAAa0B,EAAQe,EAAU,CAAC;AAAA,IAExC,GAAG,EA0EH,MAAO,CACL,KAxCW,IAAM,CACjB,IAAMM,GAAQ,CAAC,EACXC,GAAmB,GACvB,OAAInC,EAAmB,kBACrBkC,GAAM,KAAK5B,CAA6B,EACxC6B,GAAmB,IAEjBnC,EAAmB,kBACrBkC,GAAM,KAAKxB,CAA6B,EACxCyB,GAAmB,IAEjBnC,EAAmB,6BACrB,OAAO,OAAOkB,EAAwC,EAAE,QAAQkB,IAAQF,GAAM,KAAKE,EAAI,CAAC,EACxFD,GAAmB,IAEjBnC,EAAmB,MACrBkC,GAAM,KAAKD,EAAiB,EAC5BE,GAAmB,IAEjBnC,EAAmB,eACrBkC,GAAM,KAAKF,EAA0B,EACrCG,GAAmB,IAEjBnC,EAAmB,MACrBkC,GAAM,KAAKR,EAAiB,EAC5BS,GAAmB,IAEjBnC,EAAmB,eACrBkC,GAAM,KAAKT,EAA0B,EACrCU,GAAmB,IAEjB,CAAC1C,GAAc0C,IACjBD,GAAM,QACF,SAAShC,CAAK,MAAMzB,EAAK,OAAO,IAAIc,EAAY,KAAK,GAAG,CAAC,KACzD,SAASY,CAAO,MAAM1B,EAAK,OAAO,IAAIM,EAAU,eAAeQ,CAAW,EAAE,KAAK,GAAG,CAAC,IAAI,EAExF2C,GAAM,KAAK;AAAA,CAAI,CACxB,EAIE,KAAAzD,EACA,gBAAA8B,EACA,gBAAAI,GACA,2BAAAQ,EACA,QAAAN,EACA,WAAAE,GACA,WAAAE,GACA,IAjFU,IAAIoB,KAAkD,CAChE,GAAIA,GAAgB,SAAW3C,EAAO,EACpC,MAAM,IAAI,MAAM,0BAA0BA,CAAI,EAAE,EAElD,IAAMR,GAAQmD,GAAgB3C,CAAI,EAClC,GAAI,OAAOR,IAAU,SACnB,MAAM,IAAI,MAAM,sBAAsB,EAGxC,IAAM4C,GAAoBO,GAAgB,MAAM,EAAG3C,CAAI,EAAE,IAAIK,CAAY,EAAE,KAAK,GAAG,EAEnF,OAAIL,IAAS,EACJ4B,GAAY,KAAMpC,EAAK,EACrBQ,IAAS,EACX4B,GAAYQ,GAAkB,CAAC,EAAG5C,EAAK,GAE9Cc,EAAmB,IAAM,GACzBA,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,IAAI2C,EAAiB,KAAK5C,EAAK,IAErD,EA6DE,YAAAoC,GACA,aA5DmB,CAACV,GAAoB1B,KACpCQ,EAAO,EACF4B,GAAYV,GAAY1B,EAAK,GAEpCc,EAAmB,aAAe,GAClCA,EAAmB,gBAAkB,GAC9B,OAAOb,CAAI,aAAayB,EAAU,KAAK1B,EAAK,MAuDrD,IAAA2C,GACA,YAAAL,GACA,aAAAO,GAEA,MAAAvC,EACA,KAAAL,EACA,QAAAgB,EACA,MAAAD,EACA,KAAAR,CACF,CACF,EAWS3B,EACT,CAACoB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,QAASb,CAAU,EAW3DV,GACT,CAACmB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,SAAUb,CAAU,EAW5DT,GACT,CAACkB,EAAcV,EAAcc,EAAuCb,EAAsB,IACtFZ,GAAoBqB,EAAMV,EAAMc,EAAa,WAAYb,CAAU,EA8ErER,GAAN,KAA+C,CAC7C,YAAoBoE,EAA2DC,EAA4B,CAAvF,6BAAAD,EAA2D,YAAAC,EAoG/E,KAAQ,kBAAqC,CAAC,EAC9C,KAAQ,UAA6B,CAAC,EACtC,KAAQ,SAA8B,CAAC,EAwBvC,KAAQ,cAAgB,CA9HoF,CAE5G,sCAAsCvD,EAA6B,CAGjE,MAAO,qBADY,OAAOA,GAAS,SAAW,GAAGA,CAAI,IAAMA,CACrB,eACxC,CAEA,UAAUwD,EAAiDpF,GAAgB,CACzE,IAAMqF,EAAiB,OAAOD,GAAkB,SAAWA,EAAgBA,EAAc,CAAC,EACpFE,EAAiB,OAAOF,GAAkB,SAAW,EAAIA,EAAc,CAAC,EACxEG,EAAiB,OAAOH,GAAkB,SAAW,EAAIA,EAAc,CAAC,EAE9E,GAAIC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,0BAC7BC,EAAiB,KAAK,OAAO,yBAC/B,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,yCAAyC,KAAK,OAAO,wBAAwB,KAC3F,KAAK,OAAO,wBAAwB,KAAK,KAAK,OAAO,wBAAwB,IAAI,EAGvF,GAAIF,EAAiBC,EAAiBC,EAAiB,KAAK,OAAO,kCACjE,MAAM,IAAI,MAAM,mBAAmBF,CAAc,KAAKC,CAAc,KAChEC,CAAc,+CACd,KAAK,OAAO,iCAAiC,GAAG,EAGtD,IAAMC,EAAuB,KAAK,wBAAwB,CAAC,IAAM,GAAK,KAAK,wBAAwB,CAAC,IAAM,EACpGC,EAAYD,EAAuB;AAAA;AAAA,wDAGA;AAAA;AAAA;AAAA;AAAA,yDAKnCE,EAAsBF,EACxB,4DACA;AAAA,mEAEIH,EAAiBC,EAAiBC,CAAc,iBAExD,MAAO,4BAA4BF,CAAc,KAAKC,CAAc,KAAKC,CAAc;AAAA,YAC/EE,CAAS;AAAA,MACfC,CAAmB;AAAA,GAEvB,CAEQ,uBAAuBC,EAA+B,CACxDA,EAAS,OAAS,IAChBA,EAAS,MAAM,WAAW,WAAW,GACvC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,MAAM,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAEpGA,EAAS,QAAQ,WAAW,WAAW,GACzC,KAAK,SAAS,KAAK,CAAC,KAAMA,EAAS,QAAQ,QAAQ,YAAa,EAAE,EAAG,KAAM,MAAO,OAAQA,EAAS,IAAI,CAAC,EAG9G,CAEQ,gBAAgBA,EAAyBC,EAA8B,CAC7E,GAAID,EAAS,QAAU,WACrB,MAAM,IAAI,MAAM,+FAA+F,EAEjH,KAAK,UAAU,KAAKA,CAAQ,EAC5B,KAAK,uBAAuBA,CAAQ,EAEpC,IAAME,EAASF,EAAS,QAAU,QAAU,OAAS,aAC/CjD,EAAciD,EAAS,KAAK,QAClC,MAAO,sBAAsBC,CAAY,kBAAkBC,CAAM,KAAKF,EAAS,IAAI,WAAWjD,CAAW,IAC3G,CAEA,oBAAoBoD,EAAoC,CACtD,OAAOA,EAAU,IAAIC,GAAK,KAAK,gBAAgBA,EAAG,KAAK,eAAe,CAAC,EAAE,KAAK;AAAA,CAAI,CACpF,CAEQ,yBAAyBJ,EAA+B,CAC9D,GAAIA,EAAS,QAAU,WACrB,MAAM,IAAI,MACN,sGAAsG,EAG5G,KAAK,kBAAkB,KAAKA,CAAQ,EACpC,KAAK,uBAAuBA,CAAQ,CACtC,CAEA,6BAA6BG,EAA0C,CACrE,OAAAA,EAAU,QAAQC,GAAK,KAAK,yBAAyBA,CAAC,CAAC,EAChD,IACT,CAEA,gBAAgBhE,EAAcV,EAA8BY,EAAS,EAAiB,CACpF,YAAK,SAAS,KAAK,CAAC,KAAAF,EAAM,KAAAV,EAAM,OAAAY,CAAM,CAAC,EAChC,IACT,CAEA,iBAAiB+D,EAAqD,CACpE,YAAK,SAAW,KAAK,SAAS,OAAOA,CAAkB,EAChD,IACT,CAKQ,oBAA6B,CACnC,GAAI,KAAK,SAAS,SAAW,EAC3B,MAAO,GAGT,IAAMC,EAA4B,CAAC,EACnC,OAAW,CAAC,KAAAlE,EAAM,KAAAV,EAAM,OAAAY,CAAM,IAAK,KAAK,SACtC,GAAIA,GAAUA,EAAS,EACjBZ,IAAS,MACX4E,EAAgB,KAAK,cAAclE,CAAI,iBAAiBV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,EAE1FgE,EAAgB,KAAK,GAAGlE,CAAI,eAAeV,CAAI,MAAM,KAAK,KAAKY,EAAS,CAAC,CAAC,GAAG,MAE1E,CACL,IAAMiE,EAAWjE,GAAU,MAAQA,IAAW,EAAIZ,EAAO,MAAMY,CAAM,IAAIZ,CAAI,IAC7E4E,EAAgB,KAAK,GAAGlE,CAAI,IAAImE,CAAQ,EAAE,CAC5C,CAGF,MAAO;AAAA,0BACeD,EAAgB,KAAK,IAAI,CAAC;AAAA,2BACzB,KAAK,aAAa,oCAC3C,CAMA,IAAI,2BAAoC,CACtC,OAAO,KAAK,mBAAmB,EAAI,KAAK,UAAU,IAAIhD,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,EAC1E,KAAK,kBAAkB,IAAIA,GAAKA,EAAE,KAAK,CAAC,EAAE,KAAK;AAAA,CAAI,CACzD,CAKA,IAAI,eAAwD,CAC1D,GAAI,KAAK,SAAS,SAAW,EAC3B,OAGF,IAAMkD,EAA6B9E,GAC9B,UACe,EAAE,CAAC,MAAO,MAAO,MAAO,KAAK,EAAE,QAAQA,CAAI,CAAC,EAChE,OAAO,KAAK,SAAS,IAAI+E,GAAM,CAACD,EAA0BC,EAAE,IAAI,EAAGA,EAAE,QAAU,CAAC,CAAE,CACpF,CACF,EAEarF,GAAqB,CAACsF,EAAyClB,IACxE,IAAIrE,GAAiBuF,EAAelB,CAAM,EAYjCnE,GAAmB,CAACsF,EAA4BC,IAA0C,CACrG,IAAMC,EAASF,EAAQ,OACjB9E,EAAiB,CAAC,EACxB,QAASyB,EAAI,EAAGA,EAAIuD,EAAQvD,IAAK,CAC/B,IAAMvB,EAAM8E,EAAS,EAAIvD,EACnBwD,EAAIH,EAAQ5E,CAAG,GAAK,GAChB6E,EAASA,EAAS,OAAS,EAAItD,CAAC,GAAK,GACvC,GAAKwD,IAAM,GACjBjF,EAAK,QAAQE,CAAG,CAEpB,CACA,OAAOF,CACT,ICj8BA,IAeMkF,GAMAC,GAGAC,GAGAC,GAWOC,GAqCAC,GAKAC,GAhFbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,6BAA6B,CAEjD,EAEMZ,GAAkB,CAACa,EAAmBC,IACvCA,GAAQA,EAAK,SAAWD,EAAa,CAAC,GAAI,IAAI,MAAMA,CAAS,EAAE,KAAK,CAAE,EAAE,QAAQ,EAAIC,EAEnFb,GAAiB,CAACc,EAA+BD,IACnDE,EAAU,gBAAgBD,EAAYf,GAAgBe,EAAW,OAAQD,CAAI,CAAC,EAE5EZ,GAAmB,CAACY,EAAgBG,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAASG,EAAI,EAAGA,EAAIJ,EAAM,EAAEI,EAC1BD,EAAY,KAAKF,EAAM,WAAW,IAAKJ,EAAKO,CAAC,EAAG,KAAKA,CAAC,GAAG,CAAC,EAE5D,OAAAD,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEajB,GAA6B,CAACmB,EAAyBC,IAAoC,CACtG,IAAMC,EAAgBF,EAAY,SAC5BT,EAAYS,EAAY,KAAK,OAC7BR,EAAOd,GAAgBa,EAAWU,CAAQ,EAC1CE,EAAcxB,GAAeqB,EAAY,KAAMR,CAAI,EACnDK,EAASO,GAAe,SAAUF,EAAeC,EAAY,MAAM,EACnEP,EAAQS,EAAc,IAAKH,EAAeX,CAAS,EAEnDe,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBX,EAAOC,CAAM,CAAC;AAAA;AAAA,IAElFjB,GAAiBY,EAAMD,EAAWK,EAAOC,CAAM,CAAC;AAAA;AAAA,IAEhDU,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DV,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGlDA,EAAO,YAAY,aAAcD,EAAM,aAAa,UAAU,CAAC,CAAC;AAAA,KAEpE,MAAO,CACL,KAAM,YACN,YAAa,CAAC,KAAM,GAAGK,CAAQ,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC9D,WAAaX,GAAW,CACtB,IAAMkB,EAAad,EAAU,KAAKS,CAAW,EAC7C,MAAO,CACL,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUb,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKkB,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGC,GAA2BnB,EAAO,CAAC,EAAE,KAAMa,CAAW,CAAC,CAC5G,CACF,EACA,gBAAAG,CACF,CACF,EAEaxB,GAAY,CAAC4B,EAAyBC,IAA0C,CAC3FlC,GAAeiC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ7B,GAA2B6B,EAAQ,OAAO,CAAC,EAAGC,EAAW,IAAI,CAAC,CAChF,EAEa5B,GAA4B4B,GACrCC,GAA4B,CAAC,KAAMD,EAAW,IAAgB,CAAC,ICjFnE,IAYME,GAaAC,GAaAC,GAaAC,GAYAC,GAQAC,GAYAC,GAcAC,GASAC,GAaOC,GAyEPC,GAkCOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAtQbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEM3B,GAAqC,CACzC,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,oCACX,UAAW,6BACX,GAAI,6BACJ,GAAI,oCACJ,OAAQ,uBACV,EAEMC,GAA2C,CAC/C,IAAK,sDACL,IAAK,sDACL,KAAM,wBACN,IAAK,wBACL,KAAM,wBACN,UAAW,wBACX,UAAW,wBACX,GAAI,wBACJ,GAAI,wBACJ,OAAQ,uBACV,EAEMC,GAA4C,CAChD,IAAK,aACL,IAAK,aACL,KAAM,IACN,IAAK,IACL,KAAM,IACN,UAAW,IACX,UAAW,IACX,GAAI,IACJ,GAAI,IACJ,OAAQ,GACV,EAEMC,GAA8C,CAClD,IAAK,YACL,IAAK,YACL,IAAK,YACL,KAAM,YACN,UAAW,YACX,UAAW,iBACX,GAAI,YACJ,GAAI,kBACJ,OAAQ,gBACV,EAEMC,GAAmB,CAACwB,EAAsBC,IAA2B,CACzE,IAAMC,EAAM,CAAC,EACb,QAASC,EAAIF,EAAOD,EAAcG,EAAIF,EAAM,EAAEE,EAC5CD,EAAI,KAAKC,CAAC,EAEZ,OAAOD,CACT,EAEMzB,GAA4B,CAAC2B,EAA0BC,IAAkD,CAC7G,IAAMC,EAAc,CAAC,EACfL,EAAOG,EAAM,OACnB,QAASG,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,IACxBD,EAAY,KAAKF,EAAMG,CAAG,CAAC,EAG/B,IAAMC,EAAcH,EAAK,IAAIE,GAAOH,EAAMG,CAAG,CAAC,EAC9C,MAAO,CAACD,EAAaE,CAAW,CAClC,EAEM9B,GAAuB,CAAC0B,EAAiBC,IAA6B,CAC1E,IAAMJ,EAAOG,EAAM,OAASC,EAAK,OAC3BI,EAAc,CAAC,EACjBC,EAAW,EACf,QAASH,EAAM,EAAGA,EAAMN,EAAMM,IACxBF,EAAK,QAAQE,CAAG,IAAM,GACxBE,EAAY,KAAKL,EAAMM,GAAU,CAAC,EAElCD,EAAY,KAAK,CAAC,EAGtB,OAAOA,CACT,EAEM9B,GAAuB,CAAC0B,EAAgBJ,IAA0B,CACtE,QAASE,EAAI,EAAGA,EAAIE,EAAK,OAAQ,EAAEF,EACjC,GAAIE,EAAKA,EAAK,OAASF,EAAI,CAAC,IAAMF,EAAO,EAAIE,EAC3C,MAAO,GAGX,MAAO,EACT,EAEMvB,GAAqB,CAACyB,EAAgBJ,IAA2B,CACrE,IAAMC,EAAM,CAAC,EACb,GAAI,CAACvB,GAAqB0B,EAAMJ,CAAI,EAAG,CACrC,QAASE,EAAI,EAAGA,EAAIF,EAAM,EAAEE,EACtBE,EAAK,QAAQF,CAAC,IAAM,IACtBD,EAAI,KAAKC,CAAC,EAGdE,EAAK,QAAQM,GAAQT,EAAI,KAAKS,CAAI,CAAC,CACrC,CACA,OAAOT,CACT,EAEarB,GACT,CAAC+B,EAAcC,EAAqCC,EAA+BC,EAClFC,EAA0BV,EAAuBE,IAAuC,CACvF,IAAMS,EAAaH,EAAO,CAAC,EAAE,KAEvBI,EAAaC,EAAU,KAAKb,CAAW,EACvCc,EAAaD,EAAU,KAAKX,CAAW,EAEvCa,EAAQC,EAAc,KAAMR,EAAO,CAAC,EAAE,SAAUG,CAAU,EAC1DM,EAASC,GAAe,SAAUR,EAAgBV,CAAW,EAE7DmB,EAAgB,GAEhBC,EAAsB;AAAA,oDACkBD,CAAa;AAAA,SA+C3D,MAAO,CACL,KAAAb,EACA,YAAAC,EACA,gBA/CuBc,GAA+B;AAAA,UACpDA,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBN,EAAOE,CAAM,CAAC;AAAA,UACjFG,CAAmB;AAAA;AAAA;AAAA;AAAA,WAIlBC,EAAa,UAAUF,CAAa,CAAC;AAAA;AAAA,2CAELA,CAAa;AAAA;AAAA;AAAA,gCAGxBnD,GAAiByC,CAAU,CAAC;AAAA;AAAA,wDAEJU,CAAa;AAAA,iCACpCJ,EAAM,YAAY,YAAY,CAAC;AAAA,yBACvCjD,GAAU2C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,wCAKNU,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAM3BpD,GAAgB0C,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAS3CQ,EAAO,YACH,cACA,GACIR,IAAe,OAAS,GAAGQ,EAAO,KAAK,OAAO,yCACtB,GAAGA,EAAO,KAAK,OAAO,IAAIhD,GAAmBwC,CAAU,CAAC,GAAG,EAAE,CAAC;AAAA;AAAA,WAShG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUU,CAAc,CAAC,EACvD,cAAe,CAAC,EAAGE,CAAU,EAC7B,gBAAiB,CAAC,CAAC,QAAuB,KAAME,CAAU,CAAC,CAC7D,EACF,CACF,EAEEtC,GACF,CAAC8C,EAAyBhB,EAAciB,EACvCd,IAAiG,CAChG,IAAMe,EACFF,EAAQ,OAAO,SAAW,EAAIC,EAAaE,GAAiCH,EAAQ,OAAQC,CAAU,EAEtGG,EAAcF,EAAkB,KAChCE,EAAY,SAAW,GAAK,CAACF,EAAkB,oBACjDE,EAAcJ,EAAQ,OAAO,CAAC,EAAE,KAAK,IAAI,CAACK,EAAM9B,IAAMA,CAAC,GAEzD,IAAM+B,EAAgBf,EAAU,cAAca,EAAaJ,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EAEpFvB,EAAO6B,EACPb,EAAQO,EAAQ,OAAO,CAAC,EACtBO,EAAevD,GAAmByB,EAAMuB,EAAQ,OAAO,CAAC,EAAE,KAAK,MAAM,EACvEO,EAAa,OAAS,IACxBd,EAAQO,EAAQ,QACZQ,GAA2BR,EAAQ,OAAO,CAAC,EAAGO,CAAY,EAAG,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAChG9B,EAAO7B,GAAiB6B,EAAK,OAAQgB,EAAM,KAAK,MAAM,GAGxD,GAAM,CAACf,EAAaE,CAAW,EAAI/B,GAA0B4C,EAAM,KAAMhB,CAAI,EACzEgC,EAAmB/B,EACnBwB,EAAkB,WACpBO,EAAmB3D,GAAqB4B,EAAa4B,CAAa,GAGpEN,EAAQ,QACJ/C,GACI+B,EAAM,CAAC,KAAMkB,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACT,CAAK,EAAGN,EAChFa,EAAQ,OAAO,CAAC,EAAE,SAAUS,EAAkB7B,CAAW,EAC7D,CAAC,OAAQ,CAACa,CAAK,CAAC,CAAC,CACvB,EAEStC,GAAmB,CAAC6C,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEa7C,GAAiB,CAAC4C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa5C,GAAiB,CAAC2C,EAAyBC,IAAuC,CAC7F/C,GAAa8C,EAAS,iBAAkBC,EAAY,IAAI,CAC1D,EAEa3C,GAAwB,CAAC0C,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEa1C,GAAkB,CAACyC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEazC,GAAkB,CAACwC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEaxC,GAAmB,CAACuC,EAAyBC,IAAuC,CAC/F/C,GAAa8C,EAAS,mBAAoBC,EAAY,MAAM,CAC9D,EAEavC,GAAkB,CAACsC,EAAyBC,IAAuC,CAC9F/C,GAAa8C,EAAS,kBAAmBC,EAAY,KAAK,CAC5D,EAEatC,GAAwB,CAACqC,EAAyBC,IAAuC,CACpG/C,GAAa8C,EAAS,wBAAyBC,EAAY,WAAW,CACxE,EAEarC,GAAqB,CAACoC,EAAyBC,IAAuC,CACjG/C,GAAa8C,EAAS,qBAAsBC,EAAY,QAAQ,CAClE,ICxQA,IAYMS,GAoBAC,GACOC,GA2EAC,GAUPC,GAeAC,GAWAC,GAWAC,GAWAC,GAWAC,GAoBAC,GAqBAC,GAoBAC,GAWAC,GAWAC,GAWAC,GAsBOC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GAQAC,GA7WbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KACAC,KAEMhC,GAAkBiC,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,0BAA0B,CAE9C,EAYMhC,GAAkBiC,GAAU,CAAC,GAAI,GAAI,eAAeA,EAAM,aAAa,eAAe,CAAC,IAAK,EAAE,EACvFhC,GACT,CAACiC,EAAcC,EAAqCH,EAA+BI,EAClFC,EAAqBC,EAA0BC,EAAW,GAAOC,EAAoB,KAAuB,CAC3G,IAAMC,EAAwB,CAAC,EACzBC,EAAaV,EAAO,CAAC,EAAE,KACvBW,EAAYD,EAAW,OACvBE,EAAOC,EAAU,cAAcR,EAAWM,CAAS,EACnDG,EAAkB,CAACN,GAAqBI,EAAK,SAAW,EAC9DF,EAAW,QAAQ,CAACK,EAAGC,IAAM,CACvBF,GAAmBF,EAAK,QAAQI,CAAC,GAAK,EACpCT,GACFE,EAAY,KAAK,CAAC,EAGpBA,EAAY,KAAKM,CAAC,CAEtB,CAAC,EACD,IAAME,EAAaR,EAAY,OACzBS,EAAaL,EAAU,KAAKJ,CAAW,EA4C7C,MAAO,CACL,KAAAP,EACA,YAAAC,EACA,gBA9CuBgB,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EAErBnB,EAAQoB,EAAc,KAAMrB,EAAO,CAAC,EAAE,SAAUW,CAAS,EACzDW,EAASC,GAAe,SAAUjB,EAAgBW,CAAU,EAC5DO,EAAMpB,EAASH,EAAOqB,EAAQV,CAAI,EACpCa,EAAYD,EAAI,CAAC,EAErB,QAASE,EAAI,EAAGC,GAAI,EAAGD,EAAIf,EAAWe,IAEhCZ,GAAmBF,EAAK,QAAQc,CAAC,GAAK,GACpCnB,GACFoB,KAGFF,EAAY,YAAYC,CAAC,eAAeA,CAAC,MAAMhB,EAAWgB,CAAC,CAAC,MAAMA,CAAC;AAAA,oBAC3DF,EAAI,CAAC,EAAE,SAAS,YAAY,EAAI,qBAAqBE,CAAC,IAAM,EAAE;AAAA,oBAC9DzB,EAAM,WAAW,gBAAiByB,EAAG,IAAIA,CAAC,EAAE,CAAC;AAAA,oBAC7CD,CAAS;AAAA,qBAGjBL,EAAQ,KAAK,GAAGnB,EAAM,WAAW,gBAAiByB,EAAGJ,EAAO,WAAW,iBAAkBK,EAAC,CAAC,CAAC,GAAG,EAC/FA,MAGJ,MAAO;AAAA;AAAA,UAELR,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBlB,EAAOqB,CAAM,CAAC;AAAA;AAAA,UAElFH,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,+BACvDlB,EAAM,KAAK,OAAO;AAAA,iCAChBqB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDF,EAAQ,KAAK;AAAA,CAAI,CAAC;AAAA,YAClBI,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,CAAC,CAAC;AAAA,YACNC,CAAS;AAAA,YACTD,EAAI,CAAC,CAAC;AAAA,YACNA,EAAI,SAAW,EAAIF,EAAO,YAAY,aAAc,OAAO,EAAIE,EAAI,MAAM,CAAC,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,UAE5F,EAME,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMf,EAAa,SAAUH,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGU,GAA2BlB,EAAYD,CAAW,CAAC,CACxG,EACF,CACF,EAESvC,GACT,CAAC8B,EAA+B6B,IAAmD,CACjF,IAAMjB,EAAiB,CAAC,EACxB,OAAIZ,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,GACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQ8B,GAAKlB,EAAK,KAAK,OAAOkB,CAAC,CAAC,CAAC,EAEzDC,GACH,CAAC,KAAAnB,EAAM,SAAUiB,EAAW,SAAU,kBAAmBA,EAAW,iBAAiB,CAAC,CAC5F,EAEE1D,GACF,CAAC6D,EAAyB9B,EAAc2B,EAA8BzB,IAA6B,CACjG,IAAMJ,EAASgC,EAAQ,OACjBC,EACFjC,EAAO,SAAW,EAAI6B,EAAa3D,GAAiC8B,EAAQ6B,CAAU,EAE1FG,EAAQ,QACJ/D,GACIiC,EAAM,CAAC,KAAM+B,EAAkB,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACjC,EAAO,CAAC,CAAC,EACjFiC,EAAkB,mBAAqBA,EAAkB,KAAK,SAAW,EAAIjE,GAAOoC,EACpF6B,EAAkB,KAAMjC,EAAO,CAAC,EAAE,SAAUiC,EAAkB,SAC9DA,EAAkB,iBAAiB,EACvC,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEE7D,GAAoB,CAAC4D,EAAyBH,IAAuC,CACzF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,eAAgBH,EANf,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,qBACL,CAC8D,CAChE,EAEM5B,GAAgB,CAAC2D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,EACL,CAC0D,CAC5D,EAEM3B,GAAgB,CAAC0D,EAAyBH,IAAuC,CACrF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,WAAYH,EANX,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,sBAC1C,sBACL,CAC0D,CAC5D,EAEM1B,GAAuB,CAACyD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,gBAAgBrB,EAAM,aAAa,eAAe,CAAC,KACnD,qBACL,CACiE,CACnE,EAEMzB,GAAiB,CAACwD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAKlC,EAAM,WAAW,gBAAiByB,EAAG,CAAC,CAAC,EAIxD,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMxB,GAAkB,CAACuD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAiB7B7D,GAAiB6D,EAAS,aAAcH,EAhBb,CAAC5B,EAAOqB,EAAQV,IAAS,CAClD,IAAIwB,EAAO,EACX,QAASV,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,KAE1CwB,GAAQJ,EAAQ,OAAO,CAAC,EAAE,KAAKN,CAAC,GAIpC,MAAO,CACL,oBACA,GACA,cAAczB,EAAM,aAAa,eAAe,CAAC,KACjD,eAAeqB,EAAO,KAAK,KAAK,UAAUc,CAAI,IAChD,CACF,CAC4D,CAC9D,EAEM1D,GAAiB,CAACsD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAgB7B7D,GAAiB6D,EAAS,YAAaH,EAfZ,CAAC5B,EAAOiC,EAAStB,IAAS,CACnD,IAAMuB,EAAU,CAAC,EACjB,QAAST,EAAI,EAAGA,EAAIzB,EAAM,KAAMyB,KAC1Bd,EAAK,QAAQc,CAAC,GAAK,GAAKd,EAAK,SAAW,IAC1CuB,EAAQ,KAAK,iBAAiBT,CAAC,QAAQ,EAI3C,MAAO,CACL,GAAGS,EAAQ,KAAK;AAAA,CAAI,CAAC,GACrB,eAAelC,EAAM,aAAa,eAAe,CAAC,IAClD,sBAAsBA,EAAM,aAAa,eAAe,CAAC,KACzD,EACF,CACF,CAC2D,CAC7D,EAEMtB,GAAkB,CAACqD,EAAyBH,IAAuC,CACvF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,aAAcH,EANb,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC4D,CAC9D,EAEMrB,GAAiB,CAACoD,EAAyBH,IAAuC,CACtF9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,YAAaH,EANZ,CAAC5B,EAAOqB,IAC/B,CAAC,eAAeA,EAAO,KAAK,OAAO,OAClC,GACA,YAAYrB,EAAM,aAAa,eAAe,CAAC,IAC/C,EACL,CAC2D,CAC7D,EAEMpB,GAAuB,CAACmD,EAAyBH,IAAuC,CAC5F9D,GAAeiE,EAAQ,MAAM,EAO7B7D,GAAiB6D,EAAS,kBAAmBH,EANlB,CAAC5B,EAAOqB,IAC/B,CAAC,WAAWA,EAAO,KAAK,KAAK,oBAAoBA,EAAO,KAAK,KAAK,OACjE,GACA,OAAOrB,EAAM,aAAa,eAAe,CAAC,oBAC1C,EACL,CACiE,CACnE,EAEMnB,GACF,CAACuD,EAA0BzB,EAAyBJ,IAAwC,CAC1F,GAAII,EAAK,SAAW,EAClB,OAAOJ,EAGT,IAAIU,EAAa,EACboB,EAAa,EACjB,QAASC,EAAM,EAAGA,EAAM3B,EAAK,OAAQ2B,IAC/B3B,EAAK,QAAQ2B,CAAG,IAAM,GACxBrB,GAAcmB,EAAME,CAAG,EAEvBD,GAAcD,EAAME,CAAG,EAO3B,OAAOD,EAAa,IAAMpB,EAAa,IACzC,EAESnC,GAAa,CAACiD,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FpD,GAAgBuD,EAASH,CAAU,EAEnCW,GAAiBR,EAASH,CAAU,CAExC,EAEa7C,GAAW,CAACgD,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FxD,GAAc2D,EAASH,CAAU,EAEjCY,GAAeT,EAASH,CAAU,CAEtC,EAEa5C,GAAW,CAAC+C,EAAyBH,IAAuC,CACnF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FvD,GAAc0D,EAASH,CAAU,EAEjCa,GAAeV,EAASH,CAAU,CAEtC,EAEa3C,GAAkB,CAAC8C,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FtD,GAAqByD,EAASH,CAAU,EAExCc,GAAsBX,EAASH,CAAU,CAE7C,EAEa1C,GAAY,CAAC6C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FrD,GAAewD,EAASH,CAAU,EAElCe,GAAgBZ,EAASH,CAAU,CAEvC,EAEazC,GAAY,CAAC4C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FnD,GAAesD,EAASH,CAAU,EAElCgB,GAAgBb,EAASH,CAAU,CAEvC,EAEaxC,GAAa,CAAC2C,EAAyBH,IAAuC,CACrF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FlD,GAAgBqD,EAASH,CAAU,EAEnCiB,GAAiBd,EAASH,CAAU,CAExC,EAEavC,GAAY,CAAC0C,EAAyBH,IAAuC,CACpF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FjD,GAAeoD,EAASH,CAAU,EAElCkB,GAAgBf,EAASH,CAAU,CAEvC,EAEatC,GAAkB,CAACyC,EAAyBH,IAAuC,CAC1F/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FhD,GAAqBmD,EAASH,CAAU,EAExCmB,GAAsBhB,EAASH,CAAU,CAE7C,EAEarC,GAAe,CAACwC,EAAyBH,IAAuC,CACvF/C,GAAqBkD,EAAQ,OAAO,CAAC,EAAE,KAAMH,EAAW,KAAMA,EAAW,iBAAiB,EAC5FzD,GAAkB4D,EAASH,CAAU,EAErCoB,GAAmBjB,EAASH,CAAU,CAE1C,ICnXA,IAcMqB,GAeOC,GA0BAC,GA0BAC,GAjFbC,GAAAC,GAAA,kBAOAC,KAEAC,KAGAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,GAAKA,EAAO,OAAS,EACpD,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,qBAAqB,CAEzC,EAQaR,GAAS,CAACS,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaT,GAAS,CAACQ,EAAyBC,IAA0C,CACxFX,GAAeU,EAAQ,MAAM,EAC7B,IAAME,EAAwB,CAACC,EAAOC,EAAQC,IAAS,CACrD,IAAMC,EAAU,CAAC,EACjB,QAASC,EAAI,EAAGA,EAAIJ,EAAM,KAAMI,KAC1BF,EAAK,QAAQE,CAAC,GAAK,GAAKF,EAAK,SAAW,IAC1CC,EAAQ,KAAK,iBAAiBC,CAAC,QAAQ,EAG3C,MAAO,CACL,GAAGD,EAAQ,KAAK;AAAA,CAAI,CAAC,GAAI,eAAeH,EAAM,aAAa,eAAe,CAAC;AAAA,2BAC3E,OAAOA,EAAM,aAAa,eAAe,CAAC,IAAIF,EAAW,gBAAkB,EAAI,KAAO,GAAG;AAAA,mBAC5EE,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,UAGhD,GAAIC,EAAO,YAAY,aAAc,YAAY,CACnD,CACF,EAEAJ,EAAQ,QACJQ,GACI,SAAU,CAAC,KAAMP,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EAAG,CAACD,EAAQ,OAAO,CAAC,CAAC,EAAGE,EACzF,CAACD,EAAW,IAAI,IAAmBA,EAAW,QAAQ,EAC1D,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEaR,GAA4BQ,GACrCQ,GAA4BR,CAAoE,IClFpG,IAeMS,GA4BAC,GAWAC,GAmBOC,GAkEAC,GAcAC,GAzJbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMX,GAAiB,CAACY,EAA+BC,IAAuB,CAC5E,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,IAAME,EAAiB,EACjBC,EAAiBH,EAAOE,CAAc,EACtCE,EAAYD,EAAe,SAC3BE,EAAYF,EAAe,KAAK,OACtCH,EAAO,QAAQ,CAACM,EAAOC,IAAM,CAC3B,GAAIA,IAAML,EAIV,IAAII,EAAM,WAAaF,EACrB,MAAM,IAAI,MAAM,kCAAkC,EAGpD,GAAIE,EAAM,KAAK,SAAWD,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAE5DC,EAAM,KAAK,QAAQ,CAACE,EAAKD,IAAM,CAC7B,GAAIA,IAAMN,GAAQO,IAAQL,EAAe,KAAKI,CAAC,EAC7C,MAAM,IAAI,MAAM,kCAAkC,CAEtD,CAAC,EACH,CAAC,CACH,EAEMlB,GAA0B,CAACoB,EAAyBC,IAAwC;AAAA;AAAA,wCAE1DD,CAAe,MAAMC,CAAmB;AAAA,gCAChDD,CAAe;AAAA;AAAA;AAAA;AAAA;AAAA,aAKlCA,CAAe;AAAA,KAGtBnB,GAAmB,CAACU,EAAkCW,IAA0B,CACpF,IAAMF,EAAkBT,EAAO,OAEzBY,EAAsB,CAAC,EAC7B,QAASL,EAAI,EAAGA,EAAIE,EAAiB,EAAEF,EAAG,CACxC,IAAMM,EAAgBF,EAAO,YAAY,aAAcX,EAAOO,CAAC,EAAE,aAAa,SAAS,CAAC,EACpFE,IAAoB,EACtBG,EAAU,KAAKC,CAAa,EACnBN,IAAM,EACfK,EAAU,KAAK,qBAAqBL,CAAC,QAAQM,CAAa,IAAI,EACrDN,IAAME,EAAkB,EACjCG,EAAU,KAAK,UAAUC,CAAa,IAAI,EAE1CD,EAAU,KAAK,0BAA0BL,CAAC,OAAOM,CAAa,IAAI,CAEtE,CACA,OAAOD,EAAU,KAAK;AAAA,CAAI,CAC5B,EAEarB,GACT,CAACS,EAA+Bc,EAAsBC,EAAuBC,IAAoC,CAC/G,IAAMC,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAmB,IAAI,MAAcnB,EAAO,MAAM,EAClDoB,EAAY,IAAI,MAAqBpB,EAAO,MAAM,EAEpDqB,EAAc,EACZC,EAAwD,CAAC,EACzDC,EAAa,CAAC,EACdC,EAAoC,CAAC,CAAC,QAAuB,KAAMP,CAAU,CAAC,EACpF,QAASV,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCc,GAAerB,EAAOO,CAAC,EAAE,KAAKO,CAAY,EAC1CK,EAAiBZ,CAAC,EAAIc,EACtBE,EAAW,KAAKvB,EAAOO,CAAC,EAAE,KAAK,MAAM,EACrCa,EAAUb,CAAC,EAAIkB,EAAc,QAAQlB,CAAC,GAAIS,EAAUO,EAAWhB,CAAC,CAAC,EACjEe,EAAkB,KAAK,MAAM,EAC7BE,EAAgB,KAAK,CAAC,QAAuB,KAAML,EAAiBZ,CAAC,CAAC,CAAC,EAEzE,QAASA,EAAI,EAAGA,EAAIP,EAAO,OAAQ,EAAEO,EACnCiB,EAAgB,KAAK,GAAGE,GAA2B1B,EAAOO,CAAC,EAAE,IAAI,CAAC,EAEpEiB,EAAgB,KAAK,GAAGE,GAA2BX,CAAW,CAAC,EAE/D,IAAMJ,EAASgB,GAAe,SAAUX,EAAUD,EAAY,MAAM,EAC9Da,EAAcjB,EAAO,WAAW,UAAWG,CAAY,EACvDJ,EACF,MAAM,KAAK,MAAMS,EAAiB,MAAM,EAAE,KAAK,CAAC,EAAE,IAAIZ,GAAK,4BAA4BA,CAAC,EAAE,EAAE,KAAK,GAAG,EAClGsB,EAAmBC,GAA+B;AAAA;AAAA,KAEzD,IAAM,CACHA,EAAa,gBAAgB,aAAc,KAAK,EAChD,QAASvB,EAAI,EAAGA,EAAIP,EAAO,OAAQO,IACjCuB,EAAa,gBAAgB,mBAAmBvB,CAAC,GAAI,KAAK,EAE5D,OAAOuB,EAAa,iBAAiB,GAAGV,EAAWT,CAAM,CAC3D,GAAG,CAAC;AAAA;AAAA,IAENtB,GAAwB8B,EAAiB,OAAQT,CAAmB,CAAC;AAAA;AAAA,IAErEoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DnB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,2CAEbiB,CAAW;AAAA;AAAA,0CAEZT,EAAiB,MAAM,MAAMT,CAAmB;AAAA,QAClFkB,CAAW;AAAA;AAAA;AAAA,MAGbtC,GAAiB8B,EAAWT,CAAM,CAAC;AAAA,KAGnC,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGG,CAAY,GAAI,kBAAAQ,CAAiB,EACxD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAa,SAAAC,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKC,EAAa,EAAuB,CAAC,EAClE,gBAAAO,CACF,GACA,gBAAAK,CACF,CACF,EAESrC,GAAS,CAACuC,EAAyBC,IAAuC,CACrF,IAAMhC,EAAS+B,EAAQ,OACjBE,EAAajC,EAAO,CAAC,EAAE,KACvBc,EAAeI,EAAU,cAAcc,EAAW,KAAMC,EAAW,MAAM,EAC/E7C,GAAeY,EAAQc,CAAY,EACnC,IAAMC,EAAckB,EAAW,MAAM,EACrClB,EAAYD,CAAY,EACpBd,EAAO,OAAO,CAACkC,EAAK5B,IAAU4B,GAAO5B,EAAM,KAAK,OAASQ,EAAeR,EAAM,KAAKQ,CAAY,EAAI,GAAI,CAAC,EAE5G,IAAMqB,EAAiBnC,EAAO,OAAOM,GAASY,EAAU,KAAKZ,EAAM,IAAI,EAAI,CAAC,EAC5EyB,EAAQ,QACJxC,GAAwB4C,EAAgBrB,EAAcC,EAAaf,EAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQmC,CAAc,CAAC,CACtH,EAEa1C,GAAyBuC,GAClCI,GAA4B,CAAC,KAAMJ,EAAW,IAAc,CAAC,IC1JjE,IAoEMK,GAmKAC,GAsGAC,GA2HAC,GA+EOC,GAiDPC,GAmHOC,GA3rBbC,GAAAC,GAAA,kBAGAC,KAEAC,KAEAC,KACAC,KA4DMZ,GAA0B,CAACa,EAA+BC,IAAoD,CAmClH,IAAMC,EAAQF,EAAO,CAAC,EAChBG,EAAUH,EAAO,CAAC,EAClBI,EAAOJ,EAAO,CAAC,EACfK,EAAYL,EAAO,CAAC,EACpBM,EAAON,EAAO,CAAC,EACfO,EAAuBP,EAAO,CAAC,EAErC,GAAIM,GAAQC,EACV,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIL,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAMM,EAAYN,EAAM,KAAK,CAAC,EACxBO,EAAiBP,EAAM,KAAK,CAAC,EAC7BQ,EAAkBR,EAAM,KAAK,CAAC,EAEpC,GAAIE,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIA,EAAQ,KAAK,CAAC,IAAMO,EACtB,MAAM,IAAI,MAAM,uEAAuE,EAGzF,GAAIN,EAAK,KAAK,CAAC,IAAMD,EAAQ,KAAK,CAAC,EACjC,MAAM,IAAI,MAAM,oFAAoF,EAGtG,IAAIQ,EAAcP,EAAK,KAAK,CAAC,EAAI,EAC7BQ,EAAcD,EACdE,EAAcD,EAClB,GAAIX,EAAW,eAAe,OAAS,EAAG,CACxC,GAAIA,EAAW,eAAe,SAAW,EACvC,MAAM,IAAI,MAAM,mDAAmD,EAErE,QAAWa,KAAMb,EAAW,eAC1B,GAAIa,EAAKb,EAAW,WAAa,EAC/B,MAAM,IAAI,MAAM,mDAAmD,EAIvEU,EAAcV,EAAW,eAAe,CAAC,EACzCW,EAAcX,EAAW,eAAe,CAAC,EACzCY,EAAcZ,EAAW,eAAe,CAAC,CAC3C,CAEA,IAAMc,EAAmBN,EAEzB,GAAIE,IAAgBC,EAClB,MAAM,IAAI,MAAM,6DAA6D,EAG/E,GAAIR,EAAK,KAAK,CAAC,IAAMO,EAAcC,EAAcC,EAC/C,MAAM,IAAI,MAAM,+EAA+E,EAGjG,IAAIG,EAAqB,EACzB,GAAIV,EAAM,CACR,GAAIM,IAAgBC,EAClB,MAAM,IAAI,MAAM,oDAAoD,EAEtE,GAAIP,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,qCAAqC,EAEvD,GAAIA,EAAK,KAAK,CAAC,IAAM,EACnB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,GAAIA,EAAK,KAAK,CAAC,IAAME,EACnB,MAAM,IAAI,MAAM,kDAAkD,EAEpE,GAAIF,EAAK,KAAK,CAAC,IAAML,EAAW,SAC9B,MAAM,IAAI,MAAM,gDAAgD,EAElE,GAAIK,EAAK,KAAK,CAAC,IAAMM,EAAcX,EAAW,SAC5C,MAAM,IAAI,MAAM,gEAAgE,EAG7EA,EAAW,yBACde,EAAqBV,EAAK,KAAK,CAAC,EAGpC,CAEA,IAAMW,EAAsBF,EAAmBC,EACzCE,EAAoB,GAEpBC,EAAW,EACjB,GAAId,EAGF,MAAM,IAAI,MAAM,oBAAoB,EAGtC,GAAIC,EACF,MAAM,IAAI,MAAM,uBAAuB,EAGzC,MAAO,CACL,UAAAE,EACA,eAAAC,EACA,mBAAAO,EACA,iBAAAD,EACA,oBAAAE,EACA,kBAAAC,EACA,gBAAAR,EACA,WAAYC,EACZ,YAAAE,EACA,SAAU,KAAK,MAAMF,EAAcV,EAAW,QAAQ,EACtD,UAAW,KAAK,MAAMY,EAAcZ,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAqB,GACrB,aAAc,GACd,UAAW,CACb,CACF,EAEMb,GAAkC,CAACgC,EAA0BlB,EAAmBmB,EAAWC,IAAc,CAC7G,IAAMC,EAAaC,GAAiBF,CAAC,EACjCG,EAAK,GACHC,EAAQJ,EAAIC,EACdG,EAAQD,EACVA,EAAK,EACIC,EAAQ,EAAI,KACrBD,EAAK,KAAK,KAAKC,EAAQ,CAAC,GAE1B,IAAMC,EAAoB,KAAK,KAAKL,EAAIC,EAAaE,CAAE,EACjDG,EAAoC,CACxC,CAAC,KAAM1B,EAAM,SAAU,KAAM,EAAIoB,CAAC,EAAG,CAAC,QAAuB,KAAMI,CAAK,EACxE,CAAC,QAAuB,KAAMC,CAAiB,CACjD,EACME,EAAWC,GAA4B5B,EAAM,SAAUqB,CAAU,EACjEQ,EAAUC,KAA0CT,CAAU,EAE9DU,EAAmBC,GAA+B,CACtD,IAAMC,EAAcC,GAAe,IAAKlC,EAAM,SAAUA,EAAM,KAAMqB,CAAU,EAExEc,EAA8B,CAClC,CAAC,KAAM,QAAS,KAFIL,GAA0B9B,EAAM,QAAQ,CAEC,EAAG,CAAC,KAAM,SAAU,KAAM,KAAK,EAC5F,CAAC,KAAM,sBAAuB,KAAM,KAAK,CAC3C,EAEA,MAAO;AAAA,0CAC+BuB,CAAE;AAAA,0CACFA,CAAE;AAAA,IACxCS,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBF,CAAW,CAAC;AAAA,IACrED,EAAa,UAAU,CACrBT,EAAI,EAAG,CACT,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,8BAIwBM,CAAO;AAAA;AAAA,gCAELA,CAAO;AAAA;AAAA,+BAER,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,gDACT,IAAK,GACH,MAAO,oGACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA,uBAINM,CAAO;AAAA;AAAA,0BAEJA,CAAO;AAAA;AAAA,+BAEF,IAAM,CAC/B,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,aACT,IAAK,GACH,MAAO,8BACT,IAAK,GACH,MAAO,4DACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA,2BAImBE,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAMHU,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,yBAIvBJ,CAAO;AAAA,0BACNI,EAAY,KAAK,KAAK;AAAA;AAAA;AAAA,IAI9C,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CAAC,KAAM,GAAGV,CAAE,IAAII,CAAQ,IAAIN,CAAU,EAAE,EACrD,gBAAAU,EACA,WAAY,KAAO,CAAC,QAAS,CAAC,EAAG,cAAe,CAAC,EAAGZ,CAAC,EAAG,gBAAAO,CAAe,EACzE,CACF,EAEMvC,GACF,CAAC+B,EAA0BkB,EAAeC,EAAiBhC,EAC1DiC,EAAiCvC,EAA4Be,IAA+B,CAC3F,IAAMC,EAAsBD,EAAqBwB,EAAW,iBACtDC,EAAa,CAACD,EAAW,UAAWA,EAAW,SAAUA,EAAW,eAAgBvB,CAAmB,EAIvGyB,EAAQzC,EAAW,QAAU,EAAI,EAAM,KAAK,KAAKuC,EAAW,QAAQ,EAAIvC,EAAW,MACnFsB,EAAaC,GAAiBgB,EAAW,QAAQ,EACjDG,EAAqBH,EAAW,SAAWjB,EAC3CqB,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAK5B,EAAsB2B,CAAS,EAC5C,EAAG,KAAK,KAAKJ,EAAW,eAAiBI,CAAS,EAClD,EAAGJ,EAAW,UAAYA,EAAW,QACvC,EACMZ,EAAoC,CACxC,CAAC,QAAuB,KAAMY,EAAW,cAAc,EAAG,CAAC,QAAuB,KAAMG,CAAkB,EAC1G,CAAC,QAAuB,KAAM1B,CAAmB,EAAG,CAAC,QAAuB,KAAMuB,EAAW,QAAQ,EACrG,CAAC,OAAsB,KAAME,CAAK,CACpC,EAEMI,EAAwD,CAAC,OAAQ,MAAM,EACzEvC,IACFuC,EAAkB,KAAK,MAAM,EAC7BlB,EAAgB,KAAK,GAAGmB,GAA2BxC,EAAqB,IAAI,CAAC,GAG/E,IAAM0B,EAAmBC,GAA+B,CACtD,IAAMc,EAASC,EAAc,IAAKX,EAAE,SAAUA,EAAE,KAAMf,CAAU,EAC1D2B,EAASD,EAAc,MAAOV,EAAI,SAAUA,EAAI,KAAMhB,CAAU,EAChE4B,EAAY,CAACH,EAAQE,CAAM,EAC3BE,EAA4B7C,EAC9B0C,EAAc,yBAA0B1C,EAAqB,SAAUA,EAAqB,KAAK,MAAM,EACvG,OACA6C,GACFD,EAAU,KAAKC,CAAyB,EAE1C,IAAMC,EAASjB,GAAe,SAAUE,EAAE,SAAUG,CAAU,EAExDV,GAAUC,KAA0CT,CAAU,EAE9Dc,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAM,KAA+B,CACzF,EACA,MAAO;AAAA,sBACOO,CAAS;AAAA;AAAA,gCAECI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,gCAC7CI,EAAO,KAAK,OAAO,KAAKJ,EAAYA,CAAS;AAAA,IACzEV,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiB,GAAGc,EAAWE,CAAM,CAAC;AAAA,IAC9EnB,EAAa,UAAU,CACjBU,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kBAQQb,EAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWNA,EAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBASF,IAAM,CACpB,OAAQR,EAAY,CAClB,IAAK,GACH,MAAO,QACT,IAAK,GACH,MAAO,oBACT,IAAK,GACH,MAAO,wCACT,QACE,MAAM,IAAI,MAAM,2BAA2BA,CAAU,EAAE,CAC3D,CACF,GAAG,CAAC;AAAA;AAAA,KAEP,IACS6B,EACK;AAAA;AAAA;AAAA,sBAGGA,EAA0B,KAAK,OAAO;AAAA,4BAChCC,EAAO,KAAK,KAAK,4BAC7BD,EAA0B,aAAa,SAAS,CAAC,IAEhD,uBAAuBC,EAAO,KAAK,KAAK,4BAC9C,CAAC;AAAA;AAAA,IAGN,EACA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,KAAM,GAAG9B,CAAU,GAAI,kBAAAuB,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAY,SAAUH,EAAE,SAAU,aAAgC,CAAC,EACpF,cAAeO,EACf,gBAAAjB,CACF,GACA,gBAAAK,CACF,CACF,EAGE3C,GACF,CAAC8B,EAA0BkC,EAAmBC,EAAeC,EAC5DxC,IAA+B,CAC9B,IAAMC,EAAsBD,EAAqBwC,EAAO,iBAClDC,EAAc,CAACD,EAAO,UAAWA,EAAO,eAAgBA,EAAO,WAAW,EAC1EZ,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKW,EAAO,UAAYZ,CAAS,EACzC,EAAG,KAAK,KAAKY,EAAO,eAAiBZ,CAAS,EAC9C,EAAGY,EAAO,UAAYA,EAAO,QAC/B,EACM5B,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,EAAO,cAAc,EAAG,CAAC,QAAuB,KAAMvC,CAAmB,EACvG,CAAC,QAAuB,KAAMuC,EAAO,SAAS,EAAG,CAAC,QAAuB,KAAMA,EAAO,QAAQ,EAC9F,CAAC,QAAuB,KAAMA,EAAO,WAAW,CAClD,EAoDA,MAAO,CACL,KAAM,iBACN,YAAa,CAAC,kBApD8C,CAAC,OAAQ,MAAM,CAoD5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAM,SAAU,aAAgC,CAAC,EACzF,cAAeT,EACf,gBAAAjB,CACF,GACA,gBAzDuBM,GAA+B,CACtD,IAAMwB,EAAcT,EAAc,QAASK,EAAM,SAAUA,EAAM,IAAI,EAC/DK,EAAUV,EAAc,IAAKM,EAAE,SAAUA,EAAE,IAAI,EAC/CF,EAASjB,GAAe,SAAUkB,EAAM,SAAUG,CAAW,EAC7DpB,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC3E,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,gBAAiB,KAAM,KAAK,CACvE,EACA,MAAO;AAAA,sBACOO,CAAS;AAAA,gCACCc,EAAY,KAAK,KAAK,KAAKd,EAAYA,CAAS;AAAA,gCAChDc,EAAY,KAAK,KAAK,KAAKd,EAAYA,CAAS;AAAA,IAC5EV,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBqB,EAAaC,EAASN,CAAM,CAAC;AAAA,IACtFnB,EAAa,UAAU,CACjBU,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iBAQOc,EAAY,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAwBnC,CAWA,CACF,EAESnE,GACT,CAACqE,EAAyBtB,EAAeuB,EAAeN,EAAeO,EACtEC,EAA6BC,EAA+BC,EAC5D1D,EAA4CiC,EAAiCvC,IAA+B,CAC3G,IAAMiE,EAAmBN,EAAQ,YAAc,EACzCO,EAAqBP,EAAQ,YAAc,EAC3C5C,EAAsBkD,GAAoBC,EAAsB3B,EAAW,mBAAqB,EAChGvB,EAAsBD,EAAqBwB,EAAW,iBAEtD4B,EAAkB,CAAC5B,EAAW,UAAWA,EAAW,SAAUvB,EAAqBuB,EAAW,QAAQ,EACtG6B,EAAkBL,EAAU,CAACA,EAASH,CAAC,EAAI,CAACA,CAAC,EAC7CtB,EAAM2B,EAAmBN,EAAQ,QACJU,GAAwBD,EAAiB,EAAGD,EAAiBP,EAAE,QAAQ,EACvE,CAAC,OAAQQ,EAAiB,QAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAC9CR,EAGzBU,EAAoB,CAAC/B,EAAW,UAAWA,EAAW,SAAUvB,EAAqBuB,EAAW,QAAQ,EACxGgC,EAAoBP,EAAY,CAACA,EAAWV,CAAC,EAAI,CAACA,CAAC,EACnDkB,EAAQN,EACVP,EAAQ,QACJU,GAAwBE,EAAmB,EAAGD,EAAmBhB,EAAE,QAAQ,EAC3E,CAAC,OAAQiB,EAAmB,QAAS,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,EAChDjB,EACEmB,EAAU,CAACpC,EAAGC,CAAG,EACnBhC,GACFmE,EAAQ,KAAKnE,CAAoB,EAInC,IAAM+C,EAAQM,EAAQ,QAClBvE,GACIuE,EAAStB,EAAGC,EAAKhC,EAAsBiC,EAAYvC,EAAYe,CAAkB,EACrF,CAAC,OAAQ0D,EAAS,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAGvCd,EAAQ,QACJxE,GACIwE,EAASN,EAAOd,EAAW,UAAYA,EAAW,SAAWA,EAAW,eACxEvB,CAAmB,EACvB,CAAC,OAAQ,CAACqC,CAAK,EAAG,QAAS,CAAC,CAAC,CAAC,EAGlC,IAAMqB,GAAU,CAACrB,EAAOmB,CAAK,EAC7Bb,EAAQ,QACJtE,GAAkCsE,EAASN,EAAOmB,EAAOjC,EAAYxB,CAAkB,EACvF,CAAC,OAAQ2D,GAAS,QAAS,CAAC,CAAC,CAAC,CAAC,CACrC,EAEEnF,GAAU,CAACoE,EAAyBpB,IAAoC,CAC5E,IAAMiB,EAAc,CAClBjB,EAAW,UACXA,EAAW,SACXA,EAAW,eACXA,EAAW,QACb,EACMoC,EAAIpC,EAAW,eACfqC,EAAIrC,EAAW,gBACfsC,EAAItC,EAAW,SACfI,EAAY,GACZC,EAAW,CACf,EAAG,KAAK,KAAKL,EAAW,SAAWI,CAAS,EAC5C,EAAG,KAAK,KAAKJ,EAAW,eAAiBI,CAAS,EAClD,EAAGJ,EAAW,UAAYA,EAAW,QACvC,EACMxC,EAAS,CAAC4D,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,CAAC,EACjEhC,EAAoC,CACxC,CAAC,QAAuB,KAAMgD,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EACnG,CAAC,QAAuB,KAAMtC,EAAW,QAAQ,EAAG,CAAC,QAAuB,KAAMA,EAAW,QAAQ,EACrG,CAAC,QAAuB,KAAMA,EAAW,UAAU,EACnD,CAAC,QAAuB,KAAMA,EAAW,WAAaA,EAAW,WAAaA,EAAW,WAAW,CACtG,EAEMP,EAAmBC,GAA+B,CACtD,IAAM6C,EAAU3C,GAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUyD,CAAW,EACpEuB,EAAU5C,GAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUyD,CAAW,EACpEwB,EAAU7C,GAAe,WAAYpC,EAAO,CAAC,EAAE,SAAUyD,CAAW,EACpEvD,EAAQ+C,EAAc,QAASjD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEkF,EAASjC,EAAc,SAAUjD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACnEI,EAAO6C,EAAc,OAAQjD,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/D6B,EAAW3B,EAAM,KAAK,QAEtBmC,EAA8B,CAClC,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAC7G,CAAC,KAAM,YAAa,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,MAAO,KAAM,KAAK,CACjG,EACA,MAAO;AAAA,sBACWO,CAAS;AAAA,oCACKf,CAAQ,KAAKe,EAAYA,CAAS;AAAA,sCAChCf,CAAQ,KAAKe,EAAYA,CAAS;AAAA,sCAClCf,CAAQ,KAAKe,EAAYA,CAAS;AAAA,sCAClCf,CAAQ,KAAKe,EAAYA,CAAS;AAAA,IACpEV,EAAa,iBAAiBG,CAAQ,EAAE,iBAAiBnC,EAAOgF,EAAQ9E,EAAM2E,EAASC,EAASC,CAAO,CAAC;AAAA,IACxG/C,EAAa,UAAU,CACrBU,EAAWA,EAAW,CACxB,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mBAWaf,CAAQ;AAAA,mBACRA,CAAQ;AAAA,mBACRA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,IAoCzB,EAEA,OAAO+B,EAAQ,QACX,CACE,KAAM,mBACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMH,EAAa,SAAUG,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAMH,EAAa,SAAUG,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,EAC1F,CAAC,KAAMH,EAAa,SAAUG,EAAQ,OAAO,CAAC,EAAE,SAAU,aAAgC,CAC5F,EACA,cAAef,EACf,gBAAAjB,CACF,GACA,gBAAAK,CACF,EACA,CAAC,OAAAjC,EAAQ,QAAS,CAAC,GAAI,GAAI,EAAE,CAAC,CAAC,CACrC,EAEaP,GAAY,CAACmE,EAAyB3D,IAAqC,CACtF,IAAMuD,EAASrE,GAAwByE,EAAQ,OAAQ3D,CAAU,EAE3D,CAACqC,EAAGuB,EAAGN,CAAC,EAAI/D,GAAQoE,EAASJ,CAAM,EAEzC,OAAOjE,GACHqE,EAAStB,EAAGuB,EAAGN,EAAGK,EAAQ,OAAO,CAAC,EAAG,OAAW,OAAW,OAAWA,EAAQ,OAAO,CAAC,EAAGJ,EAAQvD,CAAU,CACjH,IClsBA,IAsBMkF,GAkCAC,GAgFOC,GAGAC,GA3IbC,GAAAC,GAAA,kBAGAC,KAEAC,KAEAC,KACAC,KAGAC,KAWMV,GAAiB,CAACW,EAA+BC,IAA0C,CAC/F,GAAI,CAACD,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,sCAAsC,EAGxD,IAAME,EAAkB,CAACC,EAA2BC,EAA6BC,IAAoB,CACnG,IAAMC,EAAIF,EAAS,OACnB,GAAIE,IAAMH,EAAO,OACf,MAAM,IAAI,MAAM,GAAGE,CAAO,uBAAuBC,CAAC,EAAE,EAEtDF,EAAS,QAAQ,CAACG,EAAGC,IAAM,CACzB,GAAID,IAAMJ,EAAOK,CAAC,EAChB,MAAM,IAAI,MAAM,GAAGH,CAAO,SAASG,CAAC,gBAAgB,CAExD,CAAC,CACH,EAEA,GAAIR,EAAO,CAAC,EAAE,KAAK,OAAS,EAAG,CAC7B,IAAMS,EAAQR,EAAW,SAAW,OAC/BA,EAAW,QAAUD,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EACvBA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,EAAE,OAAOA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,CAAC,EACxGA,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGC,EAAW,QAAU,EAAI,MAAS,EAC9DC,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,qBAAqB,EAC5DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,iBAAiB,EACxDP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,oBAAoB,EAC3DP,EAAgBF,EAAO,CAAC,EAAE,KAAMS,EAAO,mBAAmB,CAC5D,MACEP,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,qBAAqB,EAC1DE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,iBAAiB,EACtDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,oBAAoB,EACzDE,EAAgBF,EAAO,CAAC,EAAE,KAAM,CAAC,CAAC,EAAG,mBAAmB,CAE5D,EAEMV,GACF,CAACU,EAA+BC,IAAiD,CAC/E,GAAM,CAAC,QAAAS,EAAS,QAAAC,EAAS,OAAAC,CAAM,EAAIX,EAC7BY,EAASb,EAAO,CAAC,EAAE,KACnBc,EAAaH,EAAUI,GAAiBF,EAAOA,EAAO,OAAS,CAAC,CAAC,EAAI,EACrEG,EAAcJ,IAAW,QAAUC,EAAO,OAAS,EAAIC,EAAa,EACpEG,EAAaC,EAAU,KAAKL,CAAM,EAAIC,EAEtCK,EAAoBR,EACpBS,EAAcD,EAAoBN,EAAO,OAASA,EAClDQ,EAAIC,EAAc,IAAKtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMc,CAAU,EACrES,EAAQD,EAAc,QAAStB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC9EQ,EAAOF,EAAc,OAAQtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EAC5ES,EAAYH,EAAc,YAAatB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACtFU,EAAWJ,EAAc,WAAYtB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAW,EACpFW,EAAIC,GAAe,IAAK5B,EAAO,CAAC,EAAE,SAAUoB,EAAaN,CAAU,EAGnEe,EAAc,IAAc,CAChC,IAAIC,EAAU,GACd,GAAInB,EACFmB,EAAU,iBACNjB,EAAO,SAAW,EAAM,KACpBD,IAAW,OAAS,iBAAiBC,EAAO,OAAS,CAAC,OAAOC,CAAU,GACnD,kBAAkB,YAE1CF,IAAW,OACbkB,EAAU;AAAA,cACRH,EAAE,WAAW,gBAAiB,IAAK,GAAG,CAAC;AAAA,4BACzBA,EAAE,gBAAgB,eAAe,CAAC,QAC7C,CAELG,EAAU,kBAAkBP,EAAM,KAAK,OAAO;AAAA,qDACLV,EAAO,OAAS,CAAC,KAE1D,QAASL,EAAI,EAAGA,EAAIe,EAAM,KAAMf,IAC9BsB,GAAW,YAAYtB,CAAC,qBAAqBA,CAAC,KAEhDsB,GAAW,iBAAiBP,EAAM,gBAAgB,UAAU,CAAC,GAC/D,CAEF,OAAOO,CACT,EACMC,EAAgCC,GAAyB;AAAA,oBACjDtB,CAAO;AAAA,IACvBsB,EAAO,gBAAgB,aAAc,KAAK,EAAE,iBAAiBX,EAAGE,EAAOC,EAAMC,EAAWC,EAAUC,CAAC,CAAC;AAAA,IACpGK,EAAO,UAAU,CAAC;AAAA,IAClBA,EAAO,sCAAsC,qBAAqB,CAAC;AAAA,0BAC7CL,EAAE,gBAAgB,gBAAgBb,CAAU,EAAE,CAAC;AAAA,MACnEe,EAAY,CAAC;AAAA,kBACDN,EAAM,YAAY,SAAS,CAAC;AAAA,iBAC7BC,EAAK,YAAY,SAAS,CAAC;AAAA,sBACtBC,EAAU,YAAY,SAAS,CAAC;AAAA,qBACjCC,EAAS,YAAY,SAAS,CAAC;AAAA,cACtCL,EAAE,YAAY,YAAY,CAAC;AAAA;AAAA,MAEnCM,EAAE,YAAY,aAAc,OAAO,CAAC;AAAA,KAEpC,MAAO,CACL,KAAM,qBACN,YAAa,CACX,KAAM,GAAG1B,EAAW,OAAO,IAAIA,EAAW,MAAM,IAAIU,CAAO,IAAIG,CAAU,GACzE,kBAAmBK,EAAoB,CAAC,OAAQ,OAAQ,OAAQ,OAAQ,MAAM,EAAI,MACpF,EACA,gBAAiBY,EACjB,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM/B,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKiB,EAAa,EAAuB,CAAC,EAClE,gBAAiBE,EACb,CACE,CAAC,QAAuB,KAAMF,CAAU,EACxC,GAAGgB,GAA2BpB,CAAM,CACtC,EACA,CACE,CAAC,QAAuB,KAAMI,CAAU,CAC1C,CACN,EACF,CACF,EAES1B,GAA4BU,GACrCiC,GAA4BjC,CAAoE,EAEvFT,GAAY,CAAC2C,EAAyBlC,IAA8C,CAC/F,GAAM,CAAC,OAAAD,EAAQ,YAAAoC,CAAW,EAAID,EACxBE,EAAoB9C,GAAyB,CAAC,GAAGU,EAAY,YAAAmC,CAAW,CAAC,EAI/E,GAHIE,GAAI,OAAO,sBACbjD,GAAeW,EAAQqC,CAAiB,EAEtCpC,EAAW,aACb,MAAM,IAAI,MAAM,uDAAuD,EAEvEkC,EAAQ,QAAQ7C,GAAoCU,EAAQqC,CAAiB,CAAC,CAElF,ICtJA,IASME,GAkBAC,GAkCOC,GA7DbC,GAAAC,GAAA,kBAIAC,KAGAC,KAEMN,GAAkBO,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,IAAK,IAAK,IAAI,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC9C,MAAM,IAAI,MAAM,+CAA+C,EAGjE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMN,GAA4BM,GAA+C,CAC/E,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAExBE,EAAWF,EAAO,CAAC,EAAE,KAAK,CAAC,EAE3BG,EAAaC,EAAU,KAAKH,CAAW,EAAI,EAE3CI,EAAWL,EAAO,CAAC,EAAE,SACrBM,EAAQC,EAAc,QAASF,EAAUJ,EAAa,CAAC,EACvDO,EAAOD,EAAc,OAAQF,EAAU,CAACH,CAAQ,EAAG,CAAC,EACpDO,EAAWF,EAAc,WAAYF,EAAUJ,EAAa,CAAC,EAC7DS,EAASC,GAAe,SAAUN,EAAUJ,EAAa,CAAC,EAahE,MAAO,CACL,KAAM,UACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,CACpE,GACA,gBAjBuBS,GAA+B;AAAA,qBACrCV,CAAQ;AAAA,IACzBU,EAAa,iBAAiBN,EAAOE,EAAMC,EAAUC,CAAM,CAAC;AAAA;AAAA,IAE5DE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCT,CAAU,CAAC;AAAA,kBAClDG,EAAM,YAAY,YAAY,CAAC;AAAA,UACvCE,EAAK,YAAY,uBAAuB,CAAC,MAAMC,EAAS,YAAY,YAAY,CAAC;AAAA,MACrFC,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAU7C,CACF,EAEaf,GAAWkB,GAAkC,CACxDpB,GAAeoB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQnB,GAAyBmB,EAAQ,MAAM,CAAC,CAC1D,IChEA,IAeMC,GA4BAC,GAiBOC,GAIAC,GAIAC,GAIAC,GAIAC,GAIAC,GAGAC,GASAC,GAIAC,GA8BPC,GAMOC,GAaAC,GAIAC,GAIAC,GAQAC,GAGAC,GAgBAC,GAcAC,GAKAC,GAIAC,GAIAC,GAMAC,GAOAC,GAIAC,GAIAC,GAIAC,GAMAC,GASAC,GAMAC,GASAC,GAIAC,GAIAC,GAIAC,GAIAC,GAEAC,GAKAC,GAUAC,GAGAC,GAOAC,GAQAC,GAzTbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMM/C,GACF,CAACgD,EAA4BC,EAAkBC,EAAuBC,EACrEC,EAAmCC,IAA8C,CAChF,IAAMC,EAAU,KAAK,KAAKL,EAAW,CAAC,EAElCM,EAAa,GACb,OAAOH,GAAa,SACtBG,EAAa,GAAGH,CAAQ,MAExBG,EAAaH,EAAS,GAAG,EAG3B,IAAMI,EAAQC,EAAc,YAAaP,EAAe,CAACI,CAAO,EAAG,CAAC,EAC9DI,EAASC,GAAe,aAAcR,EAAgB,CAACG,CAAO,EAAG,CAAC,EAExE,MAAO;AAAA,QACLN,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBQ,EAAOE,CAAM,CAAC;AAAA;AAAA,IAEnFL,GAA4B,EAAE;AAAA;AAAA,IAE9BL,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA;AAAA,cAE/DQ,EAAM,YAAY,YAAY,CAAC;AAAA,MACvCE,EAAO,YAAY,aAAcH,CAAU,CAAC;AAAA,IAE9C,EAEEtD,GACF,CAACuD,EAAmBI,EAAcR,EAAmCC,EACpEQ,EAAmBV,EAAyBK,EAAM,YAA2B,CAC5E,KAAAI,EACA,YAAa,CAAC,KAAMC,EAAU,kBAAmB,CAAC,MAAM,CAAC,EACzD,gBAAiBb,GAAgBhD,GAC7BgD,EAAcc,EAAU,KAAKN,EAAM,IAAI,EAAGA,EAAM,SAAUL,EAAgBC,EAAUC,CAAwB,EAChH,WAAaU,IAAkB,CAC7B,QAAS,CAAC,CAAC,KAAMP,EAAM,KAAM,SAAUL,CAAc,CAAC,EACtD,cACI,CAAC,EAAG,KAAK,KAAKW,EAAU,KAAKC,EAAa,CAAC,EAAE,IAAI,EAAI,GAA0B,CAAgB,CAAC,EACpG,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKD,EAAU,KAAKN,EAAM,IAAI,EAAI,CAAC,CAAC,CACzE,CACF,EACF,GAEStD,GAAO8D,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa7D,GAAQ6D,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa5D,GAAS4D,GAAkC,CACtDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa3D,GAAQ2D,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa1D,GAAS0D,GAAkC,CACtDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEazD,GAAQyD,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EACaxD,GAASwD,GAAkC,CACtDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAOavD,GAAuBwD,GAChCC,GAA4BD,CAA0B,EAG7CvD,GAAO,CAACsD,EAAyBC,IAAqC,CACjF,IAAIE,EACJ,OAAQF,EAAW,GAAI,CACrB,QACEE,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,QACEA,EAAO,YACP,MACF,OACEA,EAAO,YACP,MACF,OACEA,EAAO,aACP,MACF,QACE,MAAM,IAAI,WAAW,0EAA0EF,EAAW,EAAE,EAAE,CAClH,CACAD,EAAQ,QACJ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQG,EAAM,OAAWF,EAAW,SAAUA,EAAW,EAAE,CAAC,CAClH,EAOMtD,GAAoCyD,GAAkD,CAC1F,IAAMC,EAAOD,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAIE,GACtFC,EAAOH,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,OAAS,EAAKA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAII,GAC5F,OAAON,GAA4B,CAAC,IAAAG,EAAK,IAAAE,CAAG,CAAC,CAC/C,EAEa3D,GAAO,CAACoD,EAAyBS,IAAyC,CACrF,IAAMR,EAAaD,EAAQ,OAAO,SAAW,EAAIS,EAAiB9D,GAAiCqD,EAAQ,MAAM,EAC3GU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QACJ/D,GACI+D,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,0BAA2B;AAAA,4BACnDF,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,4BAC9CS,CAAQ,YAAYA,CAAQ,IAAIT,EAAW,GAAG;AAAA,EAEhEA,EAAW,QAAQ,EACvB,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACnB,EAEapD,GAAQmD,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEalD,GAAOkD,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEajD,GAAQiD,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAMahD,GAAwBiD,GACjCC,GAA4BD,CAA6B,EAEhDhD,GAAM,CAAC+C,EAAyBC,IAAsC,CACjF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK;AAAA,uBAChCF,CAAQ,IAAIT,EAAW,KAAK;AAAA;AAAA,kBAEjCS,CAAQ,QAAQA,CAAQ;AAAA;AAAA;AAAA;AAAA,wBAIlBA,CAAQ,cAAcA,CAAQ;AAAA;AAAA,KAGhDT,EAAW,QAAQ,CAAC,CAC1B,EAEa/C,GAAU,CAAC2D,EAAU,QAAU;AAAA,YAChCA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA,YACPA,CAAO;AAAA;AAAA,sBAEGA,CAAO,cAAcA,CAAO;AAAA;AAAA;AAAA;AAAA,GAMrC1D,GAAO6C,GAAkC,CACpD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,YAAYA,CAAC,IAAK1D,GAAQwD,CAAQ,CAAC,CAAC,CAClH,EAEatD,GAAO4C,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa3C,GAAS2C,GAAkC,CACtDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,QAAS,OAAO,CAAC,CACnF,EAEa1C,GAAQ0C,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,SAASA,CAAC,sBAAsBA,CAAC,0BAA2B1D,GAAQwD,CAAQ,CAAC,CAAC,CACpH,EAEanD,GAAY,CAACyC,EAAyBC,IAAsC,CACvF,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,YAAaY,GAAK,8BAA8BA,CAAC,KAAKA,CAAC,KAAKA,CAAC,YAAYF,CAAQ,UACpG,6BAA6BA,CAAQ,IAAIT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,CACzF,EAEazC,GAAOwC,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEanD,GAAOuC,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAOY,GAAK,IAAIA,CAAC,EAAE,CAAC,CACtF,EAEalD,GAAcsC,GAAkC,CAC3DA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,aAAcY,GAAK,OAAOA,CAAC,EAAE,CAAC,CAChG,EAEajD,GAAQqC,GAAkC,CACrD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,OAAQY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,WAAWF,CAAQ,SAAS,CAAC,CAC5G,EAEa9C,GAAWoC,GAAkC,CACxDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,UAAWY,GAAK,sBAAsBA,CAAC,KAAK,CAAC,CAC/G,EAOa/C,GAA8BoC,GACvCC,GAA4BD,CAG3B,EAEQnC,GAAc,CAACkC,EAAyBC,IAA4C,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,cACnBY,GAAK,YAAYF,CAAQ,oBAAoBA,CAAQ,WAAWT,EAAW,KAAK,MAAMW,CAAC,WAAWF,CAAQ,KACtGT,EAAW,IAAI,MACnB,OAAWA,EAAW,QAAQ,CAAC,CACrC,EAEalC,GAAOiC,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEahC,GAAQgC,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa/B,GAAQ+B,GAAkC,CACrDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ,MAAM,CAAC,CACjF,EAEa9B,GAAO8B,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,EAEa7B,GAAkByC,GAAc,QAAQA,CAAC,yBAAyBA,CAAC,2BAA2BA,CAAC,MAE/FxC,GAAQ4B,GAAkC,CAErDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,OAAQ7B,EAAc,CAAC,CACzF,EAEaE,GAAe,CAACwC,EAAU,QAAU;AAAA,qBAC5BA,CAAO;AAAA,qBACPA,CAAO;AAAA,qBACPA,CAAO;AAAA;AAAA,oBAERA,CAAO,cAAcA,CAAO;AAAA,WACrC1C,GAAe,GAAG,CAAC;AAAA;AAAA,EAIjBG,GAAsBwC,GAC/B,uCAAuCA,CAAC,qBAAqBA,CAAC,MAAMA,CAAC,uBAAuBA,CAAC,GAEpFvC,GAAYyB,GAAkC,CACzD,IAAMU,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrEA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,WAAY1B,GAAoBD,GAAaqC,CAAQ,EAAG,OAC3EV,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAAC,CACjC,EAEaxB,GAAkB,CAACwB,EAAyBC,IAAwC,CAC/F,IAAMS,EAAWC,GAA0BX,EAAQ,OAAO,CAAC,EAAE,QAAQ,EACrE,OAAAA,EAAQ,QAAQ/D,GACZ+D,EAAQ,OAAO,CAAC,EAAG,kBAAmBY,GAAK,eAAeF,CAAQ,WAAWE,CAAC,KAAKA,CAAC,8BACpF,wCAAwCF,CAAQ,KAAKT,EAAW,KAAK,KAAMA,EAAW,QAAQ,CAAC,EAC5F,CACT,EAEaxB,GAAOuB,GAAkC,CACpDA,EAAQ,QAAQ/D,GAA6B+D,EAAQ,OAAO,CAAC,EAAG,MAAO,KAAK,CAAC,CAC/E,IC3TA,IAUMe,GAkBAC,GAyCOC,GArEbC,GAAAC,GAAA,kBAIAC,KAGAC,KACAC,KAEMP,GAAkBQ,GAAwC,CAC9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,gCAAgC,EAGlD,GAAI,CAAC,CAAC,KAAM,KAAM,KAAK,EAAE,SAASA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EACjD,MAAM,IAAI,MAAM,4CAA4C,EAG9D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,MAAM,IAAI,MAAM,mDAAmD,CAEvE,EAEMP,GAAkCO,GAA+C,CACrF,IAAMC,EAAcD,EAAO,CAAC,EAAE,KAAK,MAAM,EACzCC,EAAY,CAAC,EAAIA,EAAY,CAAC,EAAI,EAElC,IAAMC,EAAQC,EAAc,QAASH,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM,CAAC,EACpEI,EAAOD,EAAc,OAAQH,EAAO,CAAC,EAAE,SAAU,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAAG,CAAC,EACvEK,EAASC,GAAe,SAAUN,EAAO,CAAC,EAAE,SAAUC,EAAa,CAAC,EAEpEM,EAAaC,EAAU,KAAKP,CAAW,EAAI,EAC3CQ,EAAWC,GAA4BV,EAAO,CAAC,EAAE,QAAQ,EAsB/D,MAAO,CACL,KAAM,gBACN,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKO,EAAa,EAAuB,CAAC,CACpE,GACA,gBA1BuBI,GAA+B;AAAA;AAAA,yBAEjCX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,EAAI,CAAC;AAAA;AAAA,IAE9CW,EAAa,iBAAiBT,EAAOE,EAAMC,CAAM,CAAC;AAAA;AAAA,IAElDO,GAAQH,CAAQ,CAAC;AAAA;AAAA,IAEjBE,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsCJ,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ9DF,EAAO,YAAY,aAAc,uBAAuB,CAAC;AAAA,IAU7D,CACF,EAEaX,GAAiBmB,GAAkC,CAC9DrB,GAAeqB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQpB,GAA+BoB,EAAQ,MAAM,CAAC,CAChE,ICxEA,IAiBMC,GAqGAC,GAsEAC,GAQOC,GAIAC,GAIAC,GAMAC,GAIAC,GAsBAC,GAIAC,GAMAC,GAMAC,GAMAC,GAlQbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KASMjB,GACF,CAACkB,EAA4BC,EAA0BC,EAA0BC,EAChFC,EAAoBC,EAAsBC,EAAsCC,EAChFC,EAAeC,EAAeC,EAAoBC,IAAsC,CACvF,IAAIC,EACAC,EACA,OAAON,GAAa,SACtBK,EAAmBC,EAAmB,CAACC,EAAGC,IAAM,GAAGR,CAAQ,KAAKO,CAAC,MAAMC,CAAC,KAC/D,OAAOR,GAAa,WAC7BK,EAAmBC,EAAmBN,GAEtCK,EAAmBL,EAAS,OAC5BM,EAAmBN,EAAS,QAG9B,IAAMS,EAASC,GAAe,aAAcP,EAAYP,EAAW,OAAQ,CAAC,EACtEW,EAAII,EAAc,QAASV,EAAOP,EAAM,OAAQ,CAAC,EACjDc,EAAIG,EAAc,QAAST,EAAOP,EAAM,OAAQ,CAAC,EAEnDiB,EACJ,GAAIf,EACF,GAAIC,EAAa,CACf,IAAMe,EAAgBC,EAAU,KAAKpB,CAAK,IAAM,EAC1CqB,EAAgBD,EAAU,KAAKnB,CAAK,IAAM,EAC1CqB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC3EuB,EAAuBtB,EAAM,OAAS,GAAKA,EAAMA,EAAM,OAAS,CAAC,EAAI,IAAM,EAC7EkB,GAAiBE,EACnBH,EAAaH,EAAO,YAChB,aACAH,EACIO,EAAgB,GAAGN,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,EACvFQ,EAAgB,GAAGP,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,GAAG,CAAC,MAAQA,EAAE,YAAY,YAAY,CAAC,CAAC,EAEjGI,EAAa;AAAA,kCACSH,EAAO,gBAAgB,iBAAiB,CAAC;AAAA,4BAC/CF,EAAE,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,4BACrDD,EAAE,2BAA2B,gBAAiBC,CAAM,CAAC;AAAA,cAEjEA,EAAO,YACH,aACAH,EACIP,GAA+BiB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,kBACpDR,GAA+BkB,EAC3BT,EAAE,YAAY,cAAc,EAC5B,GAAGA,EAAE,KAAK,KAAK,IAAIA,EAAE,YAAY,cAAc,CAAC,iBAAiB,CAAC,CAAC;AAAA,WAGvF,MACEI,EAAaH,EAAO,YAChB,aAAcH,EAAiBC,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAEzF,CACL,GAAI,CAACV,EACH,MAAM,IAAI,MAAM,sFAAsF,EAGxG,IAAMoB,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,eAAeF,CAAC,eAAeA,CAAC,IAC9CG,GAAc,eAAeH,CAAC,eAAeA,CAAC,IACpD,MAAO;AAAA,+BACcA,CAAC,MAAMX,EAAO,gBAAgB,qBAAqBW,CAAC,GAAG,CAAC;AAAA,yBAC9DA,CAAC,MAAMb,EAAE,2BAA2B,gBAAgBa,CAAC,GAAIX,CAAM,CAAC;AAAA,yBAChEW,CAAC,MAAMZ,EAAE,2BAA2B,gBAAgBY,CAAC,GAAIX,CAAM,CAAC;AAAA,wBACjEW,CAAC,aAAaA,CAAC;AAAA,wBACfA,CAAC,aAAaA,CAAC;AAAA,4BACXA,CAAC,aAAaA,CAAC;AAAA,4BACfA,CAAC,aAAaA,CAAC;AAAA,cAC7BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIhB,EAAiBiB,EAAaC,EAAW,CAAC;AAAA,WAE9E,EACIpB,IAAe,EACjBS,EAAa;AAAA;AAAA,cAETM,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,uGAGtCN,EAAa;AAAA,cACTM,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,cAC7CA,EAAiB,yBAA0B,CAAC,CAAC;AAAA,WAGrD,CAEA,MAAO;AAAA,UACHzB,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBc,EAAGC,EAAGC,CAAM,CAAC;AAAA;AAAA,UAE9EL,GAA4B,EAAE;AAAA;AAAA,UAE9BX,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEmB,CAAU;AAAA,QAEhB,EAEEpC,GACF,CAACgD,EAAcC,EAAkBlB,EAAeC,EAAeR,EAC9DI,EAAmCsB,EAAyBnB,EAAE,WAA0B,CACvF,IAAMoB,EAAc,CAACb,EAAU,SAASP,EAAE,KAAMC,EAAE,IAAI,EAClDoB,EAAcrB,EAAE,KAChBsB,EAAaf,EAAU,KAAKP,EAAE,IAAI,EAElCV,EAAY,GACZE,EAA8B,GAG5B+B,EAAc,CAACH,CAAW,EAChC,GAAIA,EAAa,CACf,IAAMI,EAAkBC,GAAc,UAAUzB,EAAE,KAAMC,EAAE,KAAM,EAAK,EACrE,GAAI,CAACuB,EACH,MAAM,IAAI,MAAM,8CAA+C,EAEjEH,EAAcG,EACdF,EAAaf,EAAU,KAAKc,CAAW,EACvC,IAAMf,EAAgBC,EAAU,KAAKP,EAAE,IAAI,IAAM,EAC3CQ,EAAgBD,EAAU,KAAKN,EAAE,IAAI,IAAM,EAC3CQ,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EAC9EU,EAAuBT,EAAE,KAAK,OAAS,GAAKA,EAAE,KAAKA,EAAE,KAAK,OAAS,CAAC,EAAI,IAAM,EACpFsB,EAAY,KAAKjB,CAAa,EAC9BiB,EAAY,KAAKf,CAAa,EAC9Be,EAAY,KAAKd,CAAoB,EACrCc,EAAY,KAAKb,CAAoB,EAErC,IAAIgB,EAAkB,EACtB,QAASC,EAAI,EAAGA,EAAIN,EAAY,OAAQM,IAAK,CAC3C,IAAMC,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS2B,CAAC,GAAK,EACpCE,EAAO5B,EAAE,KAAKA,EAAE,KAAK,OAAS0B,CAAC,GAAK,EAC1C,GAAIC,IAASC,EACXH,GAAmBE,MAEnB,MAEJ,CACIF,EAAkB,IAAM,GAC1BlC,EAA8B,GAC9BF,EAAY,KACHgB,GAAiBE,GAAiBC,GAAwBC,KACnEpB,EAAY,GAEhB,MAEEA,EAAY,GAEd,OAAAiC,EAAY,KAAKjC,CAAS,EAEnB,CACL,KAAA2B,EACA,YAAa,CACX,KAAMC,EAAWK,EAAY,IAAKV,GAAMA,EAAE,SAAS,CAAC,EAAE,KAAK,GAAG,EAC9D,kBAAmB,CAAC,OAAQ,MAAM,CACpC,EACA,gBAAkB3B,GAAiBlB,GAC/BkB,EAAcc,EAAE,KAAMC,EAAE,KAAMoB,EAAa/B,EAAW8B,EAAa5B,EAA6BC,EAChGO,EAAE,SAAUC,EAAE,SAAUkB,EAAgBtB,CAAwB,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAsB,CAAC,EAC3F,gBAAiB,CACf,CAAC,QAAuB,KAAM,KAAK,KAAKf,EAAU,KAAKc,CAAW,EAAI,CAAC,CAAC,EACxE,GAAGS,GAA2B9B,EAAE,KAAMC,EAAE,KAAMoB,CAAW,CAC3D,CACF,EACF,CACF,EAEEnD,GACF,CAAC6D,EAAyBd,EAAcxB,EAA8BI,EACrEqB,EAAmBC,IAAkC,CACpDY,EAAQ,QAAQ9D,GACZgD,EAAMC,GAAY,GAAIa,EAAQ,OAAO,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAGtC,EAAUI,EACtEsB,CAAc,CAAC,CACrB,EAEShD,GAAO4D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa7B,GAAO2D,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa5B,GAAS0D,GAAkC,CACtD7D,GACI6D,EAAS,QAAU,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEa3B,GAAOyD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEa1B,GAAOwD,GAAkC,CACpD,IAAMC,EAAO5B,EAAc,QAAS2B,EAAQ,OAAO,CAAC,EAAE,SAAUA,EAAQ,OAAO,CAAC,EAAE,IAAI,EAAE,KAAK,MAE7F7D,GACI6D,EAAS,MAAQ,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,cAAcD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,qBAAqBD,CAAC,IAAIC,CAAC,GAAG,EAC7G;AAAA,wBACkB+B,CAAI,SAASA,CAAI,QAAQA,CAAI;AAAA,iBACpCA,CAAI;AAAA,iBACJA,CAAI;AAAA,uBACEA,CAAI;AAAA,iBACVA,CAAI;AAAA;AAAA,+BAEUA,CAAI,6BAA6BA,CAAI,qBAAqBA,CAAI,IAV1EA,IAAS,MAAQ,QAAU,EAW5B;AAAA;AAAA,oCAEkBA,CAAI,eAAeA,CAAI,cAAcA,CAAI;AAAA;AAAA,oBAEzDA,CAAI;AAAA;AAAA,OAEjB,CACP,EAEaxD,GAAOuD,GAAkC,CACpD7D,GAAY6D,EAAS,MAAO,CAAC/B,EAAGC,IAAM,GAAGD,CAAC,IAAIC,CAAC,EAAE,CACnD,EAEaxB,GAAWsD,GAAkC,CACxD7D,GACI6D,EAAS,UAAY,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACtG,QAAwB,CAC9B,EAEavB,GAAQqD,GAAkC,CACrD7D,GACI6D,EAAS,OAAS,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,IAAIC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,IAAIC,CAAC,GAAG,EAAI,OACnG,QAAwB,CAC9B,EAEatB,GAAkBoD,GAAkC,CAC/D7D,GACI6D,EAAS,iBAAmB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EAC3G,OAAW,QAAwB,CACzC,EAEarB,GAAemD,GAAkC,CAC5D7D,GACI6D,EAAS,cAAgB,CAAC,OAAQ,CAAC/B,EAAGC,IAAM,OAAOD,CAAC,KAAKC,CAAC,IAAK,OAAQ,CAACD,EAAGC,IAAM,aAAaD,CAAC,KAAKC,CAAC,GAAG,EACxG,OAAW,QAAwB,CACzC,ICtQA,IAiBagC,GAuBAC,GAaAC,GAUAC,GA/DbC,GAAAC,GAAA,kBAGAC,KACAC,KAaaP,GACT,CAACQ,EAA0CC,EAAmBC,EAAW,QAAkB,CACzF,OAAQF,EAAW,WAAY,CAC7B,IAAK,OACH,MAAO,sBAAsBC,CAAS,UACxC,IAAK,UACH,MAAO,YAAYA,CAAS,YAAYA,CAAS,yBACnD,IAAK,OACH,MAAO,wBAAwBA,CAAS,IAAIC,CAAQ,yBAAyBD,CAAS,IAClFC,CAAQ,yBACd,IAAK,cACH,MAAO,eAAeD,CAAS,cAAcA,CAAS,UAAUC,CAAQ,8BACpEA,CAAQ,qBACd,IAAK,YACH,MAAO,kBAAkBA,CAAQ,6CAA6CD,CAAS,UACzF,IAAK,GACH,MAAO,GAET,QACE,MAAM,IAAI,MAAM,0BAA0BD,EAAW,UAAU,EAAE,CACrE,CACF,EAESP,GACT,CAACO,EAA0CG,IAAqC,CAC1EH,EAAW,aAAe,OAC5BG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,OAAQ,EAAG,CAAC,OAAsB,KAAMA,EAAW,OAAQ,CAAC,EAC/FA,EAAW,aAAe,cACnCG,EAAe,KACX,CAAC,OAAsB,KAAMH,EAAW,KAAM,EAAG,CAAC,OAAsB,KAAMA,EAAW,IAAK,CAAC,EAC1FA,EAAW,aAAe,aACnCG,EAAe,KAAK,CAAC,OAAsB,KAAMH,EAAW,KAAM,CAAC,CAEvE,EAESN,GAA2B,CAACM,EAA0CI,IAAgC,CAC7GJ,EAAW,aAAe,OAC5BI,EAAS,KAAK,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,WAAY,KAAM,KAAK,CAAC,EACrEJ,EAAW,aAAe,cACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAAC,EAC9DJ,EAAW,aAAe,aACnCI,EAAS,KAAK,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAE9C,EAEaT,GACRK,GAAgF,CAC/E,IAAMK,EAAaL,GAAY,YAAwB,GACvD,GAAIK,IAAe,cAAe,CAChC,GAAM,CAACC,EAAOC,CAAI,EAAIP,GAAY,mBAAyC,CAAC,GAAK,EAAG,EACpF,MAAO,CAAC,WAAAK,EAAY,MAAAC,EAAO,KAAAC,CAAI,CACjC,SAAWF,IAAe,OAAQ,CAChC,GAAM,CAACG,EAASC,CAAO,EAAIT,GAAY,mBAAyC,CAACU,GAAUC,EAAQ,EACnG,MAAO,CAAC,WAAAN,EAAY,QAAAI,EAAS,QAAAD,CAAO,CACtC,SAAWH,IAAe,YAAa,CACrC,GAAM,CAACC,CAAK,EAAIN,GAAY,mBAAiC,CAAC,GAAI,EAClE,MAAO,CAAC,WAAAK,EAAY,MAAAC,CAAK,CAC3B,CACA,MAAO,CAAC,WAAAD,CAAU,CACpB,IC7EJ,IAqBaO,GAeAC,GApCbC,GAAAC,GAAA,kBAqBaH,GAAc,CAACI,EAAmBC,IAAqB,CAClE,OAAQD,EAAW,CACjB,IAAK,GACH,OAAOC,EACT,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,IAAK,GACH,MAAO,QAAQA,CAAQ,IACzB,QACE,MAAM,IAAI,MAAM,GAAGD,CAAS,8BAA8B,CAC9D,CACF,EAEaH,GAAeK,GAA6B;AAAA,QACjDA,EAAU,iDAAmD,EAAE;UCrCvE,IAqBaC,GArBbC,GAAAC,GAAA,kBAqBaF,GAAiBG,GAAuB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAO3CA,CAAS,YAAYA,CAAS,YAAYA,CAAS;AAAA;IC5B7D,IA8BMC,GAiBAC,GAyBOC,GAuFPC,GAiBAC,GAKOC,GAiKPC,GA8EOC,GApabC,GAAAC,GAAA,kBAqBAC,KAEAC,KAEAC,KACAC,KAEAC,KAEMd,GAA6B,CAACe,EAAoBC,IAClDD,EACK;AAAA;AAAA;AAAA,wDAG6CC,EAAY,iBAAmB,EAAE;AAAA,UAI9E;AAAA;AAAA;AAAA,gDAGqCA,EAAY,iBAAmB,EAAE;AAAA,UAK3Ef,GAAyB,CAACgB,EAAqBC,IAC/CD,EACK;AAAA;AAAA;AAAA;AAAA,UAIDC,IAAqB,EAAI,GAAK,6DAA6D;AAAA;AAAA;AAAA;AAAA;AAAA,YAKzFA,IAAqB,EAAI,GAAK,2CAA2C;AAAA,WAG1E;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAMCA,IAAqB,EAAI,GAAK,yCAAyC;AAAA,WAKtEhB,GACT,CAACiB,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,KAAe,CACpF,IAAMC,EAAaL,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CO,EAAaN,EAAc,CAAC,EAAID,EAAc,CAAC,EAC/CQ,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EACtCP,EAAmBS,EAAaP,EAAc,CAAC,EAC/CS,EAAgBP,EAAYF,EAAc,CAAC,EAEjD,GAAI,GAAIH,GAAcC,IAAqB,GAAKC,EAAc,CAAC,IAAM,GAC7D,CAACF,IAAeC,IAAqB,GAAKA,IAAqB,KACjES,EAAaP,EAAc,CAAC,IAAM,GAAKE,EAAYF,EAAc,CAAC,IAAM,GAAKD,EAAc,CAAC,IAAM,GACtG,MAAM,IAAI,MAAM,iBAAiBF,CAAU,8BACvCC,CAAgB,yBAAyBC,EAAc,CAAC,CAAC;AAAA,oCACjCD,CAAgB;AAAA,eACrCS,CAAU,yCAAyCP,EAAc,CAAC,CAAC,eACtEE,CAAS,0CAA0CF,EAAc,CAAC,CAAC,kBACnED,EAAc,CAAC,CAAC,aAAa,EAEnC,MAAO;AAAA,yCAC4BD,CAAgB,IAAIG,CAAI,MAAMM,EAAaT,CAAgB,MAAMU,CAAU;AAAA,2CACzEP,CAAI,MAAMK,EAAaP,EAAc,CAAC,CAAC,MAAMG,CAAS;AAAA;AAAA,uBAE1EH,EAAc,CAAC,CAAC;AAAA,uBAChBA,EAAc,CAAC,CAAC;AAAA,2BACZD,CAAgB;AAAA,oBACvBI,CAAS;AAAA;AAAA,2BAEFF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAUrEG,EAAS,IAAM,iBAAiB;AAAA,IAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,8CACvCS,CAAU;AAAA;AAAA,oBAEpCF,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,iBACpGC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,wBAE9CH,CAAI;AAAA;AAAA;AAAA,8BAGEQ,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAM/B7B,GAA2BiB,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,0CAInBa,CAAa;AAAA;AAAA;AAAA,sFAI7Cb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,YAU/BE,IAAqB,EAAI,GAAK,4DAA4D;AAAA;AAAA,YAE1FjB,GAAuBgB,EAAYC,CAAgB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAU5D,EAEEf,GAAyB,CAACY,EAAoBC,IAC9CD,EACK;AAAA;AAAA;AAAA,yCAG8BC,EAAY,iBAAmB,EAAE;AAAA,cAI/D;AAAA;AAAA;AAAA,iCAGsBA,EAAY,iBAAmB,EAAE;AAAA,cAK5DZ,GAA2Ba,GAC7BA,EAAa,gDAAkD,gDAItDZ,GACT,CAACc,EAAyBC,EAAyCC,EAAO,MAAOL,EAChFC,EAAa,GAAOK,EAAY,GAAIC,EAAS,GAAOC,EAAkB,GACtEM,EAA4B,KAAkB,CAC7C,IAAML,EAAaN,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CM,EAAaP,EAAc,CAAC,EAAIC,EAAc,CAAC,EAC/CO,EAAaV,EAAaQ,EAAaH,EACvCM,EAAaX,EAAaK,EAAYG,EAE5C,GAAI,EAAEG,EAAaR,EAAc,CAAC,IAAM,GAAKO,EAAaP,EAAc,CAAC,IAAM,GACzEE,EAAYF,EAAc,CAAC,IAAM,GACrC,MAAM,IAAI,MAAM,cAAcQ,CAAU,yCACpCR,EAAc,CAAC,CAAC,gBAAgBO,CAAU,yCAC1CP,EAAc,CAAC,CAAC,eAAeE,CAAS,yCAAyCF,EAAc,CAAC,CAAC,EAAE,EAEzG,IAAMW,EAAgBH,EAAaR,EAAc,CAAC,EAC5CY,EAAgBL,EAAaP,EAAc,CAAC,EAC5CS,EAAgBP,EAAYF,EAAc,CAAC,EAC3Ca,EAAgBH,EAClB;AAAA;AAAA;AAAA,gDAGsCL,CAAU;AAAA,gDACVC,CAAU;AAAA;AAAA;AAAA;AAAA;AAAA,iDAKTE,CAAU,2BAA2BR,EAAc,CAAC,CAAC;AAAA,mDACnDO,CAAU,2BAA2BP,EAAc,CAAC,CAAC;AAAA,YAC5FjB,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,iDAIRM,CAAS,2BAA2BF,EAAc,CAAC,CAAC;AAAA,uDAC9CM,CAAU,2BAA2BN,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,uCAGrEJ,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAO5CK,CAAI;AAAA;AAAA;AAAA,2DAG2BD,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA,0BAI7DH,EAAa,oCAAoCG,EAAc,CAAC,CAAC,KACpD,iCAAiCA,EAAc,CAAC,CAAC,OAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAUzBA,EAAc,CAAC,CAAC;AAAA;AAAA,4DAEdA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,MAKlE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4CAMkCK,CAAU;AAAA;AAAA,kCAEpBM,CAAa;AAAA,kCACbC,CAAa;AAAA,kCACbH,CAAa;AAAA;AAAA;AAAA;AAAA,sCAITE,CAAa;AAAA,wCACXC,CAAa;AAAA;AAAA;AAAA,QAG7C7B,GAAuBc,EAAYD,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,sCAKfa,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,8BAMrBb,EAAY,iBAAmB,EAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOvCK,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,QAOpBjB,GAAwBa,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,EAkBrC,MAAO;AAAA,yCAC4BI,CAAI,KAAKM,CAAU,MAAMC,CAAU;AAAA,yCACnCP,CAAI,KAAKK,CAAU,MAAMJ,CAAS;AAAA,yBAClDH,EAAc,CAAC,CAAC;AAAA,yBAChBA,EAAc,CAAC,CAAC;AAAA,sBACnBG,CAAS;AAAA;AAAA,2BAEJF,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC,KAAKA,EAAc,CAAC,CAAC;AAAA;AAAA;AAAA;AAAA,kBAInEG,EAAS,IAAM,iBAAiB;AAAA,MAC5CP,EAAY,sBAAsBA,EAAU,gBAAgB,YAAY,CAAC,IAAM,EAAE;AAAA,sBAE7EO,EAAS,GAAG,KAAK,KAAKC,EAAkBF,CAAS,CAAC,GAAK,0CAA0C;AAAA,mBACxFC,EAAS,qBAAqBC,CAAe,GAAK,GAAG;AAAA;AAAA,4BAE5CH,CAAI;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQ1BY,CAAa;AAAA;AAAA,CAGf,EAEE3B,GACF,CAAC4B,EAAmBC,EAAkBC,EAAyBC,EAC9DC,EAAuCC,EAAiB,KAAkB,CACzE,GAAM,CAACC,EAAaC,EAAaC,CAAU,EAAIJ,EACzC,CAACK,EAAeC,EAAWC,EAAWC,CAAc,EAAIT,EACxDU,EAAiBC,GAAiBR,EAAaE,CAAU,EACzDO,EAAiBD,GAAiBP,EAAaC,CAAU,EACzDQ,EAAWC,GAA4Bd,EAAU,CAAC,EAAE,KAAK,MAAM,EAC/De,EAAc,IAAM,CACxB,IAAMC,EAAQT,EAAU,KAClBU,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBX,EAAU,KAAK,OAAO,IACpD,QAASY,EAAIH,EAAQ,EAAI,EAAGI,GAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,KAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,EAAC,IAAM,cAAc,IAEvF,OAAAV,EAAe,QAAQS,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcF,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBE,CACT,EACMG,EAAc,IAAM,CACxB,IAAMC,EAAQd,EAAU,KAClBS,EAAYX,EAAc,KAC5BY,EAAS,iBAAiBV,EAAU,KAAK,OAAO,IACpD,QAASW,EAAIG,EAAQ,EAAI,EAAGF,GAAIH,EAAY,EAAGE,GAAK,EAAGA,IAAKC,KAC1DF,GAAU;AAAA,WAAcC,CAAC,OAAOF,EAAY,EAAI,gBAAgBG,EAAC,IAAM,cAAc,IAEvF,OAAAR,EAAe,QAAQO,GAAK,CAC1BD,GAAU;AAAA,WAAcC,CAAC,QAC3B,CAAC,EACDD,GAAU;AAAA,WAAcI,EAAQ,CAAC;AAAA,8BACXA,EAAQ,CAAC,kBACxBJ,CACT,EAwCA,MAvCe;AAAA,kEAC6CZ,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBkB,EAAY,CAAC;AAAA,kBACLR,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kEAKcD,EAAc,KAAK,OAAO,QAClFiB,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,oBACtBU,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BAC1BhB,CAAS;AAAA;AAAA;AAAA,UAGzBwB,EAAY,CAAC;AAAA,kBACLb,EAAU,aAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,6DAKSe,GAAY1B,EAAWgB,CAAQ,CAAC;AAAA,0BACnEhB,CAAS;AAAA;AAAA;AAAA;AAAA,UAKzBC,EACI,mBAAmBI,EAAiB,cAAgB,GAAGqB,GAAY1B,EAAWgB,CAAQ,CAAC,aAAa,IAChE,EAAsC;AAAA,UAC9Ed,CAAe;AAAA,UACfU,EAAe,aAAa,oBAAqB,OAAO,CAAC;AAAA;AAAA;AAAA,KAK/D,EAESvC,GACT,CAACsD,EAA+BC,EAAoDC,EACnFC,EACAzB,EAAiB,KAAyD,CACzE,IAAM0B,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAASL,EAAO,CAAC,EAAE,KACnBM,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAaF,EAAO,MAAM,EAAG,EAAE,EAC/BG,EAAYL,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAC5FO,EAAYC,EAAU,KAAKF,CAAS,EACpCG,EAAYP,EAAOA,EAAO,OAAS,CAAC,EACpCQ,EAAWR,EAAOA,EAAO,OAAS,CAAC,EACnCS,EAAYR,EAAOA,EAAO,OAAS,CAAC,EACpCS,EAASF,EAAW,IAAM,GAAKC,EAAY,IAAM,EAGjDE,EAAoBJ,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDpD,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDyD,EAAW,CACf,KAAK,KAAKH,EAAYtD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKJ,EAAYpD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKN,EAAYlD,EAAc,CAAC,EAAIwD,EAAkB,CAAC,CAAC,CAC/D,EAEME,EAAaH,EAAS,EAAI,EAC1BI,EAAa,CAAC,GAAGZ,EAAYK,EAAWC,EAAWK,CAAU,EAC7DzB,EAAQ0B,EAAW,OACnBC,EAAa,CAAC,GAAGZ,EAAYK,EAAUC,EAAYI,CAAU,EAC7DnB,EAAQqB,EAAW,OACnBC,GAAkB,CAACX,EAAWE,EAAWE,EAAYI,CAAU,EAC/DI,EAAoC,CACxC,CAAC,OAAsB,KAAMV,CAAS,EAAG,CAAC,OAAsB,KAAME,CAAS,EAC/E,CAAC,OAAsB,KAAMD,CAAQ,CACvC,EACAU,GAA6BrB,EAAsBoB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,GAA2Bf,EAAWU,EAAYC,CAAU,CAAC,EACrF,IAAMK,GAAwD,CAAC,OAAQ,MAAM,EAEvElD,GAAU0B,EAAO,OAAS,EAC5B1B,KACF+C,EAAgB,KAAK,GAAGE,GAA2BvB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEwB,GAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,GAA2BH,EAAe,CAAC,EAEnE,IAAMK,GAAmBC,GAA+B,CACtD,IAAMjC,GAAYe,EAAU,OACtBrD,GAAYwE,GAAiB,YAAa3B,EAAO,CAAC,EAAE,SAAUP,GAAW,CAAC,EAC1EJ,GAAWC,GAA4BU,EAAO,CAAC,EAAE,QAAQ,EAEzD4B,GAAIC,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUR,EAAOyB,CAAU,EAC5Da,GAAID,EAAc,IAAK7B,EAAO,CAAC,EAAE,SAAUF,EAAOmB,CAAU,EAC5Dc,GAAS9C,GAAe,SAAUe,EAAO,CAAC,EAAE,SAAUoB,GAAgB,OAAQH,CAAU,EACxFe,GAAiB,CAACJ,GAAGE,EAAC,EAC5B,GAAIxD,GAAS,CACX,IAAM2D,GAAiBvD,EAAiBuC,EAAa,EACrDe,GAAe,KAAKH,EAAc,OAAQ7B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQiC,EAAc,CAAC,CACtG,CACA,IAAMC,GACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAC7GC,GAAyBlC,EAAsBiC,EAAQ,EACvD,IAAME,GAAW9C,GAA4ByC,GAAO,KAAK,MAAM,EACzDxD,GAAkB8D,GAAqBpC,EAAsB8B,GAAO,KAAK,MAAOK,EAAQ,EACxFE,GAAmB7F,GACrBwE,EAAY3C,GAASC,GAAiB,CAACpB,GAAWyE,GAAGE,GAAGC,EAAM,EAAG,CAACzB,EAAYC,EAAYC,CAAS,EACnG9B,CAAc,EAClB,MAAO;AAAA,IAEHgD,EAAa,iBAAiBQ,EAAQ,EAAE,0BAA0B/E,EAAS,EAAE,iBACzE,GAAG6E,GAAgBD,EAAM,CAAC;AAAA,IACtCO,EAAgB;AAAA,IAERxB,EAASzE,GAA2B0E,EAAmBxD,EAAe8B,GAAUlC,EAAS,EAChFX,GAAuBuE,EAAmBxD,EAAe8B,GAAUlC,EAAS,CAAC;AAAA,oBAE5F,EACA,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAG4D,CAAiB,IAAId,EAAqB,UAAU,IAAIa,CAAM,IAAIpC,CAAc,GACzF,kBAAA8C,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGgB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAK,CACF,GACA,gBAAAI,EACF,CACF,IC7fJ,IAiCMc,GA4HOC,GA7JbC,GAAAC,GAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAoBC,EAAoBC,EAAmBC,EAAU,GAC9FC,EAA4BC,EAAoB,EAAGC,EAAoB,EAAGC,EAAmB,EAC7FC,EAAW,QAAkB,CAC5B,IAAMC,EAAeF,IAA6B,CAChD,OAAQA,GAAkB,CACxB,IAAK,GACH,MAAO,uBACT,IAAK,GACH,MAAO,kBAAkBC,CAAQ,8CACnC,IAAK,GACH,MAAO,2BACT,QACE,MAAM,IAAI,MAAM,oBAAoBD,EAAgB,oBAAoB,CAC5E,CACF,EACMG,EAAeH,IAA6B,CAChD,OAAQA,GAAkB,CACxB,IAAK,GACH,MAAO,oDACT,IAAK,GACH,MAAO,wDACT,QACE,MAAM,IAAI,MAAM,oBAAoBA,EAAgB,oBAAoB,CAC5E,CACF,EACMI,EAAgBZ,EAAiB;AAAA;AAAA,MAGA;AAAA;AAAA,MAIjCa,EAAkBb,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCc,EAAUd,EAAiB,2BAA6B,2BACxDe,EAASf,EAAiB,2BAA6B,2BACvDgB,EAAMhB,EAAiB,MAAQ,MAC/BiB,EAAMjB,EAAiB,MAAQ,MAC/BkB,EAAe;AAAA;AAAA,qBAENlB,EAAiB,gCAAkC,+BAA+B;AAAA,mBACpFgB,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA,iBAELC,CAAG;AAAA,iBACHA,CAAG;AAAA;AAAA;AAAA,gBAGJA,CAAG;AAAA,oBACCE,GAAYb,EAAmBG,CAAQ,CAAC;AAAA;AAAA;AAAA,8BAG9BK,CAAO,2BAA2BC,CAAM;AAAA,QAC9DH,CAAa;AAAA;AAAA,QAEbF,EAAYJ,CAAiB,CAAC;AAAA;AAAA,qBAI1Bc,EAAUpB,EAAkBC,GAAaE,EAAW;AAAA,wBACxCG,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SACbN,GAAYD,EAAY;AAAA,wBACxCI,CAAiB;AAAA,MACnCY,CAAY,GAC8C;AAAA,wBACxCZ,CAAiB;AAAA;AAAA,QAEjCY,CAAY;AAAA;AAAA,aAEPC,GAAYb,EAAmBG,CAAQ,CAAC,SAEzCY,EAAU,GAAGV,EAAYJ,CAAiB,CAAC,GAE3Ce,EAAUH,GAAYX,EAAkBC,CAAQ,EAChDc,EACFvB,EAAiBmB,GAAYb,EAAmBG,CAAQ,EAAIU,GAAYZ,EAAmBE,CAAQ,EACjGe,GACFxB,EAAiBmB,GAAYZ,EAAmBE,CAAQ,EAAIU,GAAYb,EAAmBG,CAAQ,EACjGgB,EAAkBC,GAAqBrB,EAAYiB,EAASb,CAAQ,EAsB1E,MArBiB;AAAA,yDACkCc,CAAK;AAAA,QACtDvB,EAAiBoB,EAAUC,CAAO;AAAA;AAAA;AAAA,yDAGeG,EAAK;AAAA,QACtDxB,EAAiBqB,EAAUD,CAAO;AAAA;AAAA;AAAA,gEAGsBE,CAAO;AAAA,0BAC7Cd,CAAgB;AAAA;AAAA;AAAA;AAAA,uBAInBR,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGa,CAAe;AAAA,QACfc,GAAYvB,CAAO,CAAC;AAAA,QACpBqB,CAAe;AAAA;AAAA;AAAA,MAKnB,EAESnC,GACT,CAACsC,EAA+BvB,EAA4BwB,EAAgCC,EAC3FC,EAAmBC,EAAkBC,EAAkBC,IAAoD,CAC1G,IAAMlC,EAAiBK,EAAW,SAAW,OACvC8B,EAAanC,EAAiB4B,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAWrC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAYtC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAcvC,EAAiB6B,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAASxC,IAAmBmC,EAAa,IAAM,GAAKA,EAAa,IAAM,IAAMI,EAAc,IAAM,EAGjGE,EAAYzC,EAAiBuC,EAAcF,EAAWC,EACtDI,EAAY1C,EAAiBqC,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,iCAAiCD,CAAQ,EAAE,EAEtE,IAAMrC,EAAmBgC,EAAUxC,GAAkBmC,EAAa,IAAM,EAAI,EAAI,EAAK,EAC/EY,EAAaJ,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDI,EAAaL,EAAc,CAAC,EAAIC,EAAkB,CAAC,EACnDK,GAAY,KAAK,IAAIN,EAAc,CAAC,EAAInC,EAAkBmC,EAAc,CAAC,CAAC,EAC1E1C,EAAY6B,EAAYiB,IAAe,EACvC7C,GAAY6B,EAAYiB,IAAe,EACvC7C,GAAW6B,EAAWiB,KAAc,EACpCC,GAAeV,EAAS,CAAChC,EAAkB,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EAE3D2C,EAAoC,CACxC,CAAC,OAAsB,KAAMrB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAM,CAAC3B,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EAC7G,CAAC,OAAsB,KAAMA,EAAW,OAAO,EAAG,CAAC,OAAsB,KAAMA,EAAW,SAAS,CACrG,EACA+C,GAA6B/C,EAAY8C,CAAe,EACxDA,EAAgB,KAAK,GAAGE,GAA2BzB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAClF,IAAM0B,GAAwD,CAAC,OAAQ,MAAM,EACzErB,IACFkB,EAAgB,KAAK,GAAGE,GAA2BzB,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE0B,GAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,GAA2BxB,CAAW,CAAC,EAE/D,IAAM0B,GAAmBC,IAA+B,CACtD,IAAMC,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,MAAO,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ,CAAC,EAC9E,CAAC,KAAM,WAAY,KAAM,MAAO,OAAQ,CAAC,CAC3C,EACAC,GAAyBrD,EAAYoD,EAAQ,EAG7C,IAAME,GAAanB,EAAS,EAAI,EAC1BoB,GAAIC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EACpDkC,GAAmB;AAAA,qDACsBtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,8BAChDpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,6EAEsBpB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA;AAAA,qCAEjEpB,EAAS,MAAQ,EAAE;AAAA,SAE1CuB,GAAIC,EACN,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQpB,IAAqB,EAAI,EAAIA,CAAgB,EAC3FyD,GAAID,EAAc,IAAKpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EAC5EO,GAAiB,CAACH,GAAGE,EAAC,EACtBE,GAASC,GAAe,SAAUxC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQ8B,EAAU,EAC1F,GAAI1B,EAAS,CACX,IAAMoC,GAAOL,EAAc,OAAQpC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ+B,EAAU,EACxFO,GAAe,KAAKG,EAAI,EACxBP,IAAoB;AAAA,0DAC4BtB,EAAS,QAAQoB,EAAC,IAAMA,EAAC;AAAA,+BACpD5D,EAAiB,IAAM,GAAG,GAAGwC,EAAS,MAAQ,EAAE;AAAA,UAEvE,CAEA,MAAO;AAAA,UACL8B,GAAc,yBAAyB,CAAC;AAAA;AAAA;AAAA;AAAA,UAIxCd,GAAa,iBAAiBC,EAAQ,EAAE,iBAAiB,GAAGS,GAAgBC,EAAM,CAAC;AAAA,UACnFL,EAAgB;AAAA,UAEdzE,GACIW,EAAgBC,EAAWC,GAAWC,GAAU8B,EAAS5B,EAAY6C,GAAa,CAAC,EAAGA,GAAa,CAAC,EACpGA,GAAa,CAAC,EAAGU,EAAC,CAAC;AAAA,UAEvBpB,EACI+B,GAA2B3B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,EAAS,EACrGuB,GACI5B,EAAmBD,EAAeiB,GAAG,OAAW,CAAC5D,EAAgBiD,GAAW,GAAO,OACnFf,CAAyB,CAAC,EACxC,EACA,MAAO,CACL,KAAM,eACN,YAAa,CACX,KAAM,GAAG7B,EAAW,QAAQ,IAAIG,CAAgB,IAAIgC,CAAM,IAAIvC,CAAS,IAAIC,EAAS,IAAIC,EAAQ,IAC5F4C,CAAU,IAAIC,CAAU,IAAIC,EAAS,GACzC,kBAAAK,EACF,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMzB,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAM,CACF,GACA,gBAAAI,EACF,CACF,IC9QJ,IAgBakB,GAuGAC,GAvHbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAMaR,GACT,CAACS,EAA+BC,EAC/BC,IAAqF,CACpF,IAAMC,EAAUH,EAAO,OAAS,EAC1BI,EAAcD,EAAU,8BAAgC,GACxDE,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KACnBO,EAAyBD,EAAO,CAAC,EAAIL,EAAW,MAEhDO,EAAgBP,EAAW,SAAW,OACtCQ,EAAcC,GAChBL,EAAQC,EAAQL,EAAW,UAAWA,EAAW,KAAMA,EAAW,QAASO,CAAa,EACtFG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAMV,EAAW,SAAS,EAC7F,CAAC,QAAuB,KAAM,CAACA,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC5E,CAAC,QAAuB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,EACtE,CAAC,QAAuB,KAAMM,CAAsB,CACtD,EACAO,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,GAA2BV,EAAQC,CAAM,CAAC,EAClE,IAAMU,EAAwD,CAAC,OAAQ,MAAM,EACzEb,IACFU,EAAgB,KAAK,GAAGE,GAA2Bf,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEgB,EAAkB,KAAK,MAAM,GAE/BH,EAAgB,KAAK,GAAGE,GAA2BN,CAAW,CAAC,EAE/D,IAAMQ,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,GAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEY,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,MAAM,EACxDsB,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,MAAM,EACxDsB,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,CAAC,EAG9E,IAAM6B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ5B,EAAW,UAAU,MAAM,EACxG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,EAChF,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACA,OAAA6B,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA;AAAA,IAE9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,0BAEtDC,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,8CAEhBX,EAAgB,EAAI,CAAC;AAAA,yDACVA,EAAgB,EAAI,CAAC,oBAClEA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA,iBAGhBW,EAAO,KAAK,KAAK,MAAMA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMCX,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0DAMrBA,EAAgB,EAAI,CAAC;AAAA;AAAA;AAAA;AAAA,uBAKnEA,EAAgBiB,EAAE,IAAI,QAAS,UAAW,SAAU,eAAe,EACnDA,EAAE,IAAI,QAAS,gBAAiB,UAAW,QAAQ,CAAC;AAAA,uBACzDE,EAAE,IAAI,iBAAkB,aAAc,UAAW,QAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,MAK3EvB,CAAW;AAAA,MACXmB,CAAe;AAAA,MACfJ,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAEzC,EACA,MAAO,CACL,KAAM,cACN,YAAa,CAAC,KAAMlB,EAAW,SAAU,kBAAAe,CAAiB,EAC1D,WAAY,KAAO,CACjB,QAAS,CAAC,CACR,KAAMd,EAA6BA,EAA2BO,CAAW,EAAIA,EAC7E,SAAUT,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,EAESzB,GACT,CAACQ,EAA+BC,EAA4BQ,IAAgD,CAC1G,IAAMN,EAAUH,EAAO,OAAS,EAC1B+B,EAAaC,GAAiBvB,EAAY,CAAC,CAAC,EAC5CwB,EAAeD,GAAiBvB,EAAY,CAAC,CAAC,EAC9CE,EAAaC,EAAU,KAAKH,CAAW,EAAIsB,EAAaE,EACxD5B,EAAS,CAACL,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGzB,EAAS,CAACN,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI+B,CAAU,EACjGG,EAAsB,CAACzB,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAIsB,CAAU,EAElGlB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EACxC,CAAC,OAAsB,KAAM,CAACV,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,CAAC,EAC3E,CAAC,OAAsB,KAAM,CAACA,EAAW,KAAK,CAAC,EAAGA,EAAW,KAAK,CAAC,CAAC,CAAC,CACvE,EACAa,GAA6Bb,EAAYY,CAAe,EACxDA,EAAgB,KAAK,GAAGE,GAA2BV,EAAQC,EAAQ4B,CAAmB,CAAC,EACvF,IAAMC,GAAWF,EAAe,GAAKhC,EAAW,QAAQ,CAAC,EAAIK,EAAO,CAAC,EAC/DW,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,GAAe,SAAUpB,EAAO,CAAC,EAAE,SAAUkC,EAAoB,OAAQH,CAAU,EAC5FV,EAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,EAAkBC,GAAqBvB,EAAYkB,EAAO,KAAK,MAAOE,CAAQ,EAC9EI,EAAIC,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQ0B,CAAU,EACpEJ,EAAID,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQyB,CAAU,EACpEH,EAAY,CAACH,EAAGE,CAAC,EACnBxB,GACFyB,EAAU,KAAKF,EAAc,IAAK1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAM+B,CAAU,CAAC,EAEnF,IAAM3B,EAAcD,EAAU,8BAAgC,GACxD0B,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EACxC,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ,CAAC,CACvC,EACA,OAAAC,GAAyB7B,EAAY4B,CAAQ,EACtC;AAAA,IACXX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGD,EAAWT,CAAM,CAAC;AAAA,IAC9ED,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,8CAIlCe,CAAY;AAAA,oCACtBA,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,wBAOxBR,EAAE,KAAK,KAAK,KAAKU,CAAO;AAAA,wBACxBhB,EAAO,KAAK,KAAK,KAAKc,CAAY;AAAA;AAAA;AAAA,8CAGZ3B,EAAO,CAAC,CAAC;AAAA;AAAA;AAAA,8BAGzB6B,CAAO;AAAA;AAAA;AAAA,0BAGXV,EAAE,IAAI,QAAS,gBAAiB,eAAgB,eAAe,CAAC;AAAA;AAAA,0BAEhEA,EAAE,KAAK,KAAK;AAAA;AAAA;AAAA,gDAGUnB,EAAO,CAAC,CAAC;AAAA,wBACjCqB,EAAE,IAAI,WAAY,UAAW,IAAK,gBAAgB,CAAC;AAAA,iCAC1CM,CAAY;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAOlBA,CAAY;AAAA;AAAA,QAE/B7B,CAAW;AAAA,QACXmB,CAAe;AAAA,QACfJ,EAAO,IAAI,QAAS,MAAO,UAAW,iBAAkB,OAAO,CAAC;AAAA;AAAA,IAGlE,EAEA,MAAO,CACL,KAAM,wBACN,YAAa,CACX,KAAM,GAAGlB,EAAW,QAAQ,IAAI8B,CAAU,IAAIE,CAAY,IAAIE,CAAO,IAAI7B,EAAO,CAAC,CAAC,IAAIA,EAAO,CAAC,CAAC,GAC/F,kBAAmBH,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMM,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAI,CACF,CACF,ICvNJ,IAYamB,GA6IPC,GAUOC,GAnKbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KACAC,KACAC,KAEaT,GACT,CAACU,EAA+BC,EAAoDC,EACnFC,EACAC,EAAiB,KAAyD,CACzE,IAAMC,EAASL,EAAO,CAAC,EAAE,KACnBM,EAASN,EAAO,CAAC,EAAE,KAEnBO,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIF,EAAOA,EAAO,OAAS,CAAC,EAC5BG,EAAIJ,EAAOA,EAAO,OAAS,CAAC,EAC5BK,EAAaC,GAAiBH,CAAC,EAC/BI,EAAcD,GAAiBF,CAAC,EAChCI,EAAeF,GAAiBJ,CAAC,EACjCO,EAAaC,EAAU,KAAKb,CAAW,EAAIQ,EAAaG,EACxDG,EAAUhB,EAAO,OAAS,EAC1BiB,EAAYd,EAAsBA,EAAoB,MAAM,EAAG,EAAE,EAAID,EAAY,MAAM,EAAG,EAAE,EAE5FgB,EAAsB,CADVH,EAAU,KAAKE,CAAS,EACFV,EAAGC,CAAC,EAEtCW,EAAoC,CACxC,CAAC,QAAuB,KAAML,CAAU,EAAG,CAAC,QAAuB,KAAMP,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,CACjC,EACAW,GAA6BnB,EAAsBkB,CAAe,EAClEA,EAAgB,KAAK,GAAGE,GAA2BJ,EAAWZ,EAAQC,CAAM,CAAC,EACzEU,GACFG,EAAgB,KAAK,GAAGE,GAA2BrB,EAAO,CAAC,EAAE,IAAI,CAAC,EAEpEmB,EAAgB,KAAK,GAAGE,GAA2BH,CAAmB,CAAC,EAEvE,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAYC,GAAiB,aAAczB,EAAO,CAAC,EAAE,SAAUiB,EAAU,MAAM,EAC/ES,EAAIC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUK,EAAO,OAAQO,CAAW,EACrEgB,GAAID,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUM,EAAO,OAAQI,CAAU,EACpEmB,EAASC,GAAe,SAAU9B,EAAO,CAAC,EAAE,SAAUkB,EAAoB,OAAQR,CAAU,EAC5FqB,GAAWC,GAA4BH,EAAO,KAAK,MAAM,EACzDI,GAAkBC,GAAqBjC,EAAsB4B,EAAO,KAAK,MAAOE,EAAQ,EACxFI,GAAiB,CAACT,EAAGE,EAAC,EACxBQ,EAAc,GAClB,GAAIpB,EAAS,CACX,IAAMqB,GAAiBjC,EAAiBM,EAAa,EACrDyB,GAAe,KAAKR,EAAc,OAAQ3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQqC,EAAc,CAAC,EACpGD,EAAc,GACVhC,EAAiB,uBAAuBiC,EAAc,KACrC,YAAYR,EAAO,KAAK,KAAK,kBAAkB,EACtE,CAEA,IAAMS,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAajC,EAAO,MAAM,EAAG,EAAE,EAC/BkC,GAAiBC,GAAiBH,GAAYrB,CAAS,EACvDyB,GAAiBD,GAAiBF,GAAYtB,CAAS,EACvD0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EACrF,CAAC,KAAM,IAAK,KAAM,KAAK,CACzB,EACAC,GAAyB3C,EAAsB0C,EAAQ,EAEvD,IAAME,GAAa,CAACC,GAAyBC,KAA4B,CACvE,IAAMC,GAAOF,GAAS,KAChBG,GAAOH,GAAS,KACtB,GAAIE,KAAS,EACX,MAAO,OAAOC,EAAI,cAAcH,GAAS,KAAK,OAAO,YAEvD,IAAMI,GAAY1B,EAAU,KACxB2B,GAAS,OAAOF,EAAI,aAAaH,GAAS,KAAK,OAAO,IAC1D,QAASM,GAAIJ,GAAO,EAAI,EAAGK,GAAIH,GAAY,EAAGE,IAAK,EAAGA,KAAKC,KACzDF,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,OAAOF,GAAY,EAAI,iBAAiBG,EAAC,IAAM,eAAe,IAEhG,OAAAN,GAAc,QAAQK,IAAK,CACzBD,IAAU;AAAA,EAAKF,EAAI,YAAYG,EAAC,QAClC,CAAC,EACDD,IAAU,GAAGF,EAAI,YAAYD,GAAO,CAAC;AAAA,uBACxBC,EAAI,YAAYD,GAAO,CAAC,UAC9BG,EACT,EAEMG,GAAa,IAAc,CAC/B,IAAIC,GAAU,eAAe7B,EAAE,KAAK,KAAK,IACzC,QAAS0B,GAAI,EAAGA,GAAIxC,EAAawC,KAC/BG,IAAW;AAAA,0BACGH,EAAC,yBAAyBA,EAAC,2BAA2B1C,CAAU,KAEhF,QAAS0C,GAAI,EAAGA,GAAIvC,EAAcuC,KAAK,CACrCG,IAAW,iCAAiCH,EAAC,yBAAyBxC,CAAW,KAEjF,QAASyC,GAAI,EAAGA,GAAIzC,EAAayC,KAC/BE,IAAW;AAAA,qBACJH,EAAC,WAAWxB,GAAE,KAAK,KAAK,UAAUhB,IAAgB,EAAI,GAAK,IAAIyC,EAAC,GAAG,YAAYA,EAAC,YACnFD,EAAC;AAAA,CAET,CACA,OAAOG,EACT,EAEA,MAAO;AAAA,IAEHhC,EAAa,iBAAiBoB,EAAQ,EAAE,0BAA0BnB,CAAS,EAAE,iBACzE,GAAGW,GAAgBN,CAAM,CAAC;AAAA,IACtCN,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,4CACpCb,CAAU,QAAQA,CAAU;AAAA,8CAC1BA,CAAU;AAAA,iCACvBG,CAAY;AAAA,qCACRA,CAAY;AAAA;AAAA;AAAA,MAG3CX,EAAY,SAAW,EAAI,GAAK,uBAAuBsB,EAAU,gBAAgB,OAAO,CAAC,GAAG;AAAA,MAC5FqB,GAAWnB,EAAGc,EAAc,CAAC;AAAA,qBACdd,EAAE,gBAAgB,WAAW,CAAC;AAAA,MAC7CmB,GAAWjB,GAAGc,EAAc,CAAC;AAAA,qBACdd,GAAE,gBAAgB,WAAW,CAAC;AAAA,wBAC3BC,EAAO,KAAK,KAAK,KAAKhB,CAAY;AAAA,oDACND,CAAW;AAAA,QACvD0C,GAAW,CAAC;AAAA;AAAA,2BAEOzC,CAAY;AAAA;AAAA,QAE/BuB,CAAW;AAAA,QACXH,EAAe;AAAA,0BACGJ,EAAO,KAAK,OAAO;AAAA,qBACxBA,EAAO,gBAAgB,aAAa,CAAC;AAAA,QAClDA,EAAO,YAAY,YAAYnB,CAAU,GAAI,OAAO,CAAC;AAAA;AAAA;AAAA,GAIvD,EACA,MAAO,CACL,KAAM,cACN,YAAa,CACX,KAAM,GAAGT,EAAqB,UAAU,IAAIS,CAAU,IAAIE,CAAW,IAAIC,CAAY,IAAIT,CAAc,GACvG,kBAAmBY,EAAU,CAAC,OAAQ,OAAQ,MAAM,EAAI,CAAC,OAAQ,MAAM,CACzE,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMd,EAAa,SAAUF,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAClE,gBAAAK,CACF,GACA,gBAAAG,CACF,CACF,EAEE/B,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,EAG7C,GAAIA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACxF,MAAM,IAAI,MAAM,kCAAkC,CAEtD,EAEaR,GAAUgE,GAAkC,CACvDjE,GAAeiE,EAAQ,MAAM,EAC7B,IAAMtD,EAAcuD,GAAc,UAAUD,EAAQ,OAAO,CAAC,EAAE,KAAMA,EAAQ,OAAO,CAAC,EAAE,KAAM,EAAI,EAChG,GAAI,CAACtD,EACH,MAAM,IAAI,MAAM,uCAAwC,EAE1D,IAAMM,EAAIN,EAAYA,EAAY,OAAS,CAAC,EACtCO,EAAI+C,EAAQ,OAAO,CAAC,EAAE,KAAKA,EAAQ,OAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EAC9DhD,EAAI,GAAKC,EAAI,EACf+C,EAAQ,QAAQlE,GAA6BkE,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,EAE3FsD,EAAQ,QAAQE,GAAwBF,EAAQ,OAAQ,CAAC,WAAY,EAAE,EAAGtD,CAAW,CAAC,CAE1F,IChLA,IAeayD,GA6BPC,GAEAC,GAmDAC,GAmBOC,GA0BPC,GAyIAC,GA0BOC,GAjTbC,GAAAC,GAAA,kBAIAC,KAIAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAEahB,GACT,CAACiB,EAA+BC,EAAgCC,EAC/DC,EAA+BC,EAA4BC,IAAqC,CAC/F,IAAMC,EAAYN,EAAW,CAAC,EACxBO,EAAoBP,EAAW,MAAMK,EAAgB,EAAI,EAAGA,EAAgB,EAAI,CAAC,EACjFG,EAAcD,EAAkB,OAChCE,EAAcR,EAAY,CAAC,EAE3BS,EADqBT,EAAY,MAAM,CAAC,EACA,IAAI,CAACU,EAAGC,IAAMD,GAAKA,EAAI,IAAMT,EAAUU,CAAC,EAAI,EAAE,EAEtFC,EAD2BN,EAAkB,IAAI,CAACI,EAAGC,IAAMD,EAAIR,EAAWS,CAAC,EAAIT,EAAWS,EAAIJ,CAAW,CAAC,EAEnF,IAAI,CAACG,EAAGC,IAAM,KAAK,OAAOD,EAAID,EAAmBE,CAAC,EAAIR,EAAQQ,CAAC,GAAKR,EAAQQ,CAAC,CAAC,CAAC,EAC5G,OAAAC,EAAY,OAAO,EAAG,EAAGP,CAAS,EAClCO,EAAY,OAAOR,EAAgB,EAAI,EAAG,EAAGI,CAAW,EACjDI,CACT,EAcE7B,GAA2B,CAAC,EAAG,EAAG,EAAG,CAAC,EAEtCC,GAAiB,CAAC6B,EAA+BC,IAAqC,CAG1F,GAAI,CAACD,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,uCAAuC,EAGzD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAME,EAAcF,EAAO,CAAC,EAAE,KAAKC,EAAW,SAAW,OAASD,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFG,EAAkBH,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIC,EAAW,MACvD,GAAIC,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAIrE,GAAIH,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAC/F,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMN,EAAcM,EAAO,CAAC,EAAE,KAAK,OAAS,EAE5C,GAAIC,EAAW,UAAU,SAAWP,EAClC,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAIvD,GAAIO,EAAW,QAAQ,SAAWP,EAChC,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAIrD,GAAIO,EAAW,KAAK,SAAWP,EAAc,EAC3C,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAKtD,GAAIO,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWD,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAEM5B,GAA4B,CAA2B6B,EAAeD,IAAqC,CAC/G,IAAMb,EAAcc,EAAW,YAAY,MAAM,EAEjD,QAASH,EAAI,EAAGA,EAAIE,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEF,EACvCX,EAAYW,EAAI,CAAC,IAAM,IACzBX,EAAYW,EAAI,CAAC,EAAIE,EAAO,CAAC,EAAE,KAAKF,CAAC,GAGzC,IAAMM,EAAOH,EAAW,KAAK,MAAM,EACnCI,GAAa,yBACTL,EAAO,CAAC,EAAE,KAAMC,EAAW,QAASA,EAAW,UAAWd,EAAaiB,EAAMH,EAAW,SAAW,OACnGA,EAAW,OAAO,EAGtB,IAAMK,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAiB,CAAI,CAAC,EACzCE,CACT,EAEajC,GAAuB4B,GAAwD,CAC1F,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBS,EAAU,CAAC,SAAU,QAAS,aAAc,YAAY,EAAET,EAAW,QAAkB,EACvFb,EAAYa,EAAW,UACvBU,EAAQV,EAAW,MACnBd,EAAcc,EAAW,aACzBG,EAAOH,EAAW,KAClBX,EAAUW,EAAW,QACrBW,EAAYX,EAAW,WAA6B,EAE1D,MAAO,CACL,QAAAS,EACA,OAAAD,EACA,UAAArB,EACA,MAAAuB,EACA,YAAAxB,EACA,KAAAiB,EACA,QAAAd,EACA,SAAAsB,EACA,GAAGL,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEMjC,GAAS,CAACuC,EAAyBb,EAA+BC,IAAqC,CAC3G,IAAMa,EAAqB1C,GAA0B6B,EAAYD,CAAM,EAKjEe,EAAiBd,EAAW,SAAW,OAC7C,GAAIA,EAAW,QAAU,EAAG,CAM1B,GADmC,CAACY,EAAQ,YAAY,eAAe,QAAQ,GAC7CE,GAAkBf,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMC,EAAW,OACjFD,EAAO,CAAC,EAAE,KAAK,CAAC,IAAM,GAAKC,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,EAAG,CAC7F,IAAMF,GAAc9B,GAChB+B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZC,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAG9B,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC+B,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAEhC,IAAME,GAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3ChB,EAAO,SAAW,GACpBkB,GAAW,KAAKlB,EAAO,CAAC,CAAC,EAE3Ba,EAAQ,QACJM,GAAsCD,GAAYJ,EAAoBf,EAAW,EAAG,CAAC,OAAQmB,EAAU,CAAC,CAC9G,MACEL,EAAQ,QAAQO,GAA6BpB,EAAQc,CAAkB,CAAC,EAE1E,MACF,CAEA,IAAMO,EAAUrB,EAAO,SAAW,EAC5BsB,EAActB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACnDQ,EAAavB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EAClDS,EAAgBxB,EAAO,CAAC,EAAE,KAAKe,EAAiB,EAAI,CAAC,EACrDU,EAAezB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/B0B,EAAc1B,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BD,EAAc9B,GAChB+B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMC,EAAW,UAAWa,EAAmB,KAAMb,EAAW,QAC1Fc,CAAc,EACZY,EAAY5B,EAAYgB,EAAiB,EAAI,CAAC,EAC9Ca,EAAW7B,EAAYgB,EAAiB,EAAI,CAAC,EAC7CpB,EAAcI,EAAYgB,EAAiB,EAAI,CAAC,EAEhDc,EAAWd,GAAkBU,IAAiBH,GAAeI,IAAgBH,GAC/EtB,EAAW,KAAK,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,EACvD,GAAI4B,GACCJ,IAAiB,GAAKC,IAAgB,GAAKzB,EAAW,UAAU,CAAC,IAAM,GAAKA,EAAW,UAAU,CAAC,IAAM,GACxGA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,QAAQ,CAAC,IAAM,GAAKA,EAAW,KAAK,CAAC,IAAM,GACrFA,EAAW,KAAK,CAAC,IAAM,EAAI,CAE9B,IAAM6B,EAAQ/B,EAAY,CAAC,EACvBgC,GAAWC,EAAWC,GACpBC,GAAe,CAAC,EACtB,GAAInB,EAAgB,CAClB,IAAMC,GAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAG9B,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC+B,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAIlE,GAHIA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,IAE5Ba,EAAU,CACZ,IAAMM,GAAYb,EAAcC,EAAaC,EAC7CO,GAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAG8B,EAAOK,EAAS,CAAC,EACnDH,EAAYhB,GAAiB,QAAQ,CAAC,EAAGmB,GAAWxC,CAAW,CAAC,EAChEsC,GAAoB,CAAC,EAAGH,EAAOnC,CAAW,CAC5C,MACEoC,GAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAOR,EAAcC,EAAYC,CAAa,CAAC,EAC9EQ,EAAYhB,GAAiB,QAAQ,CAAC,EAAGQ,EAAe7B,CAAW,CAAC,EACpEsC,GAAoB,CAACH,EAAOH,EAAYC,EAAUjC,CAAW,EAE/DuC,GAAa,KAAKH,EAAS,EAC3BG,GAAa,KAAKF,CAAS,CAC7B,MACED,GAAY/B,EAAO,CAAC,EAAE,QAAQ,CAAC8B,EAAON,EAAeF,EAAcC,CAAU,CAAC,EAC9ES,EAAYhC,EAAO,CAAC,EAAE,QAAQ,CAAC,EAAGL,EAAa6B,CAAa,CAAC,EAC7DS,GAAoB,CAACH,EAAOnC,EAAagC,EAAYC,CAAQ,EAC7DM,GAAa,KAAKF,CAAS,EAC3BE,GAAa,KAAKH,EAAS,EAEzBV,GACFa,GAAa,KAAKlC,EAAO,CAAC,CAAC,EAE7B,IAAMoC,GAAIH,GAAkB,CAAC,EACvBI,EAAIH,GAAa,CAAC,EAAE,KAAKA,GAAa,CAAC,EAAE,KAAK,OAAS,CAAC,EAE1DE,GAAI,GAAKC,EAAI,EACfxB,EAAQ,QACJyB,GACIJ,GAAcpB,EAAoBf,EAAakC,GAAmBlB,CAAc,EACpF,CAAC,OAAQmB,EAAY,CAAC,EAE1BrB,EAAQ,QACJ0B,GAAwBL,GAAcpB,EAAoBf,EAAakC,GAAmBlB,CAAc,EACxG,CAAC,OAAQmB,EAAY,CAAC,EAE5B,MACF,CAIA,IAAMM,EAAgE,GAGhExB,EAAoBH,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJI,GAA2BjB,EAAO,CAAC,EAAG9B,EAAwB,EAC9D,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAAC+B,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACY,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKG,GAIhC,IAAME,EAAa,CAAClB,EAAO,CAAC,EAAGgB,CAAgB,EAC3CK,GACFH,EAAW,KAAKlB,EAAO,CAAC,CAAC,EAI3B,IAAMyC,EAAY1B,EAAiBY,EAAYC,EAAWjC,EACpD+C,EAAY3B,EAAiBpB,EAAcgC,EAAYC,EACvDe,EAAWlB,EAAeC,EAAcF,EAC9CX,EAAQ,QACJ+B,GACI1B,EAAYJ,EAAoBf,EAAa0C,EAAWC,EAAWC,EAAUtB,EAC7EmB,CAAyB,EAC7B,CAAC,OAAQtB,CAAU,CAAC,CAC1B,EAEM3C,GAAS,CAACsC,EAAyBZ,IAAqC,CAE5E,IAAMV,EAAgBU,EAAW,SAAW,OACtCD,EAAS,CACba,EAAQ,OAAO,CAAC,EAAE,QACdtB,EAEI,CAACsB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5Bb,EAAO,KAAKa,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAMT,EAAO,CAAC,EAAGH,EAAW,KAAK,CAAC,EAAG,EAAGA,EAAW,KAAK,CAAC,CAAC,EACpDX,EAAU,CAAC,CAAC,EAAE,OAAOW,EAAW,OAAO,EACvCb,EAAY,CAAC,CAAC,EAAE,OAAOa,EAAW,SAAS,EAC3Cd,EAAc,CAAC,CAAC,EAAE,OAAOc,EAAW,WAAW,EAC/Ca,EAAqB1C,GAA0B,CAAC,GAAG6B,EAAY,KAAAG,EAAM,QAAAd,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGa,CAAM,EACnHa,EAAQ,QAAQO,GACZpB,EAAQc,EACRf,GAAeR,EAAgB,CAACQ,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAAI,CAAC,CAAC,CAAC,CAC3F,EAEavB,GAAO,CAACqC,EAAyBZ,IAAqC,CACjF9B,GAAe0C,EAAQ,OAAQZ,CAAU,EACrCY,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpCtC,GAAOsC,EAASZ,CAAU,EAE1B3B,GAAOuC,EAASA,EAAQ,OAAQZ,CAAU,CAE9C,ICxTA,IAiCM4C,GA2HOC,GA5JbC,GAAAC,GAAA,kBAqBAC,KACAC,KAGAC,KAEAC,KAEAC,KACAC,KACAC,KAEMV,GACF,CAACW,EAAyBC,EAAU,GAAOC,EAAqCC,EAC/EC,EAAmB,IAAc,CAChC,IAAMC,EAAeD,GAA6B,CAChD,OAAQA,EAAkB,CACxB,IAAK,GACH,MAAO,sEACT,IAAK,GACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAQED,CAAI;AAAA,cAEf,QACE,MAAM,IAAI,MAAM,oBAAoBC,CAAgB,oBAAoB,CAC5E,CACF,EACME,EAAgBN,EAAiB;AAAA;AAAA,QAGA;AAAA;AAAA,QAIjCO,EAAkBP,EAAiB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAOA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAQnCQ,EAAUR,EAAiB,2BAA6B,2BACxDS,EAAST,EAAiB,2BAA6B,2BACvDU,EAAMV,EAAiB,MAAQ,MAC/BW,EAAMX,EAAiB,MAAQ,MAE/BY,EAAe;AAAA,yBACFZ,EAAiB,2BAA6B,0BAA0B;AAAA,uBAC1EA,EAAiB,gCAAkC,+BAA+B;AAAA,qBACpFU,CAAG;AAAA,qBACHA,CAAG;AAAA;AAAA,mBAELC,CAAG;AAAA,mBACHA,CAAG;AAAA;AAAA;AAAA,kCAGYH,CAAO;AAAA,iBACxBL,CAAI;AAAA;AAAA,kCAEaM,CAAM;AAAA,iBACvBN,CAAI;AAAA;AAAA;AAAA;AAAA,kBAIHQ,CAAG;AAAA,QACbL,CAAa;AAAA,0EACqDF,CAAgB,KAE9ES,EAAUb,EAAiB;AAAA,0BACbI,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SACoB;AAAA,0BACbC,CAAgB;AAAA;AAAA,UAEhCQ,CAAY;AAAA;AAAA,eAEPT,CAAI,SAEPW,EAAU;AAAA,0BACIV,CAAgB;AAAA,yBACjBJ,EAAiB,2BAA6B,0BAA0B;AAAA;AAAA;AAAA,YAIvFA,EAAiB,yDACA,wDAAwD;AAAA;AAAA;AAAA,UAGzEK,EAAYD,CAAgB,CAAC;AAAA;AAAA,eAExBD,CAAI;AAAA,QAGPY,EAAkBC,GAAqBd,EAAYC,CAAI,EAqB7D,MApBiB;AAAA,uDACgCA,CAAI;AAAA,MACrDH,EAAiBa,EAAUC,CAAO;AAAA;AAAA;AAAA,uDAGeX,CAAI;AAAA,MACrDH,EAAiBc,EAAUD,CAAO;AAAA;AAAA;AAAA,iEAGyBV,CAAI;AAAA,wBAC7CC,CAAgB;AAAA;AAAA;AAAA,uBAGjBJ,EAAiB,gCAAkC,+BAA+B;AAAA,QACjGO,CAAe;AAAA,QACfU,GAAYhB,CAAO,CAAC;AAAA,QACpBc,CAAe;AAAA,8EACuDX,CAAgB;AAAA;AAAA,IAI1F,EAESd,GACT,CAAC4B,EAA+BhB,EAAqCiB,EACpEC,EAAmBC,EAAmBC,EAAkBC,EACxDC,IAAoD,CACnD,IAAMxB,EAAiBE,EAAW,SAAW,OACvCuB,EAAazB,EAAiBkB,EAAO,CAAC,EAAE,KAAK,CAAC,EAAIA,EAAO,CAAC,EAAE,KAAK,CAAC,EAClEQ,EAAYP,EAAY,CAAC,EACzBQ,EAAW3B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC1DS,EAAY5B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAC3DU,EAAc7B,EAAiBmB,EAAY,CAAC,EAAIA,EAAY,CAAC,EAE7DW,EAAS9B,GAAmByB,EAAa,IAAM,GAAKA,EAAa,GAAMI,EAAc,IAAM,EAG3FE,EAAY/B,EAAiB6B,EAAcF,EAAWC,EACtDI,EAAYhC,EAAiB2B,EAAWC,EAAYC,EACpDI,EAA0C,CAAC,EAAG,EAAG,CAAC,EAClDC,EAAoBd,GAAa,EAAI,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,CAAC,EACzDe,EAAW,CACf,KAAK,KAAKJ,EAAYE,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKF,EAAYC,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,EAC7D,KAAK,KAAKR,EAAYO,EAAc,CAAC,EAAIC,EAAkB,CAAC,CAAC,CAC/D,EAEAE,GAAU,UAAW,IAAM,wCAAwCD,CAAQ,EAAE,EAE7E,IAAM/B,EAAmB0B,EAAS,EAAI,EAChCO,EAAY,KAAK,IAAIJ,EAAc,CAAC,EAAI7B,EAAkB6B,EAAc,CAAC,CAAC,EAC1EK,EAAaR,EAAS,EAAI,EAC1BS,GACF,CAACrC,EAAW,YAAYF,EAAiB,EAAI,CAAC,EAAGE,EAAW,YAAYF,EAAiB,EAAI,CAAC,CAAC,EAC7FwC,EAAsB,CAC1BD,GAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,GAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,IACrGqC,GAAW,CAAC,GAAKrC,EAAW,UAAU,CAAC,GAAK,EAAI,GAAKqC,GAAW,CAAC,EAAI,IAAMrC,EAAW,UAAU,CAAC,EAAI,GACvG,EACMuC,GAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFsC,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOtC,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,CACvF,EAEMwC,GAAoC,CACxC,CAAC,OAAsB,KAAMtB,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAS,EAC/E,CAAC,OAAsB,KAAMC,CAAQ,EAAG,CAAC,OAAsB,KAAMpB,EAAW,OAAO,EACvF,CAAC,OAAsB,KAAMA,EAAW,SAAS,EAAG,CAAC,OAAsB,KAAMqC,EAAU,EAC3F,CAAC,OAAsB,KAAME,EAAI,CACnC,EACAE,GAA6BzC,EAAYwC,EAAe,EACxDA,GAAgB,KAAK,GAAGE,GAA2B1B,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAAC,EAElF,IAAM2B,GAAwD,CAAC,OAAQ,MAAM,EACzEtB,IACFmB,GAAgB,KAAK,GAAGE,GAA2B1B,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE2B,GAAkB,KAAK,MAAM,GAE/BH,GAAgB,KAAK,GAAGE,GAA2BzB,CAAW,CAAC,EAE/D,IAAM2B,EAAmBC,IAA+B,CACtD,IAAMC,GAAIC,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EAC5EY,GAAID,EAAc,IAAK/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACnEiC,GAASC,GAAe,SAAUlC,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQmB,CAAU,EACpFe,GAAiB,CAACL,GAAGE,EAAC,EAExBI,GAAmB,GACvB,GAAI/B,EAAS,CACX,IAAMgC,GAAON,EAAc,OAAQ/B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQoB,CAAU,EACxFe,GAAe,KAAKE,EAAI,EACxBD,IAAoB;AAAA,4DAC8BC,GAAK,KAAK,KAAK;AAAA,iCAC1CvD,EAAiB,IAAM,GAAG,GAAG8B,EAAS,MAAQ,EAAE;AAAA,YAEzE,CAEA,IAAM0B,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EACvG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQ,CAAC,EAAG,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQ,CAAC,EACrF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQjB,GAAW,MAAM,EAC5D,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQE,GAAK,MAAM,CACjD,EACAgB,GAAyBvD,EAAYsD,EAAQ,EAC7C,IAAME,GAAWC,GAA4BzC,EAAO,CAAC,EAAE,SAAU,CAAC,EAClE,GAAIwC,KAAa,OAASA,KAAa,MACrC,MAAM,IAAI,MAAM,YAAYA,EAAQ,oBAAoB,EAE1D,MAAO;AAAA,UACLE,GAAc,yBAAyB,CAAC;AAAA,UACxCb,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGH,GAAgBF,EAAM,CAAC;AAAA,UACnFG,EAAgB;AAAA,UAChBjE,GAA6BW,EAAgBuB,EAASrB,EAAY8C,GAAE,KAAK,MAAO5C,CAAgB,CAAC;AAAA,UAE/F0B,EAAS+B,GACI3B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,CAAS,EACrFyB,GACI5B,EAAmBD,EAAeyB,GAAU,OAAW,CAAC1D,EAAgBqC,EAAW,GACnF,OAAWb,CAAyB,CAAC,EACxD,EAEA,MAAO,CACL,KAAM,wBACN,YACI,CAAC,KAAM,GAAGtB,EAAW,QAAQ,IAAIgC,CAAiB,IAAID,CAAa,IAAIH,CAAM,GAAI,kBAAAe,EAAiB,EACtG,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM1B,EAAa,SAAUD,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAGiB,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,gBAAAO,EACF,GACA,gBAAAI,CACF,CACF,ICvQJ,IA2BMiB,GAiMOC,GA5NbC,GAAAC,GAAA,kBAmBAC,KACAC,KAEAC,KAEAC,KAGMP,GACF,CAACQ,EAA4BC,EAA+BC,EAAgCC,EAC3FC,EAA+BC,EAAS,GAAOC,EAAkBC,EACjEC,EAAiB,KAAkB,CAClC,IAAMC,EAASD,EAAiB,EAAI,EAC9BE,EAASF,EAAiB,EAAI,EAC9BG,EAAaH,EAAiB,EAAI,EAClCI,EAAgBP,EAAS,EAAI,EAE/BQ,EAAmB;AAAA,iDACoBR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,0BAC9DD,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,KAEvDH,IACFU,GAAoB;AAAA,sDAC0BR,EAAS,QAAQC,CAAQ,IAAMA,CAAQ;AAAA,2BAClEE,EAAiB,IAAM,GAAG,GAAGH,EAAS,MAAQ,EAAE;AAAA,QAGrE,IAAMS,EAAaT,EAAS,EAAI,EAC1BU,EAAIC,EAAc,IAAKf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC5EG,EAAKD,EAAc,KAAMf,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQa,CAAU,EAC9EI,EAAiB,CAACD,EAAIF,CAAC,EACzBZ,GACFe,EAAe,KAAKF,EAAc,OAAQf,EAAO,CAAC,EAAE,SAAU,CAACC,EAAYS,CAAU,CAAC,EAAE,OAAQG,CAAU,CAAC,EAE7G,IAAMK,EAASC,GAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUC,EAAY,OAAQY,CAAU,EAEpFO,EAAe;AAAA,2BACAjB,EAAuB,cAAgB,gBAAgB;AAAA,kBAChEA,EAAuB,cAAgB,gBAAgB;AAAA,kBACvDA,EAAuB,cAAgB,gBAAgB,MAAMQ,CAAa;AAAA,wBACpER,EAAuB,cAAgB,gBAAgB;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM7CE,CAAQ,MAAMM,CAAa;AAAA,8BAC/BA,CAAa;AAAA,8BACbN,CAAQ;AAAA;AAAA;AAAA,uBAGfA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,oCAExCA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yBAOnBA,CAAQ,kBAAkBA,CAAQ,WAAWA,CAAQ;AAAA,0BACpDA,CAAQ,wBAAwBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,sCAO/CA,CAAQ;AAAA;AAAA;AAAA;AAAA,wCAINA,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAUhBS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAMhBW,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA;AAAA,iDAEjBX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,iDAMRK,CAAU;AAAA;AAAA,gCAE3BI,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,OAAQ,IAAI,CAAC;AAAA,oCAChCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCASZS,EAAE,IAAI,cAAe,cAAe,KAAM,IAAI,CAAC;AAAA,gCAC/CA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA,gCACnDA,EAAE,IAAI,cAAe,cAAe,SAAU,IAAI,CAAC;AAAA;AAAA,+BAEpDE,EAAG,IAAI,QAAS,OAAQ,QAAS,IAAI,CAAC;AAAA,oCACjCX,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,mCAUTM,CAAa;AAAA,qCACXT,EAAU,YAAc,QAAQG,CAAQ,QAAQ;AAAA,YACzEa,EAAO,IAAI,QAAS,IAAK,QAAS,KAAM,OAAO,CAAC;AAAA;AAAA,SAGhDG,EAAc;AAAA,gCACMH,EAAO,gBAAgB,YAAY,CAAC;AAAA,wBAC5CA,EAAO,WAAW,gBAAiB,CAAC,CAAC;AAAA,qBACxCA,EAAO,WAAW,gBAAiBR,CAAU,CAAC;AAAA,oBAC/CQ,EAAO,WAAW,gBAAiBV,CAAM,CAAC;AAAA,oBAC1CU,EAAO,WAAW,gBAAiBT,CAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,0BAQpCJ,CAAQ;AAAA;AAAA;AAAA;AAAA;AAAA,yBAKTA,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,sCAEvCA,CAAQ,sBAAsBG,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAU/CH,CAAQ,iBAAiBA,CAAQ,WAAWA,CAAQ;AAAA;AAAA,wCAEvCA,CAAQ,sBAAsBI,CAAM;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+BAQlEF,EAAiBS,EAAG,IAAI,QAAS,OAAQ,OAAQ,cAAc,EAC9CA,EAAG,IAAI,QAAS,eAAgB,OAAQ,MAAM,CAAC;AAAA,+BAC3CF,EAAE,IAAI,eAAgB,cAAe,cAAe,aAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAM/DZ,EAAU,WAAa,GAAGG,CAAQ,OAAO;AAAA,YAC/Da,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,UAG/C,MAAO;AAAA,IACTnB,EAAa,iBAAiBO,CAAQ,EAAE,iBAAiB,GAAGW,EAAgBC,CAAM,CAAC;AAAA,IACnFN,CAAgB;AAAA;AAAA,MAEdb,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,IAC5EK,EAASgB,EAAeC,CAAW,GACnC,EAES7B,GACT,CAACQ,EAA+BsB,EAC/BC,IAAqF,CACpF,IAAMrB,EAAUF,EAAO,OAAS,EAE1BC,EAAcqB,EAAW,YACzBE,EAAaC,EAAU,KAAKxB,CAAW,EAMvCyB,EAAW,CACf,KAAK,KAAKF,EAAa,EAAE,EACzB,EACA,CACF,EACAG,GAAU,UAAW,IAAM,uCAAuCD,CAAQ,EAAE,EAE5E,IAAMnB,EAAiBe,EAAW,SAAW,OACvCM,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAU,CAACP,EAAW,QAAQ,CAAC,EAAGA,EAAW,QAAQ,CAAC,CAAC,EACvDQ,EACF,CAACR,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAGe,EAAW,YAAYf,EAAiB,EAAI,CAAC,CAAC,EAC7FwB,EAAY,CAACT,EAAW,UAAU,CAAC,EAAGA,EAAW,UAAU,CAAC,CAAC,EAC7DU,EAAsB,CAC1BF,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,IAC3FQ,EAAW,CAAC,GACPR,EAAW,UAAU,CAAC,GAAK,EACvB,GACCA,EAAW,YAAYf,EAAiB,EAAI,CAAC,EAAI,IAAMe,EAAW,UAAU,CAAC,EAAI,GAC7F,EACMW,EAAO,CACXD,EAAoB,CAAC,EAAI,EAAI,KAAK,OAAOV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,GAAK,CAAC,EACrFU,EAAoB,CAAC,EAAI,EAAI,KAAK,MAAMV,EAAW,KAAK,CAAC,EAAIA,EAAW,KAAK,CAAC,CAAC,EAAI,CACrF,EAEMlB,EAAS,GACT8B,EAAQZ,EAAW,MACnBa,EAASnC,EAAO,CAAC,EAAE,KACnBoC,EAAwBD,EAAO,CAAC,EAAID,EACpCG,EAAyBF,EAAO,CAAC,EAEjCG,EAAoC,CACxC,CAAC,QAAuB,KAAMd,CAAU,EAAG,CAAC,QAAuB,KAAMK,CAAO,EAChF,CAAC,QAAuB,KAAMC,CAAU,EAAG,CAAC,QAAuB,KAAMC,CAAS,EAClF,CAAC,QAAuB,KAAMC,CAAmB,EAAG,CAAC,OAAsB,KAAMC,CAAI,EACrF,CAAC,QAAuB,KAAMG,CAAqB,EAAG,CAAC,QAAuB,KAAMC,CAAsB,EAC1G,GAAGE,GAA2BvC,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9D,EACIE,IACFoC,EAAgB,KAAK,GAAGC,GAA2BvC,EAAO,CAAC,EAAE,IAAI,CAAC,EAClE4B,EAAkB,KAAK,MAAM,GAE/BU,EAAgB,KAAK,GAAGC,GAA2BtC,CAAW,CAAC,EAE/D,IAAME,EAAuBuB,EAAS,CAAC,IAAM,GAAKA,EAAS,CAAC,IAAM,EAC5Dc,EAAmBzC,GAA+B,CACtD,IAAMO,GAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQuB,EAAQ,MAAM,EACzF,CAAC,KAAM,cAAe,KAAM,MAAO,OAAQC,EAAW,MAAM,EAC5D,CAAC,KAAM,YAAa,KAAM,MAAO,OAAQA,EAAW,MAAM,EAC1D,CAAC,KAAM,wBAAyB,KAAM,MAAO,OAAQE,EAAoB,MAAM,EAC/E,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQC,EAAK,MAAM,EAAG,CAAC,KAAM,2BAA4B,KAAM,KAAK,EAChG,CAAC,KAAM,4BAA6B,KAAM,KAAK,CACjD,EACM5B,EAAWoC,GAA4BzC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO,GACHT,GACIQ,EAAcC,EAAQC,EAAaC,EAASC,EAAsBC,EAAQC,EAAUC,GACpFC,CAAc,CAAC,EACzB,EACA,MAAO,CACL,KAAM,kBACN,YAAa,CAAC,KAAM,GAAGe,EAAW,QAAQ,IAAK,kBAAAM,CAAiB,EAChE,WAAY,KAAO,CACjB,cAAe,CAAC,EAAGF,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,EAAG,EAAGA,EAAS,CAAC,CAAC,EAC9D,QAAS,CAAC,CACR,KAAMH,EAA6BA,EAA2BtB,CAAW,EAAIA,EAC7E,SAAUD,EAAO,CAAC,EAAE,QACtB,CAAC,EACD,gBAAAsC,CACF,GACA,gBAAAE,CACF,CACF,ICpTJ,IAYME,GAIAC,GAWAC,GAiCAC,GAwCOC,GA+BPC,GAqEAC,GAEAC,GAsDAC,GA6COC,GA7SbC,GAAAC,GAAA,kBAMAC,KACAC,KAEAC,KACAC,KAEMf,GACF,CAACgB,EAAeC,EAAgBC,EAAaC,EAAgBC,EAAkBC,KAC1EL,EAAQ,GAAKC,EAASC,GAAOC,EAAS,GAAKC,EAAW,EAAIC,EAE7DpB,GAAoB,CAACqB,EAAkBC,EAAiBC,EAAgBC,EAAcC,IAAiB,CAC3G,IAAMC,EAAW,KAAK,MAAML,EAAW,CAAC,EACpCC,IAAY,cACdC,EAAKC,CAAI,EAAIE,EACbH,EAAKE,CAAI,EAAIJ,EAAWK,GACfJ,IAAY,eACrBC,EAAKC,CAAI,EAAIH,EAAWK,EACxBH,EAAKE,CAAI,EAAIC,EAEjB,EAEMzB,GACF,CAAC0B,EAA+BC,EAAgCC,EAA8BP,EAC7FQ,EAAeP,EAAgBQ,EAA4BC,EAAwBC,EACnFC,IAA0B,CACzB,IAAMC,EAAcR,EAAW,OAAS,EAClCS,EAAoBF,EAAY,SAAW,EACjD,GAAID,EAAc,SAAW,EAC3B,QAASI,EAAI,EAAGA,EAAIF,EAAa,EAAEE,EACjCJ,EAAc,KAAK,CAAC,EAGxB,IAAMK,EAAYX,EAAW,CAAC,EACxBY,EAAcX,EAAYI,EAAgB,EAAI,CAAC,EAAIF,EACzD,QAASO,EAAI,EAAGG,EAAIb,EAAW,OAASQ,GAAeH,EAAgB,EAAI,GAAIK,EAAIF,EAAa,EAAEE,EAAG,EAAEG,EAAG,CACxG,IAAMC,EAASd,EAAWa,CAAC,EACrBpB,EAAUgB,EAAoBK,EAASV,EAAQM,CAAC,EAAIH,EAAYG,CAAC,EACjEhB,EAAWtB,GAAgB0C,EAAQV,EAAQM,CAAC,EAAGd,EAAKc,CAAC,EAAGT,EAAYY,CAAC,EAAGX,EAAUQ,CAAC,EAAGjB,CAAO,EACnGpB,GAAkBqB,EAAUC,EAASC,EAAMc,EAAGA,EAAIF,CAAW,EACzDC,GACFF,EAAY,KACRH,EAAQM,CAAC,GAAKI,EAAS,GAAKR,EAAcI,CAAC,GAAKT,EAAYY,CAAC,EAAI,GAAKX,EAAUQ,CAAC,EAAI,EAAId,EAAKc,CAAC,EAC/Fd,EAAKc,EAAIF,CAAW,CAAC,CAE7B,CACAD,EAAY,OAAO,EAAG,EAAGI,CAAS,EAClCJ,EAAY,OAAOF,EAAgB,EAAI,EAAG,EAAGO,CAAW,CAC1D,EAOErC,GACF,CAAoCwC,EAAeC,IAAqC,CACtF,IAAMf,EAAcc,EAAW,YAAY,MAAM,EAEjD,GAAIA,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAClGjB,EAAY,OAAS,EACrB,QAASS,EAAI,EAAGA,EAAIM,EAAO,CAAC,EAAE,KAAK,OAAQ,EAAEN,EAC3CT,EAAY,KAAKe,EAAO,CAAC,EAAE,KAAKN,CAAC,CAAC,CAEtC,CACA,IAAMS,EAAiBJ,EAAW,SAAW,OAC7Cd,EAAY,OAAO,EAAG,EAAGe,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAC1Cf,EAAY,OAAOkB,EAAiB,EAAI,EAAG,EAAGH,EAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAE/D,IAAMpB,EAAOmB,EAAW,KAAK,MAAM,EAC7BR,EAAcQ,EAAW,YAAY,MAAM,EAC3CT,EAAgBS,EAAW,cAAc,MAAM,EAC/Cf,EAAagB,EAAO,CAAC,EAAE,KACzBd,EAAYa,EAAW,UAAU,MAAM,EAC3C,GAAIb,EAAU,OAAO,CAACe,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC9C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5Cd,EAAY,IAAI,MAAMM,CAAW,EAAE,KAAK,CAAC,CAC3C,CACA,IAAIJ,EAAUW,EAAW,QAAQ,MAAM,EACvC,GAAIX,EAAQ,OAAO,CAACa,EAAGC,IAAMD,EAAIC,EAAG,CAAC,IAAM,EAAG,CAC5C,IAAMV,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5CZ,EAAU,IAAI,MAAMI,CAAW,EAAE,KAAK,CAAC,CACzC,CAGAlC,GACI0B,EAAYC,EAAaC,EAAWa,EAAW,QAASA,EAAW,MAAOnB,EAAMQ,EAASe,EACzFb,EAAeC,CAAW,EAG9B,IAAMa,EAAmB,OAAO,OAAO,CAAC,EAAGL,CAAU,EACrD,cAAO,OAAOK,EAAe,CAAC,YAAAnB,EAAa,KAAAL,EAAM,cAAAU,EAAe,YAAAC,EAAa,UAAAL,EAAW,QAAAE,CAAO,CAAC,EACzFgB,CACT,EAES5C,GAAgCuC,GAAiE,CAC5G,IAAMM,EAAuBC,GAAkCP,CAAU,EAEnEQ,EAASR,EAAW,OACpBpB,EACF,CAAC,SAAU,QAAS,aACnB,YAAY,EAAE,OAAOoB,EAAW,QAAW,IAAc,EAAIA,EAAW,OAAiB,EACxFb,EAAYa,EAAW,UACvBZ,EAAQY,EAAW,MACnBd,EAAcc,EAAW,YACzBnB,EAAOmB,EAAW,KAClBX,EAAUW,EAAW,QACrBS,EAAYT,EAAW,SAA2B,EAClDT,EAAgBS,EAAW,cAC3BR,EAAcQ,EAAW,YAC/B,MAAO,CACL,QAAApB,EACA,OAAA4B,EACA,UAAArB,EACA,MAAAC,EACA,YAAAF,EACA,cAAAK,EACA,YAAAC,EACA,KAAAX,EACA,QAAAQ,EACA,SAAAoB,EACA,GAAGH,EACH,SAAU,GAAGN,EAAW,MAAM,IAAIM,EAAqB,UAAU,GACnE,CACF,EAEM5C,GAAiB,CAACuC,EAA+BD,IAA8C,CAGnG,GAAI,CAACC,GAAWA,EAAO,SAAW,GAAKA,EAAO,SAAW,EACvD,MAAM,IAAI,MAAM,6BAA6B,EAI/C,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC3D,MAAM,IAAI,MAAM,2CAA2C,EAG7D,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM,8CAA8C,EAIhE,IAAMS,EAAcT,EAAO,CAAC,EAAE,KAAKD,EAAW,SAAW,OAASC,EAAO,CAAC,EAAE,KAAK,OAAS,EAAI,CAAC,EACzFU,EAAkBV,EAAO,CAAC,EAAE,KAAK,CAAC,EACxC,GAAIS,IAAgBC,EAClB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMC,EAAcX,EAAO,CAAC,EAAE,KAAK,CAAC,EAAID,EAAW,MAGnD,GAAIC,EAAO,SAAW,IAAMA,EAAO,CAAC,EAAE,KAAK,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,CAAC,IAAMW,GAC/E,MAAM,IAAI,MAAM,cAAc,EAGhC,IAAMnB,EAAcQ,EAAO,CAAC,EAAE,KAAK,OAAS,EAG5C,GAFqBD,EAAW,UAAU,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEnDH,EAAW,UAAU,SAAWP,EAClD,MAAM,IAAI,MAAM,uBAAuBA,CAAW,GAAG,EAKvD,GAFmBO,EAAW,QAAQ,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAEjDH,EAAW,QAAQ,SAAWP,EAC9C,MAAM,IAAI,MAAM,qBAAqBA,CAAW,GAAG,EAKrD,GADgBO,EAAW,KAAK,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GAC9CH,EAAW,KAAK,SAAWP,EAAc,EACtD,MAAM,IAAI,MAAM,kBAAkBA,EAAc,CAAC,GAAG,EAItD,GAAIO,EAAW,cAAc,SAAWP,GAAeO,EAAW,cAAc,SAAW,EACzF,MAAM,IAAI,MAAM,4BAA4BP,CAAW,GAAG,EAM5D,GADuBO,EAAW,YAAY,OAAO,CAACE,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAI,GACrDH,EAAW,YAAY,SAAW,GACpDA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EAC5D,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAID,EAAW,YAAY,SAAW,GAAKA,EAAW,YAAY,SAAWC,EAAO,CAAC,EAAE,KAAK,OAAS,EACnG,MAAM,IAAI,MAAM,sBAAsB,CAE1C,EAGMtC,GAAsB,CAAC,EAAG,EAAG,EAAG,CAAC,EAEjCC,GACF,CAACiD,EAAyBZ,EAA+BD,IAA8C,CACrG,IAAMc,EAAqBtD,GAAmCwC,EAAYC,CAAM,EAC1EG,EAAiBJ,EAAW,SAAW,OACvCR,EAAcsB,EAAmB,YACjCjB,EAAcL,EAAYY,EAAiB,EAAI,CAAC,EAChDW,EAAgBd,EAAO,CAAC,EAAE,KAAKG,EAAiB,EAAI,CAAC,EAI3D,GAAIU,EAAmB,QAAU,GAAMjB,IAAgB,GAAKkB,IAAkB,EAAI,CAChFF,EAAQ,QAAQG,GAAiCf,EAAQa,CAAkB,CAAC,EAC5E,MACF,CACA,IAAMG,EAAYzB,EAAYY,EAAiB,EAAI,CAAC,EAC9Cc,EAAW1B,EAAYY,EAAiB,EAAI,CAAC,EAC7Ce,EAAelB,EAAO,CAAC,EAAE,KAAK,CAAC,EAC/BmB,EAAcnB,EAAO,CAAC,EAAE,KAAK,CAAC,EAE9BoB,EAAYjB,EAAiBa,EAAYC,EAAWrB,EACpDyB,EAAYlB,EAAiBP,EAAcoB,EAAYC,EACvDK,EAAWJ,EAAeC,EAAcL,EAExCS,EAAgE,GAIhEC,EAAoBZ,EAAQ,iBAAiB,IAC/CA,EAAQ,QACJa,GAA2BzB,EAAO,CAAC,EAAGtC,EAAmB,EACzD,CAAC,OAAQ,CAAC,CAAC,EAAG,QAAS,CAACqC,EAAW,SAAW,GAAK,EAAE,CAAC,CAAC,EAAE,CAAC,EAC9DA,EAAW,UAAY,CAACa,EAAQ,iBAAiB,KACnDA,EAAQ,iBAAiB,GAAKY,GAIhC,IAAME,EAAsB,CAAC1B,EAAO,CAAC,EAAGwB,CAAgB,EAClDG,EAAU3B,EAAO,SAAW,EAC9B2B,IACE,CAACxB,GAAkBH,EAAO,CAAC,EAAE,KAAK,SAAW,EAC/C0B,EAAoB,KAAK1B,EAAO,CAAC,EAAE,QAAQ,CAACA,EAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAG,CAAC,CAAC,CAAC,EAErE0B,EAAoB,KAAK1B,EAAO,CAAC,CAAC,GAKtCY,EAAQ,QACJgB,GACIF,EAAqBb,EAAoBtB,EAAa6B,EAAWC,EAAWC,EAAUK,EACtFJ,CAAyB,EAC7B,CAAC,OAAQG,CAAmB,CAAC,CACnC,EAEE9D,GAAkB,CAACgD,EAAyBb,IAA8C,CAE9F,IAAMV,EAAgBU,EAAW,SAAW,OAEtCC,EAAS,CACbY,EAAQ,OAAO,CAAC,EAAE,QACdvB,EAEI,CAACuB,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,EAEnF,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,EAE5FA,EAAQ,OAAO,CAAC,EAAE,QAAQ,CAACA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,EAAG,EAAGA,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,CAAC,CAChH,EACIA,EAAQ,OAAO,SAAW,GAC5BZ,EAAO,KAAKY,EAAQ,OAAO,CAAC,CAAC,EAE/B,IAAI3B,EAAcc,EAAW,aACzBd,EAAY,SAAW,GAAKA,EAAY,CAAC,IAAM,KACjDA,EAAc,CAAC2B,EAAQ,OAAO,CAAC,EAAE,KAAK,CAAC,CAAC,GAE1C,IAAI1B,EAAYa,EAAW,WACvBb,EAAU,SAAW,GAAKA,EAAU,CAAC,IAAM,KAC7CA,EAAY,CAAC,CAAC,GAEhB,IAAIE,EAAUW,EAAW,SACrBX,EAAQ,SAAW,GAAKA,EAAQ,CAAC,IAAM,KACzCA,EAAU,CAAC,CAAC,GAEd,IAAIR,EAAOmB,EAAW,KAClBnB,EAAK,SAAW,IAClBA,EAAO,CAAC,EAAG,CAAC,GAEdA,EAAO,CAAC,EAAGA,EAAK,CAAC,EAAG,EAAGA,EAAK,CAAC,CAAC,EAC9BQ,EAAU,CAAC,CAAC,EAAE,OAAOA,CAAO,EAC5BF,EAAY,CAAC,CAAC,EAAE,OAAOA,CAAS,EAChCD,EAAc,CAAC,CAAC,EAAE,OAAOA,CAAW,EACpC,IAAM4B,EACFtD,GAAmC,CAAC,GAAGwC,EAAY,KAAAnB,EAAM,QAAAQ,EAAS,UAAAF,EAAW,YAAAD,CAAW,EAAGe,CAAM,EACrGY,EAAQ,QAAQG,GACZf,EAAQa,EACRtB,GAAeF,EAAgB,CAACE,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,EAC/C,CAACA,EAAY,CAAC,EAAGA,EAAY,CAAC,EAAGA,EAAY,CAAC,CAAC,CAAC,CAAC,CACtF,EAEa1B,GAAgB,CAAC+C,EAAyBb,IAA8C,CACnGtC,GAAemD,EAAQ,OAAQb,CAAU,EACrCa,EAAQ,OAAO,CAAC,EAAE,KAAK,SAAW,EACpChD,GAAgBgD,EAASb,CAAU,EAEnCpC,GAAgBiD,EAASA,EAAQ,OAAQb,CAAU,CAEvD,ICpTA,IAgBM8B,GAkDOC,GAOAC,GAzEbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAOMR,GACF,CAACS,EAAmBC,EAA+BC,EAAuBC,IACvD,CACb,IAAMC,EAAaC,EAAU,KAAKJ,CAAU,EACtCK,EAAOL,EAAW,OAClBM,EAAQC,EAAc,QAASR,EAAWM,CAAI,EAC9CG,EAASC,GAAe,SAAUV,EAAWM,CAAI,EACjDK,EAAYT,EAAU,WAAa,EAAiBA,EAAU,cAAc,EAAE,CAAC,EAC3B,OAAOA,EAAU,iBAAiB,EAAE,CAAC,CAAC,EAC1FU,EAAOP,EAAU,cAAcM,EAAWL,CAAI,EAC9CO,EAAmBC,GAA+B,CACtD,IAAMC,EAAQ,QAAQR,EAAM,WAAW,eAAgB,eAAe,CAAC,KACjES,EAAMC,GAAa,uBAAwB,gBAAiBX,CAAI,EAChEY,EAAaf,EAAW,QAAUY,GAASZ,EAAW,UAAY,OAAS,IAAM,IACjFgB,EAAahB,EAAW,QAAUa,EAAMD,GAASZ,EAAW,UAAY,GAAK,QACnF,MAAO;AAAA,kBAEHW,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,CAAM,CAAC;AAAA,kBAClCK,EAAa,UAAU,CAAC;AAAA,oBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,uCACtDL,EAAO,gBAAgB,YAAY,CAAC;AAAA,8BAC7CA,EAAO,KAAK,KAAK;AAAA,sCACTS,CAAU;AAAA,qCACXC,CAAU;AAAA;AAAA,sBAEzBZ,EAAM,WAAW,eAAgB,gBAAiB,QAAQ,CAAC;AAAA,kCAC/CA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,oBAEhDE,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,kBAEjD,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMF,EAAY,SAAUD,CAAS,CAAC,EACjD,cAAe,CAAC,EAAG,KAAK,KAAKI,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,QAAuB,KAAMQ,CAAI,EAC7E,GAAGQ,GAA2BnB,EAAYA,CAAU,CACtD,CAEF,GACA,gBAAAY,CACF,CACF,EAGKrB,GAAS,CAAC6B,EAAyBlB,IAAuC,CACrF,IAAMF,EAAaoB,EAAQ,OAAO,CAAC,EAAE,KAC/BrB,EAAYqB,EAAQ,OAAO,CAAC,EAAE,SAC9BT,EAAOS,EAAQ,OAAO,CAAC,EAC7BA,EAAQ,QAAQ9B,GAAwBS,EAAWC,EAAYW,EAAMT,CAAU,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACjG,EAEaV,GAAyBU,GAA0D,CAC9F,IAAMmB,EAAYnB,EAAW,YAAwB,EAC/CoB,EAAUpB,EAAW,UAAsB,EACjD,OAAOqB,GAA4B,CAAC,UAAAF,EAAW,QAAAC,CAAO,CAAC,CACzD,IC7EA,IAoBME,GASAC,GAWAC,GA2DOC,GAKAC,GAxGbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMV,GAAkBW,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,gCAAgC,EAElD,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,iCAAiC,CAErD,EAEMV,GAAmB,CAACW,EAAgBC,EAAcC,EAAsBC,IAAkC,CAC9G,IAAMC,EAAc,CAAC,EACrBA,EAAY,KAAK,cAAcD,EAAO,KAAK,OAAO,QAAQD,EAAM,KAAK,OAAO;AAAA,aACjEA,EAAM,KAAK,OAAO,GAAG,EAChC,QAASG,EAAI,EAAGA,EAAIJ,EAAM,EAAEI,EAC1BD,EAAY,KAAKF,EAAM,WAAW,IAAKF,EAAKK,CAAC,EAAG,KAAKA,CAAC,GAAG,CAAC,EAE5D,OAAAD,EAAY,KAAK,YAAY,EACtBA,EAAY,KAAK;AAAA,CAAI,CAC9B,EAEMd,GAAgC,CAACgB,EAAyBC,IAAoD,CAClH,IAAIC,EAAWC,EAAWC,EAAW,EACjCC,EACAX,EACEY,EAAgBL,EAAW,SAAW,OACtCM,EAAYN,EAAW,UACvBO,EAAYP,EAAW,OAAS,MAClCK,GACF,CAACJ,EAAGC,EAAGC,EAAG,CAAC,EAAIJ,EAAY,KAC3BK,EAAQG,EAAY,CAACN,EAAGC,EAAGC,EAAGG,EAAWA,EAAW,EAAKA,GAAa,CAAE,EACpD,CAACL,EAAGC,EAAGC,EAAG,EAAKG,GAAa,EAAIA,EAAWA,CAAS,EACxEb,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,IAEzD,CAACN,EAAGC,EAAGC,EAAG,CAAC,EAAI,CAACJ,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,EAAGA,EAAY,KAAK,CAAC,CAAC,EAClGK,EAAQG,EAAY,CAACN,EAAGK,EAAWA,EAAW,EAAKA,GAAa,EAAIJ,EAAGC,CAAC,EACpD,CAACF,EAAG,EAAKK,GAAa,EAAIA,EAAWA,EAAWJ,EAAGC,CAAC,EACxEV,EAAOc,EAAY,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,EAAI,CAAC,EAAG,EAAG,EAAG,EAAG,EAAG,CAAC,GAE3D,IAAMC,EAAsBT,EAAY,QAAQK,CAAK,EAC/CK,EAAoBD,EAAoB,KAAK,OAC7CE,EAAgBX,EAAY,SAE5BY,EAAgBC,EAAc,IAAKF,EAAeD,CAAiB,EACnEI,EAAeC,GAAe,SAAUJ,EAAeD,CAAiB,EAExEM,EAAmBC,GAA+B;AAAA,IACtDA,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEhG/B,GAAiBW,EAAMgB,EAAmBE,EAAeE,CAAY,CAAC;AAAA;AAAA,IAEtEG,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,oBAE5DH,EAAa,gBAAgB,YAAY,CAAC;AAAA;AAAA;AAAA,MAGxDA,EAAa,YAAY,aAAcF,EAAc,aAAa,UAAU,CAAC,CAAC;AAAA,KAGlF,MAAO,CACL,KAAM,eACN,YAAa,CAAC,KAAM,GAAGZ,EAAY,IAAI,IAAIC,EAAW,SAAS,IAAIA,EAAW,IAAI,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACjH,WAAaR,GAAW,CACtB,IAAMyB,EAAcZ,EAAgB,CAACJ,EAAGC,EAAII,EAAWH,EAAIG,EAAW,EAAKA,GAAa,CAAE,EACtD,CAACL,EAAG,EAAKK,GAAa,EAAIJ,EAAII,EAAWH,EAAIG,CAAS,EACpFY,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAkBZ,EAAoB,KACtCa,EAAiBF,EAAU,gBAAgBC,EAAiB3B,CAAI,EACtE,MAAO,CACL,QAAS,CAAC,CAAC,KAAMwB,EAAa,SAAUzB,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAK0B,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGI,GAA2BF,EAAiBC,CAAc,CAAC,CAChH,CACF,EACA,gBAAAN,CACF,CACF,EAEa/B,GAAe,CAACuC,EAAyBvB,IAA6C,CACjGnB,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQxC,GAA8BwC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CAC9E,EAEaf,GAA+Be,GACxCwB,GAA4B,CAC1B,UAAWxB,EAAW,UACtB,KAAMA,EAAW,KACjB,OAAQA,EAAW,MACrB,CAAC,IC7GL,IAsBMyB,GAEAC,GACAC,GACAC,GACAC,GAQAC,GAqBAC,GA4HAC,GAEAC,GA+GOC,GAOAC,GA5SbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAaMhB,GACF,qBACEC,GAAc,IAAMD,GAAgB,KACpCE,GAAkB,IAAMD,GAAc,IACtCE,GAAa,IAAMF,GAAc,MAAQA,GACzCG,GAAiB,IAAMD,GAAa,IAQpCE,GAAN,KAAiB,CACf,YAAYY,EAAa,GAAI,CAC3B,KAAK,gBAAkB,IAAI,IAC3B,KAAK,WAAaA,CACpB,CAGA,UAAUC,EAAgBC,EAAe,CACvC,IAAIC,EAAQ,KAAK,gBAAgB,IAAIF,CAAM,EACvCE,IAAU,OACZA,EAAQ,CAACD,CAAK,EAEdC,EAAM,KAAKD,CAAK,EAElB,KAAK,gBAAgB,IAAID,EAAQE,CAAK,CACxC,CAIF,EAEMd,GAAN,KAAqB,CACnB,YAAYe,EAA+CC,EAAkB,CAAlB,cAAAA,EACzD,KAAK,YAAc,GACnB,KAAK,aAAe,IAAI,IACxB,KAAK,IAAM,IAAI,MACf,KAAK,WAAa,CAAC,EAGnB,GAAI,CAACC,EAAKC,CAAG,EAAIF,EAAS,SAAS,IAAI,EAAIA,EAAS,MAAM,KAAM,CAAC,EAAI,CAACA,EAAU,EAAE,EAClF,GAAI,CAACC,EAAI,MAAM,OAAOnB,EAAc,CAAC,EACnC,MAAM,IAAI,MAAM,kBAAkB,EAapC,GAXmBmB,EAAI,MAAM,GAAG,EACrB,QAAQ,CAACE,EAAWN,IAAU,CACvC,IAAMO,EAAOL,EAAOF,CAAK,EAAE,KAAK,MAAM,EACtC,GAAI,CAACM,EAAU,MAAM,OAAOvB,EAAe,CAAC,EAC1C,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMyB,EAAa,KAAK,YAAYF,EAAW,GAAMC,EAAMP,CAAK,EAChE,KAAK,IAAI,KAAKQ,CAAU,CAC1B,CAAC,EAGGH,IAAQ,GAEVA,GAAO,CAAC,GAAG,KAAK,aAAa,QAAQ,CAAC,EAC1B,OAAO,CAAC,CAACI,EAAKC,CAAI,IAAOA,EAAK,QAAU,GAAKD,IAAQ,KAAM,EAC3D,IAAI,CAAC,CAACA,CAAG,IAAMA,CAAG,EAClB,KAAK,EAAE,UAEf,CAACJ,EAAI,MAAM,OAAOvB,EAAW,CAAC,EAChC,MAAM,IAAI,MAAM,aAAa,EAKduB,EAAI,MAAM,OAAOxB,GAAe,GAAG,CAAC,GAC3C,QAASkB,GAAW,CAC9B,GAAIA,IAAW,MACb,KAAK,WAAa,KAAK,WAAW,OAAO,KAAK,YAAY,MACrD,CACL,IAAMW,EAAO,KAAK,aAAa,IAAIX,CAAM,EACzC,GAAIW,IAAS,OACX,MAAM,IAAI,MAAM,oBAAoB,EAEtC,KAAK,WAAW,KAAKA,EAAK,QAAQ,CACpC,CACF,CAAC,EACD,KAAK,IAAM,KAAK,YAAYL,EAAK,GAAO,KAAK,UAAU,CACzD,CAGA,UAAUN,EAAgBY,EAAkBb,EAAoB,CAC9D,IAAIY,EAAO,KAAK,aAAa,IAAIX,CAAM,EACvC,GAAIW,IAAS,OAAW,CACtB,GAAIA,EAAK,WAAaC,GAAYD,EAAK,QAAU,EAC/C,MAAM,IAAI,MAAM,oBAAoB,EAEpCA,EAAK,QACLA,EAAK,aAAa,KAAKZ,CAAU,CAErC,MACEY,EAAO,CAAC,MAAO,EAAG,SAAAC,EAAU,aAAc,CAACb,CAAU,CAAC,EAExD,KAAK,aAAa,IAAIC,EAAQW,CAAI,CACpC,CAGA,YAAYE,EAAcC,EAAkBN,EAAyBP,EAAQ,GAAgB,CAC3F,IAAMc,EAAOP,EAAK,OACdQ,EAAW,GACXC,EAAe,CAAC,EAChBC,EAAU,EAEd,GAAI,CAACL,EAAK,MAAM,OAAO7B,EAAe,CAAC,GAAM,CAAC8B,GAAWD,IAAS,GAChE,MAAM,IAAI,MAAM,kBAAkB,EAEpC,IAAMM,EAAeN,EAAK,MAAM,OAAO/B,GAAe,GAAG,CAAC,EACpD2B,EAAa,IAAItB,GAAWc,CAAK,EAEvC,OAAAkB,GAAc,QAAQ,CAACnB,EAAgBoB,IAAc,CACnD,GAAIpB,IAAW,MAAO,CACpB,GAAIgB,EACF,MAAM,IAAI,MAAM,6CAA6C,EAE/DA,EAAW,GACX,IAAMK,EAAoBN,EAAOI,EAAa,OAAS,EACvD,GAAIE,EAAoB,EACtB,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GADAJ,EAAeT,EAAK,MAAMU,EAASA,EAAUG,CAAiB,EAC1D,KAAK,aACP,GAAI,KAAK,aAAa,SAAWJ,EAAa,QAC1C,KAAK,aAAa,SAAS,IAAMA,EAAa,SAAS,EACzD,MAAM,IAAI,MAAM,8BAA8B,UAEvCH,EACT,KAAK,YAAc,GACnB,KAAK,aAAeG,MAEpB,OAAM,IAAI,MAAM,uCAAuC,EAGzD,QAASK,EAAI,EAAGA,EAAIL,EAAa,OAAQK,IAAK,CAC5C,IAAMtB,EAAS,OAAO,aAAa,IAAI,WAAW,CAAC,EAAIsB,CAAC,EACxDb,EAAW,UAAUT,EAAQoB,EAAIE,CAAC,EAClC,KAAK,UAAUtB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAC/C,CACF,MACEQ,EAAW,UAAUT,EAAQoB,GAAK,KAAK,YAAc,KAAK,aAAa,OAAS,EAAI,EAAE,EACtF,KAAK,UAAUpB,EAAQQ,EAAKU,GAAS,EAAGjB,CAAK,CAEjD,CAAC,EACMQ,CACT,CAQF,EAEMpB,GAAakC,GAAyBA,EAAO,OAE7CjC,GACF,CAACkC,EAAuCC,EAAkBC,EACzDC,IAAgD,CAE/C,IAAMC,EADQJ,EAAY,IAAKhB,GAASA,EAAK,MAAM,EAC3B,IAAI,CAACO,EAAMd,IAAU4B,EAAc,QAAQ5B,CAAK,GAAIwB,EAAUV,CAAI,CAAC,EACrFe,EAAaC,EAAU,KAAKJ,CAAW,EACvCK,EAASC,GAAe,SAAUR,EAAUE,EAAY,MAAM,EAC9DO,EACF,CAAC,GAAGR,EAAe,aAAa,KAAK,CAAC,EAAE,OAAQ1B,GAAW,CAAC0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,CAAC,EACxGmC,EAAmBC,GAA+B,CACtD,IAAMC,EAAoB,CAAC,EACrBC,EAAW,kBACXC,EAAU,iBACVC,EAAY,eACZC,EAAgC,CAAC,EACjCC,EAAiC,CAAC,EAClCC,EAAiC,CAAC,EAClCC,EAA4B,CAAC,EAC7BC,EAAyBnB,EAAe,aAAa,OAASA,EAAe,IAAI,gBAAgB,KACvGA,EAAe,aAAa,QAAQ,CAACf,EAAMX,IAAW,CACpD,GAAI0B,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,EAAG,CAClD,IAAM8C,GAAcpB,EAAe,IAAI,gBAAgB,IAAI1B,CAAM,IAAI,CAAC,EAClE8C,KAAgB,QAClBpB,EAAe,IAAI,QAAQ,CAACb,EAAMO,KAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,EAAC,EAAG,CACjC,IAAM2B,GAAUlC,EAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,KAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,GAAQ,QAAS9C,IAAU,CACzBoC,EAAQ,KAAK,GACTT,EAAUR,EAAC,EAAE,WACT,QAAQA,EAAC,UAAWnB,GAAO+B,EAAO,WAAW,gBAAiBc,EAAW,CAAC,CAAC,EAAE,CACvF,CAAC,CACH,CACF,CAAC,CAEL,MACEpB,EAAe,IAAI,QAAQ,CAACb,GAAMO,IAAM,CACtC,GAAIT,EAAK,aAAa,SAASS,CAAC,EAAG,CACjC,IAAM2B,GAAUlC,GAAK,gBAAgB,IAAIb,CAAM,EAC/C,GAAI+C,KAAY,OACd,MAAM,IAAI,MAAM,sBAAsB,EAExCA,GAAQ,QAAS9C,IAAU,CACzBwC,EAAoB,KAAK,GAAGb,EAAUR,CAAC,EAAE,WAAW,QAAQA,CAAC,UAAWnB,GAAO,GAAGD,CAAM,EAAE,CAAC,EAAE,CAC/F,CAAC,EACD4C,EAAgB,KAAK,WAAWhB,EAAUR,CAAC,EAAE,aAAa,QAAQA,CAAC,SAAS,CAAC,GAAG,CAClF,CACF,CAAC,EACDsB,EAAqB,KACjB,WAAW1C,CAAM,cAAcA,CAAM,eAAeX,GAAUW,CAAM,CAAC,KAAKA,CAAM,OAAO,EAC3F2C,EAAqB,KAAK,GAAG,CAEjC,CAAC,EACD,IAAMK,EAAYH,EACd,CACE,GAAGR,EACH,aAAaT,EAAU,IAAI,CAACqB,EAAU7B,IAAM6B,EAAS,aAAa,QAAQ7B,CAAC,SAAS,CAAC,EAAE,KAAK,KAAK,CAAC,GACpG,EACA,CACE,GAAGiB,EACHE,EACA,GAAGG,EACH,GAAGD,EACHH,EACA,GAAGM,EACHJ,EACA,GAAGG,CACL,EACJ,MAAO;AAAA,cAEHP,EACK,iBAAiBF,EAAgB,IAAKlC,IAAY,CAAC,KAAM,GAAGX,GAAUW,CAAM,CAAC,GAAI,KAAM,KAAK,EAAE,CAAC,EAC/F,gBAAgB,aAAc,KAAK,EACnC,iBAAiB,GAAG4B,EAAWI,CAAM,CAAC;AAAA;AAAA,cAEzCI,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,kCACrDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA,cACxDJ,EAAU,IAAI,CAACsB,EAAM9B,IAAM,YAAYA,CAAC,YAAYQ,EAAUR,CAAC,EAAE,KAAK,OAAO,GAAG,EAAE,KAAK;AAAA,CAAI,CAAC;AAAA,cAC5F4B,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,cACpBhB,EAAO,YAAY,aAAc,KAAK,CAAC;AAAA,YAE/C,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMN,EAAe,SAAU,kBAAmBF,EAAY,IAAI,IAAM,MAAM,CAAC,EAC7F,WAAY,IAAM,CAGhB,IAAM2B,EACFjB,EAAgB,OAAQlC,GAAW0B,EAAe,aAAa,IAAI1B,CAAM,CAAC,EACrE,IACIA,IACI,CAAC,QAAuB,KAAM0B,EAAe,aAAa,IAAI1B,CAAM,GAAG,UAAY,CAAC,EAAE,EACvGmD,EAAoB,KAAK,CAAC,QAAuB,KAAMrB,CAAU,CAAC,EAClE,IAAMsB,EACF5B,EAAY,IAAI,CAAChB,EAAM6C,IAAM,CAAC,GAAGC,GAA2B9C,CAAI,CAAC,CAAC,EAC7D,OAAO,CAAC+C,EAAKC,IAAyBD,EAAI,OAAOC,CAAoB,EAAGL,CAAmB,EACpG,OAAAC,EAAgB,KAAK,GAAGE,GAA2B3B,CAAW,CAAC,EACvD,CACN,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAAsB,CACF,CACF,EACA,gBAAAjB,CACF,CACF,EAES5C,GAAS,CAACkE,EAAyBC,IAAuC,CACrF,IAAMhC,EAAiB,IAAItC,GAAeqE,EAAQ,OAAQC,EAAW,QAAQ,EACvE/B,EAAcD,EAAe,WAC7BF,EAAciC,EAAQ,OAAO,IAAI,CAACE,EAAON,IAAMM,EAAM,IAAI,EAC/DF,EAAQ,QAAQnE,GAAwBkC,EAAaiC,EAAQ,OAAO,CAAC,EAAE,SAAU/B,EAAgBC,CAAW,CAAC,CAC/G,EAEanC,GAAyBkE,GAA0D,CAC9F,IAAMtD,EAAYsD,EAAW,SAAoB,QAAQ,OAAQ,EAAE,EACnE,OAAOE,GAA4B,CAAC,SAAAxD,CAAQ,CAAC,CAC/C,IC/SA,IAUMyD,GAiBAC,GAYAC,GAIAC,GAyDOC,GApGbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,0BAA0B,EAE5C,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EAEzDG,EAAaD,EAAM,OAASD,EAAW,OAAS,EAAIC,EAAM,OAASD,EAAW,OAC9EG,EAAkBH,EAAW,OAASC,EAAM,OAAS,EAAID,EAAW,OAASC,EAAM,OACvF,KAAOC,EAAaD,EAAM,QAAUE,EAAkBH,EAAW,OAAQ,EAAEE,EAAY,EAAEC,EACvF,GAAIF,EAAMC,CAAU,IAAMF,EAAWG,CAAe,GAAKF,EAAMC,CAAU,IAAM,GAC3EF,EAAWG,CAAe,IAAM,EAClC,MAAM,IAAI,MAAM,oDAAoD,CAG1E,EAEMb,GAAmB,CAACc,EAA2BC,IAAwC,CAC3F,IAAMC,EAAOF,EAAO,OAASC,EAAO,OAC9BJ,EAAkB,CAAC,EACzB,QAASM,EAAI,EAAGA,EAAID,EAAM,EAAEC,EAC1BN,EAAM,KAAKG,EAAOG,CAAC,CAAC,EAEtB,QAASA,EAAI,EAAGA,EAAIF,EAAO,OAAQ,EAAEE,EACnCN,EAAM,KAAKI,EAAOE,CAAC,IAAM,EAAIH,EAAOG,EAAID,CAAI,EAAID,EAAOE,CAAC,CAAC,EAE3D,OAAON,CACT,EAEMV,GAAuB,CAACS,EAA+BC,IACxDD,EAAW,OAASC,EAAM,OAAUX,GAAiBU,EAAYC,CAAK,EAAIX,GAAiBW,EAAOD,CAAU,EAG3GR,GAA2BO,GAA+C,CAC9E,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAAQ,MAAM,KAAKF,EAAO,CAAC,EAAE,iBAAiB,EAAG,MAAM,EACvDS,EAAwBjB,GAAqBS,EAAYC,CAAK,EAC9DQ,EAAWV,EAAO,CAAC,EAAE,SACrBW,EAAaD,IAAa,EAAgB,EAAI,EAC9CE,EAAa,KAAK,KAAKC,EAAU,KAAKJ,CAAW,EAAIE,CAAU,EAE/DG,EAAmBC,GAA+B,CACtD,IAAMC,EAAQC,EAAc,QAASP,EAAUT,EAAW,OAAQU,CAAU,EACtEO,EAASC,GAAe,SAAUT,EAAUD,EAAY,OAAQE,CAAU,EAC5ES,EACJ,GAAIV,IAAa,EAAe,CAC9B,IAAMW,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO;AAAA,6BAChDD,CAAC,MAAML,EAAO,gBAAgB,kBAAkBK,CAAC,GAAG,CAAC;AAAA,sBAC5DA,CAAC,MAAMP,EAAM,2BAA2B,gBAAgBO,CAAC,GAAIL,CAAM,CAAC;AAAA,qBACrEK,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIR,EAAM,YAAY,QAAQO,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAEhFH,EAAa;AAAA,0CACuBT,CAAU;AAAA;AAAA,UAE1CU,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,UAClCH,EAAO,YAAY,aAAc,MAAM,CAAC;AAAA,QAE9C,MACEE,EAAa;AAAA,8BACWF,EAAO,gBAAgB,YAAY,CAAC;AAAA,4BACtCF,EAAM,2BAA2B,gBAAiBE,CAAM,CAAC;AAAA,UAC3EA,EAAO,YAAY,aAAcF,EAAM,YAAY,aAAa,CAAC,CAAC;AAAA,SAGxE,MAAO;AAAA,MACLD,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBC,EAAOE,CAAM,CAAC;AAAA,MAC/EH,EAAa,UAAU,CAAC;AAAA,MACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,MACvEK,CAAU,EACd,EAEMK,EACF,CAAC,CAAC,QAAuB,KAAMb,CAAU,EAAG,GAAGc,GAA2BzB,EAAYQ,CAAW,CAAC,EACtG,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAM,GAAGA,EAAY,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACxE,gBAAAK,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAML,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKY,EAAa,EAAuB,CAAC,EAClE,gBAAAa,CACF,EACF,CACF,EAEa/B,GAAUiC,GAAkC,CACvDrC,GAAeqC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAwBkC,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxE,ICvGA,IAaMC,GAiDOC,GA9DbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KACAC,KAIMP,GAA6BQ,GAAqD,CACtF,IAAMC,EAAWD,EAAa,CAAC,EAAE,SAC3BE,EAAaC,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAChDI,EAAaD,EAAU,KAAKH,EAAa,CAAC,EAAE,IAAI,EAEhDK,EAAUD,EAAa,IAAM,EAC7BE,EAAmBC,GAAuC,CAC9D,IAAMC,EAAIC,EAAc,IAAKR,EAAU,CAAC,CAAC,EAAG,CAAC,EACvCS,EAAOD,EAAc,OAAQR,EAAU,CAAC,CAAC,EAAG,CAAC,EAC7C,EAAIU,GAAe,IAAKV,EAAU,CAAC,CAAC,EAAG,CAAC,EAExCW,EAA8B,CAAC,CAAC,KAAM,kBAAmB,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,CAAC,EAEvGC,EAAqBC,GAAe;AAAA,gBAC9BA,CAAC,oCAAoCA,CAAC;AAAA,gBACtCA,CAAC,MAAMJ,EAAK,YAAY,OAAOI,CAAC,aAAa,CAAC,QAAQA,CAAC,gBAC7DC,EAAoBV,EACtB;AAAA,mBACWK,EAAK,YAAY,uCAAuC,CAAC,IACpE,GAAGG,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC,GAAGA,EAAkB,CAAC,CAAC;AAAA,mBACjFL,EAAE,KAAK,KAAK,gCAE3B,MAAO,GAAGD,EAAa,iBAAiBK,CAAQ,EAAE,iBAAiBJ,EAAGE,EAAM,CAAC,CAAC;AAAA;AAAA,MAEtEM,GAAaC,GAA0BhB,CAAQ,CAAC,CAAC;AAAA;AAAA,MAEvDM,EAAa,UAAUW,EAAc,CAAC;AAAA,QACpCX,EAAa,sCAAsC,0BAA0B,CAAC;AAAA;AAAA,gBAEtEC,EAAE,YAAY,YAAY,CAAC;AAAA,QACnCO,CAAiB;AAAA;AAAA,QAEjB,EAAE,YAAY,aAAoBI,GAAmB,MAAM,CAAC,CAAC;AAAA,MAEnE,EAEA,MAAO,CACL,KAAM,mBACN,YAAa,CAAC,KAAM,GAAGd,CAAO,GAAI,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACrE,gBAAAC,EACA,WAAac,IAAY,CACvB,QAAS,CAAC,CAAC,KAAMA,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,gBACI,CAAC,CAAC,QAAuB,KAAM,KAAK,KAAKlB,EAAa,CAAC,CAAC,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACxG,cAAe,CAAC,EAAG,KAAK,KAAKF,EAAagB,GAAiB,CAAC,CAAC,CAC/D,EACF,CACF,EAEazB,GAAY4B,GAAkC,CACrDA,EAAQ,OAAO,OAAS,GAAKlB,EAAU,KAAKkB,EAAQ,OAAO,CAAC,EAAE,IAAI,IAAM,EACpE5B,GAAS4B,CAAO,EAEtBA,EAAQ,QAAQ7B,GAA0B6B,EAAQ,MAAM,CAAC,CAE7D,ICpEA,IAeMC,GAMAC,GAsGOC,GAGAC,GA9HbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,2BAA2B,CAE/C,EAEMT,GAA0B,CAACS,EAA+BC,IAA8C,CAC5G,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAeH,EAAO,CAAC,EAAE,KAEzBI,EAAYF,EAAW,OACvBG,EAAOC,EAAU,cAAcL,EAAW,KAAMG,CAAS,EAEzDG,EAAcL,EAAW,MAAM,CAAC,EACtCK,EAAY,OAAOF,EAAM,EAAG,GAAGF,CAAY,EAE3C,IAAMK,EAAeN,EAAWG,CAAI,EAC9BI,EAAaT,EAAO,CAAC,EAAE,WAAa,EAAgB,EAAI,EACxDU,EAAa,KAAK,KAAKJ,EAAU,KAAKC,CAAW,EAAIE,CAAU,EAE/DE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMH,CAAI,EAAG,GAAGO,GAA2BZ,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMO,CAAW,CAChH,EAEMM,EAAmBC,GAA+B,CACtD,IAAMC,EAAOC,EAAc,OAAQhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQS,CAAU,EAClFQ,EAAUD,EAAc,eAAgBhB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACjFkB,EAASC,GAAe,SAAUnB,EAAO,CAAC,EAAE,SAAUO,EAAY,OAAQE,CAAU,EAEpFW,EAAmBC,GAA6B,CACpD,IAAMC,EAAcnB,EAAa,OAC7BoB,EAAU,qBAAqBF,CAAC,OAAOJ,EAAQ,KAAK,OAAO,OAC/D,QAASO,EAAI,EAAGA,EAAIF,EAAaE,IAC/BD,GAAW,GAAGD,EAAc,EAAI,iBAAiBD,CAAC,IAAIG,CAAC,IAAM,iBAAiBH,CAAC,EAAE,MAC7Ed,EAAY,OAAS,EAAI,gBAAgBc,CAAC,oBAAoBG,CAAC,IAAM,gBAAgBH,CAAC,EAAE,IAE9FE,GAAW;AAAA,mBACEF,CAAC,MAAMJ,EAAQ,aAAa,iBAAiBI,CAAC,EAAE,CAAC;AAAA,mBACjDA,CAAC;AAAA,iBACHA,CAAC,SAASA,CAAC;AAAA;AAAA,2BAEDA,CAAC,MAAMN,EAAK,KAAK,OAAO;AAAA,UAE7C,QAASS,EAAI,EAAGC,EAAI,EAAGD,EAAIpB,EAAWoB,IAChCA,IAAMnB,GACRkB,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,aAAaA,CAAC,KACvFI,GAAKH,IAELC,GAAW,GAAGnB,EAAY,EAAI,cAAciB,CAAC,IAAIG,CAAC,IAAM,cAAcH,CAAC,EAAE,MACrEd,EAAY,OAAS,EAAI,gBAAgBc,CAAC,IAAII,CAAC,IAAM,gBAAgBJ,CAAC,EAAE,IAC5EI,KAGJ,OAAOF,CACT,EACIG,EACJ,GAAI1B,EAAO,CAAC,EAAE,WAAa,EAAe,CACxC,IAAM2B,EAAmB,CAACC,EAAgBP,EAAWQ,EAAW,KAAO;AAAA,6BAChDR,CAAC,MAAMH,EAAO,gBAAgB,kBAAkBG,CAAC,GAAG,CAAC;AAAA,YACtED,EAAgBC,CAAC,CAAC;AAAA,sBACRA,CAAC,MAAMN,EAAK,gBAAgB,cAAcM,CAAC,EAAE,CAAC;AAAA,qBAC/CA,CAAC,YAAYA,CAAC;AAAA,yBACVA,CAAC,YAAYA,CAAC;AAAA,YAC3BO,CAAM,IAAIP,CAAC,OAAOQ,CAAQ,IAAId,EAAK,YAAY,QAAQM,CAAC,EAAE,CAAC,aAAaA,CAAC;AAAA,UAE/EK,EAAa;AAAA,0CACuBjB,CAAU;AAAA;AAAA,UAE1CkB,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCA,EAAiB,QAAS,EAAG,KAAK,CAAC;AAAA,UACnCT,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,OAE/C,MACEQ,EAAa;AAAA,4BACSR,EAAO,gBAAgB,YAAY,CAAC;AAAA,QACxDE,EAAgB,EAAE,CAAC;AAAA,oBACPL,EAAK,aAAa,aAAa,CAAC;AAAA,QAC5CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,QAG7C,MAAO;AAAA,QAEHJ,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBC,EAAME,EAASC,CAAM,CAAC;AAAA,QAC5CJ,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,UACzEY,CAAU;AAAA,QAElB,EACA,MAAO,CACL,KAAM,SACN,YAAa,CAAC,KAAMzB,EAAW,SAAU,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMM,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAE,CACF,CACF,EAEarB,GAAyBS,GAClC6B,GAA4B,CAAC,KAAM7B,EAAW,IAAc,CAAC,EAEpDR,GAAS,CAACsC,EAAyB9B,IAAuC,CACrF,IAAMD,EAAS+B,EAAQ,OACvBzC,GAAeU,CAAM,EACrB+B,EAAQ,QAAQxC,GAAwBwC,EAAQ,OAAQ9B,CAAU,CAAC,CACrE,IClIA,IAeM+B,GAeAC,GA+DOC,GAGAC,GAhGbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAMMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIA,EAAO,CAAC,EAAE,KAAK,OAAS,EAC1B,MAAM,IAAI,MAAM,2DAA2D,EAG7E,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAWA,EAAO,CAAC,EAAE,KAAK,OAC3C,MAAM,IAAI,MAAM;AAAA,4DACwC,CAE5D,EAEMT,GACF,CAACS,EAA+BC,IAAsD,CACpF,IAAMC,EAAaF,EAAO,CAAC,EAAE,KACvBG,EAAsBH,EAAO,CAAC,EAAE,SAChCI,EAAYF,EAAW,OAEvBG,EAAeL,EAAO,CAAC,EAAE,KACzBM,EAAkBN,EAAO,CAAC,EAAE,SAC5BO,EAAOC,EAAU,cAAcP,EAAW,KAAMG,CAAS,EACzDK,EAAeP,EAAWK,CAAI,EAE9BG,EAAcL,EAAa,MAAM,CAAC,EAClCM,EAAaH,EAAU,KAAKE,CAAW,EAEvCE,EAAQC,EAAc,QAASV,EAAqBC,CAAS,EAC7DU,EAAUD,EAAc,eAAgBP,EAAiBD,EAAa,MAAM,EAC5EU,EAASC,GAAe,SAAUb,EAAqBO,EAAY,MAAM,EAGzEO,EAAoC,CACxC,CAAC,QAAuB,KAAMN,CAAU,EAAG,CAAC,OAAsB,KAAMF,CAAY,EACpF,CAAC,QAAuB,KAAMF,CAAI,CACpC,EACA,OAAAU,EAAgB,KAAK,GAAGC,GAA2BhB,EAAYG,EAAcK,CAAW,CAAC,EA4BlF,CACL,KAAM,iBACN,YAAa,CAAC,kBA7B8C,CAAC,OAAQ,MAAM,CA6B5C,EAC/B,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMA,EAAa,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKW,EAAa,EAAuB,CAAC,EAClE,gBAAAM,CACF,GACA,gBA9BuBE,GAA+B;AAAA,QAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,eAAgB,KAAK,EACrC,gBAAgB,OAAQ,KAAK,EAC7B,iBAAiBP,EAAOE,EAASC,CAAM,CAAC;AAAA,QAC/CI,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,4BAErDJ,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,kBAE9CD,EAAQ,YAAY,YAAY,CAAC;AAAA;AAAA;AAAA;AAAA,2BAIxBF,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,eAAgB,gBAAiB,UAAU,CAAC;AAAA,oBACjDA,EAAM,aAAa,cAAc,CAAC;AAAA;AAAA,QAE9CG,EAAO,YAAY,aAAc,OAAO,CAAC;AAAA,IAY3C,CACF,EAESvB,GAAiCS,GAC1CmB,GAA4B,CAAC,KAAMnB,EAAW,IAAc,CAAC,EAEpDR,GAAiB,CAAC4B,EAAyBpB,IAA+C,CACrG,IAAMD,EAASqB,EAAQ,OACvB/B,GAAeU,CAAM,EACrBqB,EAAQ,QAAQ9B,GAAgC8B,EAAQ,OAAQpB,CAAU,CAAC,CAC7E,ICpGA,IAWMqB,GA0BAC,GAwFOC,GAQAC,GArIbC,GAAAC,GAAA,kBAGAC,KAEAC,KAIAC,KAEMR,GAAkBS,GAAwC,CAC9D,GAAI,CAACA,EACH,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,sBAAsB,EAIxC,GAAIA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAG5C,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,UACjCA,EAAO,SAAW,GAAKA,EAAO,CAAC,EAAE,WAAaA,EAAO,CAAC,EAAE,SAC3D,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EASMR,GAAwB,CAACQ,EAA+BC,IAA4C,CACxG,IAAMC,EAASF,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9BG,EAASH,EAAO,CAAC,EAAE,KAAK,MAAM,EAC9B,CAACI,EAAGC,EAAGC,CAAC,EAAIC,GAAS,qBACvBL,EAAQD,EAAW,OAAQE,EAAQF,EAAW,OAAQD,EAAO,SAAW,EAAIA,EAAO,CAAC,EAAE,KAAO,MAAS,EACpGQ,EAAc,CAACJ,EAAGC,CAAC,EACzB,GAAI,CAACG,EACH,MAAM,IAAI,MAAM,qCAAsC,EAExD,IAAMC,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAML,CAAC,EAAG,CAAC,QAAuB,KAAMC,CAAC,EAC5G,CAAC,QAAuB,KAAMC,CAAC,EAAG,CAAC,OAAsB,KAAML,EAAW,KAAK,EAC/E,CAAC,OAAsB,KAAMA,EAAW,IAAI,CAC9C,EACMW,EAAwD,CAAC,OAAQ,MAAM,EACzEZ,EAAO,SAAW,IACpBW,EAAgB,KAAK,GAAGE,GAA2Bb,EAAO,CAAC,EAAE,IAAI,CAAC,EAClEY,EAAkB,KAAK,MAAM,GAE/BD,EAAgB,KAAK,GAAGE,GAA2BL,CAAW,CAAC,EAE/D,IAAMM,EAAmBC,GAA+B,CACtD,IAAIC,EAAO,GACPf,EAAW,QAAUA,EAAW,OAClCe,EAAO,0DACEf,EAAW,QAAU,CAACA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAUA,EAAW,OAC1Ce,EAAO,0DACE,CAACf,EAAW,QAAU,CAACA,EAAW,SAC3Ce,EAAO,2DAGT,IAAMC,EAAiBhB,EAAW,QAAU,EAAI,GAAK,2BAC/CiB,EAAIC,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDoB,EAAID,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACzDqB,EAAWH,EAAE,KAAK,MACpBI,EAAwB,KACtBC,EAAY,CAACL,EAAGE,CAAC,EACnBpB,EAAO,SAAW,IACpBsB,EAAIH,EAAc,IAAKnB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAChEuB,EAAU,KAAKD,CAAC,GAElB,IAAME,EAASC,GAAe,SAAUzB,EAAO,CAAC,EAAE,SAAUQ,EAAY,MAAM,EAC9Ee,EAAU,KAAKC,CAAM,EACrB,IAAME,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAAG,CAAC,KAAM,IAAK,KAAM,KAAK,EAC/G,CAAC,KAAM,QAAS,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,KAAK,CAC1D,EACA,MAAO;AAAA,IACPX,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGH,CAAS,CAAC;AAAA;AAAA,IAEtER,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,kBAK9DM,CAAQ;AAAA;AAAA,QAElBL,CAAI;AAAA;AAAA;AAAA,MAGNC,CAAc;AAAA,OACb,IACGK,GAAK,KACA,iBAAiBA,EAAE,2BAA2B,aAAcE,CAAM,CAAC,cACtEH,CAAQ,qBAAqBC,EAAE,YAAY,SAAS,CAAC,IAEpD,IACN,CAAC;AAAA;AAAA,IAGN,EAEA,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGrB,EAAW,QAAQ,GAAI,kBAAAW,CAAiB,EAC/D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAUR,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKS,EAAa,EAAuB,CAAC,EAClE,gBAAAE,CACF,GACA,gBAAAG,CACF,CACF,EAEarB,GAAuBQ,GAAwD,CAC1F,IAAM0B,EAAS1B,EAAW,OACpB2B,EAAS3B,EAAW,OACpB4B,EAAQ5B,EAAW,MACnB6B,EAAO7B,EAAW,KACxB,MAAO,CAAC,OAAA0B,EAAQ,OAAAC,EAAQ,MAAAC,EAAO,KAAAC,EAAM,SAAU,GAAG7B,EAAW,MAAM,IAAIA,EAAW,MAAM,IAAIA,EAAW,QAAU,CAAC,EAAE,CACtH,EAEaP,GAAO,CAACqC,EAAyB9B,IAAqC,CACjFV,GAAewC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvC,GAAsBuC,EAAQ,OAAQ9B,CAAU,CAAC,CACnE,ICxIA,IAeM+B,GAwGAC,GAwHAC,GAoDOC,GAnSbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAOMR,GACF,CAACS,EAA+BC,IAAoD,CAClF,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdE,EAAO,EACPC,EAAYC,EAAU,gBAAgBJ,EAAQE,CAAI,EAClDG,EAAWD,EAAU,kBAAkBJ,EAAQE,CAAI,EACnDI,EAAaC,GAAiBF,CAAQ,EACtCG,EAAiBH,EAAWC,EAC5BG,EAAa,CAACT,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGQ,CAAc,EAClDE,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EC,EACF,CAAC,CAAC,QAAuB,KAAMN,CAAQ,EAAG,CAAC,QAAuB,KAAMG,CAAc,CAAC,EAC3FG,EAAgB,KAAK,GAAGC,GAA2BH,EAAYA,CAAU,CAAC,EAE1E,IAAMI,EAAmBC,GAA+B,CACtD,IAAMC,EAAIC,EAAc,IAAKlB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACxEW,EAAQD,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EACjEoB,EAAOF,EAAc,OAAQlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,IAAI,EAC/DqB,EAASC,GAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUW,EAAW,OAAQH,CAAU,EACnFe,EAAY,CAACN,EAAGE,EAAOC,EAAMC,CAAM,EACnCG,EAAWP,EAAE,KAAK,MAClBQ,EAAUjB,IAAe,EAAI,MAAQ,MAAMA,CAAU,QACrDkB,EAAgB,GAEhBC,EAA8B,CAAC,CAAC,KAAM,WAAY,KAAM,KAAK,EAAG,CAAC,KAAM,iBAAkB,KAAM,KAAK,CAAC,EAC3G,MAAO;AAAA;AAAA;AAAA,2CAG4BF,CAAO,KAAKC,CAAa;AAAA,0BAC1CA,CAAa;AAAA,IACnCV,EAAa,iBAAiBW,CAAQ,EAAE,iBAAiB,GAAGJ,CAAS,CAAC;AAAA,IACtEP,EAAa,UAAUU,CAAa,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,oBAOrBD,CAAO;AAAA;AAAA,4BAECA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,qBAahDW,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKhDiB,CAAO;AAAA;AAAA,yBAEEA,CAAO,IAAIR,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,OAAOQ,CAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,4BAcpDG,GAAU,qBAAsBpB,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,mFAIYP,EAAW,OAAO;AAAA,yCAC5DkB,EAAM,YAAY,SAAS,CAAC;AAAA,6BACxCC,EAAK,YAAY,SAAS,CAAC;AAAA;AAAA,oBAEpCH,EAAE,IAAI,QAAS,UAAW,GAAG,CAAC,MAAMO,CAAQ,IAAIC,CAAO,qBAAqBD,CAAQ,IAC5FC,CAAO;AAAA,QACXJ,EAAO,IAAI,QAAS,UAAW,IAAK,OAAO,CAAC;AAAA;AAAA,IAG9C,EACA,MAAO,CACD,KAAM,wBAEV,YAAa,CAAC,KAAM,GAAGpB,EAAW,OAAO,IAAIO,CAAU,GAAI,kBAAAI,CAAiB,EAC5E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAClD,EACA,cAAe,CAAC,EAAGK,CAAS,EAC5B,gBAAAQ,CACF,GACA,gBAAAE,CACF,CACF,EAEEvB,GACF,CAACqC,EAAyBC,EAAmBX,EAAmBC,EAAkBW,EAAWC,EAAWC,EACvGC,IAAoB,CACnB,IAAM1B,EAAaC,GAAiBwB,CAAC,EAC/BE,EAAK,GAGLC,EAAa5B,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC5D6B,EAAc7B,IAAe,EAAI,MAAQ,MAAMA,CAAU,IACzD8B,EAAiB,CAACC,EAAcC,KAAiB,GAAGJ,CAAU,IAAIG,CAAI,KAAKC,EAAI,IAC/EC,EAAcV,EAAIE,EAAIzB,EACtBkC,EAAS,KAAK,KAAKV,EAAIG,CAAE,EAEzBQ,EAA4D,CAAC,MAAM,EACnEC,EAAwC,CAC5C,CAAC,QAAuB,KAAMF,CAAM,EAAG,CAAC,QAAuB,KAAMV,CAAC,EACtE,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAMwB,EAAIC,EAAIzB,CAAU,CAAC,CAC9D,EAEMqC,EAAuB7B,GAA+B,CAC1D,IAAM8B,GAAc5B,EAAc,QAASY,EAAM,SAAUA,EAAM,KAAMtB,CAAU,EACjF,MAAO;AAAA,IACXQ,EAAa,iBAAiB8B,EAAW,CAAC;AAAA,kEACoBV,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAUmB,CAAE,CAAC;AAAA,4CACcA,CAAE;AAAA,+CACCA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gBAQjCY,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA;AAAA,sBAE9B6B,CAAW;AAAA;AAAA;AAAA;AAAA,2BAINC,EAAe,MAAO,YAAY,CAAC;AAAA,IAExD,EAEMU,EAAanB,EAAQ,QACvB,CACE,KAAM,0BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAmBmC,CAAqB,EAC7E,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACZ,EAAGE,EAAGE,EAAI,CAAC,EAAG,UAAwB,CAChD,EACA,cAAe,CAAC,EAAGJ,EAAIE,EAAIzB,CAAU,EACrC,gBAAiBoC,CACnB,GACA,gBAAiBC,CACnB,EACA,CAAC,OAAQ,CAACf,CAAK,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,EAEjCjB,EAAoC,CACxC,CAAC,QAAuB,KAAM4B,CAAW,EAAG,CAAC,QAAuB,KAAMT,CAAC,EAC3E,CAAC,QAAuB,KAAM,KAAK,MAAMC,EAAIzB,CAAU,CAAC,EACxD,CAAC,QAAuB,KAAM,KAAK,MAAM2B,EAAKF,EAAIzB,CAAU,CAAC,CAC/D,EACMI,EAAwD,CAAC,OAAQ,OAAQ,MAAM,EAC/EG,EAAmBC,GAA+B,CACtD,IAAMiC,GAAc/B,EAAc,QAASC,EAAM,SAAUA,EAAM,KAAMX,CAAU,EAC3E0C,EAAahC,EAAc,OAAQE,EAAK,SAAUA,EAAK,KAAMZ,CAAU,EAC7E,MAAO;AAAA,2DAC4C4B,CAAU;AAAA,2DACVa,GAAY,KAAK,OAAO;AAAA,0DACzBC,EAAW,KAAK,OAAO;AAAA,kEACfd,CAAU;AAAA;AAAA;AAAA;AAAA,IAIxEpB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,wBAAwB,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,gBAKlE+B,GAAW,MAAOvC,CAAU,CAAC;AAAA,uBACtBuC,GAAW,MAAOvC,CAAU,CAAC;AAAA,mCACjB2B,CAAE;AAAA,gEAC2BA,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,+DAMHD,CAAO;AAAA,qCACjCG,CAAW;AAAA,yBACvBA,CAAW;AAAA;AAAA,2BAETC,EAAe,eAAgB,cAAc,CAAC;AAAA,IAEnE,EACA,OAAOT,EAAQ,QACX,CACE,KAAM,uCAEN,YAAa,CAAC,KAAM,GAAGrB,CAAU,IAAI0B,CAAO,GAAI,kBAAAtB,CAAiB,EACjE,WAAY,KAAO,CACjB,QAAS,CACP,CAAC,KAAM,CAACmB,EAAGE,EAAG,CAAC,EAAG,UAAwB,CAC5C,EACA,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAc,EAAuB,CAAC,EACnE,gBAAA5B,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACiC,EAAY7B,EAAOC,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC3D,EAEE3B,GACF,CAACoC,EAAyB7B,EAA+BC,IAAuC,CAC9F,IAAMC,EAASF,EAAO,CAAC,EAAE,KACnBG,EAAcD,EACdiD,EAAIjD,EAAO,CAAC,EACZkD,EAAIlD,EAAOA,EAAO,OAAS,CAAC,EAC5BmD,EAAI/C,EAAU,kBAAkBJ,EAAQ,CAAC,EAAIkD,EAC7C5C,EAAaC,GAAiB2C,CAAC,EAC/BE,EAAahD,EAAU,KAAKH,CAAW,EAAIK,EAC3CK,EACF,CAAC,CAAC,QAAuB,KAAMwC,CAAC,EAAG,CAAC,QAAuB,KAAM,KAAK,MAAMD,EAAI5C,CAAU,CAAC,CAAC,EAC1FI,EAAwD,CAAC,OAAQ,MAAM,EAEvE2C,EAAoB/D,GAAYqC,EAAS7B,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGmD,EAAGE,EAAGD,EAAGnD,EAAW,OAAO,EACrGc,EAAmBC,GAA+B,CACtD,IAAMQ,EAAWgC,GAA4BxD,EAAO,CAAC,EAAE,QAAQ,EACzDyD,EAAYjD,IAAe,EAAI,QAAU,QAAQA,CAAU,IAC3DkD,EAAgBlD,IAAe,EAAIgB,EAAW,MAAMhB,CAAU,IAAIgB,CAAQ,IAE1EsB,EAAc5B,EAAc,QAASlB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMQ,CAAU,EACnFmD,EAAerC,GAAe,SAAUtB,EAAO,CAAC,EAAE,SAAUG,EAAaK,CAAU,EAEzF,MAAO;AAAA,2DAC4CsC,EAAY,KAAK,OAAO;AAAA,gEACnBW,CAAS;AAAA,kEACPE,EAAa,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,IAIvF3C,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kDAMsB0C,CAAa,eAAeA,CAAa;AAAA,IAErF,EACA7B,EAAQ,QACJ,CACE,KAAM,4BACN,YAAa,CAAC,KAAM,GAAGrB,CAAU,GAAI,kBAAAI,CAAiB,EACtD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMT,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKsD,EAAa,EAAuB,CAAC,EAClE,gBAAAzC,CACF,GACA,gBAAAE,CACF,EACA,CAAC,OAAQ,CAACf,EAAO,CAAC,EAAGuD,CAAiB,CAAC,CAAC,CAC9C,EAES7D,GAAe,CAACmC,EAAyB5B,IAA6C,CAC7FA,EAAW,SAAW,OACxBR,GAAkCoC,EAASA,EAAQ,OAAQ5B,CAAU,EAErE4B,EAAQ,QAAQtC,GAA8BsC,EAAQ,OAAQ5B,CAAU,CAAC,CAE7E,ICzSA,IAgBM2D,GAMAC,GA6GOC,GAnIbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAQMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,CAE3D,EAEMP,GACF,CAACO,EAA+BC,EAAiCC,IAAqC,CACpG,IAAMC,EAAaF,EAAW,WAExBG,EAASJ,EAAO,CAAC,EAAE,KACnBK,EAAQL,EAAO,CAAC,EAChBM,EAAO,CAACH,GAAcH,EAAO,CAAC,EAE9BO,EAAcH,EACdI,EAAOC,EAAU,cAAcR,EAAW,KAAMG,EAAO,MAAM,EAC7DM,EAAYD,EAAU,gBAAgBL,EAAQI,CAAI,EAClDG,EAAWF,EAAU,kBAAkBL,EAAQI,CAAI,EAEnDI,EAAYH,EAAU,KAAKJ,EAAM,IAAI,EACrCQ,EAAWP,EAAOG,EAAU,KAAKH,EAAK,IAAI,EAAI,EACpD,GAAIM,IAAcD,GAAaL,GAAQO,IAAaF,EAClD,MAAM,IAAI,MAAM,+BAA+BA,CAAQ;AAAA;AAAA,2BAEpCC,CAAS,qBAAqBC,CAAQ,EAAE,EAG7D,IAAMC,EAA6B,CAAC,EACpC,QAASC,EAAI,EAAGA,EAAIX,EAAO,OAAQ,EAAEW,EAC/BA,EAAIP,EACNM,EAAiB,KAAKV,EAAOW,CAAC,CAAC,EAE/BD,EAAiB,KAAK,CAAC,EAG3B,IAAME,EAAaC,GAAiBN,CAAQ,EACtCO,EAAwD,CAAC,OAAQ,MAAM,EACvEC,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAS,EAAG,CAAC,OAAsB,KAAMC,CAAQ,EAC/E,CAAC,QAAuB,KAAM,KAAK,MAAMA,EAAWK,CAAU,CAAC,EAC/D,CAAC,OAAsB,KAAMf,EAAW,OAAO,CACjD,EACIK,GACFY,EAAkB,KAAK,MAAM,EAE/B,IAAME,EAAoBlB,EAAc,EAClCmB,EAAkBnB,EAAc,EAEhCoB,EAAmBC,GAA+B,CACtD,IAAMC,EAAWC,GAA4BzB,EAAO,CAAC,EAAE,QAAQ,EACzD0B,GAAY,CAChBC,EAAc,IAAK3B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMgB,CAAU,EACjEW,EAAc,QAAStB,EAAM,SAAUA,EAAM,KAAMW,CAAU,CAC/D,EACIV,GACFoB,GAAU,KAAKC,EAAc,OAAQrB,EAAK,SAAUA,EAAK,KAAMU,CAAU,CAAC,EAE5EU,GAAU,KAAKE,GAAe,SAAU5B,EAAO,CAAC,EAAE,SAAUO,EAAaS,CAAU,CAAC,EAChFI,GACFM,GAAU,KAAKE,GAAe,qBAAoCd,CAAgB,CAAC,EAEjFO,GACFK,GAAU,KAAKE,GAAe,mBAAkCd,CAAgB,CAAC,EAGnF,IAAMe,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,YAAa,KAAM,KAAK,EAClE,CAAC,KAAM,uBAAwB,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,CAC5E,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiB,GAAGH,EAAS,CAAC;AAAA,IACtEH,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,wBAEvDO,GAAW,MAAOd,CAAU,CAAC;AAAA,+BACtBc,GAAW,MAAOd,CAAU,CAAC;AAAA;AAAA;AAAA,oBAGxCe,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA;AAAA;AAAA;AAAA,iBAInDgB,GAAU,cAAehB,CAAU,CAAC;AAAA,oCACjBgB,GAAU,qBAAsBhB,CAAU,CAAC,yBACnEb,EAAa,GAAK,eAAe;AAAA;AAAA;AAAA,uBAGtB4B,GAAUP,EAAUR,EAAY,eAAe,CAAC;AAAA,uBAChDe,GAAUP,EAAUR,EAAY,UAAU,CAAC;AAAA,6BACrCU,GAAU,CAAC,EAAE,KAAK,KAAK,cAAcvB,EAAa,GAAK,QAAQ;AAAA,UAClFG,EAAO,KAAKyB,GAAUP,EAAUR,EAAY,SAAS,CAAC,GAAK,EAAE;AAAA;AAAA;AAAA;AAAA,MAIjEI,EAAoB,sCAAwC,EAAE;AAAA,MAC9DC,EAAkB,2CAA6C,EAAE;AAAA,IAEjE,EACMY,EAAU,CAAC,CAAC,KAAM1B,EAAa,SAAUP,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIoB,GACFa,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAE7DO,GACFY,EAAQ,KAAK,CAAC,KAAMnB,EAAkB,UAAwB,CAAC,EAG1D,CACL,KAAM,qBACN,YAAa,CAAC,KAAM,GAAGE,CAAU,IAAId,CAAW,IAAIC,CAAU,GAAI,kBAAAe,CAAiB,EACnF,WAAY,KACP,CAAC,QAAAe,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKvB,EAAY,EAAuB,CAAC,EAAG,gBAAAS,CAAe,GAClG,gBAAAG,CACF,CACF,EAES5B,GAAY,CAACwC,EAAyBjC,IAA0C,CAC3FT,GAAe0C,EAAQ,MAAM,EAC7BA,EAAQ,QAAQzC,GAA2ByC,EAAQ,OAAQjC,EAAYiC,EAAQ,WAAW,CAAC,CAC7F,ICtIA,IAoBMC,GA+BOC,GAmPAC,GAQAC,GA9SbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAWMT,GAAiB,CAACU,EAA+BC,IAA4C,CACjG,GAAID,EAAO,OAAS,GAAKA,EAAO,OAAS,EACvC,MAAM,IAAI,MAAM,oCAAoC,EAEtD,IAAME,EAAIF,EAAO,CAAC,EACZG,EAAQD,EAAE,KAAK,OACrB,GAAIA,EAAE,KAAKC,EAAQ,CAAC,IAAMF,EAAW,EACnC,MAAM,IAAI,MAAM,wDAAwD,EAE1E,IAAMG,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3FI,EAAWJ,EAAW,UAAY,EAAIA,EAAW,KACjDK,EAAIN,EAAO,CAAC,EAClB,GAAI,CAACO,EAAU,SAASD,EAAE,KAAM,CAACL,EAAW,EAAGG,EAAeC,CAAQ,CAAC,EACrE,MAAM,IAAI,MAAM,6EAA6E,EAG/F,IAAMG,EADSR,EAAO,CAAC,EACI,KAC3B,GAAIO,EAAU,KAAKC,CAAW,IAAMP,EAAW,EAAIG,EACjD,MAAM,IAAI,MAAM,0BAA0B,EAE5C,GAAIJ,EAAO,SAAW,EAAG,CAEvB,IAAMS,EADaT,EAAO,CAAC,EACQ,KAC7BU,EACFT,EAAW,KAAO,EAAKA,EAAW,EAAIG,EAAiBH,EAAW,EAAI,KAAK,OAAOG,EAAgB,GAAK,CAAC,EAC5G,GAAIG,EAAU,KAAKE,CAAe,IAAMC,EACtC,MAAM,IAAI,MAAM,8BAA8B,CAElD,CACF,EAEanB,GACT,CAACS,EAA+BC,EAC/BU,EAAoDC,IAAwD,CAC3G,IAAMC,EAAab,EAAO,CAAC,EAAE,KACvBG,EAAQU,EAAW,OACnBT,EAAgB,KAAK,OAAOH,EAAW,EAAIA,EAAW,UAAY,GAAKA,EAAW,SAAS,EAC3Fa,EAAYD,EAAWV,EAAQ,CAAC,EAChCY,EAAWd,EAAW,EACtBe,EAAYf,EAAW,EACvBgB,EAAYJ,EAAW,MAAM,EAAGV,EAAQ,CAAC,EACzCe,EAAYX,EAAU,KAAKU,CAAS,EAEpCE,EADWlB,EAAW,UAAY,EAAIA,EAAW,KACpB,EAC7BmB,EAAWpB,EAAO,CAAC,EAAE,SACrBqB,EAAeC,GAAiBR,CAAS,EACzCS,EAAcD,GAAiBrB,EAAW,CAAC,EAC3CuB,EAAcF,GAAiBH,CAAe,EAC9CM,EAAcC,GAAqBN,CAAQ,EAC3CO,EAAsBb,EAAYV,EAAgBqB,EAClDG,EAAwB,KAAK,MAAMhB,EAAiCe,CAAmB,EACvFE,EAA0BzB,GAAiBO,EAAyB,CAAC,GAAKiB,EAAwB,EAClGE,EAAc,CAACD,GAA2BD,GAAyB,EAAKN,GAAiBN,CAAS,EAClGY,GAAyB,GAAMN,GAAiBN,CAAS,GAAK,EAAU,EACA,EACxEe,GAAcd,EAAU,OAAO,CAACH,EAAWE,CAAS,CAAC,EACrDgB,EAAazB,EAAU,KAAKwB,EAAW,EAAID,EAAaT,EAExDY,GAAoCJ,EACtC,CAAC,EACD,CAAC,CAAC,QAAuB,KAAMG,CAAU,EAAG,CAAC,QAAuB,KAAM/B,EAAW,SAAS,CAAC,EAC7FiC,GAAiB,CAAChB,EAAWJ,EAAWC,EAAWQ,CAAW,EAC9DY,GAAS5B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,EAAE,MAAM,EAC5DmC,GAAO,OAAO,GAAI,EAAGhB,EAAkBK,CAAW,EAClDS,GAAgB,KAAK,GAAGG,GAA2BF,EAAc,CAAC,EAClED,GAAgB,KAAK,GAAGG,GAA2BD,EAAM,CAAC,EAC1DF,GAAgB,KAAK,GAAGG,GAA2BpC,EAAO,CAAC,EAAE,IAAI,CAAC,EAC9DA,EAAO,SAAW,GACpBiC,GAAgB,KAAK,GAAGG,GAA2B7B,EAAU,aAAaP,EAAO,CAAC,EAAE,IAAI,CAAC,CAAC,EAE5F,IAAMqC,EAAkB,CAACnB,EAAWJ,EAAWE,EAAYc,CAAU,EACrEG,GAAgB,KAAK,GAAGG,GAA2BC,CAAe,CAAC,EACnE,IAAMC,GAAmBC,IAA+B,CACtD,IAAMC,GAAYN,GAAe,OAC3BhC,GAAIuC,EAAc,IAAKzC,EAAO,CAAC,EAAE,SAAUwC,GAAWjB,CAAW,EACjEjB,GAAImC,EAAc,OAAsBN,GAAO,OAAQX,CAAW,EAClEkB,GAASD,EAAc,SAAUzC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC1E2C,GAAiB,CAACzC,GAAGI,GAAGoC,EAAM,EAC9BE,GACF5C,EAAO,SAAW,EAAIyC,EAAc,iBAAgCzC,EAAO,CAAC,EAAE,KAAK,MAAM,EAAI,OAC7F4C,IACFD,GAAe,KAAKC,EAAU,EAEhC,IAAMC,GAAaR,EAAgB,OAC7BS,GAASC,GAAe,SAAU/C,EAAO,CAAC,EAAE,SAAU6C,GAAYf,CAAU,EAC5EkB,GAA8B,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACpG5B,GAAW6B,GAA4BjD,EAAO,CAAC,EAAE,QAAQ,EAEzDkD,IAAe,IAAM,CACzB,OAAQ3B,EAAa,CACnB,IAAK,GACH,MAAO,SAASH,EAAQ,OAC1B,IAAK,GACH,MAAO,UAAUA,EAAQ,IAC3B,IAAK,GACH,MAAO,UAAUA,EAAQ,IAC3B,QACE,MAAM,IAAI,MAAM,GAAGG,CAAW,8BAA8B,CAChE,CACF,GAAG,EAEG4B,GAAkB;AAAA,yCACShC,CAAe,aAAaK,CAAW;AAAA,YACpElB,GAAE,WAAW,YAAa,IAAK,MAAM,CAAC;AAAA,yBACzBA,GAAE,aAAa,WAAW,CAAC;AAAA,qCACfkB,CAAW;AAAA,iCACfA,IAAgB,EAAI,SAAW,kBAAkB;AAAA;AAAA;AAAA;AAAA,uCAI3C0B,EAAW,IACtC,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,GAAGjC,EAAQ,kBAAkBiC,EAAC,OAAOjC,EAAQ,kBAAkBiC,EAAC,IAAI,EACjG,KAAK,IAAI,CAAC;AAAA,0CACe,IAC5B9B,IAAgB,EACX,GAAG2B,EAAW,IACjB,MAAM,KAAK,CAAC,OAAQ,CAAC,EAAG,CAACE,GAAGC,KAAM,uBAAuBA,EAAC,yBAAyB,EAAE,KAAK,IAAI,CAAC,KAE5F,yBAAyBH,EAAW,IAAI,MAAM,CAAC,EAAE,KAAK,YAAY,EAAE,KAAK,GAAG,CAAC,eAErF,CAAC;AAAA;AAAA,uCAE2BrB,EAA0Bf,EAAYO,CAAY;AAAA,gBACzEnB,GAAE,WAAW,YAAasC,GAAY,EAAGX,EAA0B,IAAM,SAASR,CAAY,MAAM,CAAC;AAAA,gBACrGnB,GAAE,WAAW,YAAasC,GAAY,EAAG,aAAa,CAAC;AAAA,mCACpCtC,GAAE,gBAAgB,WAAW,CAAC;AAAA,4BACrCgD,EAAW;AAAA,yCACE,EAAI3B,CAAW;AAAA,8BAC1BrB,GAAE,YAAY,cAAc,CAAC;AAAA;AAAA;AAAA,gBAG3C2B,EAA0B,gDAAkD,kBAAkB,GAClGC,EAAa,EAAI,MAAQ,EAAE,OAC3B,MACK,KACG,CAAC,OAAQ,EAAIP,CAAW,EACxB,CAAC6B,GAAGC,KAAM,GACN9B,IAAgB,EAAI,UAAU8B,EAAC,4BAA4BA,EAAC,IACxC,cAAcA,EAAC,2BAA2BA,EAAC,IAAI,EAAE,EAC5E,KAAK,KAAK,CAAC;AAAA;AAAA,6BAEC,EAAI9B,CAAW;AAAA;AAAA,WAG9B+B,GAAuBV,GAAa;AAAA;AAAA;AAAA;AAAA;AAAA,gCAKlBA,GAAW,YAAY,kBAAkB,CAAC;AAAA,aAExB,GAE1C,OAAOf,EAA0B;AAAA,iDACQiB,GAAO,KAAK,KAAK,KAAKhC,EAAYV,CAAa;AAAA,UACtFmC,GAAa,iBAAiB,GAAGI,GAAgBG,EAAM,CAAC;AAAA,UACxDP,GAAa,UAAU,CACvBnC,EAAe,EAAG,CACpB,CAAC,CAAC;AAAA,2BACiBF,GAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAI7BA,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,qCAEd4B,CAAU;AAAA,sDACOA,CAAU;AAAA,gBAEnBc,GAAa;AAAA,mDACPxC,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,yCAMvBwC,GAAW,YAAY,uBAAuB,CAAC,8BAC9B,EAAE;AAAA,6BAC/BtC,GAAE,KAAK,OAAO;AAAA,cAC7BA,GAAE,WAAW,YAAa,IAAK,6BAA6B,CAAC;AAAA;AAAA,+DAEZF,CAAa;AAAA,0BAClDsC,GAAO,YAAY,cAAc,CAAC;AAAA;AAAA,+BAE7BtB,EAAQ,IAAIwB,GAAa,2BAA6B,CAAG;AAAA,cAC1EtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA,6CACRL,EAAW,UAAYsB,CAAW;AAAA,yDACtBT,CAAS;AAAA,cACpDqC,EAAe;AAAA;AAAA;AAAA;AAAA,kCAIKL,GAAO,KAAK,OAAO;AAAA,cACvCA,GAAO,WAAW,iBAAkB,IAAK,OAAO,CAAC;AAAA,cACjDA,GAAO,WAAW,iBAAkBD,GAAa,EAAG,KAAK,CAAC;AAAA,cAC1DC,GAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAG,CAAC;AAAA,kCACpCC,GAAO,gBAAgB,gBAAgB,CAAC;AAAA,wCAClChC,CAAS;AAAA,kCACfgC,GAAO,KAAK,KAAK,MAAMA,GAAO,KAAK,KAAK;AAAA;AAAA,0CAEhC1C,CAAa;AAAA;AAAA,6CAEVU,CAAS;AAAA;AAAA,gBAEtCgC,GAAO,YAAY,gBAAiB,cAAc,CAAC;AAAA,iCAClC9B,EAAYc,CAAU;AAAA;AAAA;AAAA,WAId;AAAA,UAC/BS,GAAa,iBAAiBS,EAAQ,EAAE,iBAAiB,GAAGL,GAAgBG,EAAM,CAAC;AAAA,UACnFP,GAAa,UAAU,CAAC;AAAA,YACtBA,GAAa,sCAAsC,sBAAsB,CAAC;AAAA,qCACjDO,GAAO,KAAK,KAAK,KAAKzB,CAAY;AAAA,iCACtCyB,GAAO,gBAAgB,YAAY,CAAC;AAAA,sBAC/CA,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,sBACnDC,GAAO,WAAW,iBAAkBD,GAAa,CAAC,CAAC;AAAA,2BAC9C3C,GAAE,KAAK,OAAO;AAAA;AAAA;AAAA;AAAA,YAKI0C,GAAa;AAAA,8CACZd,CAAU,QAAQ1B,CAAa;AAAA;AAAA,uCAEtCwC,GAAW,YAAY,kBAAkB,CAAC;AAAA,yEAEvB,EAAE;AAAA,oCACxBxC,EAAgB0B,CAAU;AAAA,2BACnCxB,GAAE,KAAK,OAAO;AAAA,qCACJwB,CAAU;AAAA,cACjCxB,GAAE,WAAW,YAAa,IAAK,SAASwB,CAAU,MAAM,CAAC;AAAA;AAAA,+CAExB1B,CAAa;AAAA;AAAA,4BAEhCsC,GAAO,YAAY,aAAa,CAAC;AAAA;AAAA,iCAE5BtB,EAAQ,IAAIwB,GAAa,qDAAuD,CAAG;AAAA,gBACpGtC,GAAE,WAAW,YAAa,IAAK,OAAO,CAAC;AAAA;AAAA,gBAEvC6C,EAAe;AAAA;AAAA,gBAEfG,EAAoB;AAAA,sDACkB/B,CAAW;AAAA;AAAA;AAAA,cAIpBqB,GAAa;AAAA,kBACxCU,EAAoB;AAAA,iBAEoB,EAAE;AAAA;AAAA,wCAEpBjC,CAAY;AAAA,gBACpCyB,GAAO,WAAW,iBAAkBD,GAAa,EAAG,GAAGxB,CAAY,YAAY,CAAC;AAAA,gBAChFyB,GAAO,aAAa,iBAAkB,kBAAkB,CAAC;AAAA;AAAA,UAGnE,EACA,MAAO,CACL,KAAMjB,EAA0B,uBAAyB,cACzD,YAAa,CACX,KAAM,GAAG5B,EAAW,QAAQ,IAAIa,CAAS,IAAIM,CAAQ,IAAIpB,EAAO,MAAM,GACtE,kBAAmB,MAAMA,EAAO,MAAM,EAAE,KAAK,MAAM,CACrD,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM+B,GAAa,SAAAX,CAAQ,CAAC,EACvC,KAAMS,EAA0B,uBAAyB,cACzD,cAAeA,EAA0B,CAAC,EAAG,EAAG,EAAG,KAAK,KAAKb,EAAYc,CAAU,EAAG,EAAGZ,CAAS,EACzD,CAAC,EAAG,KAAK,KAAKc,EAAa,EAAuB,CAAC,EAC5F,gBAAAC,EACF,GACA,gBAAAK,EACF,CACF,EAES9C,GAAc,CAAC+D,EAAyBtD,IAA4C,CAC/FX,GAAeiE,EAAQ,OAAQtD,CAAU,EACzC,IAAMU,EAAqD4C,EAAQ,4BAA4B,EACzF3C,EAAiC2C,EAAQ,kCAAkC,EACjFA,EAAQ,QAAQhE,GACZgE,EAAQ,OAAQtD,EAAYU,EAA0BC,CAA8B,CAAC,CAC3F,EAEanB,GAA8BQ,GACvCuD,GAA4BvD,CAAsE,IC/StG,IAaMwD,GAGAC,GAiOOC,GAGPC,GAEAC,GA0CAC,GA2BOC,GA3TbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KACAC,KAEAC,KACAC,KACAC,KAEMf,GAAW,CAACgB,EAA+BC,IAC5CD,EAAO,OAASC,GAAOD,EAAOC,CAAC,EAAE,KAAK,OAAS,GAAOC,EAAU,KAAKF,EAAOC,CAAC,EAAE,IAAI,EAAK,EAAID,EAAOC,CAAC,EAAI,OAEvGhB,GAAiB,CAACe,EAA+BG,IAAoD,CACzG,IAAMC,EAAQJ,EAAO,CAAC,EAChBK,EAAMrB,GAASgB,EAAQ,CAAC,EACxBM,EAAQtB,GAASgB,EAAQ,CAAC,EAC1BO,EAAOvB,GAASgB,EAAQ,CAAC,EACzBQ,EAAiBxB,GAASgB,EAAQ,CAAC,EACnCS,EAAuBzB,GAASgB,EAAQ,CAAC,EACzCU,EAAU1B,GAASgB,EAAQ,CAAC,EAC5BW,EAAY3B,GAASgB,EAAQ,CAAC,EAoCpC,GAAII,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,mDAAmD,EAGrE,IAAMQ,EAAe,GACfC,EAAYT,EAAM,KAAK,CAAC,EACxBU,EAAiBV,EAAM,KAAK,CAAC,EAC7BW,EAAaX,EAAM,KAAK,SAAW,EAAKQ,EAAeR,EAAM,KAAK,CAAC,EAAI,EAAIA,EAAM,KAAK,CAAC,EAChDD,EAAW,SAAWC,EAAM,KAAK,CAAC,EAC3EY,EAAmBF,EAEnBG,EAAqB,EACrBC,EAAoB,EAClBC,EAAW,KAAK,MAAMJ,EAAaZ,EAAW,QAAQ,EAC5D,GAAIO,GAAWC,EAAW,CACxB,GAAID,EAAQ,KAAK,SAAW,EAC1B,MAAM,IAAI,MAAM,mDAAmD,EAErE,GAAIA,EAAQ,KAAK,CAAC,IAAMG,GAAaH,EAAQ,KAAK,CAAC,IAAMP,EAAW,UAAYO,EAAQ,KAAK,CAAC,IAAMS,EAClG,MAAM,IAAI,MAAM,iFAAiF,EAEnG,GAAIR,EAAU,KAAK,CAAC,IAAME,GAAaF,EAAU,KAAK,CAAC,IAAMR,EAAW,UACpEQ,EAAU,KAAK,CAAC,IAAMQ,EACxB,MAAM,IAAI,MAAM,mFAAmF,EAErG,GAAIT,EAAQ,KAAK,CAAC,IAAMC,EAAU,KAAK,CAAC,EACtC,MAAM,IAAI,MAAM,gFAAgF,EAElG,GAAIA,EAAU,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,qDAAqD,EAEvEM,EAAqBP,EAAQ,KAAK,CAAC,EACnCQ,EAAoBR,EAAQ,KAAK,CAAC,CACpC,SAAWA,GAAWC,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAG1F,IAAIS,EACJ,GAAIf,EAAK,CACP,GAAID,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kEAAkE,EAEpF,GAAIC,EAAI,KAAK,OAAS,GAAKA,EAAI,KAAK,OAAS,EAC3C,MAAM,IAAI,MAAM,uDAAuD,EAEzE,GAAID,EAAM,KAAK,CAAC,IAAMC,EAAI,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,4DAA4D,EAG9E,GAAIA,EAAI,KAAK,SAAW,EAAG,CACzB,GAAIA,EAAI,KAAK,CAAC,IAAMD,EAAM,KAAK,CAAC,EAC9B,MAAM,IAAI,MAAM,6DAA6D,EAE/EgB,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,SAAWA,EAAI,KAAK,SAAW,EAAG,CAChC,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAM,GAAKA,EAAI,KAAK,CAAC,IAAMc,EAC9E,MAAM,IAAI,MAAM,4FAA4F,EAE9G,GAAIb,EACF,MAAM,IAAI,MAAM,yDAAyD,EAE3Ec,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,KAAO,CACL,GAAIA,EAAI,KAAK,CAAC,IAAMF,EAAW,UAAYE,EAAI,KAAK,CAAC,IAAMc,EACzD,MAAM,IAAI,MAAM,wFAAwF,EAG1GC,EAAY,EACZJ,EAAmBX,EAAI,KAAK,CAAC,CAC/B,CACF,KAAO,CACL,GAAID,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,uEAAuE,EAEzF,GAAIA,EAAM,KAAK,SAAW,IAAMA,EAAM,KAAK,CAAC,IAAMD,EAAW,UAAYC,EAAM,KAAK,CAAC,IAAM,GACzF,MAAM,IAAI,MAAM,8FAA8F,EAGhHgB,EAAY,CACd,CAEA,GAAIb,EAAM,CACR,GAAIA,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8CAA8C,EAGhE,GAAID,GACEF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,CAAC,IAAM,EAC/C,MAAM,IAAI,MAAM,oCAAoC,CAG1D,CAEA,IAAIiB,IACJ,GAAIb,EAAgB,CAClBa,EAAW,EACX,IAAMC,EAAWd,EAAe,KAUhC,MATIc,EAAS,SAAW,EAClBA,EAAS,CAAC,IAAMT,EAClBQ,EAAW,EACFC,EAAS,CAAC,IAAM,EAAIT,EAAY,IACzCQ,EAAW,GAEJC,EAAS,SAAW,GAAKA,EAAS,CAAC,IAAMT,GAAaS,EAAS,CAAC,IAAMN,IAC/EK,EAAW,GAETA,IAAa,EACT,IAAI,MAAM,0FAA0F,EAEtG,IAAI,MAAM,oBAAoB,CACtC,CAEA,IAAIE,EAAe,GACfC,EAAcT,EAClB,GAAIT,EAAO,CACT,GAAIA,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,qDAAqD,EAGvE,GAAIF,EAAM,KAAK,CAAC,IAAME,EAAM,KAAK,CAAC,EAChC,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIA,EAAM,KAAK,SAAW,EAAG,CAC3B,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,wEAAwE,EAE1FkB,EAAclB,EAAM,KAAK,CAAC,CAC5B,KAAO,CACL,GAAIU,IAAqBV,EAAM,KAAK,CAAC,EACnC,MAAM,IAAI,MAAM,kFAAkF,EAEpGkB,EAAclB,EAAM,KAAK,CAAC,EAAIA,EAAM,KAAK,CAAC,EAC1CiB,EAAe,EACjB,CACF,CAEA,IAAME,EAAsBR,EAAqBD,EAC3CU,GAAsB,GAE5B,GAAIlB,EACF,MAAM,IAAI,MAAM,mCAAmC,EAGrD,GAAIC,EAAsB,CACxB,GAAIA,EAAqB,KAAK,SAAW,EACvC,MAAM,IAAI,MAAM,iEAAiE,EAEnF,GAAKA,EAAqB,KAAK,CAAC,IAAMI,GAAaJ,EAAqB,KAAK,CAAC,IAAM,GAChFA,EAAqB,KAAK,CAAC,IAAMN,EAAW,UAAYM,EAAqB,KAAK,CAAC,IAAMK,GACzFL,EAAqB,KAAK,CAAC,IAAMgB,EACnC,MAAM,IAAI,MAAM,2FAA2F,CAE/G,CAEA,MAAO,CACL,UAAAZ,EACA,eAAAC,EACA,mBAAAG,EACA,iBAAAD,EACA,oBAAAS,EACA,kBAAAP,EACA,gBAAiB,EACjB,WAAAH,EACA,YAAAS,EACA,SAAAL,EACA,UAAW,KAAK,MAAMK,EAAcrB,EAAW,QAAQ,EACvD,SAAUA,EAAW,SACrB,iBAAkB,GAClB,uBAAwB,GACxB,gBAAiBA,EAAW,gBAC5B,SAAAkB,EACA,MAAOlB,EAAW,MAClB,oBAAAuB,GACA,aAAAH,EACA,UAAAH,CACF,CACF,EAEalC,GAAqCiB,GAC9CwB,GAA4B,CAAC,GAAGxB,CAAU,CAAC,EAEzChB,GAAgDwC,GAA4B,CAAC,KAAM,CAAC,EAAG,EAAG,EAAG,CAAC,CAAC,CAAC,EAEhGvC,GACF,CAACwC,EAAyBC,EAAiBtB,EAAkBM,EAAmBC,EAC/EC,EAAoBe,IAAuB,CAC1C,IAAMC,EAAc,CAAClB,EAAWC,EAAgBC,CAAU,EACpDiB,EAAa9B,EAAU,KAAK6B,CAAW,EACvCE,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,QAAuB,KAAMF,CAAU,EACnF,CAAC,QAAuB,KAAMf,CAAU,CAC1C,EAEMmB,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,GAAe,gBAAiBR,EAAI,SAAUE,CAAW,EAClEO,EAAWC,EAAc,MAAOV,EAAI,SAAUE,CAAW,EACzDS,EAAYD,EAAc,OAAQhC,EAAK,SAAUwB,CAAW,EAE5DU,EAA8B,CAClC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,cAAe,KAAM,KAAK,CAC3G,EACA,MAAO;AAAA,IACXN,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBH,EAAUE,EAAWJ,CAAM,CAAC;AAAA,IACrFD,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA;AAAA;AAAA,IAK1E,EAEA,OAAOP,EAAQ,QACX,CACE,KAAM,4BACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,MAAM,CAAC,EACjD,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMG,EAAa,SAAUF,EAAI,SAAU,aAAgC,CAAC,EACvF,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,GACA,gBAAAC,CACF,EACA,CAAC,OAAQ,CAACL,EAAKtB,CAAI,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAC7C,EAEElB,GACF,CAACuC,EAAyBf,EAAmB6B,EAAkB5B,EAAwBK,EACtFwB,EAAmBpC,EAAmBuB,IAAwB,CAG7D,IAAIc,EAAgBD,EACpB,GAAKpC,EAOE,CACL,GAAIO,IAAmB,EACrB,MAAM,IAAI,MAAM,mFAAmF,EAEnG,OAAA8B,EACIxD,GAAiBwC,EAASe,EAAOpC,EAAMM,EAAWC,EAAgB4B,EAAWvB,EAAUW,CAAW,EACtGc,EAAgBA,EAAc,QAAQ,CAAC/B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,EAC9ES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAEnD,KAjBE,QAAID,EAAM,KAAK,SAAW,IACxBC,EAAgBD,EAAM,QAAQ,CAAC9B,EAAWC,EAAgB4B,EAAUvB,CAAQ,CAAC,GAExES,EAAQ,QACXiB,GAA2BD,EAAezD,GAAyB,IAAI,EACvE,CAAC,OAAQ,CAACyD,CAAa,EAAG,QAAS,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAanD,EAEStD,GAAqB,CAACsC,EAAyBzB,IAAqC,CAC/F,IAAM2C,EAAS7D,GAAe2C,EAAQ,OAAQzB,CAAU,EAClDC,EAAQwB,EAAQ,OAAO,CAAC,EACxBvB,EAAMrB,GAAS4C,EAAQ,OAAQ,CAAC,EAChCtB,EAAQtB,GAAS4C,EAAQ,OAAQ,CAAC,EAClCrB,EAAOvB,GAAS4C,EAAQ,OAAQ,CAAC,EACjCpB,EAAiBxB,GAAS4C,EAAQ,OAAQ,CAAC,EAC3CnB,EAAuBzB,GAAS4C,EAAQ,OAAQ,CAAC,EACjDlB,EAAU1B,GAAS4C,EAAQ,OAAQ,CAAC,EACpCjB,EAAY3B,GAAS4C,EAAQ,OAAQ,CAAC,EAC5C,GAAIxB,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,+BAA+B,EAGjD,GAAIC,GAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,8BAA8B,EAIhD,IAAM0C,EAAS1C,GAAOC,GAASD,EAAI,KAAK,SAAW,GAAKC,EAAM,KAAK,SAAW,EAExE0C,EAAI3D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,eAAgBA,EAAO,SAAU1C,EAAOG,EAAM,CAAC,EAEtG,GAAIwC,EACF,OAAOE,GACHrB,EAASoB,EAAG3C,EAAKC,EAAOE,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAC7F3C,CAAU,EAEhB,GAAI,CAACE,GAAO,CAACC,EACX,MAAM,IAAI,MAAM,gCAAgC,EAElD,IAAM4C,EAAI7D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,SAAUzC,EAAKE,EAC3FuC,EAAO,UAAU,EAEfK,EAAI9D,GACNuC,EAASkB,EAAO,UAAWA,EAAO,SAAUA,EAAO,iBAAkBA,EAAO,UAAWxC,EAAOC,EAC9F,EAAIuC,EAAO,UAAU,EAEzBG,GACIrB,EAASoB,EAAGE,EAAGC,EAAG3C,EAAgB,OAAWE,EAASC,EAAWF,EAAsBqC,EAAQ3C,CAAU,CAC/G,ICrWA,IAiBMiD,GAmBAC,GA0BAC,GA2BAC,GAuBAC,GAuBAC,GAeAC,GAiDAC,GA0BOC,GAjObC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KASMb,GAAkBc,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAClE,MAAM,IAAI,MAAM,sCAAsC,EAGxD,GAAIA,EAAO,QAAU,EAAG,CACtB,IAAIC,EAAYD,EAAO,CAAC,EAAE,KAAK,OAAS,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,EAI9D,GAHIA,EAAO,SAAW,IACpBC,EAAYD,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IAAMA,EAAO,CAAC,EAAE,KAAK,CAAC,GAEpD,CAACC,EACH,MAAM,IAAI,MAAM,6EAA6E,CAEjG,CACF,EAEMd,GAAiB,CAACe,EAAuBC,EAAmBC,IAA+B,CAC/F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,sBACSH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,GAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,2BAI7EG,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA,gCAGzCI,GAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,UAI9E,MAAO;AAAA,oBACWD,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA,cAIvBG,CAAK;AAAA;AAAA;AAAA,OAInB,EAEMjB,GAAgB,CAACc,EAAuBC,EAAmBC,IAA+B,CAC9F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,GAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,yCAKnEG,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,gCAEvDI,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA;AAAA;AAAA,oCAI1CI,GAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMhB,GAAa,CAACa,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,GAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA,+BAI7EG,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,4BACjDI,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEtCI,GAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMf,GAAa,CAACY,EAAuBC,EAAmBC,IAA+B,CAC3F,IAAIC,EAAQ,GACZ,QAASC,EAAIH,EAAY,EAAGG,GAAK,EAAG,EAAEA,EACpCD,GAAS;AAAA,0BACaH,EAAO,WAAW,UAAWI,CAAC,CAAC,OAAOC,GAAa,gBAAiBD,EAAGF,CAAU,CAAC;AAAA;AAAA,6BAE/EG,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,+BAE5CI,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA,6BAChDI,GAAa,mBAAoBD,EAAGH,CAAS,CAAC;AAAA;AAAA,oCAEvCI,GAAa,qBAAsBD,EAAGH,CAAS,CAAC;AAAA,cAIlF,MAAO;AAAA;AAAA;AAAA,gBAGOE,CAAK;AAAA;AAAA,WAGrB,EAEMd,GAAgB,CAACW,EAAuBC,EAAmBK,IAAsC,CACrG,OAAQA,EAAW,KAAM,CACvB,IAAK,GACH,OAAOrB,GAAee,EAAQC,EAAWK,EAAW,KAAK,MAAM,EACjE,IAAK,GACH,OAAOpB,GAAcc,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAChE,IAAK,GACH,OAAOnB,GAAWa,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,IAAK,GACH,OAAOlB,GAAWY,EAAQC,EAAWK,EAAW,KAAK,MAAM,EAC7D,QACE,MAAM,IAAI,MAAM,cAAc,CAClC,CACF,EAEMhB,GAAuB,CAACQ,EAA+BQ,IAA2C,CACtG,IAAMC,EAAcC,EAAU,SAASV,EAAO,CAAC,EAAE,KAAK,MAAM,EAAGQ,EAAW,IAAI,EACxEG,EAAYX,EAAO,CAAC,EAAE,KACtBY,EAAaF,EAAU,KAAKD,CAAW,EACvCI,EACF,CAAC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,OAAsB,KAAMJ,EAAW,IAAI,CAAC,EACzFA,EAAW,OAAS,GACtBK,EAAgB,KAAK,CAAC,KAAMb,EAAO,CAAC,EAAE,SAAU,KAAMQ,EAAW,KAAK,CAAC,EAGzEK,EAAgB,KAAK,GAAGC,GAA2Bd,EAAO,CAAC,EAAE,KAAMS,CAAW,CAAC,EAC/E,IAAMM,EAAwD,CAAC,MAAM,EAE/DC,EAAmBC,GAA+B,CACtD,IAAMf,EAASgB,GAAe,SAAUlB,EAAO,CAAC,EAAE,SAAUS,EAAY,MAAM,EACxEU,EAAQC,EAAc,IAAKpB,EAAO,CAAC,EAAE,SAAUW,EAAU,MAAM,EAC/DU,EAAWF,EAAM,KAAK,MACtBG,EAAa/B,GAAcW,EAAQS,EAAU,OAAQH,CAAU,EAC/De,EACF,CAAC,CAAC,KAAM,cAAe,KAAM,KAAK,EAAG,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQf,EAAW,KAAK,MAAM,CAAC,EACpG,OAAIA,EAAW,OAAS,GACtBe,EAAS,KAAK,CAAC,KAAM,iBAAkB,KAAMF,CAAkC,CAAC,EAG3E;AAAA,cACGJ,EAAa,iBAAiBM,CAAQ,EAAE,iBAAiBJ,EAAOjB,CAAM,CAAC;AAAA,cACvEe,EAAa,UAAU,CAAC;AAAA,cACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA;AAAA,4BAE5Df,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,0BAEtCmB,CAAQ;AAAA,cACpBC,CAAU;AAAA;AAAA,UAGtB,EAEA,MAAO,CACL,KAAM,MACN,YAAa,CAAC,KAAM,GAAGd,EAAW,IAAI,GAAI,kBAAAO,CAAiB,EAC3D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMN,EAAa,SAAUT,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKU,EAAU,KAAKD,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAAG,CACF,CACF,EAEMvB,GAAgC,CAACO,EAA+BQ,IAA6C,CACjH,GAAIR,EAAO,OAAS,EAAG,CACrB,IAAMwB,EAAexB,EAAO,CAAC,EAAE,iBAAiB,EAC1CyB,EAASzB,EAAO,QAAU,GAAKA,EAAO,CAAC,EAAE,KAAQA,EAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAAI,EAElFG,EAAYH,EAAO,CAAC,EAAE,KAAK,OAC3B0B,EAAa,IAAI,WAAW,EAAIvB,CAAS,EAAE,KAAK,CAAC,EACvD,GAAIH,EAAO,QAAU,EAAG,CACtB,IAAM2B,EAAO3B,EAAO,CAAC,EAAE,iBAAiB,EACxC,QAASM,EAAI,EAAGA,EAAIqB,EAAK,OAAQrB,IAC/BoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,CAAC,EAAI,OAAOkB,EAAalB,CAAC,CAAC,EACpDoB,EAAW,OAAOC,EAAKrB,CAAC,CAAC,EAAIH,CAAS,EAAI,OAAOqB,EAAalB,EAAIqB,EAAK,MAAM,CAAC,CAElF,MACEH,EAAa,QAAQ,CAACI,EAAGtB,IAAMoB,EAAW,OAAOpB,CAAC,CAAC,EAAK,OAAOsB,CAAC,CAAE,EAGpE,IAAMC,EAAiB,CAAC,EACxB,OAAAH,EAAW,QAAQE,GAAKC,EAAK,KAAKD,CAAC,CAAC,EAE7B,CAAC,KAAMpB,EAAW,KAAM,MAAAiB,EAAO,KAAAI,CAAI,CAC5C,KACE,QAAOrB,CAEX,EAEad,GAAM,CAACoC,EAAyBtB,IAAoC,CAC/EtB,GAAe4C,EAAQ,MAAM,EAC7B,IAAMC,EAAoBtC,GAA8BqC,EAAQ,OAAQtB,CAAU,EAClFsB,EAAQ,QAAQtC,GAAqBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACxF,ICrOA,IAmBMC,GAMAC,GA4BAC,GA2DAC,GAsJAC,GAGAC,GAGAC,GAGAC,GAaAC,GAiCOC,GAYAC,GAKPC,GAWOC,GAKAC,GAUPC,GA6BOC,GAKAC,GAgBAC,GAKAC,GA/ZbC,GAAAC,GAAA,kBAGAC,KAEAC,KAEAC,KAIAC,KAQMxB,GAAkByB,GAAwC,CAC9D,GAAIC,GAAI,OAAO,uBAAyB,CAACD,GAAUA,EAAO,SAAW,GACnE,MAAM,IAAI,MAAM,4BAA4B,CAEhD,EAEMxB,GAA0C,CAC5C0B,EAAmBC,EAA2BC,IAAyD,CACzG,IAAMC,EAAiBF,EAAW,SAAW,OACvCG,EAA2BJ,EAAM,KAAK,MAAM,EAC9CG,GACFC,EAAyB,OAAO,EAAG,EAAGA,EAAyB,IAAI,CAAE,EAEvE,IAAMC,EAAe,OAAO,eAAe,KAAKJ,EAAY,WAAW,EACjEK,EAAcL,EAAW,YAAY,MAAM,EAC3CM,EAAUN,EAAW,QAAQ,MAAM,EACnCO,EAAsBH,EAAgBJ,EAAiC,UAAU,MAAM,EAAI,CAAC,EAC5FQ,EAAOR,EAAW,KAAK,MAAM,EACnCS,GAAa,qBAAqBR,EAAkBE,EAA0BE,EAAaC,EAASC,EAAWC,CAAI,EAEnH,IAAME,EAA4BD,GAAa,uBAC3CR,EAAkBE,EAA0BG,EAASC,EAAWF,EAAaG,EAAMR,EAAW,OAAO,EAEnGW,EAAgB,OAAO,OAAO,CAAC,EAAGX,CAAU,EAC9CI,EACF,OAAO,OAAOO,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,UAAAD,EAAW,SAAUP,EAAW,QAAQ,CAAC,EAEnG,OAAO,OAAOW,EAAe,CAAC,YAAAN,EAAa,QAAAC,EAAS,KAAAE,EAAM,SAAUR,EAAW,QAAQ,CAAC,EAE1F,IAAMY,EAA2BF,EAA0B,MAAM,EACjE,OAAAE,EAAyB,KAAKA,EAAyB,OAAO,EAAG,CAAC,EAAE,CAAC,CAAC,EAC/D,CAACD,EAAeT,EAAiBU,EAA2BF,CAAyB,CAC9F,EAEMpC,GAAuB,CACzBuC,EACAb,IAAgG,CAClG,IAAME,EAAiBF,EAAW,SAAW,OACvCc,EAAaC,EAAU,KAAKF,CAAW,EACvCG,EAAaD,EAAU,KAAKf,EAAW,WAAW,EAClDiB,EACF,CAAC,CAAC,QAAuB,KAAMH,CAAU,EAAG,CAAC,QAAuB,KAAME,CAAU,CAAC,EACnFE,EAA8B,CAAC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,aAAc,KAAM,KAAK,CAAC,EACzG,GAAIlB,EAAW,YAAY,QAAU,EAAG,CACtC,IAAMmB,EAAKnB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7DoB,EAAKpB,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrDqB,EAAUrB,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxDsB,EAAQtB,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EAClDuB,EAAoB,CAAC,EAAEF,EAAUC,GACvCL,EAAgB,KACZ,CAAC,QAAuB,KAAME,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAE,EAChC,CAAC,QAAuB,KAAMC,CAAO,EACrC,CAAC,QAAuB,KAAMC,CAAK,CACvC,EACAJ,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,EAEhC,IAAIM,EAAoB,GACxB,GAAIxB,EAAW,YAAY,SAAW,EAAG,CACvC,IAAMyB,EAAKzB,EAAW,YAAYA,EAAW,YAAY,OAAS,CAAC,EAC7D0B,EAAK1B,EAAW,QAAQA,EAAW,QAAQ,OAAS,CAAC,EACrD2B,EAAU3B,EAAW,KAAKA,EAAW,KAAK,OAAS,EAAI,CAAC,EACxD4B,EAAQ5B,EAAW,KAAKA,EAAW,KAAK,OAAS,CAAC,EACxDwB,EAAoB,CAAC,EAAEG,EAAUC,GACjCX,EAAgB,KACZ,CAAC,QAAuB,KAAMQ,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAE,EAAG,CAAC,QAAuB,KAAMC,CAAO,EAC3G,CAAC,QAAuB,KAAMC,CAAK,CAAC,EAExCV,EAAS,KACL,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,KAAM,KAAM,KAAK,EAAG,CAAC,KAAM,UAAW,KAAM,KAAK,EACnF,CAAC,KAAM,QAAS,KAAM,KAAK,CAAC,CAClC,CACA,MAAO,CAACD,EAAiBC,EAAU,GAAMK,EAAmBC,CAAiB,CAC/E,KAAO,CACL,GAAItB,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM2B,EAAgBd,EAAU,eAAef,EAAW,WAAW,EACrEiB,EAAgB,KACZ,CAAC,QAAuB,KAAMY,CAAa,EAAG,CAAC,QAAuB,KAAM7B,EAAW,IAAI,EAC3F,CAAC,QAAuB,KAAMA,EAAW,OAAO,CAAC,EACrDkB,EAAS,KACL,CAAC,KAAM,gBAAiB,KAAM,MAAO,OAAQW,EAAc,MAAM,EACjE,CAAC,KAAM,OAAQ,KAAM,MAAO,OAAQ7B,EAAW,KAAK,MAAM,EAC1D,CAAC,KAAM,UAAW,KAAM,MAAO,OAAQA,EAAW,QAAQ,MAAM,CAAC,EAErE,IAAM8B,EAAU9B,EAAW,KAAK,OAAO,CAAC+B,EAAKC,IAAQD,EAAMC,CAAG,EAC9D,MAAO,CAACf,EAAiBC,EAAU,CAAC,CAACY,EAAS,GAAO,EAAK,CAC5D,CACF,EAEMvD,GAAsB,CACxB0D,EAA4BC,EAAkBC,EAAcC,EAAyBpC,EACrFqC,EAAaC,EAAaC,EAAerB,EAA6BY,EAAkBP,EACxFC,IAAuC,CACzC,IAAMtB,EAAiBF,EAAW,SAAW,OACvCwC,EAAWN,EAAE,KAAK,MAClBO,EAASC,GAAe,SAAUR,EAAE,KAAK,OAAQE,CAAe,EAEtE,GAAIpC,EAAW,YAAY,QAAU,EAAG,CACtC,IAAI2C,EAAQ,GACRC,EAAQ,GACRC,EAAW,GACTC,EAAUX,GAAQjC,EAAiB,EAAI,GAsB7C,GArBIqB,EACFoB,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO;AAAA,4CACxBA,CAAO;AAAA;AAAA;AAAA;AAAA,kCAIjBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAGjBM,EAAQ;AAAA;AAAA,6BAEeG,CAAO,eAAeA,CAAO;AAAA,kCACxBZ,EAAE,gBAAgB,UAAU,CAAC;AAAA,oBAC3CG,CAAG;AAAA,mBAIfrC,EAAW,YAAY,SAAW,EAAG,CACvC,IAAM+C,EAAUZ,GAAQjC,EAAiB,EAAI,GACzCsB,EACFoB,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,iCACzBA,CAAO,qBAAqBA,CAAO,yBAAyBA,CAAO;AAAA;AAAA;AAAA;AAAA,gBAM5FH,EAAQ;AAAA;AAAA,6BAEaG,CAAO,eAAeA,CAAO;AAAA,kBAGpDF,EAAW;AAAA;AAAA,aAGb,CAoBA,MAlBoB;AAAA,cACVZ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,8BAE3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,4BAEvCD,CAAQ,IAAID,CAAK;AAAA;AAAA,gBAE7BK,CAAK;AAAA,gBACLD,CAAK;AAAA,gBACLE,CAAQ;AAAA,gBACRP,CAAG;AAAA;AAAA;AAAA,cAKjB,KAAO,CACL,GAAIpC,EACF,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAM8C,EAAchD,EAAW,YAAY,OACrCiD,EAAWjD,EAAW,KAAK,OAC7BkD,EAAU,GACd,OAAIpB,EACFoB,EAAU;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,gCAQgBhB,EAAE,gBAAgB,UAAU,CAAC;AAAA,kBAC3CG,CAAG;AAAA,iBAGfa,EAAU;AAAA;AAAA,8BAEchB,EAAE,gBAAgB,UAAU,CAAC;AAAA,gBAC3CG,CAAG;AAAA,cAGK;AAAA,cACVJ,EAAa,iBAAiBf,CAAQ,EAAE,iBAAiBgB,EAAGO,CAAM,CAAC;AAAA;AAAA,cAEnER,EAAa,UAAU,CAAC;AAAA,gBACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,8BAC3DQ,EAAO,gBAAgB,YAAY,CAAC;AAAA,+BACnCA,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,wCAE3BO,CAAW;AAAA;AAAA,4BAEvBR,CAAQ,IAAID,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,uCAMNS,EAAc,CAAC;AAAA,0CACZG,GAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA,2CACvDG,GAAa,yBAA0B,IAAKH,CAAW,CAAC;AAAA;AAAA,0BAEzEA,EAAc,CAAC;AAAA;AAAA;AAAA,+BAGVb,EAAOa,CAAW,UAAUb,CAAI;AAAA,+CAEvDgB,GAAa,mBAAoB,OAAOhB,EAAOa,CAAW,IAAKA,CAAW,CAAC;AAAA,oCAC/Cb,EAAOa,CAAW,QAAQG,GAAa,gBAAiB,SAAUF,CAAQ,CAAC;AAAA,oBAC3FC,CAAO;AAAA;AAAA,gBAEXZ,CAAG;AAAA;AAAA;AAAA,cAKjB,CACF,EAcM9D,GAAiCwB,GAClC,GAAGA,EAAW,MAAM,IAAIA,EAAW,QAAQ,IAAIA,EAAW,OAAO,IAAIA,EAAW,YAAY,MAAM,GAEjGvB,GAA4CuB,GAC7C,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,eAAe,GAEzEtB,GAAwCsB,GACzC,GAAGxB,GAA8BwB,CAAU,CAAC,IAAIA,EAAW,YAAY,IAAIA,EAAW,SAAS,GAE9FrB,GAA6BqB,IAA+D,CAChG,OAAQA,EAAW,OACnB,QAAS,CAAC,SAAU,QAAS,aAAc,YAAY,EAAEA,EAAW,QAAkB,EACtF,SAAUA,EAAW,UACrB,YAAaA,EAAW,aACxB,QAASA,EAAW,QACpB,KAAMA,EAAW,IACnB,GAMMpB,GACF,CAACwE,EAAcrD,EAAmBE,EAA2BD,IAAmD,CAC9G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEiC,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyC,EAAWN,EAAE,KAAK,MAElBG,EAAM,kBACRC,EAAM,GACNe,EAAmB,gBACrBf,GAAO,YAAYE,CAAQ,yBAE3BF,GAAO,YAAYE,CAAQ,oCAE7B,GAAM,CAACvB,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxDpC,EAAgB,KAAK,GAAGsC,GAA2BxD,EAAM,KAAMc,CAAW,CAAC,EAC3E,IAAM2C,EAAwD,CAAC,MAAM,EACrE,MAAO,CACL,KAAAJ,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAAK,EAAKpB,EAC3FY,EAASP,EAAmBC,CAAiB,CACnD,CACF,EAES3C,GAA8BmB,GAA+D,CACxG,IAAMyD,EAAmBzD,EAAW,oBAAiC,EAE/D0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI0D,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,wEAAwE,EAE1F,IAAMC,EAAwB,CAAC,gBAAAF,EAAiB,GAAGC,EAAM,SAAU,EAAE,EACrE,MAAO,CAAC,GAAGC,EAAuB,SAAUlF,GAAyCkF,CAAqB,CAAC,CAC7G,EAEa7E,GAAc,CAAC8E,EAAyB5D,IAA4C,CAC/F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,cAAegF,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CACnG,EAEMjB,GAAuB,CAC3B,QAAS,GACT,SAAU,EACV,gBAAiB,GACjB,YAAa,CAAC,EACd,QAAS,CAAC,EACV,KAAM,CAAC,EACP,aAAc,EACd,UAAW,CAAC,CACd,EAEaC,GAAoCgB,GAA+D,CAC9G,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEa5E,GAAoB,CAAC2E,EAAyB5D,IAA4C,CACrG5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQhF,GAA6B,oBAAqBgF,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CACxG,EAOMd,GACF,CAACkE,EAAcrD,EAAmBE,EAA2BD,IAA+C,CAC1G,GAAM,CAACqD,EAAoBxC,CAAW,EAClCxC,GAAwC0B,EAAOC,EAAYC,CAAgB,EACzEoC,EAAM;AAAA;AAAA,MAGNC,EAAM,GACNJ,EAAIoB,EAAc,IAAKvD,EAAM,SAAUA,EAAM,KAAK,MAAM,EACxDyD,EAAwD,CAAC,MAAM,EAC/D,CAACvC,EAAiBC,EAAUY,EAASP,EAAmBC,CAAiB,EAC3ElD,GAAqBuC,EAAawC,CAAkB,EACxD,OAAApC,EAAgB,KAAK,GAAGsC,GAA2BxD,EAAM,KAAMc,CAAW,CAAC,EACpE,CACL,KAAAuC,EACA,YACI,CAAC,KAAM,GAAGpD,EAAW,QAAQ,IAAI8B,CAAO,IAAIP,CAAiB,IAAIC,CAAiB,GAAI,kBAAAgC,CAAiB,EAC3G,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAM3C,EAAa,SAAUd,EAAM,QAAQ,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKgB,EAAU,KAAKF,CAAW,EAAI,EAAuB,CAAC,EACnF,gBAAAI,CACF,GACA,gBAAiBgB,GAAgB1D,GAC7B0D,EAAcC,EAAGnC,EAAM,KAAK,OAAQc,EAAY,OAAQwC,EAAoBhB,EAAKC,EAChFvC,EAAM,WAAa,GAAoB,OAAS,KAAMmB,EAAUY,EAASP,EAC1EC,CAAiB,CACvB,CACF,EAESrC,GAAU,CAACyE,EAAyB5D,IAAwC,CACvF5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,UAAW0E,EAAQ,OAAO,CAAC,EAAG,GAAO5D,CAAU,CAAC,CAC3F,EAEaZ,GAA0BY,GAA2D,CAChG,IAAM8D,EAAe9D,EAAW,cAC1BO,EAAYP,EAAW,UAEvB0D,EAAO/E,GAA0BqB,CAAU,EAEjD,GAAI8D,IAAiB,EACnB,MAAM,IAAI,MAAM,6DAA6D,EAE/E,GAAIJ,EAAK,WAAa,EACpB,MAAM,IAAI,MAAM,oEAAoE,EAEtF,IAAMK,EAAoB,CAAC,aAAAD,EAAc,UAAAvD,EAAW,GAAGmD,EAAM,SAAU,EAAE,EACzE,MAAO,CAAC,GAAGK,EAAmB,SAAUrF,GAAqCqF,CAAiB,CAAC,CACjG,EAEa1E,GAAgCW,GAA2D,CACtG,IAAM6D,EAAS7D,EAAW,OAC1B,MAAO,CAAC,OAAA6D,EAAQ,GAAG9E,GAAsB,SAAU8E,CAAM,CAC3D,EAEavE,GAAgB,CAACsE,EAAyB5D,IAAwC,CAC7F5B,GAAewF,EAAQ,MAAM,EAC7BA,EAAQ,QAAQ1E,GAAyB,gBAAiB0E,EAAQ,OAAO,CAAC,EAAG,GAAM5D,CAAU,CAAC,CAChG,IClaA,IAUMgE,GAUAC,GAoCOC,GAxDbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GAAwB,CAACQ,EAAeC,EAAeC,IAAwB,CACnF,IAAMC,EAAiBH,IAAUC,EAC3BG,EAA8BJ,EAAQC,GAASC,EAAQ,EACvDG,EAA8BL,EAAQC,GAASC,EAAQ,EAE7D,GAAIC,GAAkBC,GAA+BC,EACnD,MAAM,IAAI,MAAM,2CAA4C,CAEhE,EAEMZ,GAAyB,CAACO,EAAeC,EAAeC,EAAeI,IAAoC,CAC/G,IAAMC,EAAc,KAAK,IAAI,KAAK,MAAMN,EAAQD,GAASE,CAAK,CAAC,EACzDM,EAAwB,CAACD,CAAW,EACpCE,EAAaF,EACbG,EAAoC,CACxC,CAAC,QAAuB,KAAMD,CAAU,EAAG,CAAC,KAAMH,EAAU,KAAMN,CAAK,EAAG,CAAC,KAAMM,EAAU,KAAMJ,CAAK,EACtG,GAAGS,GAA2BH,CAAW,CAC3C,EAEMI,EAAmBC,GAA+B,CACtD,IAAMC,EAASC,GAAe,SAAUT,EAAUE,EAAY,MAAM,EAC9DQ,EAAWF,EAAO,KAAK,MACvBG,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,QAAS,KAAMD,CAAkC,EAC3F,CAAC,KAAM,QAAS,KAAMA,CAAkC,CAC1D,EACA,MAAO;AAAA,UACDH,EAAa,iBAAiBI,CAAQ,EAAE,iBAAiBH,CAAM,CAAC;AAAA,UAChED,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,gDACnCG,CAAQ;AAAA,QAEtD,EAEA,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGV,CAAQ,EAAE,EACjC,gBAAAM,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMJ,EAAa,SAAAF,CAAQ,CAAC,EACvC,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,EAAuB,CAAC,EAClE,gBAAAC,CACF,EACF,CACF,EAEahB,GAASwB,GAAkC,CACtD,IAAIlB,EAAQ,EACRC,EAAQ,EACRC,EAAQ,EACRgB,EAAQ,OAAO,CAAC,EAAE,WAAa,GACjClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,EAC3ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,cAAc,EAAE,CAAC,GAClCA,EAAQ,OAAO,CAAC,EAAE,WAAa,IACxClB,EAAQkB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7CjB,EAAQiB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,EAC7ChB,EAAQgB,EAAQ,OAAO,CAAC,EAAE,gBAAgB,EAAE,CAAC,GAE3CC,GAAI,OAAO,sBACb3B,GAAsBQ,EAAOC,EAAOC,CAAK,EAG3CgB,EAAQ,QAAQzB,GAAuBO,EAAOC,EAAOC,EAAOgB,EAAQ,OAAO,CAAC,EAAE,QAAQ,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CACvG,IC1EA,IAiCME,GAuBAC,GASAC,GA6CAC,GAkDAC,GAkCAC,GAaAC,GAwBAC,GAyBAC,GAuBAC,GAkCAC,GAWAC,GAQAC,GAsDAC,GA6EAC,GAwEAC,GAoHAC,GAOOC,GAiBAC,GAnqBbC,GAAAC,GAAA,kBAIAC,KAEAC,KACAC,KAGAC,KAuBMxB,GAAiB,CAACyB,EAAkBC,IAAuC,CAK/E,GAJAD,EAAO,MAAOE,GAAUA,EAAQ,IAAM,IAAM,CAClB,MAAM,IAAI,MAAM,oDAAoD,CACtE,EAAE,EAEtBF,EAAO,OAAS,GAClB,GAAIC,EAAW,OAAS,UACtB,GAAI,EAAED,EAAO,SAAW,GAAKA,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACtGA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GACxDA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MACN;AAAA,oGACwF,UAErFC,EAAW,OAAS,SACzB,EAAED,EAAO,SAAW,GAAMA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC/EA,EAAO,SAAW,GAAKA,EAAO,CAAC,IAAM,GAAKA,EAAO,CAAC,IAAM,GAC7D,MAAM,IAAI,MAAM,+DAA+D,EAIvF,EAEMxB,GAAe,CAACwB,EAA2BG,EAAyBC,IAA2B,CACnGD,EAAK,MAAOD,GAAUA,GAAS,GAAKA,EAAQE,IAAS,IAAM,CACnC,MAAM,IAAI,MAAM,qEAAqE,CACvF,EAAE,EACxB,IAAMC,EAAY,IAAI,MAAMD,CAAI,EAAE,KAAK,CAAG,EAC1C,OAAAD,EAAK,QAAQ,CAACD,EAAOI,IAAUD,EAAUH,CAAK,EAAIF,EAAOM,CAAK,CAAC,EACxDD,CACT,EAEM5B,GACF,CAAC8B,EAA+BN,EAA8BO,EAAsBR,EACnFS,EAAiBC,IAAwB,CACxC,GAAM,CAACC,EAAeC,EAAkBC,CAAe,EAClDL,EAAe,GAAM,CAAC,EAAG,EAAG,CAAC,EAAI,CAAC,GAAKD,EAAO,OAAS,EAAK,EAAI,GAAI,EAAE,EACrEH,EAAOG,EAAO,CAAC,EAAE,KAAK,OAC5B,GAAII,EAAgB,GAAKJ,EAAO,OAASI,GAAiBJ,EAAOI,CAAa,EAAE,KAAK,OAAS,EAC5FJ,EAAOI,CAAa,EAAE,gBAAgB,EAAE,QAAST,GAAUQ,EAAI,KAAKR,CAAK,CAAC,UACjED,EAAW,0BAA4B,qBAChD,MAAM,IAAI,MAAM,2FAA2F,EAG7G,GAAIW,EAAmB,GAAKL,EAAO,OAASK,GAAoBL,EAAOK,CAAgB,EAAE,KAAK,OAAS,EAAG,CAExG,GADAL,EAAOK,CAAgB,EAAE,gBAAgB,EAAE,QAASV,GAAUF,EAAO,KAAKE,CAAK,CAAC,EAC5EF,EAAO,SAAW,GACjBA,EAAO,SAAWI,GAASI,GAAgB,IAAMR,EAAO,SAAWC,EAAW,KAAK,OACtF,MAAM,IAAI,MACN,6FAA6F,EAEnG1B,GAAeyB,EAAQC,CAAU,EAC7BA,EAAW,KAAK,OAAS,GAC3BzB,GAAawB,EAAQC,EAAW,KAAMG,CAAI,EAAE,QAAQ,CAACF,EAAOI,IAAUN,EAAOM,CAAK,EAAIJ,CAAK,CAE/F,CACA,GAAIW,EAAkB,GAAKN,EAAO,OAASM,IACzCN,EAAOM,CAAe,EAAE,iBAAiB,EAAE,QAASX,GAAUO,EAAM,KAAK,OAAOP,CAAK,CAAC,CAAC,EACnFO,EAAM,SAAWL,GAASI,GAAgB,IAAMC,EAAM,SAAWR,EAAW,KAAK,QACnF,MAAM,IAAI,MAAM,4FAA4F,EAIhH,GAAIA,EAAW,KAAK,OAAS,EAAG,CAC9B,GAAID,EAAO,SAAWC,EAAW,KAAK,OACpC,MAAM,IAAI,MAAM,0FAA0F,EAE5G,GAAIQ,EAAM,SAAWR,EAAW,KAAK,OACnC,MAAM,IAAI,MACN,8FAA8F,CAEtG,CACA,GAAI,OAAOD,EAAW,KAAe,OAAOS,EAAU,KAAeT,EAAO,OAAS,GAAKS,EAAM,OAASL,EACvG,MAAM,IAAI,MAAM,yDAAyD,CAE7E,EAEE1B,GACF,CAACoC,EAAiDC,IAC9C;AAAA,2DACmDA,CAAK,OAC3D,IAAM,CACD,OAAQD,EAAwB,CAC9B,IAAK,aACH,MAAO,UAAUC,CAAK,gBAAgBA,CAAK,YAC7C,IAAK,qBACH,MAAO;AAAA,8BACSA,CAAK,uBAAuBA,CAAK;AAAA;AAAA;AAAA,qBAInD,IAAK,uBACH,MAAO,WAAWA,CAAK,uBAAuBA,CAAK,YACrD,IAAK,gBACH,MAAO;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,kCAMaA,CAAK;AAAA;AAAA,0BAEbA,CAAK,6DAA6DA,CAAK;AAAA;AAAA,qBAGrF,IAAK,qBACH,MAAO;AAAA,6BACQA,CAAK,gBAAgBA,CAAK;AAAA,2BAC5BA,CAAK,gBAAgBA,CAAK,yBAAyBA,CAAK;AAAA,0BACzDA,CAAK;AAAA;AAAA,mCAEIA,CAAK,yBAAyBA,CAAK;AAAA,qBAE1D,IAAK,uBACH,MAAO,uBAAuBA,CAAK,YAAYA,CAAK;AAAA,uCAC3BA,CAAK;AAAA,mCACTA,CAAK;AAAA;AAAA,sCAEFA,CAAK,uBAAuBA,CAAK,mBAC3D,IAAK,aACH,MAAO,YAAYA,CAAK,uBAAuBA,CAAK,mBACtD,QACE,MAAM,IAAI,MAAM,6BAA6BD,CAAsB,mBAAmB,CAC1F,CACF,GAAG,EACP,IAEEnC,GAA8B,CAACqC,EAA0BR,EAAsBO,IACjF,6CAA6CA,CAAK,4BAA4BA,CAAK,MAAQ,IAAM,CAC/F,OAAQC,EAAa,CACnB,IAAK,oBACH,MAAO,yIAKT,IAAK,QACH,MAAO,2BACT,IAAK,OACH,MAAO,0BACT,IAAK,qBACH,MAAO,0KAKT,IAAK,SACL,QACE,GAAIR,EAAe,GACjB,MAAO,mLAOT,MAAM,IAAI,MAAM,gBAAgBQ,CAAW,mBAAmB,CAClE,CACF,GAAG,EACH,IAEEpC,GAAY,CAAC8B,EAAwBP,EAAyBC,IAA2B,CAC7F,IAAMa,EAAS,IAAI,MAAMb,CAAI,EAAE,KAAK,CAAC,EAAE,OAAO,IAAI,MAAMA,CAAI,EAAE,KAAK,CAAC,CAAC,EAC/Dc,EAAWR,EAAI,SAAW,EAAIO,EAASP,EAAI,MAAM,EACvD,OAAIP,EAAK,OAAS,GAChBA,EAAK,QAAQ,CAACgB,EAAGC,IAAM,CACrBH,EAAOE,CAAC,EAAID,EAASE,CAAC,EACtBH,EAAOG,EAAIhB,CAAI,EAAIc,EAASf,EAAK,OAASiB,CAAC,CAC7C,CAAC,EACMH,GAEFC,CACT,EAEMrC,GACF,CAACwC,EAA+BrB,EAA2BS,EAA0BN,IACrE,CACV,IAAImB,EAAwB,CAAC,EAC7B,GAAIb,EAAM,OAAS,EACjB,GAAIN,EAAK,OAAS,EAAG,CAEnB,GADAkB,EAAW,QAASF,GAAMG,EAAY,KAAKH,CAAC,CAAC,EACzC,KAAK,IAAI,GAAGhB,CAAI,EAAIkB,EAAW,OACjC,MAAM,IAAI,MAAM,sBAAsB,EAExClB,EAAK,QAAQ,CAACgB,EAAGC,IAAME,EAAYH,CAAC,EAAIV,EAAMW,CAAC,CAAC,CAClD,MACEX,EAAM,QAASU,GAAMG,EAAY,KAAKH,CAAC,CAAC,MAErC,CACL,GAAInB,EAAO,SAAW,EACpB,MAAM,IAAI,MAAM,yCAAyC,EAEzDsB,EAAcD,EAAW,IAAI,CAACnB,EAAOI,IAAU,KAAK,MAAMJ,EAAQF,EAAOM,CAAK,CAAC,CAAC,CAEpF,CACA,OAAOgB,CACT,EAEFxC,GAAoB,CAACuC,EAA+BrB,EAAkBC,IAAiC,CAC3G,IAAMsB,GAAiB,IAAM,CAC3B,OAAQtB,EAAW,sBAAuB,CACxC,IAAK,aACH,OAAOA,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAImB,GAAKpB,EAAOoB,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGpB,EAAQ,OAAO,SAAS,EAC1E,IAAK,cACH,OAAOC,EAAW,KAAK,OAAS,EAAI,KAAK,IAAI,GAAGA,EAAW,KAAK,IAAImB,GAAKpB,EAAOoB,CAAC,CAAC,EAAG,OAAO,SAAS,EACjE,KAAK,IAAI,GAAGpB,EAAQ,OAAO,SAAS,EAC1E,QACE,MAAM,IAAI,MAAM,4BAA4BC,EAAW,qBAAqB,mBAAmB,CACnG,CACF,GAAG,EACHD,EAAO,KAAK,EAAK,EAAGA,EAAO,MAAM,EACjC,IAAMwB,EAAsBH,EAAW,MAAM,EAC7C,OAAIpB,EAAW,KAAK,OAAS,GAC3BA,EAAW,KAAK,QAASkB,GAAMnB,EAAOmB,CAAC,EAAII,CAAa,EACxDtB,EAAW,KAAK,QAASkB,GAAMK,EAAoBL,CAAC,EAAI,KAAK,MAAME,EAAWF,CAAC,EAAInB,EAAOmB,CAAC,CAAC,CAAC,IAE7FnB,EAAO,KAAKuB,EAAe,EAAGvB,EAAO,MAAM,EAC3CwB,EAAoB,QAAQ,CAACL,EAAGC,IAAMI,EAAoBJ,CAAC,EAAI,KAAK,MAAMD,EAAInB,EAAOoB,CAAC,CAAC,CAAC,GAEnFI,CACT,EAEMzC,GACF,CAAC0C,EAAuBJ,EAA+BC,EAAgCI,EACtFC,IAA8B;AAAA,mEACgCF,EAAO,KAAK,OAAO,cAC9EA,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,oCACZG,EAAO,KAAK,KAAK,KAAKH,EAAY,MAAM;AAAA,gCAC5CA,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA,sBAC/CG,GAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA,wBAChDE,GAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,uBAC7CC,GAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA;AAAA,kCAExDF,EAAO,KAAK,KAAK;AAAA;AAAA,gCAEnBG,GAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,GAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,OAQzFtC,GACF,CAAC6C,EAAsBJ,EAAuBJ,EAA+BC,EAC5EI,EAAsBC,EAAmBG,IAAsC;AAAA,gEACpBL,EAAO,KAAK,OAAO,QAAQI,EAAM,KAAK,OAAO;AAAA,2BAClFA,EAAM,KAAK,OAAO;AAAA,gCACbP,EAAY,MAAM;AAAA,6BACrBG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,sBAE/CG,GAAa,kBAAmB,IAAKF,CAAY,CAAC;AAAA;AAAA;AAAA;AAAA,0BAI9CE,GAAa,eAAgB,IAAKD,CAAS,CAAC;AAAA,yBAC7CC,GAAa,eAAgB,OAAOP,EAAW,MAAM,GAAIM,CAAS,CAAC;AAAA,gCAC5DC,GAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA,iCAC3DO,GAAa,wBAAyB,IAAKN,EAAY,MAAM,CAAC;AAAA;AAAA;AAAA,iBAG9EQ,CAAgB,4CAA4CL,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA,wCAGtDA,EAAO,KAAK,KAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAS/CI,EAAM,WAAW,gBAAiB,IAAK,cAAc,CAAC;AAAA;AAAA;AAAA,OAI1D5C,GAAoB,CAAC4C,EAAsBR,IAA0C;AAAA,0CACjDQ,EAAM,KAAK,OAAO;AAAA,gCAC5BR,EAAW,MAAM;AAAA,4BACrBQ,EAAM,WAAW,gBAAiB,GAAG,CAAC;AAAA,gDAClBD,GAAa,uBAAwB,IAAKP,EAAW,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,OAOtGnC,GACF,CAAC2C,EAAsBE,EAAoBC,EAAkBC,IACzDJ,EAAM,KAAOI,EAAc;AAAA,MAC7BJ,EAAM,WAAW,gBAAiBE,EAAY,SAAS,CAAC;AAAA,MACxDF,EAAM,WAAW,gBAAiBG,EAAU,OAAO,CAAC;AAAA,EAEvB,GAE7B7C,GACF,CAAC0C,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUG,EAAWC,EAAUL,CAAU,EAC5CV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,CAAC,EAC9DN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wEAC2Dd,CAAK;AAAA,2BAClDc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBM,EAAW,mBAAmBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QAC9FN,EAAM,WAAW,gBAAiBO,EAAU,mBAAmBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC5FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,+CAGHJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,gBAE/DA,CAAK,sBAAsBoB,CAAS;AAAA,gBACpCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAEzCN,EACI,yBAAyBT,EAAWc,CAAS,CAAC,8BAA8Bd,EAAWe,CAAQ,CAAC;AAAA,iBAC7FF,CAAkB;AAAA,SAErB,EAAE;AAAA,8BACcb,EAAWc,CAAS,CAAC;AAAA,8BACrBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,2BAKvBf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA,iBAC1EjB,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK;AAAA,iBACLA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,gBAAgBA,CAAK;AAAA,iBAC1BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAWjC,EAEE3B,GACF,CAACyC,EAAsBJ,EAAuBJ,EAA+BC,EAC5EtB,EAA2BU,EAAwB2B,EAAqBP,EACxEI,EAA4BI,IAAoC,CAC/D,IAAMC,EAAOlB,EAAW,SAAW,EAC7BmB,EAAS,GACT,CAACL,EAAWC,CAAQ,EAAIG,EAAO,CAAC,EAAG,CAAC,EAAIC,EAAS,CAAC,EAAG,CAAC,EAAI,CAAC,EAAG,CAAC,EAC/DzB,EAAQc,EAAM,KAAK,MACnBY,EAAoCC,GAAwB,CAChE,IAAMC,EAAYD,IAAQP,EAAY,MAAQ,MAC9C,MAAO;AAAA,WACJQ,CAAS,qCAAqCd,EAAM,KAAK,OAAO,qBAC/DJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,6BACfU,EAAO,WAAW,iBAAkBiB,CAAG,CAAC;AAAA,2BAC1C3B,CAAK,+DAA+Df,EAAO0C,CAAG,CAAC;AAAA,UAChGpB,EAAYoB,CAAG,CAAC,KAAKrB,EAAWqB,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,KAAKhC,EAAIgC,CAAG,CAAC,MAAMrB,EAAW,MAAM;AAAA,gCAC/DN,CAAK;AAAA;AAAA;AAAA,cAGvBe,CAAgB,0CAA0CT,EAAWqB,CAAG,CAAC;AAAA,mBACpER,CAAkB;AAAA;AAAA,0BAEXnB,CAAK,gBAAgBA,CAAK;AAAA;AAAA,gBAEpC4B,CAAS,KAAK5B,CAAK,oBAAoBA,CAAK;AAAA,gBAC5C4B,CAAS,WAAWA,CAAS,OAAOtB,EAAWqB,CAAG,CAAC;AAAA,eACpD,IACDJ,EACK;AAAA,mCAEER,EACF,UAAUI,CAAkB,IAE5B,GAAGS,CAAS,iBAAiBA,CAAS,KAAKtB,EAAWqB,CAAG,CAAC,WAElE,CAAC;AAAA;AAAA,kCAEsBb,EAAM,KAAK,OAAO;AAAA,YACxCA,EAAM,WAAW,qBAAsBa,EAAK,OAAOC,CAAS,GAAG,CAAC;AAAA,0BAEhED,IAAQP,EAAYN,EAAM,aAAa,oBAAoB,EACvC,2DAA2D;AAAA;AAAA;AAAA,QAIrF,EAEA,MAAO;AAAA,MACPY,EAAiCN,CAAS,CAAC;AAAA,MAC3CM,EAAiCL,CAAQ,CAAC;AAAA,qCACXrB,CAAK,cAAcA,CAAK;AAAA;AAAA,wBAErCA,CAAK,gBAAgBA,CAAK;AAAA,wBAC1BA,CAAK;AAAA,wBACLA,CAAK;AAAA,uBACNA,CAAK;AAAA,oBACRsB,CAAW,wBAAwBA,CAAW,yBACxDA,CAAW,yBAAyBA,CAAW;AAAA,oBACrCA,CAAW,mBAAmBA,CAAW;AAAA,oBACzCA,CAAW,2BAA2BA,CAAW;AAAA,oBACjDA,CAAW,yBAAyBA,CAAW,0BACzDA,CAAW,0BAA0BA,CAAW;AAAA;AAAA;AAAA;AAAA,qCAIrBtB,CAAK,sBAAsBA,CAAK,YAAYA,CAAK;AAAA,oBAClEA,CAAK;AAAA;AAAA;AAAA;AAAA,4CAImBU,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA,yBACnDc,EAAM,KAAK,OAAO;AAAA;AAAA;AAAA,KAIvC,EAEExC,GACF,CAACwC,EAAsBJ,EAAuBJ,EAA+BS,EAC5EI,IAAuC,CAEtC,GAAM,CAACF,EAAUY,EAAUT,EAAWC,EAAUL,CAAU,EACtDV,EAAW,SAAW,EAAI,CAAC,GAAI,EAAG,EAAG,EAAG,EAAE,EAAc,CAAC,EAAG,EAAG,EAAG,EAAG,CAAC,EACpEN,EAAQc,EAAM,KAAK,MACzB,MAAO;AAAA,wFAC2Ed,CAAK;AAAA,2BAClEc,EAAM,KAAK,OAAO;AAAA,QACrCA,EAAM,WAAW,gBAAiBe,EAAU,qBAAqBvB,EAAWuB,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9Ff,EAAM,WAAW,gBAAiBM,EAAW,sBAAsBd,EAAWc,CAAS,CAAC,QAAQ,CAAC;AAAA,QACjGN,EAAM,WAAW,gBAAiBO,EAAU,qBAAqBf,EAAWe,CAAQ,CAAC,QAAQ,CAAC;AAAA,QAC9FlD,GAA0B2C,EAAOE,EAAYC,EAAU,CAAC,CAAC;AAAA,eAClDH,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA;AAAA,gDAGFJ,EAAO,KAAK,OAAO,QAAQV,CAAK;AAAA;AAAA,kBAE9DA,CAAK,sBAAsB6B,CAAQ;AAAA,mBAClC7B,CAAK,sBAAsBoB,CAAS;AAAA,kBACrCpB,CAAK,sBAAsBqB,CAAQ;AAAA,QAE3CN,EAAmB,6BAA6BT,EAAWuB,CAAQ,CAAC,oCAC7CvB,EAAWc,CAAS,CAAC,kCAAkCd,EAAWe,CAAQ,CAAC;AAAA,eAC7FF,CAAkB;AAAA,WAEJ,EAAE;AAAA;AAAA,gCAECb,EAAWuB,CAAQ,CAAC;AAAA,oCAChBvB,EAAWc,CAAS,CAAC;AAAA,kCACvBd,EAAWe,CAAQ,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAO3Bf,EAAW,OAAS,EAAI,uBAAuBU,CAAU,KAAO,GAAG;AAAA,0BACpEV,EAAW,OAAS,EAAI,uBAAuBW,CAAQ,KAAO,GAAG;AAAA;AAAA,kBAEzEjB,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,kBACLA,CAAK;AAAA,iBACNA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,mBAAmBA,CAAK;AAAA,iBAC7BA,CAAK,UAAUA,CAAK;AAAA,iBACpBA,CAAK,kBAAkBA,CAAK;AAAA,iBAC5BA,CAAK,UAAUA,CAAK;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,MAgBjC,EAEEzB,GACF,CAACuD,EAAyB5C,EAA8BO,EAAsBsC,EAC7ErC,EAA0BsC,IAA6C,CACtE,IAAM1B,EAAawB,EAAY,KACzBnC,EAAM9B,GAAUmE,EAAU9C,EAAW,KAAMoB,EAAW,MAAM,EAE9DC,EAAczC,GAAgBwC,EAAYyB,EAAarC,EAAOR,EAAW,IAAI,EAC7ED,EAAS8C,EAAY,MAAM,EAC3BA,EAAY,SAAW,IACzB9C,EAASqB,EAAW,IAAI,CAACnB,EAAOI,IAAUJ,IAAU,EAAI,EAAMoB,EAAYhB,CAAK,EAAIJ,CAAK,EACpFD,EAAW,wBAA0B,YACvCqB,EAAcxC,GAAkBuC,EAAYrB,EAAQC,CAAU,IAGlE,IAAMwB,EAASuB,GAAe,SAAUH,EAAY,SAAUvB,EAAY,MAAM,EAC1EO,EAAQoB,EAAc,QAASJ,EAAY,SAAUxB,EAAW,MAAM,EACtE6B,EAAaC,EAAU,KAAK7B,CAAW,EACvC8B,EAAU/B,EAAW,SAAWC,EAAY,QAAUD,EAAW,MAAM,CAACgC,EAAGjC,IAAMiC,IAAM/B,EAAYF,CAAC,CAAC,EACrGU,EAAmB7B,EAAW,0BAA4B,qBAC1DiC,EAAqBjC,EAAW,mBAChCqD,EAAWzB,EAAM,KAAK,MACtB0B,EAAmBC,GAA+B;AAAA,QACtDJ,EAAU,GAAK;AAAA,QACf1E,GAA2CuB,EAAW,wBAAyBqD,CAAQ,CAAC;AAAA,SACvF,IAAM,CACP,OAAQrD,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA,gBACHhB,GAAkB4C,EAAOR,CAAU,CAAC;AAAA,gBACpC1C,GAA4BsB,EAAW,YAAaO,EAAc8C,CAAQ,CAAC;AAAA,gBAE3EtE,GACI6C,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,OAAQoB,CAAgB,CAAC;AAAA,gBAE9F,IAAK,SACH,MAAO;AAAA,gBACH/C,GAA0C0C,EAAQJ,EAAYC,EAAatB,EAAO,OAAQU,EAAI,MAAM,CAAC;AAAA,iBACpG,IAAM,CACT,GAAIW,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GAAGlC,GAAsB0C,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAC3F,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EAC1D,MAAO,GAAGhC,GAAuBwC,EAAOJ,EAAQJ,EAAYS,EAAkBI,CAAkB,CAAC,GAEjG,MAAM,MAAM,kFAAkF,CAElG,GAAG,CAAC;AAAA,cAEN,IAAK,QACH,MAAO;AAAA,eACJ,IAAM,CACP,GAAIb,EAAW,SAAW,GAAKA,EAAW,SAAW,EACnD,MAAO,GACHjC,GACIyC,EAAOJ,EAAQJ,EAAYC,EAAatB,EAAQU,EAAKT,EAAW,YAAa6B,EAC7E7B,EAAW,mBAAoBA,EAAW,cAAc,CAAC,GAEjE,MAAM,MAAM,2EAA2E,CAE3F,GAAG,CAAC;AAAA,cAEN,QACE,MAAM,MAAM,qBAAqB,CACrC,CACF,GAAG,CAAC;AAAA,OACH;AAAA,QAEGuD,EAAa,gBAAgB,cAAe,KAAK,EAC5C,gBAAgB,SAAU,MAAOxD,EAAO,MAAM,EAC9C,gBAAgB,MAAO,MAAOU,EAAI,MAAM,EACxC,iBAAiBmB,EAAOJ,CAAM,CAAC;AAAA,QACtC+B,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,UAC1EJ,EAAU,0CAA4C;AAAA,+BACjC3B,EAAO,gBAAgB,YAAY,CAAC;AAAA,6BACtCI,EAAM,KAAK,OAAO;AAAA,WACpC,IAAM,CACT,OAAQ5B,EAAW,KAAM,CACvB,IAAK,UACH,MAAO;AAAA;AAAA,yCAEsB4B,EAAM,aAAa,eAAe,CAAC;AAAA;AAAA,yCAEnC5B,EAAW,kBAAkB;AAAA,mBAE5D,IAAK,SACH,MAAO,wBACFoB,EAAW,SAAW,GAAKA,EAAW,SAAW,EAAK,wBACA,wBAAwB,oBACrF,IAAK,QACH,MAAO,6DACT,QACE,MAAM,MAAM,4BAA4BpB,EAAW,IAAI,EAAE,CAC7D,CACF,GAAG,CAAC;AAAA,CACT;AAAA,SAGK,MAAO,CACL,KAAM,SACN,YAAa,CACX,KAAM,GAAGA,EAAW,QAAQ,IAAIO,CAAY,IAAIR,EAAO,OAAS,EAAIA,EAAS,EAAE,IAC3ES,EAAM,OAAS,EAAIA,EAAQ,EAAE,IAAIC,EAAI,OAAS,EAAIA,EAAM,EAAE,IAAI0C,CAAO,IAAI/B,CAAU,GACvF,kBAAmB,CAAC,MAAM,CAC5B,EACA,gBAAAkC,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMjC,EAAa,SAAUuB,EAAY,QAAQ,CAAC,EAC7D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBAAiB,CACf,CAAC,QAAuB,KAAMA,CAAU,EAAG,CAAC,OAAsB,KAAMlD,CAAM,EAC9E,CAAC,OAAsB,KAAMU,CAAG,EAAG,GAAG+C,GAA2BpC,EAAYC,CAAW,CAC1F,CACF,EACF,CACF,EAEE/B,GAAuCmE,GAAoC,CAC/E,IAAMC,EAAmBD,EAAQ,iBAGjC,OAF2B,IAAI,YAAYC,EAAkBA,EAAiB,WAAY,CAAC,EACnD,CAAC,CAE3C,EAEanE,GAAS,CAACkE,EAAyBzD,IAAuC,CACrF,IAAMD,EAAmB,CAAC,EACpBS,EAAkB,CAAC,EACnBC,EAAgB,CAAC,EAKjBF,EAAejB,GAAoCmE,CAAO,EAChE,GAAIzD,EAAW,YAAc,EAC3B,MAAM,MAAM,6DAA6D,EAE3ExB,GAAeiF,EAAQ,OAAQzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAC3EgD,EAAQ,QACJpE,GAAwBoE,EAAQ,OAAO,CAAC,EAAGzD,EAAYO,EAAcR,EAAQS,EAAOC,CAAG,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC7G,EAEajB,GAAyBQ,GAA0D,CAC9F,IAAM2D,EAAY3D,EAAW,UACvBE,EAAOF,EAAW,KAClB4D,EACF5D,EAAW,wBACToC,EAAcpC,EAAW,YACzBqC,EAAiBrC,EAAW,iBAA6B,EACzDiC,EAAqBjC,EAAW,mBAChC6D,EAA+C7D,EAAW,sBAC1D8D,EAAa9D,EAAW,KAExBe,EAA4Bf,EAAW,cAAgB,GAAK,SAAWA,EAAW,YACxF,OAAO+D,GAA4B,CACjC,UAAAJ,EACA,KAAAzD,EACA,wBAAA0D,EACA,YAAAxB,EACA,eAAAC,EACA,mBAAAJ,EACA,sBAAA4B,EACA,KAAAC,EACA,YAAA/C,CACF,CAAC,CACH,IC1rBA,IAkBMiD,GAqDAC,GA+FOC,GAtKbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KASMR,GAAiB,CAACS,EAA+BC,IAAgD,CACrG,GAAM,CAACC,EAAOC,EAAaC,EAAUC,CAAQ,EAAIL,EAC3C,CAAC,SAAAM,EAAU,mBAAAC,CAAkB,EAAIN,EAEvC,GAAIC,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wDAAwDA,EAAM,KAAK,MAAM,EAAE,EAE7F,GAAI,CAACM,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,GAAK,CAACK,EAAU,SAASL,EAAY,KAAM,CAAC,CAAC,CAAC,GACtFA,EAAY,KAAK,SAAW,EAC9B,MAAM,IAAI,MAAM,uEAAuEA,EAAY,KAAK,MAAM,EAAE,EAElH,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAIC,EAAS,KAAK,SAAW,EAC3B,MAAM,IAAI,MAAM,2DAA2DA,EAAS,KAAK,MAAM,EAAE,EAEnG,GAAI,CAACG,EAAU,SAASJ,EAAS,KAAMC,EAAS,IAAI,EAClD,MAAM,IAAI,MAAM,wEAA4E,EAG9F,GAAIE,EAAqB,GAAKD,IAAa,EACzC,MAAM,IAAI,MAAM,iEAAiE,EAGnF,IAAMG,EAAYP,EAAM,KAAK,CAAC,EACxBQ,EAAiBR,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACjDS,EAAoBP,EAAS,KAAK,CAAC,EACnCQ,EAAaJ,EAAU,kBAAkBN,EAAM,KAAM,CAAC,EAAIQ,EAC1DG,EAAWN,IAAuB,EAAIH,EAAS,KAAK,CAAC,EAAI,EAAIQ,EAAaN,EAChF,GAAIC,EAAqBM,EACvB,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIV,EAAY,KAAK,SAAW,EAAG,CACjC,GAAIM,IAAcN,EAAY,KAAK,CAAC,EAClC,MAAM,IAAI,MAAM,sEAAsEA,EAAY,KAAK,CAAC,CAAC,EAAE,EAE7G,GAAIO,IAAmBP,EAAY,KAAK,CAAC,EACvC,MAAM,IAAI,MAAM,2EAA2EA,EAAY,KAAK,CAAC,CAAC,EAAE,CAEpH,CAEA,GAAIU,EAAW,IAAMT,EAAS,KAAK,CAAC,GAAKG,EAAqB,IAAMH,EAAS,KAAK,CAAC,EACjF,MAAM,IAAI,MAAM,kGACZA,EAAS,KAAK,CAAC,CAAC,EAAE,EAGxB,GAAIM,EAAiBC,EACnB,MAAM,IAAI,MAAM,gFAAgF,CAEpG,EAEMnB,GACF,CAACQ,EAA+BC,IAAuD,CACrF,GAAM,CAAC,YAAAa,EAAa,SAAAR,EAAU,mBAAAC,EAAoB,MAAAQ,CAAK,EAAId,EACrDQ,EAAYT,EAAO,CAAC,EAAE,KAAK,CAAC,EAC5BgB,EAAcR,EAAU,kBAAkBR,EAAO,CAAC,EAAE,KAAM,CAAC,EAC3DU,EAAiBV,EAAO,CAAC,EAAE,KAAKA,EAAO,CAAC,EAAE,KAAK,OAAS,CAAC,EACzDY,EAAaI,EAAcN,EAC3BO,EAAyBjB,EAAO,CAAC,EAAE,KAAK,CAAC,EACzCa,EAAWN,IAAuB,EAAIU,EAAyB,EAAIL,EAAaN,EAKhFY,EACF,IAAI,MAAcT,EAAWC,EAAgBE,EAAaC,EAAUA,EAAWI,CAAsB,EACnGE,EAAgBX,EAAU,eAAeU,CAAW,EAEpDE,EAAoC,CACxC,CAAC,OAAsB,KAAML,CAAK,EAClC,CAAC,QAAuB,KAAMG,CAAW,EACzC,CAAC,QAAuB,KAAMC,CAAa,EAI3C,GAAInB,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MAAsB,CAAC,QAAuB,KAAM,CAACgB,EAAaJ,EAAYC,EAAU,CAAC,CAAC,CAAC,EAC/F,CAAC,EACT,GAAIb,EAAO,CAAC,EAAE,KAAK,SAAW,EACtB,IAAI,MACA,CAAC,QAAuB,KAAM,CAACgB,EAAaH,EAAUH,EAAiBG,EAAU,CAAC,CAAC,CAAC,EACxF,CAAC,EAET,GAAGQ,GAA2BrB,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,KAAMA,EAAO,CAAC,EAAE,IAAI,CAC9G,EAEMsB,EAAmBC,GAA+B,CACtD,IAAMrB,EAAQsB,EAAc,QAASxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEG,EAAcqB,EAAc,eAAgBxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACrFI,EAAWoB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EK,EAAWmB,EAAc,YAAaxB,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAC/EyB,EAASC,GAAe,SAAU1B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EAEjF,OAAAuB,EAAa,iBAAiB,CAC5B,CAAC,KAAM,QAAS,KAAM,KAAK,EAC3B,CAAC,KAAM,eAAgB,KAAM,MAAO,OAAQL,EAAY,MAAM,EAC9D,CAAC,KAAM,iBAAkB,KAAM,MAAO,OAAQC,EAAc,MAAM,EAClE,CAAC,KAAM,uBAAwB,KAAM,MAAO,OAAQA,EAAc,MAAM,CAC1E,CAAC,EAEM;AAAA,UACLI,EAAa,iBAAiBrB,EAAOC,EAAaC,EAAUC,EAAUoB,CAAM,CAAC;AAAA;AAAA,UAE7EF,EAAa,UAAUI,EAAc,CAAC;AAAA,+CACDvB,EAAS,IAAI;AAAA;AAAA;AAAA,YAGhDmB,EAAa,sCAAsC,MAAM,CAAC;AAAA;AAAA;AAAA;AAAA,kBAIpDpB,EAAY,2BAA2B,UAAWuB,GAAe,GAAIvB,EAAY,KAAK,OAAQ,CAAC,CAAC,CAAC;AAAA;AAAA,sBAE7FA,EAAY,YAAY,kBAAkB,CAAC;AAAA,oFACmBW,CAAW;AAAA,yDACtCA,CAAW;AAAA,uBAC7CZ,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEF,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEoB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA,uBACpBvB,EAAM,YAAY,GAAG,CAAC,MAAMG,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,kBACvEH,EAAM,YAAY,GAAG,CAAC,MAAME,EAAS,IAAI,cAAe,SAAS,CAAC;AAAA,cACtEqB,EAAO,YAAY,IAAK,IAAI,CAAC;AAAA;AAAA;AAAA,cAG7BA,EAAO,YAAY,IAAKvB,EAAM,YAAY,GAAG,CAAC,CAAC;AAAA;AAAA,UAGvD,EAEA,MAAO,CACL,KAAM,kBACN,YAAa,CACX,KAAM0B,GAA4B,CAC1B,YAAAd,CACF,CAAC,EAAE,SACT,kBAAmB,CAAC,OAAQ,OAAQ,OAAQ,MAAM,CACpD,EACA,gBAAAQ,EACA,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMtB,EAAO,CAAC,EAAE,KAAM,SAAUA,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC9D,cAAe,CAAC,EAAG,KAAK,KAAKQ,EAAU,KAAKU,CAAW,EAAIS,EAAc,CAAC,EAC1E,gBAAAP,CACF,EACF,CACF,EAES3B,GAAkB,CAACoC,EAAyB5B,IAAgD,CACvGV,GAAesC,EAAQ,OAAQ5B,CAAU,EACzC4B,EAAQ,QAAQrC,GAAiCqC,EAAQ,OAAQ5B,CAAU,CAAC,CAC9E,ICzKA,IAeM6B,GAyDAC,GA2GOC,GAnLbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAOMP,GAAkBQ,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,uCAAuC,EAGzD,IAAMC,EAAoBD,EAAO,CAAC,EAC5BE,EAAmBF,EAAO,CAAC,EAC3BG,EAAoBH,EAAO,CAAC,EAElC,GAAIC,EAAM,WAAaC,EAAK,UAAYD,EAAM,WAAaE,EAAM,SAC/D,MAAM,IAAI,MAAM,yCAAyC,EAG3D,GAAIF,EAAM,KAAK,SAAW,GAAKA,EAAM,KAAK,SAAW,EACnD,MAAM,IAAI,MAAM,wBAAwB,EAG1C,GAAIC,EAAK,KAAK,SAAW,GAAKA,EAAK,KAAK,SAAW,EACjD,MAAM,IAAI,MAAM,uBAAuB,EAGzC,IAAME,EAAaH,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EAC7CI,EAAiBJ,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,EACvD,GAAIC,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAME,EACtC,MAAM,IAAI,MAAM,8CAA8C,EAEhE,GAAIF,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMG,EACtC,MAAM,IAAI,MAAM,kDAAkD,EAGpE,GAAIF,EAAM,KAAK,SAAW,EACxB,MAAM,IAAI,MAAM,kBAAkB,EAEpC,GAAIA,EAAM,KAAKA,EAAM,KAAK,OAAS,CAAC,IAAMC,EACxC,MAAM,IAAI,MAAM,+CAA+C,EAEjE,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBN,EAAO,CAAC,EACjC,GAAIM,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMF,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CAEA,GAAIJ,EAAO,OAAS,EAAG,CACrB,IAAMO,EAAmBP,EAAO,CAAC,EACjC,GAAIO,EAAK,KAAK,SAAW,EACvB,MAAM,IAAI,MAAM,iBAAiB,EAEnC,GAAIA,EAAK,KAAKA,EAAK,KAAK,OAAS,CAAC,IAAMH,EACtC,MAAM,IAAI,MAAM,8CAA8C,CAElE,CACF,EAEMX,GACF,CAACO,EAA+BQ,EAAqCC,EAAqBC,IACvE,CACb,IAAMC,EAAaH,EAAW,WAExBI,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAcH,EACdI,EAAaH,EACbT,EAAaQ,EAAW,MAAM,EAAE,EAAE,CAAC,EACnCK,EAAmBP,EAAaE,EAAW,MAAM,EAAG,EAAE,EAAE,OAAO,CAAC,EAAI,CAAC,EACrEM,EAAe,CAACP,GAAcX,EAAO,OAAS,EAC9CmB,EAAenB,EAAO,OAAS,EAC/BoB,EAAgBV,GAAcD,EAAc,EAC5CY,EAAqBX,GAAcD,EAAc,EACjDa,EAA4Bb,EAAc,EAE1Cc,EAAaC,GAAiBpB,CAAU,EAExCqB,EAAoC,CACxC,CAAC,QAAuB,KAAMT,CAAU,EACxC,CAAC,QAAuB,KAAMO,CAAU,EACxC,CAAC,QAAuB,KAAMnB,CAAU,EACxC,CAAC,OAAsB,KAAMI,EAAW,OAAO,CACjD,EACMkB,EAAmBC,GAA+B,CACtD,IAAMC,EAAmC,CACvC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,aAAc,KAAM,KAAK,EAChC,CAAC,KAAM,cAAe,KAAM,KAAK,EACjC,CAAC,KAAM,UAAW,KAAM,KAAK,CAC/B,EACMC,EAAY,CAChBC,EAAc,IAAK9B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMuB,CAAU,EACjEO,EAAc,OAAQ9B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMuB,CAAU,EACpEO,EAAc,QAAS9B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMuB,CAAU,CACvE,EACIL,GACFW,EAAU,KAAKC,EAAc,OAAQ9B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMuB,CAAU,CAAC,EAElFJ,GACFU,EAAU,KAAKC,EAAc,OAAQ9B,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAMuB,CAAU,CAAC,EAEtFM,EAAU,KAAKE,GAAe,SAAU/B,EAAO,CAAC,EAAE,SAAUe,EAAaQ,CAAU,CAAC,EAChFH,GACFS,EAAU,KAAKE,GAAe,gBAA+Bd,CAAgB,CAAC,EAE5EI,GACFQ,EAAU,KAAKE,GAAe,mBAAkCd,CAAgB,CAAC,EAE/EK,GACFO,EAAU,KAAKE,GAAe,sBAAuB/B,EAAO,CAAC,EAAE,SAAUe,EAAaQ,CAAU,CAAC,EAEnG,IAAMS,GAAWC,GAA4BjC,EAAO,CAAC,EAAE,QAAQ,EAC/D,MAAO;AAAA;AAAA,QAEX2B,EAAa,iBAAiBC,CAAa,EAAE,iBAAiB,GAAGC,CAAS,CAAC;AAAA;AAAA,QAE3EF,EAAa,UAAU,CAAC;AAAA,UACtBA,EAAa,sCAAsC,6CAA6C,CAAC;AAAA;AAAA;AAAA,oBAGvFO,GAAW,MAAOX,CAAU,CAAC;AAAA,0BACvBW,GAAW,MAAOX,CAAU,CAAC;AAAA;AAAA;AAAA,6BAG1BJ,EAAe,UAAYa,GAAW,OAAO;AAAA;AAAA;AAAA,YAG9DV,EAA4B,2CAA6C,EAAE;AAAA;AAAA,4BAE3Da,GAAUH,GAAUT,EAAY,OAAO,CAAC;AAAA;AAAA;AAAA;AAAA,qBAI/Ca,GAAU,MAAOb,CAAU,CAAC;AAAA,wCACTa,GAAU,YAAab,CAAU,CAAC,gCAC1DZ,EAAa,GAAK,eAAe;AAAA,UACvCS,EAAgB,kCAAoC,EAAE;AAAA,UACtDC,EAAqB,4CAA8C,EAAE;AAAA;AAAA,qDAE1BV,EAAa,GAAK,KAAKqB,EAAQ,QAAQ,OAC5EA,EAAQ,4BAA4Bd,EAAe,YAAc,EAAE;AAAA;AAAA,QAGzE,EACMmB,EAAU,CAAC,CAAC,KAAMtB,EAAa,SAAUf,EAAO,CAAC,EAAE,QAAQ,CAAC,EAClE,OAAIS,EAAc,GAChB4B,EAAQ,KAAK,CAAC,KAAMpB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB4B,EAAQ,KAAK,CAAC,KAAMpB,EAAkB,UAAwB,CAAC,EAE7DR,EAAc,GAChB4B,EAAQ,KAAK,CAAC,KAAMzB,EAAY,SAAUZ,EAAO,CAAC,EAAE,QAAQ,CAAC,EAExD,CACL,KAAM,yBACN,YAAa,CACX,KAAM,GAAGuB,CAAU,IAAIH,CAAa,IAAIC,CAAkB,IAAIC,CAAyB,GACvF,kBAAmBtB,EAAO,IAAI,CAACsC,EAAQC,IAAW,MAAM,CAC1D,EACA,gBAAAb,EACA,WAAY,KAAO,CAAC,QAAAW,EAAS,cAAe,CAAC,EAAG,KAAK,KAAKrB,EAAaZ,EAAa,EAAE,CAAC,EAAG,gBAAAqB,CAAe,EAC3G,CACF,EAEK/B,GAAgB,CAAC8C,EAAyBhC,IAA8C,CAGnGhB,GAAegD,EAAQ,MAAM,EAG7B,IAAMH,EAAU,CAAC,CAAC,EACdG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAsB,EAAE,EAE9BG,EAAQ,YAAc,GACxBH,EAAQ,KAAK,CAAC,EAEhBG,EAAQ,QACJ/C,GAA+B+C,EAAQ,OAAQhC,EAAYgC,EAAQ,YAAa,EAAU,EAAG,CAAC,QAAAH,CAAO,CAAC,CAC5G,ICrMA,IAiBMI,GAkBAC,GAcAC,GAeAC,GAcAC,GAsBAC,GAmFOC,GAYAC,GAnMbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMb,GAAiB,CAACc,EAA+BC,IAAsC,CAC3F,GAAI,CAACD,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,EAElC,GAAIC,EAAW,KAAK,SAAW,GAC7B,GAAIA,EAAW,KAAK,SAAWA,EAAW,OAAO,QAAUA,EAAW,KAAK,SAAWA,EAAW,KAAK,OACpG,MAAM,IAAI,MAAM,iDAAiD,UAE1DA,EAAW,OAAO,SAAWA,EAAW,KAAK,OACtD,MAAM,IAAI,MAAM,2CAA2C,EAE7DD,EAAO,MAAM,CAAC,EAAE,QAAQ,CAACE,EAAGC,IAAQ,CAClC,GAAIH,EAAOG,EAAM,CAAC,EAAE,WAAa,GAAkBH,EAAOG,EAAM,CAAC,EAAE,WAAa,EAC9E,MAAM,IAAI,MAAM,SAASA,CAAG,qCAAqC,CAErE,CAAC,CACH,EAEMhB,GAAY,CAACa,EAA+BG,IAA0B,CAC1E,IAAMC,EAAkB,CAAC,EACzB,GAAIJ,EAAO,OAASG,EAClB,GAAIH,EAAOG,CAAG,EAAE,WAAa,EAC3BH,EAAOG,CAAG,EAAE,iBAAiB,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,UACxDL,EAAOG,CAAG,EAAE,WAAa,EAClCH,EAAOG,CAAG,EAAE,cAAc,EAAE,QAAQE,GAAKD,EAAM,KAAK,OAAOC,CAAC,CAAC,CAAC,MAE9D,OAAM,IAAI,MAAM,SAASF,CAAG,qCAAqC,EAGrE,OAAOC,CACT,EAEMhB,GACF,CAACY,EAA+BC,IAAiD,CAC/E,GAAID,EAAO,OAAS,EAAG,CACrB,IAAMM,EAAmBnB,GAAUa,EAAQ,CAAC,EACtCO,EAAiBpB,GAAUa,EAAQ,CAAC,EACtCQ,EAAiBrB,GAAUa,EAAQ,CAAC,EACxC,OAAIQ,EAAK,SAAW,IAClBA,EAAO,CAAC,GAAG,MAAMR,EAAO,CAAC,EAAE,KAAK,MAAM,EAAE,KAAK,CAAC,GAEzCS,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,KACE,QAAOP,CAEX,EAEEZ,GACF,CAACqB,EAAeC,EAAeC,EAA+BJ,EAAyBK,IACzE,CACR,IAAIC,EAAWJ,EAIf,OAHIA,EAAQ,IACVI,GAAYF,EAAWJ,EAAKG,CAAK,CAAC,GAEhCE,EAAMF,CAAK,EAAI,EACV,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,EAAI,CAAC,CAAC,EAE3D,KAAK,IAAI,EAAG,KAAK,IAAIG,EAAUF,EAAWJ,EAAKG,CAAK,CAAC,CAAC,CAAC,CAElE,EAEFrB,GACF,CAACc,EAAsBW,EAAuBH,IAC1C,4CAA4CG,EAAO,KAAK,OAAO,QAAQX,EAAM,KAAK,OAAO;AAAA,+BAClEA,EAAM,KAAK,OAAO;AAAA;AAAA,yBAExBQ,EAAW,MAAM;AAAA,kCACRI,GAAa,uBAAwB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BAClEI,GAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,4BACtDI,GAAa,iBAAkB,IAAKJ,EAAW,MAAM,CAAC;AAAA,6BACrDI,GAAa,kBAAmB,IAAKJ,EAAW,MAAM,CAAC;AAAA,iCACnDG,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,cAO3DX,EAAM,WAAW,gBAAiB,IAAK,aAAa,CAAC;AAAA;AAAA;AAAA,SAK7Db,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBiB,EAAYC,EAAU,KAAKN,CAAU,EACrCJ,EAAQP,EAAW,KAAK,OAAS,EAAKiB,EAAU,cAAcjB,EAAW,KAAMW,EAAW,MAAM,EAC1D,CAAC,GAAG,MAAMA,EAAW,MAAM,EAAE,KAAK,CAAC,EAC3EC,EAAQ1B,GAAUa,EAAQ,CAAC,EAC/Ba,EAAM,QAASM,GAASA,IAAS,IAAM,IAAM,CACnB,MAAM,IAAI,MAAM,kBAAkB,CACpC,EAAE,EACtBN,EAAM,SAAW,IACnBA,EAAQ,MAAML,EAAK,MAAM,EAAE,KAAK,CAAC,GAEnC,IAAMF,EAASL,EAAW,OAAO,IAAI,CAACmB,EAAOC,IAAMhC,GAAkB+B,EAAOC,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAEjGN,EAAON,EAAW,KAAK,IAAI,CAACqB,EAAKD,IAAMhC,GAAkBiC,EAAKD,EAAGT,EAAYJ,EAAMK,CAAK,CAAC,EAE/F,GAAIL,EAAK,SAAWF,EAAO,QAAUE,EAAK,SAAWD,EAAK,OACxD,MAAM,IAAI,MAAM,8DAA8D,EAGhF,GAAIC,EAAK,SAAWI,EAAW,OAC7B,QAASS,EAAI,EAAGA,EAAIT,EAAW,OAAQ,EAAES,EAClCb,EAAK,SAASa,CAAC,IAClBf,EAAO,OAAOe,EAAG,EAAG,CAAC,EACrBd,EAAK,OAAOc,EAAG,EAAGT,EAAWS,CAAC,CAAC,EAC/BR,EAAM,OAAOQ,EAAG,EAAG,CAAC,GAI1B,IAAME,EAAQV,EAAM,IAAIM,GAAQ,KAAK,KAAKA,CAAI,CAAC,EAE/CN,EAAM,QAAQ,CAACM,EAAME,EAAGG,IAAU,CAChC,GAAIL,EAAO,EAAG,CACZ,IAAMM,GAAYlB,EAAKc,CAAC,EAAIf,EAAOe,CAAC,GAAKF,EACnCO,EAASpB,EAAOe,CAAC,EACjBM,EAAWD,EAASD,EAAWZ,EAAMQ,CAAC,EAC5Cf,EAAOe,CAAC,EAAIM,EACZpB,EAAKc,CAAC,EAAIK,EACVF,EAAMH,CAAC,EAAI,CAACF,CACd,CACF,CAAC,EAED,IAAMS,EAAchB,EAAW,MAAM,CAAC,EACtCJ,EAAK,QAAQ,CAACqB,EAAM3B,IAAM,CACxB0B,EAAYC,CAAI,EAAI,KAAK,MAAMtB,EAAKsB,CAAI,EAAIvB,EAAOuB,CAAI,GAAKhB,EAAMgB,CAAI,CAAC,CACzE,CAAC,EACD,IAAMC,EAA+B,CAAC,KAAMF,EAAa,SAAU5B,EAAO,CAAC,EAAE,QAAQ,EAE/Ee,EAASgB,GAAe,SAAU/B,EAAO,CAAC,EAAE,SAAU4B,EAAY,MAAM,EACxExB,EAAQ4B,EAAc,QAAShC,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,MAAM,EACxEiC,EAAaf,EAAU,KAAKU,CAAW,EACvCM,EAA8B,CAClC,CAAC,KAAM,aAAc,KAAM,KAAK,EAAG,CAAC,KAAM,SAAU,KAAM,MAAO,OAAQ5B,EAAO,MAAM,EACtF,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQiB,EAAM,MAAM,EAAG,CAAC,KAAM,QAAS,KAAM,MAAO,OAAQV,EAAM,MAAM,CACvG,EAEMsB,EAAoC,CACxC,CAAC,QAAuB,KAAMF,CAAU,EAAG,CAAC,QAAuB,KAAM3B,CAAM,EAC/E,CAAC,OAAsB,KAAMiB,CAAK,EAAG,CAAC,QAAuB,KAAMV,CAAK,EACxE,GAAGuB,GAA2BpC,EAAO,CAAC,EAAE,KAAM4B,CAAW,CAC3D,EAEMS,EAAmBC,GAA+B;AAAA,QAClDA,EAAa,iBAAiBJ,CAAQ,EAAE,iBAAiB9B,EAAOW,CAAM,CAAC;AAAA,UACrEzB,GAA0Bc,EAAOW,EAAQH,CAAU,CAAC;AAAA,UACpD0B,EAAa,UAAU,CAAC;AAAA,YACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA,iCACpDvB,EAAO,gBAAgB,YAAY,CAAC;AAAA;AAAA,YAEzDA,EAAO,YAAY,aAAcX,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,SAE/E,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAM,GAAGmB,EAAM,MAAM,IAAIjB,EAAO,MAAM,IAAIO,EAAM,MAAM,GAAI,kBAAmB,CAAC,MAAM,CAAC,EACnG,gBAAAwB,EACA,WAAY,KAAO,CACjB,QAAS,CAACP,CAAgB,EAC1B,cAAe,CAAC,EAAG,KAAK,KAAKb,EAAY,EAAuB,CAAC,EACjE,gBAAAkB,CACF,EACF,CACF,EAEa3C,GAAQ,CAAC+C,EAAyBtC,IAAsC,CACnFf,GAAeqD,EAAQ,OAAQtC,CAAU,EACzC,IAAMuC,EAAoBpD,GAAgCmD,EAAQ,OAAQtC,CAAU,EACpFsC,EAAQ,QAAQhD,GAAuBgD,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAO1F,EAEa/C,GAAwBQ,GAAyD,CAC5F,IAAMK,EAASL,EAAW,OACpBM,EAAON,EAAW,KAClBO,EAAOP,EAAW,KACxB,OAAOQ,GAA4B,CAAC,OAAAH,EAAQ,KAAAC,EAAM,KAAAC,CAAI,CAAC,CACzD,ICxMA,IAeMiC,GAUAC,GAwHOC,GAKAC,GAtJbC,GAAAC,GAAA,kBAOAC,KAEAC,KACAC,KAGAC,KAEMT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,8BAA8B,CAElD,EAMMT,GAA2B,CAACU,EAAmBC,IAA+C,CAClG,IAAMC,EAAQF,EAAM,KACdG,EAAaC,EAAU,KAAKF,CAAK,EACjCG,EAAK,GACPC,EAAOL,EAAW,KAItB,GAHIK,EAAO,IACTA,EAAOJ,EAAM,OAASI,GAEpBA,EAAOJ,EAAM,OAAS,EACxB,MAAM,IAAI,MAAM,0CAA0C,EAG5D,IAAMK,EAAOL,EAAMI,CAAI,EACjBE,EAAOL,EAAaI,EACpBE,EAAaC,GAAiBH,CAAI,EAClCI,EAAaJ,EAAOE,EAEpBG,EAAY,CAACC,EAAcJ,IAC3BA,IAAe,EACV,WAAWI,CAAI,OAAOA,CAAI,YAAYA,CAAI,OAAOA,CAAI,OACnDJ,IAAe,EACjB,OAAOI,CAAI,OAAOA,CAAI,MACpBJ,IAAe,EACjB,WAAWI,CAAI,OAAOA,CAAI,QAAQA,CAAI,MAGxCA,EAEHC,EAAIC,EAAc,IAAKf,EAAM,SAAUA,EAAM,KAAMS,CAAU,EAC7DO,EAASC,GAAe,SAAUjB,EAAM,SAAUA,EAAM,KAAMS,CAAU,EACxES,EAAYJ,EAAE,KAAK,MAEnBK,EAAgBC,GAA4BpB,EAAM,QAAQ,IAAM,MAClE,mBAAmBkB,CAAS,oBAC5B,mBAAmBA,CAAS,eAC1BG,EAAmBC,GAA+B;AAAA,sCACpBJ,CAAS;AAAA,sCACTA,CAAS;AAAA,4CACHA,CAAS,KAAKb,CAAE;AAAA;AAAA,4DAEAa,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA,gEAKLA,CAAS;AAAA;AAAA;AAAA;AAAA,QAIjEI,EAAa,gBAAgB,aAAc,KAAK,EAAE,iBAAiBR,EAAGE,CAAM,CAAC;AAAA,QAC7EM,EAAa,UAAU,CAAC;AAAA;AAAA;AAAA,qBAGXjB,CAAE;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,UAMbc,CAAa;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAmBID,CAAS,IAAIN,EAAU,kBAAmBH,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA,0BAKtDS,CAAS;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,2BAeRA,CAAS,IAAIK,GAAU,kBAAmBd,CAAU,CAAC;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA;AAAA,SAU9E,MAAO,CACL,KAAM,UACN,YAAa,CAAC,KAAM,GAAGA,CAAU,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAChE,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMP,EAAO,SAAUF,EAAM,QAAQ,CAAC,EACjD,cAAe,CAAC,EAAGQ,CAAI,EACvB,gBAAiB,CAAC,CAAC,OAAsB,KAAMG,CAAU,CAAC,CAC5D,GACA,gBAAAU,CACF,CACF,EAEa9B,GAAU,CAACiC,EAAyBvB,IAAwC,CACvFZ,GAAemC,EAAQ,MAAM,EAC7BA,EAAQ,QAAQlC,GAAyBkC,EAAQ,OAAO,CAAC,EAAGvB,CAAU,CAAC,CACzE,EAEaT,GAA0BS,GACnCwB,GAA4B,CAAC,KAAMxB,EAAW,IAAc,CAAC,ICvJjE,IAiBMyB,GAMAC,GAWAC,GASAC,GAqBAC,GAuDOC,GAOAC,GA9HbC,GAAAC,GAAA,kBAGAC,KAEAC,KACAC,KAGAC,KAQMZ,GAAkBa,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,OAAS,EAC7B,MAAM,IAAI,MAAM,gBAAgB,CAEpC,EAEMZ,GACF,CAACY,EAA+BC,IAAiD,CAC/E,IAAMC,EAAuB,CAAC,EAC1BC,EAAqBF,EAAW,WACpC,OAAID,EAAO,CAAC,EAAE,KAAK,CAAC,EAAI,IACtBA,EAAO,CAAC,EAAE,iBAAiB,EAAE,QAAQI,GAAKF,EAAW,KAAK,OAAOE,CAAC,CAAC,CAAC,EACpED,EAAaD,EAAW,QAEnBG,GAA4B,CAAC,WAAAF,EAAY,KAAMF,EAAW,KAAM,WAAAC,CAAU,CAAC,CACpF,EAEEb,GAA4BiB,GAAoC;AAAA;AAAA,gCAEtCA,CAAe;AAAA,kBAC7BC,GAAa,8BAA+B,IAAKD,CAAe,CAAC;AAAA;AAAA;AAAA;AAAA,aAItEA,CAAe;AAAA,GAEtBhB,GAAuBkB,GAAsC,CACjE,IAAMF,EAAkBE,EAAQ,OAC1BC,EAAsB,CAAC,EAC7B,QAASC,EAAI,EAAGA,EAAIJ,EAAiB,EAAEI,EAAG,CACxC,IAAMC,EAAgBH,EAAQE,CAAC,EAAE,aAAa,UAAW,mBAAmB,EACxEJ,IAAoB,EACtBG,EAAU,KAAKE,CAAa,EACnBD,IAAM,EACfD,EAAU,KAAK,wBAAwBC,CAAC,QAAQC,CAAa,IAAI,EACxDD,IAAMJ,EAAkB,EACjCG,EAAU,KAAK,UAAUE,CAAa,IAAI,EAE1CF,EAAU,KAAK,6BAA6BC,CAAC,OAAOC,CAAa,IAAI,CAEzE,CACA,MAAO;AAAA,wDAC+CH,EAAQ,CAAC,EAAE,KAAK,OAAO;AAAA,UACrEC,EAAU,KAAK;AAAA,CAAI,CAAC;AAAA,QAE9B,EAEMlB,GAAyB,CAACS,EAA+BC,IAA6C,CAC1G,IAAMW,EAAaZ,EAAO,CAAC,EAAE,KACvBa,EAAYC,EAAU,KAAKF,CAAU,EACrCG,EAAWf,EAAO,CAAC,EAAE,SACrBgB,EAAOF,EAAU,cAAcb,EAAW,KAAMW,EAAW,MAAM,EACjEJ,EAAU,IAAI,MAAqBP,EAAW,UAAU,EACxDgB,EAAQC,EAAc,QAASH,EAAUH,EAAW,MAAM,EAC1DO,EAAkB,IAAI,MAAclB,EAAW,UAAU,EACzDmB,EAAkC,CAAC,EACnCC,EAA2B,CAAC,EAC9BC,EAAc,EACZC,EAAoC,CAAC,CAAC,QAAuB,KAAMV,CAAS,CAAC,EACnF,QAASH,EAAI,EAAGA,EAAIT,EAAW,WAAYS,IAAK,CAC9CY,GAAerB,EAAW,WAAWS,CAAC,EACtCS,EAAgBT,CAAC,EAAIY,EACrB,IAAME,EAAcZ,EAAW,MAAM,EACrCY,EAAYvB,EAAW,IAAI,EAAIA,EAAW,WAAWS,CAAC,EACtDW,EAAa,KAAKG,CAAW,EAC7BhB,EAAQE,CAAC,EAAIe,GAAe,SAASf,CAAC,GAAIK,EAAUS,EAAY,MAAM,EACtEJ,EAAkB,KAAK,CAAC,KAAMC,EAAaX,CAAC,EAAG,SAAUV,EAAO,CAAC,EAAE,QAAQ,CAAC,CAC9E,CACAuB,EAAgB,KACZ,CAAC,QAAuB,KAAMJ,CAAe,EAAG,GAAGO,GAA2Bd,EAAY,GAAGS,CAAY,CAAC,EAC9G,IAAMM,EAAmBC,GAA+B;AAAA,IAEpDA,EAAa,gBAAgB,aAAc,KAAK,EAC3C,gBAAgB,qBAAsB,MAAOT,EAAgB,MAAM,EACnE,iBAAiBF,EAAO,GAAGT,CAAO,CAAC;AAAA,IAC1CnB,GAAyB8B,EAAgB,MAAM,CAAC;AAAA,IAChD7B,GAAoBkB,CAAO,CAAC;AAAA;AAAA,IAE5BoB,EAAa,UAAU,CAAC;AAAA,MACtBA,EAAa,sCAAsC,qBAAqB,CAAC;AAAA;AAAA,oBAE3DX,EAAM,gBAAgB,YAAY,CAAC;AAAA,kBACrCA,EAAM,WAAW,UAAWD,CAAI,CAAC;AAAA;AAAA;AAAA,iBAGlCT,GAAa,8BAA+B,qBAAsBY,EAAgB,MAAM,CAAC;AAAA,QAClGF,EAAM,WAAW,UAAWD,EAAM,OAAO,CAAC;AAAA;AAAA;AAAA,KAIhD,MAAO,CACL,KAAM,QACN,YAAa,CAAC,KAAMf,EAAW,SAAU,kBAAmB,CAAC,MAAM,CAAC,EACpE,gBAAA0B,EACA,WAAY,KAAO,CACjB,QAASP,EACT,cAAe,CAAC,EAAG,KAAK,KAAKP,EAAY,EAAuB,CAAC,EACjE,gBAAAU,CACF,EACF,CACF,EAEa/B,GAAQ,CAACqC,EAAyB5B,IAAsC,CACnFd,GAAe0C,EAAQ,MAAM,EAC7B,IAAMC,EACFD,EAAQ,OAAO,SAAW,EAAI5B,EAAab,GAAgCyC,EAAQ,OAAQ5B,CAAU,EACzG4B,EAAQ,QAAQtC,GAAuBsC,EAAQ,OAAQC,CAAiB,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CAC1F,EAEarC,GAAwBQ,GAAyD,CAC5F,IAAMe,EAAOf,EAAW,KAClBC,EAAuBD,EAAW,WAClCE,EAAaF,EAAW,WAAuB,EAAIC,EAAW,OAASD,EAAW,WACxF,GAAIE,IAAeD,EAAW,OAC5B,MAAM,IAAI,MAAM,+CAA+C,EAEjE,OAAOG,GAA4B,CAAC,KAAAW,EAAM,WAAAb,EAAY,WAAAD,CAAU,CAAC,CACnE,ICtIA,IAUM6B,GAIAC,GAyBAC,GAUOC,GAuCAC,GAxFbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAEMT,GAAcU,GAChB,MAAM,KAAKA,EAAkB,iBAAiB,EAAG,MAAM,EAGrDT,GAAkBU,GAAwC,CAC9D,GAAI,CAACA,GAAUA,EAAO,SAAW,EAC/B,MAAM,IAAI,MAAM,yBAAyB,EAG3C,GAAIA,EAAO,CAAC,EAAE,WAAa,GAAkBA,EAAO,CAAC,EAAE,WAAa,GAChEA,EAAO,CAAC,EAAE,WAAa,GACzB,MAAM,IAAI,MAAM,uDAAuD,EAGzE,GAAIA,EAAO,CAAC,EAAE,WAAa,EACzB,MAAM,IAAI,MAAM,mDAAmD,EAGrE,GAAIA,EAAO,CAAC,EAAE,KAAK,SAAW,EAC5B,MAAM,IAAI,MAAM,oCAAoC,EAKtD,GAFmCX,GAAWW,EAAO,CAAC,CAAC,EAE3C,SAAWA,EAAO,CAAC,EAAE,KAAK,OACpC,MAAM,IAAI,MAAM,uFAAuF,CAE3G,EAEMT,GAAiB,CAACU,EAA+BC,IAAkD,CACvG,IAAMC,EAAwB,CAAC,EAE/B,QAASC,EAAI,EAAGA,EAAIH,EAAW,OAAQ,EAAEG,EACvCD,EAAY,KAAKF,EAAWG,CAAC,EAAIF,EAAQE,CAAC,CAAC,EAG7C,OAAOD,CACT,EAEaX,GAAyBQ,GAA+C,CACnF,IAAMC,EAAaD,EAAO,CAAC,EAAE,KACvBE,EAA6Bb,GAAWW,EAAO,CAAC,CAAC,EACjDG,EAAcZ,GAAeU,EAAYC,CAAO,EAChDG,EAAaC,EAAU,KAAKH,CAAW,EAEvCI,EAAWP,EAAO,CAAC,EAAE,SACrBQ,EAAQC,EAAc,QAASF,EAAUN,EAAW,MAAM,EAC1DS,EAASC,GAAe,SAAUJ,EAAUJ,EAAY,MAAM,EAE9DS,EAAmBC,GAA+B;AAAA,2BAC/BL,EAAM,QAAQ,GAAGP,CAAU,CAAC;AAAA,QAC/CY,EAAa,gBAAgB,cAAe,KAAK,EAAE,iBAAiBL,EAAOE,CAAM,CAAC;AAAA,QAClFG,EAAa,UAAU,CAAC;AAAA,QACxBA,EAAa,sCAAsC,sBAAsB,CAAC;AAAA,6BACrDH,EAAO,gBAAgB,YAAY,CAAC;AAAA,2BACtCF,EAAM,KAAK,OAAO;AAAA,4BACjBP,EAAW,MAAM;AAAA,4BACjBO,EAAM,WAAW,uBAAwB,GAAG,CAAC;AAAA,gCACzCE,EAAO,WAAW,iBAAkB,GAAG,CAAC;AAAA;AAAA,UAE9DF,EAAM,WAAW,gBAAiB,IAAK,iBAAiB,CAAC;AAAA;AAAA,QAE3DE,EAAO,YAAY,aAAcF,EAAM,aAAa,eAAe,CAAC,CAAC;AAAA,OAG3E,MAAO,CACL,KAAM,OACN,YAAa,CAAC,KAAM,GAAGN,CAAO,GAAI,kBAAmB,CAAC,MAAM,CAAC,EAC7D,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAMC,EAAa,SAAUH,EAAO,CAAC,EAAE,QAAQ,CAAC,EAC3D,cAAe,CAAC,EAAG,KAAK,KAAKK,EAAa,EAAuB,CAAC,EAClE,gBACI,CAAC,CAAC,QAAuB,KAAMA,CAAU,EAAG,GAAGS,GAA2Bd,EAAO,CAAC,EAAE,KAAMG,CAAW,CAAC,CAC5G,GACA,gBAAAS,CACF,CACF,EAEanB,GAAQsB,GAAkC,CACrDzB,GAAeyB,EAAQ,MAAM,EAC7BA,EAAQ,QAAQvB,GAAsBuB,EAAQ,MAAM,EAAG,CAAC,OAAQ,CAAC,CAAC,CAAC,CAAC,CACtE,IC3FA,IAUMC,GA4DAC,GAoCOC,GA1GbC,GAAAC,GAAA,kBAGAC,KAEAC,KAGAC,KAEMP,GACF,CAACQ,EAA4BC,EAA+BC,EAA+BC,EAC1FC,IAAuB,CACtB,IAAMC,EAASC,GAAe,cAAeF,EAAYF,EAAW,OAAQ,CAAC,EACvEK,EAAIC,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxEQ,EAAID,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EACxES,EAAIF,EAAc,SAAUP,EAAO,CAAC,EAAE,SAAUA,EAAO,CAAC,EAAE,KAAK,OAAQ,CAAC,EAE1EU,EACEC,EAAa,CAACL,EAAWE,EAAWC,IAAc,UAAUD,CAAC,KAAKF,CAAC,KAAKG,CAAC,IAC/E,GAAI,CAACP,EACHQ,EAAaN,EAAO,YAChB,aACAO,EAAWL,EAAE,YAAY,YAAY,EAAGE,EAAE,YAAY,YAAY,EAAGC,EAAE,YAAY,YAAY,CAAC,CAAC,MAChG,CACL,IAAMG,EAAmB,CAACC,EAAgBC,EAAWC,EAAW,KAAO,CACrE,IAAMC,EAAc,iBAAiBF,CAAC,gBAAgBA,CAAC,IACjDG,EAAc,iBAAiBH,CAAC,gBAAgBA,CAAC,IAEjDI,EAAc,sBAAsBJ,CAAC,6BAA6BA,CAAC,UACzE,MAAO;AAAA,gCACeA,CAAC,MAAMV,EAAO,gBAAgB,qBAAqBU,CAAC,GAAG,CAAC;AAAA,0BAC9DA,CAAC,MAAMR,EAAE,2BAA2B,iBAAiBQ,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAMN,EAAE,2BAA2B,iBAAiBM,CAAC,GAAIV,CAAM,CAAC;AAAA,0BACjEU,CAAC,MAAML,EAAE,2BAA2B,iBAAiBK,CAAC,GAAIV,CAAM,CAAC;AAAA,yBAClEU,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,yBAChBA,CAAC,cAAcA,CAAC;AAAA,6BACZA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,6BAChBA,CAAC,cAAcA,CAAC;AAAA,cAC/BD,CAAM,IAAIC,CAAC,OAAOC,CAAQ,IAAIJ,EAAWK,EAAaC,EAAaC,CAAW,CAAC;AAAA,WAErF,EACIf,IAAe,EACjBO,EAAa;AAAA;AAAA,cAETE,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,cAClCA,EAAiB,OAAQ,EAAG,KAAK,CAAC;AAAA,wGAGtCF,EAAa;AAAA,cACTE,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,cAC9CA,EAAiB,0BAA2B,CAAC,CAAC;AAAA,WAGtD,CAEA,MAAO;AAAA,UACHb,EAAa,gBAAgB,WAAY,KAAK,EAAE,iBAAiBU,EAAGH,EAAGE,EAAGJ,CAAM,CAAC;AAAA,UACjFL,EAAa,UAAU,CAAC;AAAA,UACxBA,EAAa,sCAAsC,mBAAmB,CAAC;AAAA,UACvEW,CAAU;AAAA,QAEhB,EAEElB,GAA4BQ,GAA+C,CAC/E,IAAMmB,EAAQnB,EAAO,CAAC,EAAE,KAClBoB,EAAQpB,EAAO,CAAC,EAAE,KAClBqB,EAAQrB,EAAO,CAAC,EAAE,KAClBsB,EAAiBtB,EAAO,CAAC,EAAE,SAE3BE,EAAc,EAAEqB,EAAU,SAASJ,EAAOC,CAAK,GAAKG,EAAU,SAASH,EAAOC,CAAK,GACrFG,EAAcL,EACdM,EAAaF,EAAU,KAAKJ,CAAK,EAGrC,GAAIjB,EAAa,CACf,IAAMwB,EAAkBC,GAAc,UAAUA,GAAc,UAAUR,EAAOC,EAAO,EAAK,EAAIC,EAAO,EAAK,EAC3G,GAAI,CAACK,EACH,MAAM,IAAI,MAAM,6CAA8C,EAEhEF,EAAcE,EACdD,EAAaF,EAAU,KAAKC,CAAW,CACzC,CAEA,IAAMI,EAAU,KAAK,KAAKH,EAAa,CAAC,EAExC,MAAO,CACL,KAAM,QACN,YAAa,CAAC,kBAAmB,CAAC,OAAQ,OAAQ,MAAM,CAAC,EACzD,gBAAkB1B,GACdR,GAA2BQ,EAAcC,EAAQwB,EAAatB,EAAaoB,CAAc,EAC7F,WAAY,KAAO,CACjB,QAAS,CAAC,CAAC,KAAME,EAAa,SAAUF,CAAc,CAAC,EACvD,cAAe,CAAC,EAAG,KAAK,KAAKG,EAAa,GAA0B,CAAgB,CAAC,EACrF,gBACI,CAAC,CAAC,QAAuB,KAAMG,CAAO,EAAG,GAAGC,GAA2BR,EAAOF,EAAOC,EAAOI,CAAW,CAAC,CAC9G,EACF,CACF,EAEa/B,GAASqC,GAAkC,CACtDA,EAAQ,QAAQtC,GAAyBsC,EAAQ,MAAM,CAAC,CAC1D,IC5GA,IA6CaC,GA7CbC,GAAAC,GAAA,kBAGAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAOatC,GAA+D,IAAI,IAAI,CAClF,CAAC,MAAO,CAAUuC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAwB,CAAC,EAC7C,CAAC,SAAU,CAACC,GAAQD,EAAwB,CAAC,EAC7C,CAAC,OAAQ,CAAUE,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACC,EAAS,CAAC,EAEzB,CAAC,cAAe,CAAMC,GAAkBC,EAA0B,CAAC,EACnE,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,UAAW,CAACC,EAAO,CAAC,EACrB,CAAC,gBAAiB,CAACC,EAAa,CAAC,EACjC,CAAC,OAAQ,CAAUC,GAAeC,EAAmB,CAAC,EACtD,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,gBAAiB,CAACC,GAAeC,EAA4B,CAAC,EAC/D,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,eAAgB,CAACC,GAAcC,EAA2B,CAAC,EAC5D,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,MAAO,CAAUC,GAAcC,EAAoB,CAAC,EACrD,CAAC,QAAS,CAAWC,EAAK,CAAC,EAC3B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,QAAS,CAAUC,EAAK,CAAC,EAC1B,CAAC,YAAa,CAACpB,GAAMC,EAAmB,CAAC,EACzC,CAAC,SAAU,CAACoB,GAAQC,EAAqB,CAAC,EAC1C,CAAC,iBAAkB,CAACC,GAAgBC,EAA6B,CAAC,EAClE,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,OAAQ,CAACC,GAAMC,EAAmB,CAAC,EACpC,CAAC,oBAAqB,CAAMC,GAAwBC,EAAgC,CAAC,EACrF,CAAC,gBAAiB,CAAMC,GAAoBC,EAA4B,CAAC,EACzE,CAAC,UAAW,CAAWC,EAAO,CAAC,EAC/B,CAAC,iBAAkB,CAAWC,EAAc,CAAC,EAC7C,CAAC,cAAe,CAAUC,GAAsBC,EAA0B,CAAC,EAC3E,CAAC,wBAAyB,CAACC,EAAY,CAAC,EACxC,CAAC,qBAAsB,CAACC,EAAS,CAAC,EAClC,CAAC,YAAa,CAAUC,GAAoBxB,EAAoB,CAAC,EACjE,CAAC,OAAQ,CAAWyB,EAAI,CAAC,EACzB,CAAC,cAAe,CAAWC,EAAW,CAAC,EACvC,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,SAAU,CAACC,EAAM,CAAC,EACnB,CAAC,cAAe,CAACC,GAAaC,EAA0B,CAAC,EAEzD,CAAC,UAAW,CAAMC,GAAcC,EAAsB,CAAC,EACvD,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,qBAAsB,CAACC,GAAoBC,EAAiC,CAAC,EAC9E,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,MAAO,CAACC,EAAG,CAAC,EACb,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,QAAS,CAACC,EAAK,CAAC,EACjB,CAAC,aAAc,CAAUC,EAAU,CAAC,EACpC,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,YAAa,CAACC,EAAS,CAAC,EACzB,CAAC,aAAc,CAACC,EAAU,CAAC,EAC3B,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,WAAY,CAACC,EAAQ,CAAC,EACvB,CAAC,eAAgB,CAACC,EAAY,CAAC,EAC/B,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,SAAU,CAACC,GAAQC,EAAqB,CAAC,EAC1C,CAAC,kBAAmB,CAACC,EAAe,CAAC,EACrC,CAAC,UAAW,CAAUC,EAAO,CAAC,EAC9B,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,yBAA0B,CAACC,EAAa,CAAC,EAC1C,CAAC,QAAS,CAACC,GAAOC,EAAoB,CAAC,EACvC,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,UAAW,CAACC,GAASC,EAAsB,CAAC,EAC7C,CAAC,MAAO,CAAWC,EAAG,CAAC,EACvB,CAAC,MAAO,CAAUC,EAAG,CAAC,EACtB,CAAC,OAAQ,CAAUC,EAAI,CAAC,EACxB,CAAC,kBAAmB,CAAUC,GAA0BtE,EAAoB,CAAC,EAC7E,CAAC,OAAQ,CAACuE,EAAI,CAAC,EACf,CAAC,YAAa,CAACC,GAAWC,EAAwB,CAAC,EACnD,CAAC,QAAS,CAACC,EAAK,CAAC,CACnB,CAAC,ICzID,IAoBaC,GApBbC,GAAAC,GAAA,kBAGAC,KAGAC,KAEAC,KAYaL,GAAN,KAAqB,CAI1B,YAAoBM,EAAwB,CAAxB,aAAAA,EAClB,KAAK,KAAO,IAAI,IAChB,KAAK,gBAAkB,EACzB,CACA,YAAYC,EAAkC,CAC5C,OAAO,KAAK,KAAK,IAAIA,CAAG,CAC1B,CACA,YAAYA,EAAcC,EAA0B,CAClD,KAAK,KAAK,IAAID,EAAKC,CAAQ,CAC7B,CACA,IAAIC,EAAyBC,EAAmBC,EAAoBC,EAChEC,EAA0D,CAC5DC,GAAiBL,EAAc,YAAY,IAAI,EAC/C,IAAMM,EAAS,KAAK,QAAQ,OACtBC,EAAqB,KAAK,QAAQ,sBAAsB,EAC9D,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,CAAC,EAClE,IAAMC,EAAU,CAAC,EACjB,QAAWC,KAASR,EAClBO,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQC,EAAM,MAAM,CAAC,CAAC,EAE1E,QAAWC,KAAUR,EACnBM,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAU,CAAC,OAAQE,EAAO,MAAM,CAAC,CAAC,EAEvEN,GACFI,EAAQ,KAAK,CAAC,QAASA,EAAQ,OAAQ,SAAUJ,CAAoB,CAAC,EAExE,IAAMO,EAAYL,EAAO,gBACrB,CAAC,OAAQN,EAAc,gBAAgB,mBAAmB,CAAC,EAAG,QAAAQ,EAAS,MAAOR,EAAc,YAAY,IAAI,CAAC,EAEjH,GAAI,KAAK,QAAQ,gBAAkB,YAAa,CAC9C,IAAMY,EAAc,CAClB,SAAU,KAAK,QAAQ,gBACvB,gBAAiBZ,EAAc,gBAC/B,UAAAW,EACA,cAAAR,CACF,EAC2B,KAAK,QAAQ,oBAAoB,IAAI,KAAK,QAAQ,gBAAiB,EAC1E,KAAKS,CAAW,CACtC,CAEAL,EAAmB,YAAYP,EAAc,eAAe,EAC5DO,EAAmB,aAAa,EAAGI,CAAS,EAC5CJ,EAAmB,mBAAmB,GAAGJ,CAAa,EACtD,KAAK,QAAQ,eAAe,KAAK,QAAQ,sBAAwB,EAAI,CAAC,EACtE,KAAK,QAAQ,yBAET,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACnD,KAAK,QAAQ,YAAc,cAC7B,KAAK,QAAQ,eAAe,EAE1B,KAAK,QAAQ,uBAAyB,KAAK,QAAQ,mBACrD,KAAK,QAAQ,MAAM,EAErBU,GAAeb,EAAc,YAAY,IAAI,CAC/C,CACA,SAAgB,CAEhB,CACA,MAAMc,EAA0BC,EAAiE,CAC/FV,GAAiBS,EAAY,IAAI,EACjC,IAAMR,EAAS,KAAK,QAAQ,OACtBU,EAAuB,CAAC,EAC1BV,EAAO,SAAS,IAAI,YAAY,GAClCU,EAAW,KAAK,aAAa,EAE/B,IAAMC,EAAeC,GAAmBH,EAA6B,KAAK,QAAQ,OAAO,MAAM,EACzFI,EAAWL,EAAY,gBAAgBG,CAAY,EACnDG,EAAO,GAAGJ,EAAW,KAAK;AAAA,CAAI,CAAC;AAAA,EAAKC,EAAa,yBAAyB;AAAA,EAAKE,CAAQ,GACvFE,EAAef,EAAO,mBAAmB,CAAC,KAAAc,EAAM,MAAON,EAAY,IAAI,CAAC,EAC9EQ,GAAU,UAAW,IAAM,YAAYR,EAAY,IAAI,iBAAiBM,CAAI,EAAE,EAE9E,IAAMG,EAAkBjB,EAAO,sBAC3B,CAAC,QAAS,CAAC,OAAQe,EAAc,WAAY,MAAM,EAAG,OAAQ,OAAQ,MAAOP,EAAY,IAAI,CAAC,EAElG,OAAAD,GAAeC,EAAY,IAAI,EACxB,CAAC,YAAAA,EAAa,gBAAAS,EAAiB,qBAAsBN,EAAa,aAAa,CACxF,CAEA,2BAA2Bd,EACE,CAC3B,IAAMqB,EAAI,OAAOrB,GAAkB,SAAWA,EAAgBA,EAAc,EACtEsB,EAAI,OAAOtB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEuB,EAAI,OAAOvB,GAAkB,SAAW,EAAKA,EAAc,GAAK,EAChEwB,EAAoB,KAAK,QAAQ,OAAO,OAAO,iCACrD,GAAIH,GAAKG,GAAqBF,GAAKE,GAAqBD,GAAKC,EAC3D,MAAO,CAACH,EAAGC,EAAGC,CAAC,EAEjB,IAAME,EAAOJ,EAAIC,EAAIC,EACjBG,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EAC/C,GAAIC,EAAkBF,EAAmB,CAEvC,GADAE,EAAkB,KAAK,KAAK,KAAK,KAAKD,CAAI,CAAC,EACvCC,EAAkBF,EACpB,MAAM,IAAI,MAAM,6CAA6C,EAE/D,MAAO,CAACE,EAAiBA,EAAiBA,CAAe,CAC3D,KACE,OAAO,CAACA,EAAiBA,EAAiB,CAAC,CAE/C,CACF,IC3HA,IAmCMC,GA4CAC,GAiBAC,GAwBOC,GAxHbC,GAAAC,GAAA,kBAGAC,KAEAC,KAEAC,KACAC,KACAC,KACAC,KACAC,KAwBMZ,GACF,CAACa,EAAqCC,IAA2E,CAC/G,GAAIA,EAAkB,SAAWD,EAAa,OAC5C,MAAM,IAAI,MAAM,4BAA4BC,EAAkB,MAAM,wCAChED,EAAa,MAAM,GAAG,EAG5B,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIH,EAAa,OAAQ,EAAEG,EAAG,CAC5C,IAAMC,EAAOJ,EAAaG,CAAC,EAAE,SAC7B,OAAQF,EAAkBE,CAAC,EAAG,CAC5B,IAAK,OAAQ,CACXD,EAAW,KAAK,EAAE,EAClB,KACF,CACA,IAAK,OAAQ,CACXA,EAAW,KAAK,GAAGE,CAAI,EAAE,EACzB,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAOL,EAAaG,CAAC,EAAE,KAAK,OAClCD,EAAW,KAAK,GAAGE,CAAI,IAAIC,CAAI,EAAE,EACjC,KACF,CACA,IAAK,OAAQ,CACX,IAAMC,EAAON,EAAaG,CAAC,EAAE,KAAK,KAAK,GAAG,EAC1CD,EAAW,KAAK,GAAGE,CAAI,IAAIE,CAAI,EAAE,EACjC,KACF,CACA,QACE,MAAM,IAAI,MAAM,iCAAiCL,EAAkBE,CAAC,CAAC,EAAE,CAC3E,CACF,CAEA,OAAOD,EAAW,KAAK,GAAG,CAC5B,EASEd,GACF,CAACmB,EAA0BP,EAAqCQ,IAA0C,CAGxG,IAAIC,EAAMF,EAAY,KACtB,OAAIA,EAAY,aAAa,OAC3BE,GAAO,IAAMF,EAAY,YAAY,KAAO,KAE9CE,GAAO,IAAMD,EACT,IACOrB,GACIa,EACAO,EAAY,aAAa,mBACrB,IAAI,MAAwCP,EAAa,MAAM,EAAE,KAAK,MAAM,CAAC,CAAC,GAC1FS,CACT,EAEEpB,GAAN,KAA6C,CAI3C,YAAYqB,EAA6B,CACnCA,IACF,KAAK,aAAeA,EAAY,aAChC,KAAK,OAASA,EAAY,OAE9B,CAEA,eAAeC,EAAwC,CACrD,OAAO,KAAK,eAAiBA,CAC/B,CAEA,SAASC,EAA4B,CACnC,OAAO,KAAK,SAAWA,CACzB,CACF,EAMatB,GAAN,KAAoB,CAApB,cAkBL,sBAAgC,KAOhC,qBAA+B,KAgC/B,KAAQ,eAAyC,KACjD,KAAQ,mBAAiD,KACzD,uBAAoB,GACpB,2BAAwB,EAGxB,KAAQ,eAAsC,CAAC,EAE/C,KAAQ,eAAsD,IAAI,IAOlE,mBAA8B,UAI9B,yBAAkD,IAAI,IAKtD,KAAQ,uBAA2D,IAAI,IAKvE,gCAA4E,IAAI,IA7ChF,IAAI,yBAAoD,CACtD,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,yEAAyE,EAG3F,IAAIuB,EAAO,KAAK,iBAAiB,IAAI,KAAK,eAAe,EACzD,OAAKA,IACHA,EAAO,CAAC,EACR,KAAK,iBAAiB,IAAI,KAAK,gBAAiBA,CAAI,GAG/CA,CACT,CAmCA,MAAM,WAAWC,EAAUC,EAAoC,CAC7D,KAAK,IAAMD,EACX,IAAME,EAAqC,CAAC,EACtCC,EAAwC,CAC5C,eAAgB,CACd,+BAAgCF,EAAQ,OAAO,+BAC/C,iCAAkCA,EAAQ,OAAO,iCACjD,4BAA6BA,EAAQ,OAAO,4BAC5C,cAAeA,EAAQ,OAAO,cAC9B,kCAAmCA,EAAQ,OAAO,kCAClD,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,yBACzC,yBAA0BA,EAAQ,OAAO,wBAC3C,EACA,iBAAAC,CACF,EAEID,EAAQ,SAAS,IAAI,qDAAqD,EAC5EC,EAAiB,KAAK,qDAAuE,EACpFD,EAAQ,SAAS,IAAI,iBAAiB,GAC/CC,EAAiB,KAAK,iBAAiB,EAErCD,EAAQ,SAAS,IAAI,YAAY,GACnCC,EAAiB,KAAK,YAAY,EAGpC,KAAK,OAAS,MAAMD,EAAQ,cAAcE,CAAgB,EAC1D,KAAK,YAAc,IAAI5B,GAAgB,MAAM0B,EAAQ,mBAAmB,CAAC,EACzE,KAAK,eAAiBG,GAAqB,IAAI,EAC/C,KAAK,eAAiB,IAAIC,GAAe,IAAI,EAC7C,KAAK,QAAU,IAAI,IACnB,KAAK,qBAAuB,IAAI,IAChC,KAAK,iBAAmB,IAAI,IAG5BC,GAAgBN,EAAI,SAAW,CAAC,CAACA,EAAI,KAAK,EAI1C,KAAK,OAAO,kBAAoBO,GAAM,CAChCA,EAAG,iBAAiB,oBAEtB,QAAQ,MAAM,mDAAmDA,EAAG,MAAM,OAAO,EAAE,CAEvF,EAEA,OAAO,eACH,KAAK,IAAI,OAAQ,SAAU,CAAC,MAAO,KAAK,OAAQ,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAC3G,OAAO,eACH,KAAK,IAAI,OAAQ,UAAW,CAAC,MAAON,EAAS,SAAU,GAAO,WAAY,GAAM,aAAc,EAAK,CAAC,EAGxG,KAAK,aAAa,CACpB,CAEA,SAAgB,CACV,OAAO,KAAK,SAAa,KAC3B,KAAK,SAAS,QAAQ,EAExB,KAAK,eAAe,QAAQ,CAC9B,CAEA,mBAAuC,CACrC,OAAK,KAAK,iBACR,KAAK,eAAiB,KAAK,OAAO,qBAAqB,GAElD,KAAK,cACd,CAEA,uBAA+C,CAC7C,GAAI,CAAC,KAAK,mBAAoB,CAC5B,IAAMO,EAAiB,KAAK,kBAAkB,EACxCC,EAAkD,CAAC,EAErD,KAAK,YAAc,cACrBA,EAAsB,gBAAkB,CACtC,SAAU,KAAK,SACf,0BAA2B,KAAK,sBAAwB,EACxD,oBAAqB,KAAK,sBAAwB,EAAI,CACxD,GAGF,KAAK,mBAAqBD,EAAe,iBAAiBC,CAAqB,CACjF,CACA,OAAO,KAAK,kBACd,CAEA,gBAAuB,CACjB,KAAK,qBACP,KAAK,mBAAmB,IAAI,EAC5B,KAAK,mBAAqB,KAE9B,CAEA,OAAc,CACZ,GAAI,CAAC,KAAK,eACR,OAGFC,GAAiB,EAEjB,KAAK,eAAe,EACpB,IAAIC,EACA,KAAK,YAAc,SACrB,KAAK,eAAe,gBAChB,KAAK,SAAW,EAAG,KAAK,sBAAwB,EAAG,KAAK,mBAAqB,CAAC,EAElFA,EAAkB,KAAK,OAAO,aAE1B,CAAC,KAAM,KAAK,sBAAwB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,QAAQ,CAAC,EAExG,KAAK,eAAe,IAAIA,EAAiB,KAAK,cAAc,EAC5D,KAAK,eAAiB,CAAC,EACvB,KAAK,eAAe,mBAChB,KAAK,mBAAqB,EAAGA,EAAiB,EAAG,KAAK,sBAAwB,EAAI,CAAC,GAGzF,KAAK,OAAO,MAAM,OAAO,CAAC,KAAK,eAAe,OAAO,CAAC,CAAC,EACvD,KAAK,eAAe,sBAAsB,EAC1C,KAAK,eAAiB,KACtB,KAAK,sBAAwB,EAEzB,KAAK,YAAc,QAChBA,EAAiB,SAAS,WAAW,IAAI,EAAE,KAAK,IAAM,CACzD,IAAMC,EAAa,IAAI,eAAeD,EAAgB,eAAe,CAAC,EAChEE,EAAiB,KAAK,eAAe,IAAIF,CAAe,EAC9D,QAAStB,EAAI,EAAGA,EAAIuB,EAAW,OAAS,EAAGvB,IAAK,CAC9C,IAAMyB,EAAoBD,EAAexB,CAAC,EACpC0B,EAAWD,EAAkB,SAC7BE,EAAa,KAAK,QAAQ,IAAID,CAAQ,EACtCE,EAAaD,EAAW,WACxBE,EAAaF,EAAW,WACxBG,EAAcL,EAAkB,YAChCM,EAAmBN,EAAkB,iBACrCO,EAAoBP,EAAkB,kBACtCQ,EAAeV,EAAWvB,EAAI,CAAC,EAC/BkC,EAAaX,EAAWvB,EAAI,EAAI,CAAC,EAEnC,OAAO,KAAK,cAAkB,MAChC,KAAK,cAAgBiC,GAGvB,IAAME,EAAY,OAAOF,EAAe,KAAK,aAAa,EACpDG,EAAU,OAAOF,EAAa,KAAK,aAAa,EAEtD,GAAI,CAAC,OAAO,cAAcC,CAAS,GAAK,CAAC,OAAO,cAAcC,CAAO,EACnE,MAAM,IAAI,WAAW,2BAA2B,EAGlD,GAAI,KAAK,IAAI,OAAO,WAAW,OAC7B,KAAK,IAAI,OAAO,UAAU,OAAO,CAC/B,QAAS,EACT,eAAgBL,EAAiB,IAC7BM,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,gBAAiBL,EAAkB,IAC/BK,IAAU,CAAC,KAAMA,EAAM,KAAM,SAAUC,GAA2BD,EAAM,QAAQ,CAAC,EAAE,EACvF,SAAAX,EACA,WAAAE,EACA,WAAAC,EACA,YAAAC,EACA,UAAAK,EACA,QAAAC,CACF,CAAC,MACI,CAEL,IAAIG,EAAc,GAClBR,EAAiB,QAAQ,CAACM,EAAOrC,IAAM,CACrCuC,GAAe,SAASvC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC7F,CAAC,EACD,IAAIG,EAAe,GACnBR,EAAkB,QAAQ,CAACK,EAAOrC,IAAM,CACtCwC,GAAgB,UAAUxC,CAAC,OAAOqC,EAAM,IAAI,OAAOC,GAA2BD,EAAM,QAAQ,CAAC,IAC/F,CAAC,EAED,QAAQ,IAAI,uBAAuBX,CAAQ,IAAIE,CAAU,IAAIC,CAAU,IAAIC,CAAW,KAAKS,CAAW,GAClGC,CAAY,mBAAmBJ,EAAUD,CAAS,KAAK,CAC7D,CACAM,GAAM,MAAO,GAAGX,CAAW,KAAKG,CAAY,KAAKC,CAAU,EAAE,CAC/D,CACAZ,EAAgB,MAAM,EACtB,KAAK,eAAe,OAAOA,CAAe,CAC5C,CAAC,EAEHoB,GAAe,CACjB,CAaA,IAAIC,EAAsBZ,EAAyCa,EAC/DC,EACAC,EACAC,EAAmC,CACrC1B,GAAiBsB,EAAQ,IAAI,EAE7B,IAAMK,EAAwB,CAAC,EAC/B,QAAShD,EAAI,EAAGA,EAAI+B,EAAiB,OAAQ,EAAE/B,EAAG,CAChD,IAAMU,EAAOqB,EAAiB/B,CAAC,EAAE,KAEjC,GAAIU,IAAS,EACX,SAEF,IAAMuC,EAAU,KAAK,eAAe,IAAIvC,CAAI,EAC5C,GAAI,CAACuC,EACH,MAAM,IAAI,MAAM,0BAA0BvC,CAAI,EAAE,EAElDsC,EAAW,KAAKC,CAAO,CACzB,CAEA,GAAM,CAAC,QAAAC,EAAS,cAAAC,EAAe,gBAAAC,CAAe,EAAIT,EAAQ,WAAWZ,CAAgB,EAG/EsB,EAAyBT,EAAc,SAAW,EAAIM,EAAQ,IAAI,CAACI,EAAGtD,IAAMA,CAAC,EAAI4C,EACvF,GAAIS,EAAuB,SAAWH,EAAQ,OAC5C,MAAM,IAAI,MAAM,eAAeG,EAAuB,MAAM,qBAAqBH,EAAQ,MAAM,GAAG,EAIpG,IAAMlB,EAAkC,CAAC,EACnCuB,EAAyB,CAAC,EAChC,QAASvD,EAAI,EAAGA,EAAIkD,EAAQ,OAAQ,EAAElD,EAAG,CAIvC,GAAI,CAAC,OAAO,UAAUqD,EAAuBrD,CAAC,CAAC,GAAKqD,EAAuBrD,CAAC,EAAI,IAC5EqD,EAAuBrD,CAAC,GAAK+C,EAC/B,MAAM,IAAI,MAAM,yBAAyBM,EAAuBrD,CAAC,CAAC,EAAE,EAEtE,GAAIqD,EAAuBrD,CAAC,IAAM,GAChC,SAEF,IAAMwD,EAAcH,EAAuBrD,CAAC,IAAM,GAC5CyD,EAAeJ,EAAuBrD,CAAC,IAAM,GAC7C0D,EAAcF,GAAeC,EAC/BX,EAAyBI,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAC7D6C,EAAmBQ,EAAuBrD,CAAC,EAAGkD,EAAQlD,CAAC,EAAE,SAAUkD,EAAQlD,CAAC,EAAE,IAAI,EAGtF,GAFAgC,EAAkB,KAAK0B,CAAU,EAE7BA,EAAW,OAAS,EACtB,SAEF,IAAMT,GAAU,KAAK,eAAe,IAAIS,EAAW,IAAI,EACvD,GAAI,CAACT,GACH,MAAM,IAAI,MAAM,2BAA2BS,EAAW,IAAI,EAAE,EAK9D,GAHIF,GACF,KAAK,cAAc,KAAKP,EAAO,EAE7BQ,EAAc,CAChB,IAAIE,EAAiB,KAAK,qBAAqB,IAAI,KAAK,eAAgB,EACnEA,IACHA,EAAiB,CAAC,EAClB,KAAK,qBAAqB,IAAI,KAAK,gBAAkBA,CAAc,GAErEA,EAAe,KAAKV,EAAO,CAC7B,CACAM,EAAY,KAAKN,EAAO,CAC1B,CAIA,GAAID,EAAW,SAAWjB,EAAiB,QAAUwB,EAAY,SAAWvB,EAAkB,OAAQ,CAEpG,GAAIuB,EAAY,SAAW,EACzB,OAAAb,GAAeC,EAAQ,IAAI,EACpBX,EAMT,MAAM,IAAI,MACN,WAAWW,EAAQ,IAAI,4EAA4E,CACzG,CAKA,IAAIiB,EACJ,GAAIR,EAAiB,CACnB,IAAIS,EAAgB,EACdC,EAAoB,CAAC,EAE3BV,EAAgB,QAAQW,GAAK,CAC3B,IAAMrD,GAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIrD,GAAK,SAAW,EAClB,OAGF,IAAMsD,GAAgBD,EAAE,OAAS,GAAmB,EAAI,EACpDE,GACAC,EACAH,EAAE,OAAS,IACbG,EAAgBxD,GAAK,OAAS,EAAI,GAAMA,GAAK,OAAS,EAAI,EAAIA,GAAK,OAASsD,GAC5EC,GAAiBvD,GAAK,OAAS,EAAI,GAAKsD,GAAgBtD,GAAK,SAE7DwD,EAAgBxD,GAAK,QAAU,EAAIA,GAAK,OAASsD,GAAgB,GACjEC,GAAiB,IAEnBJ,EAAgB,KAAK,KAAKA,EAAgBK,CAAa,EAAIA,EAC3DJ,EAAQ,KAAKD,CAAa,EAM1B,IAAMM,GAAqBJ,EAAE,OAAS,GAAmB,EAAI,EAC7DF,GAAiBnD,GAAK,OAAS,EAAI,KAAK,KAAKA,GAAK,OAASyD,EAAkB,EAAIF,GAC9CvD,GAAK,OAASsD,EACnD,CAAC,EAID,IAAMI,EAAsB,GAC5BP,EAAgB,KAAK,KAAKA,EAAgBO,CAAmB,EAAIA,EACjE,IAAMC,EAAc,IAAI,YAAYR,CAAa,EACjDT,EAAgB,QAAQ,CAACW,EAAG/D,KAAM,CAChC,IAAMsE,GAASR,EAAQ9D,EAAC,EAClBU,GAAO,OAAOqD,EAAE,MAAS,SAAW,CAACA,EAAE,IAAI,EAAIA,EAAE,KACvD,GAAIA,EAAE,OAAS,EACb,IAAI,WAAWM,EAAaC,GAAQ5D,GAAK,MAAM,EAAE,IAAIA,EAAI,UAChDqD,EAAE,OAAS,GACpB,IAAI,YAAYM,EAAaC,GAAQ5D,GAAK,MAAM,EAAE,IAAIA,EAAI,UACjDqD,EAAE,OAAS,GAEpB,IAAI,YAAYM,EAAaC,GAAQ5D,GAAK,MAAM,EAAE,IAAIA,EAAI,UACjDqD,EAAE,OAAS,EACpB,IAAI,aAAaM,EAAaC,GAAQ5D,GAAK,MAAM,EAAE,IAAIA,EAAI,MAE3D,OAAM,IAAI,MAAM,6BAA6B4B,GAA2ByB,EAAE,IAAI,CAAC,EAAE,CAErF,CAAC,EAED,IAAMQ,GAEF,KAAK,eAAe,OAAOV,EAAe,eAAe,SAAW,eAAe,OAAO,EAC9F,KAAK,OAAO,MAAM,YAAYU,GAAkB,OAAQ,EAAGF,EAAa,EAAGR,CAAa,EACxF,KAAK,eAAe,QAAQU,GAAkB,EAAE,EAChDX,EAAuB,CAAC,OAAQ,EAAG,KAAMC,EAAe,OAAQU,GAAkB,MAAM,CAC1F,CAEA,IAAMC,EAA0B,KAAK,eAAe,2BAA2BrB,CAAa,EACtF9C,EAAuBmE,EAAwB,CAAC,IAAM,GAAKA,EAAwB,CAAC,IAAM,EAE1FlE,EAAMrB,GAAwB0D,EAASZ,EAAkB1B,CAAoB,EAC/EoE,EAAW,KAAK,eAAe,YAAYnE,CAAG,EAQlD,GAPKmE,IACHA,EAAW,KAAK,eAAe,MAAM9B,EAAS6B,CAAuB,EACrE,KAAK,eAAe,YAAYlE,EAAKmE,CAAQ,EAC7CC,GAAU,OAAQ,IAAM,mBAAmBpE,CAAG,kBAAkBqC,EAAQ,IAAI,EAAE,GAI5ES,GAAmBqB,EAAS,qBAAsB,CACpD,GAAIrB,EAAgB,SAAWqB,EAAS,qBAAqB,OAC3D,MAAM,IAAI,MAAM,4CAA4CA,EAAS,qBAAqB,MAAM,SAC5FrB,EAAgB,MAAM,gBAAgBqB,EAAS,YAAY,IAAI,IAAI,EAEzE,QAASzE,EAAI,EAAGA,EAAIoD,EAAgB,OAAQpD,IAAK,CAC/C,IAAM2E,EAAUvB,EAAgBpD,CAAC,EAC3B4E,EAAaD,EAAQ,KACrBE,EAAe,OAAOF,EAAQ,MAAS,SAAW,EAAIA,EAAQ,KAAK,OACnE,CAAC1E,GAAM6E,CAAM,EAAIL,EAAS,qBAAqBzE,CAAC,EACtD,GAAI4E,IAAe3E,IAAQ4E,IAAiBC,EAC1C,MAAM,IAAI,MAAM,oBAAoB9E,CAAC,0BAA0BC,EAAI,cAAc6E,CAAM,cACnFF,CAAU,cAAcC,CAAY,gBAAgBJ,EAAS,YAAY,IAAI,IAAI,CAEzF,CACF,CAOA,GALAC,GACI,OACA,IAAM,yBAAyB/B,EAAQ,IAAI,UAAUrC,CAAG,UAAUkE,EAAwB,CAAC,CAAC,IACxFA,EAAwB,CAAC,CAAC,IAAIA,EAAwB,CAAC,CAAC,EAAE,EAE9D,KAAK,YAAc,QAAU,KAAK,gBAAkB,YAAa,CACnE,IAAM/C,EAAuC,CAC3C,SAAU,KAAK,gBACf,YAAagD,EAAS,YAAY,KAClC,iBAAA1C,EACA,kBAAAC,CACF,EACA,KAAK,eAAe,KAAKP,CAAiB,EAEtC,KAAK,gBAAkB,aACK,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC7D,KAAKA,CAAiB,CAEjD,CAEA,YAAK,eAAe,IAAIgD,EAAUzB,EAAYO,EAAaiB,EAAyBZ,CAAoB,EAExGlB,GAAeC,EAAQ,IAAI,EACpBX,CACT,CAEA,OAAO+C,EAAmBrE,EAAwB,CAChD,KAAK,eAAe,OAAOqE,EAAWrE,CAAI,CAC5C,CAEA,OAAOsE,EAAaC,EAAmB,CACrC,KAAK,eAAe,OAAOD,EAAKC,CAAG,CACrC,CAEA,MAAM,SAASF,EAAmBG,EAAkD,CAGlF,MAAM,KAAK,eAAe,SAASH,EAAWG,CAAe,CAC/D,CAEA,MAAMC,EAAsB,CAC1B,OAAO,KAAK,eAAe,OAAOA,CAAI,EAAE,EAC1C,CAEA,KAAKC,EAAqB,CACxB,OAAO,KAAK,eAAe,QAAQA,CAAG,CACxC,CAEA,aAAaxD,EAAoBF,EAAkB2D,EAAoBxD,EAA0B,CAC/F,IAAMyD,EAAKC,GAAwB,IAAI3D,CAAU,EACjD,GAAI,CAAC0D,EACH,MAAM,IAAI,MAAM,2BAA2B1D,CAAU,EAAE,EAGzD,IAAMD,EAAyB,CAC7B,WAAAC,EACA,WAAAC,EACA,YAAayD,EAAG,CAAC,EACjB,WAAY,CAACA,EAAG,CAAC,EAAGD,CAAS,CAC/B,EACA,KAAK,QAAQ,IAAI3D,EAAUC,CAAU,CACvC,CAEA,cAAcD,EAAwB,CACpC,IAAMiC,EAAiB,KAAK,qBAAqB,IAAIjC,CAAQ,EAC7D,GAAIiC,EAAgB,CAClB,QAAWjD,KAAQiD,EACjB,KAAK,eAAe,QAAQjD,EAAK,EAAE,EAErC,KAAK,qBAAqB,OAAOgB,CAAQ,CAC3C,CAEA,KAAK,iBAAiB,OAAOA,CAAQ,EACrC,KAAK,QAAQ,OAAOA,CAAQ,CAC9B,CAEA,cAAcA,EAAkB8D,EAAyBC,EAA6C,CACpG,IAAMC,EAAS,KAAK,QAAQ,IAAIhE,CAAQ,EACxC,GAAI,CAACgE,EACH,MAAM,IAAI,MAAM,uBAAuBhE,CAAQ,EAAE,EAEnD,IAAME,EAAa8D,EAAO,WACpB7D,EAAa6D,EAAO,WACpBC,EAAcD,EAAO,YACrBE,EAAaF,EAAO,WAC1B,GAAI,KAAK,kBAAoB,KAC3B,MAAM,IAAI,MAAM,YAAY9D,CAAU,KAAKC,CAAU,2CAA2C,EAElG,KAAK,gBAAkBH,EAGnBkE,EAAW,CAAC,IACdA,EAAW,CAAC,EAAIA,EAAW,CAAC,EAAEA,EAAW,CAAC,CAAC,EAC3CA,EAAW,CAAC,EAAI,QAGlBlB,GAAU,OAAQ,IAAM,kCAAkC9C,CAAU,KAAKC,CAAU,MAAM,EAEzF,IAAMgE,EAAgB,KAAK,IAAI,MAE/B,KAAK,cAAgB,CAAC,EACtB,GAAI,CACF,OAAIA,GACF,KAAK,OAAO,eAAe,YAAY,EAGzCF,EAAYH,EAASI,EAAW,CAAC,CAAC,EAC3B,CACT,OAASE,EAAG,CACV,OAAAL,EAAO,KAAK,QAAQ,QAAQ,qBAAqB7D,CAAU,KAAKC,CAAU,aAAaiE,CAAC,EAAE,CAAC,EACpF,CACT,QAAE,CACID,GACFJ,EAAO,KAAK,KAAK,OAAO,cAAc,EAAE,KACpCM,GAAOA,EAAM,qCAAqCnE,CAAU,KAAKC,CAAU,MAAMkE,EAAI,OAAO,GAAK,IAAI,CAAC,EAG5G,QAAWrF,KAAQ,KAAK,cACtB,KAAK,eAAe,QAAQA,EAAK,EAAE,EAErC,KAAK,cAAgB,CAAC,EACtB,KAAK,gBAAkB,IACzB,CACF,CAGA,eAAesF,EAAmBC,EAAeC,EAAmBf,EAAsB,CACxF,IAAIgB,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EACxEG,IACHA,EAA4B,IAAI,IAChC,KAAK,2BAA2B,IAAIH,EAAWG,CAAyB,GAG1E,IAAMC,EAAiBD,EAA0B,IAAIF,CAAK,EACpDI,EAAK,KAAK,eAAe,uBAAuBH,EAAQf,EAAMiB,IAAiB,CAAC,CAAC,EACvF,OAAAD,EAA0B,IAAIF,EAAO,CAACI,EAAIH,CAAM,CAAC,EAC1CG,CACT,CACA,kBAAkBL,EAAyB,CACzC,IAAMG,EAA4B,KAAK,2BAA2B,IAAIH,CAAS,EAC3EG,IACFA,EAA0B,QAAQG,GAAc,KAAK,eAAe,yBAAyBA,EAAW,CAAC,CAAC,CAAC,EAC3G,KAAK,2BAA2B,OAAON,CAAS,EAEpD,CACA,UAAUjB,EAA8B,CACtC,IAAM9B,EAAU,KAAK,eAAe,IAAI8B,CAAS,EACjD,GAAI,CAAC9B,EACH,MAAM,IAAI,MAAM,2BAA2B8B,CAAS,EAAE,EAExD,OAAO9B,EAAQ,MACjB,CACA,iBAAiBsD,EAAsBpB,EAAclF,EAClB,CACjC,MAAO,UAAY,CACjB,IAAMS,EAAO,MAAM8F,GAAgB,KAAMD,EAAWpB,CAAI,EACxD,OAAOsB,GAAW/F,EAAK,OAAQT,CAAI,CACrC,CACF,CAEA,eAAegG,EAAqB,CAC9B,KAAK,YAAc,iBAKtB,KAAK,mBAA2B,eAAe,KAAK,SAAUA,CAAK,CACtE,CACA,cAAqB,CACnB,KAAK,UAAY,QACb,KAAK,IAAI,OAAO,WAAW,OAAS,YACnC,OAAO,KAAK,IAAI,MAAU,IAAc,KAAK,IAAI,KAAK,MAAQ,KAAK,IAAI,UACtE,KAAK,OAAO,SAAS,IAAI,qDAAqD,EAChF,KAAK,UAAY,gBACR,KAAK,OAAO,SAAS,IAAI,iBAAiB,IACnD,KAAK,UAAY,aAGf,KAAK,YAAc,QAAU,OAAO,KAAK,SAAa,MACxD,KAAK,SAAW,KAAK,OAAO,eAAe,CACzC,KAAM,YACN,MAAO,KAAK,kBAAoB,CAClC,CAAC,EACD,KAAK,mBAAqB,KAAK,OAAO,aAElC,CAAC,KAAM,KAAK,kBAAoB,EAAI,EAAG,MAAO,eAAe,SAAW,eAAe,aAAa,CAAC,GAG/G,CAEA,cAAqB,CACnBvB,GAAU,OAAQ,cAAc,EAC3B,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,GACtD,KAAK,oBAAoB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAEpD,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,GACzD,KAAK,uBAAuB,IAAI,KAAK,iBAAmB,CAAC,CAAC,EAG5D,KAAK,MAAM,EACX,KAAK,cAAgB,WACvB,CACA,YAAmB,CACjBA,GAAU,OAAQ,YAAY,EAE9B,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CACA,QAAe,CACbA,GAAU,OAAQ,QAAQ,EAC1B,KAAK,cAAgB,YACrB,IAAMgC,EAAqB,KAAK,oBAAoB,IAAI,KAAK,gBAAiB,EACxEC,EAAwB,KAAK,uBAAuB,IAAI,KAAK,gBAAiB,EAC9E7B,EAAS4B,EAAoB,OACnC,KAAK,eAAiB,CAAC,EACvB,QAAS1G,EAAI,EAAGA,EAAI8E,EAAQ9E,IAAK,CAC/B,IAAM4G,EAAqB,KAAK,sBAAsB,EAChDC,EAAUH,EAAoB1G,CAAC,EACrC,KAAK,eAAe,KAAK,sBAAwB,CAAC,EAClD4G,EAAmB,YAAYC,EAAQ,eAAe,EACtDD,EAAmB,aAAa,EAAGC,EAAQ,SAAS,EACpDD,EAAmB,mBAAmB,GAAGC,EAAQ,aAAa,EAC9D,KAAK,eAAe,KAAK,sBAAwB,EAAI,CAAC,EACtD,KAAK,wBACD,KAAK,YAAc,QACrB,KAAK,eAAe,KAAKF,EAAuB3G,CAAC,CAAC,GAEhD,KAAK,uBAAyB,KAAK,mBAAqB,KAAK,YAAc,cAC7E,KAAK,eAAe,EAElB,KAAK,uBAAyB,KAAK,mBACrC,KAAK,MAAM,CAEf,CAEA,KAAK,MAAM,EACX,KAAK,cAAgB,SACvB,CAEA,iBAAiBgG,EAAyB,CACxC,KAAK,kBAAkBA,CAAS,EAC5B,KAAK,oBAAoB,IAAIA,CAAS,GACxC,KAAK,oBAAoB,OAAOA,CAAS,EAEvC,KAAK,uBAAuB,IAAIA,CAAS,GAC3C,KAAK,uBAAuB,OAAOA,CAAS,EAE9C,KAAK,eAAe,iBAAiBA,CAAS,CAChD,CAEA,WAAWA,EAAyB,CAClC,KAAK,iBAAmBA,EACxB,KAAK,aAAa,CACpB,CACF,ICx0BA,IAAAc,GAAA,GAAAC,GAAAD,GAAA,UAAAE,KAAA,IAgBMC,GAuCAC,GAoHOF,GA3KbG,GAAAC,GAAA,kBAMAC,KAEAC,KACAC,KAEAC,KAKMP,GAAN,MAAMQ,CAAqC,CACzC,YACYC,EAAuCC,EAAkCC,EACjEC,EAAyB,CADjC,YAAAH,EAAuC,cAAAC,EAAkC,UAAAC,EACjE,UAAAC,CAA0B,CAE9C,iBAAgC,CAC9B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMC,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,aACJ,IAAI,aAAa,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CAChG,CAEA,kBAAkC,CAChC,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,cACJ,IAAI,cAAc,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjG,CAEA,eAA4B,CAC1B,GAAI,KAAK,WAAa,EACpB,MAAM,IAAI,MAAM,mBAAmB,EAErC,IAAMA,EAAeC,EAAU,KAAK,KAAK,IAAI,EAC7C,OAAOD,IAAiB,EAAI,IAAI,WAAe,IAAI,WAAW,KAAK,OAAO,MAAM,OAAQ,KAAK,KAAMA,CAAY,CACjH,CAEA,QAAQE,EAAwC,CAC9C,GAAID,EAAU,KAAKC,CAAO,IAAMD,EAAU,KAAK,KAAK,IAAI,EACtD,MAAM,IAAI,MAAM,mBAAmB,EAErC,OAAO,IAAIN,EAAe,KAAK,OAAQ,KAAK,SAAU,KAAK,KAAMO,CAAO,CAC1E,CACF,EAEMd,GAAN,KAAmD,CAajD,YAAoBQ,EAA+BO,EAAwBC,EAA2B,CAAlF,YAAAR,EAA+B,aAAAO,EAFnD,KAAQ,iBAAmB,EAC3B,KAAQ,eAAiB,EAEvB,KAAK,YAAcA,EAAQ,YAC3B,IAAME,EAAUT,EAAO,QAGnBU,EAAaF,IAAsB,EACvC,KAAK,gBAAkBC,EAAQC,GAAW,EAC1C,IAAMC,EAAaF,EAAQC,GAAW,EACtC,KAAK,YAAcD,EAAQC,GAAW,EACtC,KAAK,iBAAmBD,EAAQC,GAAW,EAC3C,KAAK,eAAiBD,EAAQC,GAAW,EAEzC,IAAME,EAAuB,CAAC,EAC9B,QAASC,EAAI,EAAGA,EAAIF,EAAYE,IAAK,CACnC,IAAMZ,EAAWQ,EAAQC,GAAW,EAC9BR,EAAOO,EAAQC,GAAW,EAC1BI,EAAML,EAAQC,GAAW,EACzBP,EAAiB,CAAC,EACxB,QAASY,EAAI,EAAGA,EAAID,EAAKC,IACvBZ,EAAK,KAAKM,EAAQC,GAAW,CAAC,EAEhCE,EAAO,KAAK,IAAIrB,GAAeS,EAAQC,EAAUC,EAAMC,CAAI,CAAC,CAC9D,CACA,KAAK,OAASS,CAChB,CAhCA,IAAI,kBAA6C,CAC/C,OAAO,KAAK,QAAQ,uBACtB,CACA,IAAI,kBAA+B,CACjC,OAAO,KAAK,OAAO,OAAO,SAAS,KAAK,iBAAkB,KAAK,iBAAmB,KAAK,cAAc,CACvG,CA6BA,6BAAwD,CACtD,MAAO,CACL,KAAK,QAAQ,OAAO,OAAO,yBAA0B,KAAK,QAAQ,OAAO,OAAO,yBAChF,KAAK,QAAQ,OAAO,OAAO,wBAC7B,CACF,CAEA,mCAA4C,CAC1C,OAAO,KAAK,QAAQ,OAAO,OAAO,8BACpC,CAEA,QAAQI,EAAsBC,EAAyE,CAErG,IAAMC,EACFD,GAAsB,QAAQ,IAAIJ,GAAK,OAAOA,GAAM,SAAW,KAAK,OAAOA,CAAC,EAAIA,CAAC,GAAK,KAAK,OAEzFM,EAAgBF,GAAsB,SAAW,CAAC,EAClDG,EAAqB,CAACC,EAAepB,EAAkBE,IACzD,IAAIZ,GAAe,KAAK,OAAQU,EAAU,KAAK,OAAOoB,EAAOlB,CAAI,EAAGA,CAAI,EACtEmB,EAAwB,CAACrB,EAAkBE,IAAwC,CACvF,IAAMoB,EAAcC,GAAqBvB,CAAQ,EACjD,GAAI,CAACsB,EACH,MAAM,IAAI,MAAM,0BAA0BtB,CAAQ,EAAE,EAEtD,IAAMwB,EAAaF,EAAclB,EAAU,KAAKF,CAAI,EAC9CuB,EAAYD,EAAa,EAAI,KAAK,QAAQ,eAAe,OAAOA,CAAU,EAAE,GAAK,EACvF,OAAO,IAAIlC,GAAe,KAAK,OAAQU,EAAUyB,EAAWvB,CAAI,CAClE,EACA,OAAO,KAAK,QAAQ,IAChBa,EAASE,EAAcC,EAAeC,EAAoBE,EAAuB,KAAK,WAAW,CACvG,CAEA,OAAOD,EAAelB,EAAiC,CACrD,IAAMwB,EAAQ,KAAK,OAAO,UAAU,EACpC,GAAI,CACF,IAAMzB,EAAO,KAAK,OAAO,YAAY,EAAIC,EAAK,QAAU,CAAsB,EAC1EyB,EAAS1B,GAAQ,EACrB,KAAK,OAAO,QAAQ0B,GAAQ,EAAIzB,EAAK,OACrC,QAASU,EAAI,EAAGA,EAAIV,EAAK,OAAQU,IAC/B,KAAK,OAAO,QAAQe,GAAQ,EAAIzB,EAAKU,CAAC,EAExC,OAAO,KAAK,OAAO,YAAa,KAAK,gBAAiBQ,EAAOnB,CAAI,CACnE,OAAS2B,EAAG,CACV,MAAM,IAAI,MACN,sCAAsCR,CAAK,gBAAgBlB,CAAI,8GAErD0B,CAAC,EAAE,CACnB,QAAE,CACA,KAAK,OAAO,aAAaF,CAAK,CAChC,CACF,CACF,EA0BarC,GACT,MAAMwC,EAAwB9B,EAAuB+B,EAAUC,IAA2C,CAC5G,IAAMC,EAAWjC,EAAO,SACxB,GAAI,CAACiC,EACH,MAAM,IAAI,MAAM,mFAAmF,EAGrG,GAAIH,IAAS,SAAU,CACrB,IAAMvB,EAAU,IAAI2B,GACpB,MAAM3B,EAAQ,WAAWwB,EAAKC,CAAW,EAEzCC,EAAS,SAAU,CAEjB1B,EAGC4B,GAAiB5B,EAAQ,MAAM4B,CAAI,EAGnCC,GAAgB7B,EAAQ,KAAK6B,CAAG,EAGjC,CAACC,EAAaC,EAAaH,EAAcI,EAAc,KAAU,CAC/D,GAAIA,EACFC,GAAU,UAAW,IAAM,kCAAkCH,CAAG,SAASC,CAAG,UAAUH,CAAI,EAAE,EAC5F5B,EAAQ,OAAO8B,EAAKC,CAAG,MAClB,CACLE,GAAU,UAAW,IAAM,yCAAyCH,CAAG,eAAeC,CAAG,UAAUH,CAAI,EAAE,EACzG,IAAMjC,EAAOF,EAAO,OAAO,SAASqC,IAAQ,GAAIA,IAAQ,GAAKF,CAAI,EACjE5B,EAAQ,OAAO+B,EAAKpC,CAAI,CAC1B,CACF,EAGA,MAAMwB,EAAmBe,EAAoBN,IACxB,CACfK,GACI,UACA,IAAM,wCAAwCd,CAAS,gBAAgBe,CAAU,UAAUN,CAAI,EAAE,EAErG,MAAM5B,EAAQ,SACVmB,EAAW,IAAM1B,EAAO,OAAO,SAASyC,IAAe,GAAIA,IAAe,GAAKN,CAAI,CAAC,CAC1F,EAGJ,CAACO,EAAoBC,EAAkBC,IAAuBrC,EAAQ,aAClEmC,EAAYC,EAAUC,EAAW5C,EAAO,aAAaA,EAAO,iBAAkB2C,CAAQ,CAAC,CAAC,EAG3FE,GAAmBtC,EAAQ,cAAcsC,CAAM,EAGhD,CAACA,EAAgBrC,EAA2BsC,EAAuBC,IAAwC,CACzGP,GACI,UACA,IAAM,mCAAmCM,CAAa,YAAYD,CAAM,uBACpErC,CAAiB,EAAE,EAC3B,IAAMwC,EAAU,IAAIxD,GAAmBQ,EAAQO,EAASC,CAAiB,EACzE,OAAOD,EAAQ,cAAcsC,EAAQG,EAASD,CAAM,CACtD,EAEA,IAAMxC,EAAQ,aAAa,EAE3B,IAAMA,EAAQ,WAAW,EAEzB,IAAMA,EAAQ,OAAO,CACvB,CAAC,CACH,MACE0B,EAAS,OAAO,CAEpB,ICjPA,IA+DMgB,GAWOC,GAWAC,GAyFPC,GAOAC,GAqBOC,GAkBAC,GAmIAC,GAuBAC,GA+EAC,GA6OAC,GAgBAC,GAlsBbC,GAAAC,GAAA,kBAMAC,KACAC,KACAC,KACAC,KACAC,KACAC,KAoDMnB,GAAU,CAACoB,EAAoBC,IAA+B,CAChDC,GAAY,EAAE,SAASF,EAAYC,CAAY,IAC/C,GAChBE,GAAe,+BAAgC,CAEnD,EAMatB,GAAc,MAAMuB,GAA4B,CAE3DxB,GAAQwB,EAAI,KAAK,WAAaC,GAAqBD,EAAI,QAAQ,CAAC,CAClE,EAQatB,GAAS,MAAMsB,EAAUE,IAAkC,CACtC,CAE9B,IAAMC,EAAW,cAAuB,KAExC,GAAID,IAAW,SAAU,CAEvB,GAAI,OAAO,UAAc,KAAe,CAAC,UAAU,IACjD,MAAM,IAAI,MAAM,gDAAgD,EAGlE,IAAIE,EAAUJ,EAAI,OAAO,QACzB,GAAKI,GAmBH,GAAI,OAAOA,EAAQ,QAAW,UAAY,OAAOA,EAAQ,UAAa,UAClE,OAAOA,EAAQ,eAAkB,WACnC,MAAM,IAAI,MAAM,kFAAkF,MArBxF,CAEZ,IAAMC,EAAkBL,EAAI,OAAO,gBACnC,GAAIK,IAAoB,QAAaA,IAAoB,aACrDA,IAAoB,mBACtB,MAAM,IAAI,MAAM,qCAAqCA,CAAe,GAAG,EAEzE,IAAMC,EAAuBN,EAAI,OAAO,qBACxC,GAAIM,IAAyB,QAAa,OAAOA,GAAyB,UACxE,MAAM,IAAI,MAAM,0CAA0CA,CAAoB,GAAG,EAGnF,GADAF,EAAU,MAAM,UAAU,IAAI,eAAe,CAAC,gBAAAC,EAAiB,qBAAAC,CAAoB,CAAC,EAChF,CAACF,EACH,MAAM,IAAI,MACN,0GAC+E,CAEvF,CAQA,GAAI,CAACJ,EAAI,KAAK,KACZ,MAAM,IAAI,MACN,qGAAqG,EAG3G,MAAMG,EAAS,SAAUL,GAAY,EAAGE,EAAKI,CAAO,CACtD,CACA,GAAIF,IAAW,QAAS,CAEtB,GAAI,OAAO,UAAc,KAAe,CAAE,UAAuC,GAC/E,MAAM,IAAI,MAAM,+CAA+C,EAGjE,MAAMC,EAAS,QAASL,GAAY,EAAGE,CAAG,CAC5C,CACF,CACF,EAoCMrB,GAAiB,IAAI,IAOrBC,GAA8B2B,GAA4C,CAC9E,IAAMC,EAAOV,GAAY,EACnBW,EAAQD,EAAK,UAAU,EAC7B,GAAI,CACF,IAAME,EAAaF,EAAK,WAAW,CAAC,EAEpC,OADkBA,EAAK,wBAAwBD,EAAeG,EAAYA,EAAa,CAAC,IACtE,GAChBX,GAAe,uCAAwC,EAElD,CAACS,EAAK,OAAOE,EAAa,CAAC,EAAGF,EAAK,OAAOE,EAAa,EAAI,CAAC,CAAC,CACtE,QAAE,CACAF,EAAK,aAAaC,CAAK,CACzB,CACF,EAQa5B,GAA0B8B,GAAwC,CAC7E,IAAMH,EAAOV,GAAY,EACnBc,EAAkBJ,EAAK,QAAQG,EAAM,UAAU,EACrD,GAAIC,IAAoB,EACtB,MAAM,IAAI,MAAM,+DAA+DD,EAAM,UAAU,GAAG,EAEpG,OAAAH,EAAK,OAAO,IAAIG,EAAOC,CAAe,EAC/B,CAACA,EAAiBD,EAAM,UAAU,CAC3C,EAUa7B,GAAgB,MACzB+B,EACAC,IAAoF,CACtF,IAAIF,EAAyBG,EACvBP,EAAOV,GAAY,EAErB,MAAM,QAAQe,CAAS,EAEzB,CAACD,EAAiBG,CAAe,EAAIF,EAC5BA,EAAU,SAAWL,EAAK,OAAO,OAE1C,CAACI,EAAiBG,CAAe,EAAI,CAACF,EAAU,WAAYA,EAAU,UAAU,EAGhF,CAACD,EAAiBG,CAAe,EAAIlC,GAAuBgC,CAAS,EAGvE,IAAIN,EAAgB,EAChBS,EAAuB,EACvBC,EAAkB,EAClBC,EAAmB,CAAC,EAClBC,EAAwB,CAAC,EACzBC,EAAyB,CAAC,EAEhC,GAAI,CAGF,GAFA,CAACJ,EAAsBE,CAAM,EAAIG,GAAkBP,CAAO,EAEtDA,GAAS,cAAgBN,EAAK,kBAAmB,CACnD,IAAMc,EAAkB,CAAC,EACzB,QAAWC,KAAQT,EAAQ,aAAc,CACvC,IAAMU,EAAO,OAAOD,GAAS,SAAWA,EAAOA,EAAK,KACpDD,EAAgB,KAAKG,GAAS,OAAOF,GAAS,SAAWA,EAAOA,EAAK,IAAI,EAAE,KAAKG,GAAQ,CACtFlB,EAAK,kBAAmBgB,EAAME,CAAI,CACpC,CAAC,CAAC,CACJ,CAGA,MAAM,QAAQ,IAAIJ,CAAe,CACnC,CAEAf,EAAgB,MAAMC,EAAK,kBAAkBI,EAAiBG,EAAiBC,CAAoB,EAC/FT,IAAkB,GACpBR,GAAe,yBAA0B,EAG3C,GAAM,CAAC4B,EAAYC,CAAW,EAAIhD,GAA2B2B,CAAa,EAEpEsB,EAAqB,CAAC,CAACf,GAAS,mBAEhCgB,EAAa,CAAC,EACdC,EAAc,CAAC,EACfC,EAAwE,CAAC,EAC/E,QAASC,EAAI,EAAGA,EAAIN,EAAYM,IAAK,CACnC,IAAMC,EAAO1B,EAAK,iBAAiBD,EAAe0B,CAAC,EAC/CC,IAAS,GACXnC,GAAe,0BAA2B,EAE5CoB,EAAsB,KAAKe,CAAI,EAC/BJ,EAAW,KAAKtB,EAAK,aAAa0B,CAAI,CAAC,CACzC,CACA,QAASD,EAAI,EAAGA,EAAIL,EAAaK,IAAK,CACpC,IAAMC,EAAO1B,EAAK,kBAAkBD,EAAe0B,CAAC,EAChDC,IAAS,GACXnC,GAAe,2BAA4B,EAE7CqB,EAAuB,KAAKc,CAAI,EAChC,IAAMC,EAAa3B,EAAK,aAAa0B,CAAI,EACzCH,EAAY,KAAKI,CAAU,EAEK,CAC9B,GAAIN,GAAsBf,GAAS,0BAA4B,OAAW,CACxEkB,EAAyB,KAAK,YAAY,EAC1C,QACF,CACA,IAAMI,EAAW,OAAOtB,GAAS,yBAA4B,SACzDA,EAAQ,wBACRA,GAAS,0BAA0BqB,CAAU,GAAK,MACtD,GAAIC,IAAa,OAASA,IAAa,cAAgBA,IAAa,aAClE,MAAM,IAAI,MAAM,4CAA4CA,CAAQ,GAAG,EAEzE,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MAAM,4CACZA,CAAQ,4EAA4E,EAE1FJ,EAAyB,KAAKI,CAAQ,CACxC,CACF,CAGA,IAAIC,EAAoC,KACxC,OAAkCL,EAAyB,KAAKM,GAAKA,IAAM,YAAY,IACrFrB,EAAkBT,EAAK,kBAAkBD,CAAa,EAClDU,IAAoB,GACtBlB,GAAe,0BAA2B,EAG5CsC,EAAe,CACb,OAAQpB,EACR,yBAAAe,EACA,gCAAiCA,EAAyB,IAAIM,GAAKC,GAAyBD,CAAC,CAAC,CAChG,GAGF3D,GAAe,IACX4B,EACA,CAACA,EAAeY,EAAuBC,EAAwBiB,EAAcR,EAAoB,EAAK,CAAC,EACpG,CAACtB,EAAeuB,EAAYC,CAAW,CAChD,OAASS,EAAG,CACV,MAAArB,EAAsB,QAAQsB,GAAOjC,EAAK,SAASiC,CAAG,CAAC,EACvDrB,EAAuB,QAAQqB,GAAOjC,EAAK,SAASiC,CAAG,CAAC,EAEpDxB,IAAoB,GACtBT,EAAK,mBAAmBS,CAAe,EAGrCV,IAAkB,GACpBC,EAAK,mBAAmBD,CAAa,EAEjCiC,CACR,QAAE,CACAhC,EAAK,MAAMI,CAAe,EACtBI,IAAyB,GAC3BR,EAAK,0BAA0BQ,CAAoB,EAErDE,EAAO,QAAQwB,GAASlC,EAAK,MAAMkC,CAAK,CAAC,EAGzClC,EAAK,sBAAsB,CAC7B,CACF,EAEazB,GAAkB4D,GAA4B,CACzD,IAAMnC,EAAOV,GAAY,EACnB8C,EAAUjE,GAAe,IAAIgE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,+CAA+CD,CAAS,EAAE,EAE5E,GAAM,CAACpC,EAAeY,EAAuBC,EAAwByB,EAAgBhB,CAAkB,EAAIe,EAEvGC,IACEhB,GACFrB,EAAK,sBAAsBqC,EAAe,MAAM,EAElDrC,EAAK,mBAAmBqC,EAAe,MAAM,GAG/CrC,EAAK,uBAAuBmC,CAAS,EAErCxB,EAAsB,QAAQsB,GAAOjC,EAAK,SAASiC,CAAG,CAAC,EACvDrB,EAAuB,QAAQqB,GAAOjC,EAAK,SAASiC,CAAG,CAAC,EACxDjC,EAAK,mBAAmBD,CAAa,EACrC5B,GAAe,OAAOgE,CAAS,CACjC,EAEa3D,GACT,CAAC8D,EAA6BC,EAAyB7B,EAAkByB,EAAmBK,EAC3FnB,EAAqB,KAAgB,CACpC,GAAI,CAACiB,EAAQ,CACXC,EAAc,KAAK,CAAC,EACpB,MACF,CAEA,IAAMvC,EAAOV,GAAY,EAEnBmD,EAAWH,EAAO,CAAC,EACnBI,EAAOJ,EAAO,CAAC,EACfV,EAAWU,EAAO,CAAC,EAErBK,EACAC,EAEJ,GAAIH,IAAa,UAAYb,IAAa,aACxC,MAAM,IAAI,MAAM,wCAAwC,EAG1D,GAAIP,GAAsBO,IAAa,aACrC,MAAM,IAAI,MACN,2DAA2DY,CAAK,mCAAmC,EAGzG,GAAIZ,IAAa,aAAc,CAC7B,IAAMiB,EAAYP,EAAO,CAAC,EAAE,UACtBQ,EAAqBC,GAAqBC,GAA2BP,CAAQ,CAAC,EACpFG,EAAiBF,EAAK,OAAO,CAACO,EAAGC,IAAMD,EAAIC,EAAG,CAAC,EAAIJ,EAEnD,IAAMK,EAAiBnD,EAAK,mBAC5B,GAAI,CAACmD,EACH,MAAM,IAAI,MAAM,qEAAqE,EAEvFR,EAAUQ,EAAehB,EAAWK,EAAOK,EAAWD,CAAc,CACtE,KAAO,CACL,IAAM1B,EAAOoB,EAAO,CAAC,EAErB,GAAI,MAAM,QAAQpB,CAAI,EAAG,CAEvB0B,EAAiB,EAAI1B,EAAK,OAC1ByB,EAAU3C,EAAK,QAAQ4C,CAAc,EACrClC,EAAO,KAAKiC,CAAO,EACnB,IAAIS,EAAYT,EAAU,EAC1B,QAASlB,EAAI,EAAGA,EAAIP,EAAK,OAAQO,IAAK,CACpC,GAAI,OAAOP,EAAKO,CAAC,GAAM,SACrB,MAAM,IAAI,UAAU,wBAAwBA,CAAC,kBAAkB,EAEjEzB,EAAK,QAAQoD,GAAW,EAAIC,GAAgBnC,EAAKO,CAAC,EAAGf,CAAM,CAC7D,CACF,MACEkC,EAAiB1B,EAAK,WACtByB,EAAU3C,EAAK,QAAQ4C,CAAc,EACrClC,EAAO,KAAKiC,CAAO,EACnB3C,EAAK,OAAO,IAAI,IAAI,WAAWkB,EAAK,OAAQA,EAAK,WAAY0B,CAAc,EAAGD,CAAO,CAEzF,CAEA,IAAM1C,EAAQD,EAAK,UAAU,EACvBsD,EAAatD,EAAK,WAAW,EAAI0C,EAAK,MAAM,EAClD,GAAI,CACF,IAAIa,EAAWD,EAAa,EAC5BZ,EAAK,QAAQc,GAAKxD,EAAK,OAAOuD,GAAU,EAAIC,CAAC,EAC7C,IAAMlB,EAAStC,EAAK,iBAChBgD,GAA2BP,CAAQ,EAAGE,EAASC,EAAgBU,EAAYZ,EAAK,OAChFX,GAAyBH,CAAQ,CAAC,EAClCU,IAAW,GACb/C,GAAe,iDAAiD4C,CAAS,WAAWK,CAAK,GAAG,EAE9FD,EAAc,KAAKD,CAAM,CAC3B,QAAE,CACAtC,EAAK,aAAaC,CAAK,CACzB,CACF,EAKSxB,GAAM,MACf0D,EAAmBsB,EAAwBC,EAAgCC,EAC3EC,EAA2CtD,IAAoE,CACjH,IAAMN,EAAOV,GAAY,EACnB8C,EAAUjE,GAAe,IAAIgE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,6CAA6CD,CAAS,EAAE,EAE1E,IAAMpC,EAAgBqC,EAAQ,CAAC,EACzBzB,EAAwByB,EAAQ,CAAC,EACjCxB,EAAyBwB,EAAQ,CAAC,EAClCC,EAAiBD,EAAQ,CAAC,EAC1Bf,EAAqBe,EAAQ,CAAC,EAC9ByB,EAAmBzB,EAAQ,CAAC,EAE5BjB,EAAasC,EAAa,OAC1BrC,EAAcuC,EAAc,OAE9BG,EAAmB,EACnBC,EAA6B,CAAC,EAE5BC,EAA+B,CAAC,EAChCC,EAAgC,CAAC,EACjCC,EAA8B,CAAC,EAE/BC,EAAiBnE,EAAK,UAAU,EAChCoE,EAAoBpE,EAAK,WAAWmB,EAAa,CAAC,EAClDkD,GAAmBrE,EAAK,WAAWmB,EAAa,CAAC,EACjDmD,EAAqBtE,EAAK,WAAWoB,EAAc,CAAC,EACpDmD,GAAoBvE,EAAK,WAAWoB,EAAc,CAAC,EAEzD,GAAI,CACF,CAAC0C,EAAkBC,CAAgB,EAAIS,GAAclE,CAAO,EAG5D,QAASmB,GAAI,EAAGA,GAAIN,EAAYM,KAC9BjD,GACIkF,EAAajC,EAAC,EAAGuC,EAAoBE,EAAmB/B,EAAWsB,EAAahC,EAAC,EAAGJ,CAAkB,EAI5G,QAASI,GAAI,EAAGA,GAAIL,EAAaK,KAC/BjD,GACIoF,EAAcnC,EAAC,EAAGwC,EAAqBC,EAAmB/B,EAAWhB,EAAawC,EAAclC,EAAC,EACjGJ,CAAkB,EAGxB,IAAIoD,GAAmBL,EAAoB,EACvCM,GAAkBL,GAAmB,EACrCM,EAAoBL,EAAqB,EACzCM,GAAmBL,GAAoB,EAC3C,QAAS9C,GAAI,EAAGA,GAAIN,EAAYM,KAC9BzB,EAAK,QAAQyE,IAAkB,EAAIT,EAAmBvC,EAAC,EACvDzB,EAAK,QAAQ0E,IAAiB,EAAI/D,EAAsB8C,EAAahC,EAAC,CAAC,EAEzE,QAASA,GAAI,EAAGA,GAAIL,EAAaK,KAC/BzB,EAAK,QAAQ2E,GAAmB,EAAIV,EAAoBxC,EAAC,EACzDzB,EAAK,QAAQ4E,IAAkB,EAAIhE,EAAuB+C,EAAclC,EAAC,CAAC,EAG5E,GAAkCY,GAAkB,CAACwB,EAAkB,CACrE,GAAM,CAAC,OAAAgB,GAAQ,yBAAArD,GAA0B,gCAAAsD,EAA+B,EAAIzC,EAE5E,GAAI1B,EAAsB,SAAWQ,EACnC,MAAM,IAAI,MAAM,2BACZA,CAAU,4DAA4DR,EAAsB,MAAM,IAAI,EAI5G,QAASc,GAAI,EAAGA,GAAIN,EAAYM,KAAK,CACnC,IAAMe,GAAQiB,EAAahC,EAAC,EACV,MAAMzB,EAAK,cAAc6E,GAAQlE,EAAsB6B,EAAK,EAAGwB,EAAmBvC,EAAC,CAAC,IACpF,GAChBlC,GAAe,oBAAoBkC,EAAC,iBAAiBU,CAAS,GAAG,CAErE,CAGA,QAASV,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMe,GAAQmB,EAAclC,EAAC,EACZmC,EAAcnC,EAAC,IAAI,CAAC,EAIjBzB,EAAK,eAAe6E,GAAQjE,EAAuB4B,EAAK,EAAGyB,EAAoBxC,EAAC,EAAG,CAAC,IACpF,GAChBlC,GAAe,mCAAmCkC,EAAC,iBAAiBU,CAAS,GAAG,EAK9EnC,EAAK,eAAe6E,GAAQjE,EAAuB4B,EAAK,EAAG,EAAGsC,GAAgCtC,EAAK,CAAC,IACtF,GAChBjD,GAAe,qBAAqBkC,EAAC,QAAQD,GAAyBC,EAAC,CAAC,gBAAgBU,CAAS,GAAG,CAG1G,CACAhE,GAAe,IACXgE,EACA,CAACpC,EAAeY,EAAuBC,EAAwByB,EAAgBhB,EAAoB,EAAI,CAAC,CAC9G,CAEArB,EAAK,iBAAiBD,CAAa,EACnC,IAAIgF,GAC8B1C,EAChC0C,GAAY,MAAM/E,EAAK,mBACnBD,EAAesC,EAAe,OAAQjB,EAAakD,EAAoBR,CAAgB,EAE3FiB,GAAY,MAAM/E,EAAK,QACnBD,EAAesE,GAAkBD,EAAmBjD,EAAYoD,GAAmBnD,EACnFkD,EAAoBR,CAAgB,EAGtCiB,KAAc,GAChBxF,GAAe,0BAA0B,EAG3C,IAAMyF,GAA2B,CAAC,EAElC,QAASvD,GAAI,EAAGA,GAAIL,EAAaK,KAAK,CACpC,IAAMa,GAAStC,EAAK,QAAQsE,EAAqB,EAAI7C,EAAC,EACtD,GAAIa,KAAW2B,EAAoBxC,EAAC,EAAG,CAErCuD,GAAO,KAAKpB,EAAcnC,EAAC,CAAE,EAC7B,QACF,CAEA,IAAMwD,GAA2BjF,EAAK,UAAU,EAE1CkF,GAAmBlF,EAAK,WAAW,EAAI,CAAC,EAE1CmF,GAAmB,GACnBC,GAA6BlF,GAAa,EAC9C,GAAI,CACgBF,EAAK,kBACnBsC,GAAQ4C,GAAkBA,GAAmB,EAAGA,GAAmB,EAAGA,GAAmB,EAAE,IAC7E,GAChB3F,GAAe,4CAA4CkC,EAAC,GAAG,EAEjE,IAAI4D,GAAkBH,GAAmB,EACnCzC,GAAWzC,EAAK,QAAQqF,IAAiB,EAC/CnF,GAAaF,EAAK,QAAQqF,IAAiB,EAC3C,IAAM/B,GAAatD,EAAK,QAAQqF,IAAiB,EAC3CC,GAAatF,EAAK,QAAQqF,IAAiB,EAC3C3C,GAAO,CAAC,EACd,QAASjB,GAAI,EAAGA,GAAI6D,GAAY7D,KAC9BiB,GAAK,KAAK1C,EAAK,QAAQsD,GAAa,EAAI7B,EAAC,CAAC,EAE5CzB,EAAK,SAASsD,EAAU,EAExB,IAAMiC,GAAO7C,GAAK,OAAO,CAACO,GAAGC,KAAMD,GAAIC,GAAG,CAAC,EAC3CkC,GAAOI,GAA2B/C,EAAQ,EAE1C,IAAMgD,GAAoBpD,GAAgB,yBAAyBsB,EAAclC,EAAC,CAAC,EAEnF,GAAI2D,KAAS,SAAU,CACrB,GAAIK,KAAsB,aACxB,MAAM,IAAI,MAAM,wCAAwC,EAE1D,IAAMC,GAAuB,CAAC,EAC1BtC,GAAYlD,GAAa,EAC7B,QAASuB,GAAI,EAAGA,GAAI8D,GAAM9D,KAAK,CAC7B,IAAMkE,GAAS3F,EAAK,QAAQoD,IAAW,EACjCwC,GAAiBnE,KAAM8D,GAAO,EAAI,OAAYvF,EAAK,QAAQoD,EAAS,EAAIuC,GAC9ED,GAAW,KAAK1F,EAAK,aAAa2F,GAAQC,EAAc,CAAC,CAC3D,CACAZ,GAAO,KAAK,CAACI,GAAM1C,GAAMgD,GAAY,KAAK,CAAC,CAC7C,SAGMD,KAAsB,cAAgBF,GAAO,EAAG,CAClD,IAAMM,GAAY7F,EAAK,cACvB,GAAI,CAAC6F,GACH,MAAM,IAAI,MAAM,uEAAuE,EAEzF,IAAMhD,GAAYgD,GAAU3F,EAAU,EAChC4F,GAAc/C,GAAqBN,EAAQ,EACjD,GAAIqD,KAAgB,QAAa,CAACC,GAAyBX,EAAI,EAC7D,MAAM,IAAI,MAAM,0BAA0BA,EAAI,EAAE,EAIlDD,GAAmB,GAEnBH,GAAO,KAAK,CACVI,GAAM1C,GAAM,CACV,UAAAG,GACA,SAAU7C,EAAK,qBAAsB6C,GAAW0C,GAAOO,GAAaV,EAAI,EACxE,QAAS,IAAM,CACbpF,EAAK,kBAAkBsC,EAAM,CAC/B,CACF,EACA,YACF,CAAC,CACH,KAAO,CACL,IAAM0D,GAAwBC,GAAkCb,EAAI,EAC9DlE,GAAO,IAAI8E,GAAsBT,EAAI,EAC3C,IAAI,WAAWrE,GAAK,OAAQA,GAAK,WAAYA,GAAK,UAAU,EACvD,IAAIlB,EAAK,OAAO,SAASE,GAAYA,GAAagB,GAAK,UAAU,CAAC,EACvE8D,GAAO,KAAK,CAACI,GAAM1C,GAAMxB,GAAM,KAAK,CAAC,CACvC,CAEJ,QAAE,CACAlB,EAAK,aAAaiF,EAAwB,EACtCG,KAAS,UAAYlF,IACvBF,EAAK,MAAME,EAAU,EAElBiF,IACHnF,EAAK,kBAAkBsC,EAAM,CAEjC,CACF,CAEA,OAAID,GAAkB,CAAChB,IACrBrB,EAAK,sBAAsBqC,EAAe,MAAM,EAChDlE,GAAe,IACXgE,EACA,CAACpC,EAAeY,EAAuBC,EAAwByB,EAAgBhB,EAAoB,EAAK,CAAC,GAExG2D,EACT,QAAE,CACAhF,EAAK,aAAamE,CAAc,EAEhCH,EAAmB,QAAQkC,IAAKlG,EAAK,kBAAkBkG,EAAC,CAAC,EACzDjC,EAAoB,QAAQiC,IAAKlG,EAAK,kBAAkBkG,EAAC,CAAC,EAC1DhC,EAAkB,QAAQiC,IAAKnG,EAAK,MAAMmG,EAAC,CAAC,EAExCrC,IAAqB,GACvB9D,EAAK,sBAAsB8D,CAAgB,EAE7CC,EAAiB,QAAQoC,IAAKnG,EAAK,MAAMmG,EAAC,CAAC,CAC7C,CACF,EAKazH,GAAgByD,GAA4B,CACvD,IAAMnC,EAAOV,GAAY,EACnB8C,EAAUjE,GAAe,IAAIgE,CAAS,EAC5C,GAAI,CAACC,EACH,MAAM,IAAI,MAAM,oBAAoB,EAEtC,IAAMrC,EAAgBqC,EAAQ,CAAC,EAGzBgE,EAAkBpG,EAAK,iBAAiBD,CAAa,EACvDqG,IAAoB,GACtB7G,GAAe,iCAAkC,EAEnDS,EAAK,SAASoG,CAAe,CAC/B,EAEazH,GAA8B0H,GAAsE,CAC/G,IAAMC,EAA6B,CAAC,EACpC,QAAWhE,KAAU+D,EAAS,CAC5B,IAAMnF,EAAOoB,EAAO,CAAC,EACjB,CAAC,MAAM,QAAQpB,CAAI,GAAK,WAAYA,GACtCoF,EAAQ,KAAKpF,EAAK,MAAM,CAE5B,CACA,OAAOoF,CACT,IC3sBA,IAAAC,GAAAC,GAAA,CAAAC,GAAAC,KAAA,CAAAA,GAAA,4jmXCAA,IASMC,GACFC,GACAC,GACAC,GACAC,GAGAC,GACEC,GAEAC,GASAC,GAMAC,GA8BAC,GAEOC,GAsDAC,GAaAC,GAaAC,GAuBAC,GAaAC,GAyBAC,GA/MbC,GAAAC,GAAA,kBAGAC,KAGAC,KACAC,KAEMtB,GAAU,IAAe,CAAC,CAACuB,GAAI,KAAK,OAAS,OAAO,SAAa,IAEnErB,GAAe,GACfC,GAAc,GACdC,GAAU,GAIRE,GAAiF,IAAI,IAErFC,GAAmB,CAACiB,EAA8BC,IAA+C,CACrG,IAAMC,EAAQpB,GAAgB,IAAIkB,CAAI,EAClCE,EACFA,EAAM,KAAKD,CAAS,EAEpBnB,GAAgB,IAAIkB,EAAM,CAACC,CAAS,CAAC,CAEzC,EAEMjB,GAAe,IAAY,CAC/B,GAAIN,IAAgB,CAACC,IAAeC,IAAW,CAACH,GAC9C,MAAM,IAAI,MAAM,kBAAkB,CAEtC,EAEMQ,GAAwBkB,GAA2C,CACvE,OAAQA,EAAG,KAAK,KAAM,CACpB,IAAK,YACHzB,GAAe,GACXyB,EAAG,KAAK,KACVvB,GAAU,GACVC,GAAkB,CAAC,EAAEsB,EAAG,KAAK,GAAG,IAEhCxB,GAAc,GACdE,GAAkB,CAAC,EAAE,GAEvB,MACF,IAAK,UACL,IAAK,YACL,IAAK,SACL,IAAK,UACL,IAAK,MACL,IAAK,gBAAiB,CACpB,IAAMoB,EAAYnB,GAAgB,IAAIqB,EAAG,KAAK,IAAI,EAC9CA,EAAG,KAAK,IACVF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAG,EAEjCF,EAAU,MAAM,EAAG,CAAC,EAAEE,EAAG,KAAK,GAAI,EAEpC,KACF,CACA,QACF,CACF,EAEMjB,GAAY,OAAO,SAAa,IAAe,UAAU,eAAqC,IAAM,OAE7FC,GAAqC,SAA0B,CAC1E,GAAI,CAAAR,GAGJ,IAAID,GACF,MAAM,IAAI,MAAM,0CAA4C,EAE9D,GAAIE,GACF,MAAM,IAAI,MAAM,uCAAyC,EAK3D,GAFAF,GAAe,GAEuBF,GAAQ,EAE5C,OAAIuB,GAAI,KAAK,YAAc,QACrBb,IAAaA,GAAU,QAAQ,OAAO,IAAM,IAC9Ca,GAAI,KAAK,UAAYb,GAAU,OAAO,EAAG,CAAEA,GAAW,YAAY,GAAG,EAAI,CAAC,GAIvE,IAAI,QAAc,CAACkB,EAASC,IAAW,CAC5C5B,IAAa,UAAU,EAEvB,IAAM6B,EAAY,IAAI,gBAAgB,IAAI,KACtC,CAGE,IACF,EACA,CAAC,KAAM,iBAAiB,CAAC,CAAC,EAC9B7B,GAAc,IAAI,OAAO6B,EAAW,CAAC,KAAM,uBAAuB,CAAC,EACnE7B,GAAY,QAAW0B,GAAmBE,EAAOF,CAAE,EACnD1B,GAAY,UAAYQ,GACxB,IAAI,gBAAgBqB,CAAS,EAC7BzB,GAAoB,CAACuB,EAASC,CAAM,EACpC,IAAME,EAA0B,CAAC,KAAM,YAAa,GAAKR,EAAG,EAC5DtB,GAAY,YAAY8B,CAAO,CACjC,CAAC,EAGD,GAAI,CACF,MAAMC,GAAsBT,GAAI,IAAI,EACpC,MAAWU,GAAYV,EAAG,EAC1BpB,GAAc,EAChB,OAAS,EAAG,CACV,MAAAC,GAAU,GACJ,CACR,QAAE,CACAF,GAAe,EACjB,EAEJ,EAEaU,GAAkB,MAAMsB,GAAkC,CACrE,GAAsClC,GAAQ,EAC5C,OAAAQ,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAME,EAA0B,CAAC,KAAM,UAAW,GAAK,CAAC,OAAAG,EAAQ,IAAAX,EAAG,CAAC,EACpEtB,GAAa,YAAY8B,CAAO,CAClC,CAAC,EAED,MAAWI,GAAOZ,GAAKW,CAAM,CAEjC,EAEarB,GAAyB,MAAMuB,GACJpC,GAAQ,GAC5CQ,GAAa,EACN,IAAI,QAAoC,CAACoB,EAASC,IAAW,CAClEtB,GAAiB,YAAa,CAACqB,EAASC,CAAM,CAAC,EAC/C,IAAME,EAA0B,CAAC,KAAM,YAAa,GAAK,CAAC,OAAAK,CAAM,CAAC,EACjEnC,GAAa,YAAY8B,EAAS,CAACK,EAAO,MAAM,CAAC,CACnD,CAAC,GAEWvB,GAAuBuB,CAAM,EAIhCtB,GACT,MAAMuB,EAA8CC,IACR,CACtC,GAAsCtC,GAAQ,EAAG,CAE/C,GAAIsC,GAAS,wBACX,MAAM,IAAI,MAAM,sEAAsE,EAExF,OAAA9B,GAAa,EACN,IAAI,QAAqC,CAACoB,EAASC,IAAW,CACnEtB,GAAiB,SAAU,CAACqB,EAASC,CAAM,CAAC,EAC5C,IAAME,EAA0B,CAAC,KAAM,SAAU,GAAK,CAAC,MAAAM,EAAO,QAAS,CAAC,GAAGC,CAAO,CAAC,CAAC,EAC9EC,EAA+B,CAAC,EAClCF,aAAiB,YACnBE,EAAa,KAAKF,EAAM,MAAM,EAEhCpC,GAAa,YAAY8B,EAASQ,CAAY,CAChD,CAAC,CACH,KACE,QAAYzB,GAAcuB,EAAOC,CAAO,CAE5C,EAEKvB,GAAiB,MAAMyB,GAAqC,CACvE,GAAsCxC,GAAQ,EAC5C,OAAAQ,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,UAAW,CAACqB,EAASC,CAAM,CAAC,EAC7C,IAAME,EAA0B,CAAC,KAAM,UAAW,GAAKS,CAAS,EAChEvC,GAAa,YAAY8B,CAAO,CAClC,CAAC,EAEIhB,GAAeyB,CAAS,CAEjC,EAEaxB,GAAM,MACfwB,EAAmBC,EAAwBC,EAA0BC,EACrEC,EAAqCN,IAAoE,CAC3G,GAAsCtC,GAAQ,EAAG,CAE/C,GAAI0C,EAAO,KAAKG,GAAKA,EAAE,CAAC,IAAM,KAAK,EACjC,MAAM,IAAI,MAAM,iDAAiD,EAGnE,GAAID,EAAQ,KAAKC,GAAKA,CAAC,EACrB,MAAM,IAAI,MAAM,yDAAyD,EAE3E,OAAArC,GAAa,EACN,IAAI,QAAsC,CAACoB,EAASC,IAAW,CACpEtB,GAAiB,MAAO,CAACqB,EAASC,CAAM,CAAC,EACzC,IAAMiB,EAAqBJ,EACrBX,EACF,CAAC,KAAM,MAAO,GAAK,CAAC,UAAAS,EAAW,aAAAC,EAAc,OAAQK,EAAoB,cAAAH,EAAe,QAAAL,CAAO,CAAC,EACpGrC,GAAa,YAAY8B,EAAcgB,GAA2BD,CAAkB,CAAC,CACvF,CAAC,CACH,KACE,QAAY9B,GAAIwB,EAAWC,EAAcC,EAAQC,EAAeC,EAASN,CAAO,CAEpF,EAEarB,GAAe,MAAMuB,GAAqC,CACrE,GAAsCxC,GAAQ,EAC5C,OAAAQ,GAAa,EACN,IAAI,QAAc,CAACoB,EAASC,IAAW,CAC5CtB,GAAiB,gBAAiB,CAACqB,EAASC,CAAM,CAAC,EACnD,IAAME,EAA0B,CAAC,KAAM,gBAAiB,GAAKS,CAAS,EACtEvC,GAAa,YAAY8B,CAAO,CAClC,CAAC,EAEId,GAAauB,CAAS,CAE/B,IC1NA,IAUaQ,GAWAC,GAiBAC,GAtCbC,GAAAC,GAAA,kBAGAC,KAGAC,KACAC,KACAC,KAEaR,GAAuB,CAACS,EAAgBC,IAA0C,CAC7F,OAAQD,EAAO,SAAU,CACvB,IAAK,MACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAMA,EAAO,KAAM,KAAK,EACtD,IAAK,aACH,MAAO,CAACA,EAAO,KAAMA,EAAO,KAAM,CAAC,UAAWA,EAAO,SAAS,EAAG,YAAY,EAC/E,QACE,MAAM,IAAI,MAAM,0BAA0BA,EAAO,QAAQ,QAAQC,EAAQ,CAAC,EAAE,CAChF,CACF,EAEaT,GAAwBQ,GAAmC,CACtE,OAAQA,EAAO,CAAC,EAAG,CACjB,IAAK,MACH,OAAO,IAAIE,GAAOF,EAAO,CAAC,EAAGA,EAAO,CAAC,EAAGA,EAAO,CAAC,CAAC,EACnD,IAAK,aAAc,CACjB,IAAMG,EAAWH,EAAO,CAAC,EACzB,GAAI,CAACI,GAAyBD,CAAQ,EACpC,MAAM,IAAI,MAAM,4BAA4BA,CAAQ,+BAA+B,EAErF,GAAM,CAAC,UAAAE,EAAW,SAAAC,EAAU,QAAAC,CAAO,EAAIP,EAAO,CAAC,EAC/C,OAAOE,GAAO,cAAcG,EAAW,CAAC,SAAAF,EAAU,KAAMH,EAAO,CAAC,EAAG,SAAAM,EAAU,QAAAC,CAAO,CAAC,CACvF,CACA,QACE,MAAM,IAAI,MAAM,0BAA0BP,EAAO,CAAC,CAAC,EAAE,CACzD,CACF,EAEaP,GAAN,KAA8E,CAMnF,MAAM,8BAA8Be,EAAmD,CAErF,OAAOC,GAAuB,MAAMC,GAASF,CAAI,CAAC,CACpD,CAEA,MAAM,UAAUG,EAAiCC,EAA0D,CACzGC,GAAiB,EACjB,IAAIC,EAEA,OAAOH,GAAiB,SACtB,OAAO,QAAY,KAAe,QAAQ,UAAY,QAAQ,SAAS,KAEzEG,EAAQ,MAAMJ,GAASC,CAAY,EAInCG,EAAQ,MAAM,KAAK,8BAA8BH,CAAY,EAG/DG,EAAQH,EAGV,CAAC,KAAK,UAAW,KAAK,WAAY,KAAK,WAAW,EAAI,MAAMI,GAAcD,EAAOF,CAAO,EACxFI,GAAe,CACjB,CAEA,MAAM,SAAyB,CAC7B,OAAOC,GAAe,KAAK,SAAS,CACtC,CAEA,MAAM,IAAIC,EAAiCC,EAAqCP,EACzC,CACrCC,GAAiB,EACjB,IAAMO,EAAuB,CAAC,EACxBC,EAAyB,CAAC,EAChC,OAAO,QAAQH,CAAK,EAAE,QAAQI,GAAO,CACnC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,WAAW,QAAQD,CAAI,EAC1C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,kBAAkBD,CAAI,GAAG,EAE3CH,EAAW,KAAKpB,CAAM,EACtBqB,EAAa,KAAKG,CAAK,CACzB,CAAC,EAED,IAAMC,EAAkC,CAAC,EACnCC,EAA0B,CAAC,EACjC,OAAO,QAAQP,CAAO,EAAE,QAAQG,GAAO,CACrC,IAAMC,EAAOD,EAAI,CAAC,EACZtB,EAASsB,EAAI,CAAC,EACdE,EAAQ,KAAK,YAAY,QAAQD,CAAI,EAC3C,GAAIC,IAAU,GACZ,MAAM,IAAI,MAAM,mBAAmBD,CAAI,GAAG,EAE5CE,EAAY,KAAKzB,CAAM,EACvB0B,EAAc,KAAKF,CAAK,CAC1B,CAAC,EAED,IAAMG,EACFP,EAAW,IAAI,CAACQ,EAAGC,IAAMtC,GAAqBqC,EAAG,IAAM,UAAU,KAAK,WAAWP,EAAaQ,CAAC,CAAC,CAAC,GAAG,CAAC,EACnGC,EAAUL,EAAY,IACxB,CAACG,EAAGC,IAAMD,EAAIrC,GAAqBqC,EAAG,IAAM,WAAW,KAAK,YAAYF,EAAcG,CAAC,CAAC,CAAC,GAAG,EAAI,IAAI,EAElGE,EAAU,MAAMC,GAAI,KAAK,UAAWX,EAAcM,EAAQD,EAAeI,EAASlB,CAAO,EAEzFqB,EAAuC,CAAC,EAC9C,QAASJ,EAAI,EAAGA,EAAIE,EAAQ,OAAQF,IAClCI,EAAU,KAAK,YAAYP,EAAcG,CAAC,CAAC,CAAC,EAAIJ,EAAYI,CAAC,GAAKrC,GAAqBuC,EAAQF,CAAC,CAAC,EAEnG,OAAAb,GAAe,EACRiB,CACT,CAEA,gBAAuB,CAEvB,CAEA,cAAqB,CACdC,GAAa,KAAK,SAAS,CAClC,CACF,IC7HA,IAeaC,GA6BAC,GA5CbC,GAAAC,GAAA,kBAIAC,KAEAC,KACAC,KAQaN,GAAkB,IAAY,CAiBzC,IAhBI,OAAOO,GAAI,KAAK,aAAgB,UAAYA,GAAI,KAAK,YAAc,KACrEA,GAAI,KAAK,YAAc,GAGrB,OAAOA,GAAI,KAAK,MAAS,YAC3BA,GAAI,KAAK,KAAO,IAGd,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,OAAU,YAC5BA,GAAI,KAAK,MAAQ,IAGf,OAAOA,GAAI,KAAK,YAAe,UAAY,CAAC,OAAO,UAAUA,GAAI,KAAK,UAAU,GAAKA,GAAI,KAAK,YAAc,EAAG,EAG5G,OAAO,KAAS,KAAe,CAAC,KAAK,qBACrC,OAAO,QAAY,KAAe,QAAQ,UAAY,QAAQ,SAAS,QAC1EA,GAAI,KAAK,WAAa,GAExB,IAAMC,EAAqB,OAAO,UAAc,IAAc,SAAK,EAAE,OAAS,UAAU,oBACxFD,GAAI,KAAK,WAAa,KAAK,IAAI,EAAG,KAAK,MAAMC,GAAsB,GAAK,CAAC,CAAC,CAC5E,CACF,EAEaP,GAAN,KAAuD,CAS5D,MAAM,KAAKQ,EAAoC,CAE7CT,GAAgB,EAGhB,MAAMU,GAAmC,EAGzC,MAAMC,GAAgBF,CAAW,CACnC,CAKA,MAAM,8BAA8BG,EAAiCC,EAChC,CACnC,IAAMC,EAAU,IAAIC,GACpB,aAAMD,EAAQ,UAAUF,EAAcC,CAAO,EACtC,QAAQ,QAAQC,CAAO,CAChC,CACF,ICzEA,IAAAE,GAAA,GAAAC,GAAAD,GAAA,iBAAAE,KAAA,IAIaA,GAJbC,GAAAC,GAAA,kBAGAC,KACaH,GAAc,IAAII,KCI/BC,KACAA,KAGAA,KCNO,IAAMC,GAAU,SDIvB,IAAOC,GAAQC,GAUe,CAC5B,IAAMC,EAA4C,cAAoC,YAGpFC,GAAgB,SAAUD,EAAa,CAAC,EACxCC,GAAgB,QAASD,EAAa,CAAC,EAEzCC,GAAgB,MAAOD,EAAa,EAAE,EACtCC,GAAgB,OAAQD,EAAa,EAAE,CACzC,CAEA,OAAO,eAAeE,GAAI,SAAU,MAAO,CAAC,MAAOC,GAAS,WAAY,EAAI,CAAC", + "names": ["backends", "backendsSortedByPriority", "registerBackend", "tryResolveAndInitializeBackend", "resolveBackendAndExecutionProviders", "init_backend_impl", "__esmMin", "name", "backend", "priority", "currentBackend", "i", "backendName", "backendInfo", "isInitializing", "e", "options", "eps", "backendHints", "backendNames", "errors", "availableBackendNames", "resolveResult", "err", "filteredEps", "target", "prop", "init_backend", "__esmMin", "init_backend_impl", "version", "init_version", "__esmMin", "logLevelValue", "env", "init_env_impl", "__esmMin", "init_version", "version", "value", "env", "init_env", "__esmMin", "init_env_impl", "tensorToDataURL", "tensorToImageData", "init_tensor_conversion_impl", "__esmMin", "tensor", "options", "canvas", "pixels2DContext", "width", "height", "inputformat", "norm", "normMean", "normBias", "stride", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "j", "G", "B", "A", "image", "channels", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "bufferToTensor", "tensorFromImage", "tensorFromTexture", "tensorFromGpuBuffer", "tensorFromPinnedBuffer", "init_tensor_factory_impl", "__esmMin", "init_tensor_impl", "buffer", "options", "height", "width", "norm", "normMean", "normBias", "inputformat", "outputformat", "stride", "float32Data", "step", "rImagePointer", "gImagePointer", "bImagePointer", "aImagePointer", "rTensorPointer", "gTensorPointer", "bTensorPointer", "aTensorPointer", "i", "Tensor", "image", "isHTMLImageEle", "isImageDataEle", "isImageBitmap", "isString", "data", "bufferToTensorOptions", "createCanvas", "createCanvasContext", "canvas", "pixels2DContext", "tempCanvas", "resolve", "reject", "context", "newImage", "img", "texture", "download", "dispose", "dims", "gpuBuffer", "dataType", "type", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "isTypedArrayChecked", "checkTypedArray", "init_tensor_impl_type_mapping", "__esmMin", "isBigInt64ArrayAvailable", "isBigUint64ArrayAvailable", "isFloat16ArrayAvailable", "calculateSize", "tensorReshape", "init_tensor_utils_impl", "__esmMin", "init_tensor_impl", "dims", "size", "i", "dim", "tensor", "Tensor", "Tensor", "init_tensor_impl", "__esmMin", "init_tensor_conversion_impl", "init_tensor_factory_impl", "init_tensor_impl_type_mapping", "init_tensor_utils_impl", "arg0", "arg1", "arg2", "checkTypedArray", "type", "dims", "expectedTypedArrayConstructor", "NUMERIC_TENSOR_TYPE_TO_TYPEDARRAY_MAP", "data", "maybeDims", "typedArrayConstructor", "firstElementType", "mappedType", "NUMERIC_TENSOR_TYPEDARRAY_TO_TYPE_MAP", "size", "calculateSize", "image", "options", "tensorFromImage", "texture", "tensorFromTexture", "gpuBuffer", "tensorFromGpuBuffer", "buffer", "tensorFromPinnedBuffer", "tensorToDataURL", "tensorToImageData", "releaseData", "tensorReshape", "Tensor", "init_tensor", "__esmMin", "init_tensor_impl", "TRACE", "TRACE_FUNC", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "init_trace", "__esmMin", "init_env_impl", "deviceType", "label", "env", "msg", "extraMsg", "stack", "hasTraceFunc", "i", "InferenceSession", "init_inference_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "init_trace", "_InferenceSession", "handler", "feeds", "arg1", "arg2", "TRACE_FUNC_BEGIN", "fetches", "options", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "TRACE_FUNC_END", "arg0", "arg3", "filePathOrUint8Array", "buffer", "byteOffset", "byteLength", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "InferenceSession", "init_inference_session", "__esmMin", "init_inference_session_impl", "init_tensor_conversion", "__esmMin", "init_tensor_factory", "__esmMin", "init_onnx_model", "__esmMin", "init_onnx_value", "__esmMin", "noBackendErrMsg", "TrainingSession", "init_training_session_impl", "__esmMin", "init_backend_impl", "init_tensor", "_TrainingSession", "handler", "hasOptimizerModel", "hasEvalModel", "trainingOptions", "sessionOptions", "evalModel", "optimizerModel", "options", "backend", "optionsWithValidatedEPs", "resolveBackendAndExecutionProviders", "inputNames", "outputNames", "feeds", "arg1", "arg2", "fetches", "Tensor", "isFetchesEmpty", "name", "isFetches", "arg1Keys", "v", "results", "returnValue", "key", "result", "trainableOnly", "array", "paramsSize", "TrainingSession", "init_training_session", "__esmMin", "init_training_session_impl", "esm_exports", "__export", "InferenceSession", "TRACE", "TRACE_FUNC_BEGIN", "TRACE_FUNC_END", "Tensor", "TrainingSession", "env", "registerBackend", "init_esm", "__esmMin", "init_backend", "init_env", "init_inference_session", "init_tensor", "init_tensor_conversion", "init_tensor_factory", "init_trace", "init_onnx_model", "init_onnx_value", "init_training_session", "fs_exports", "__export", "createReadStream", "readFile", "readFileSync", "init_fs", "__esmMin", "path_exports", "__export", "join", "init_path", "__esmMin", "require_ort_wasm_simd_jsep", "__commonJSMin", "exports", "module", "ortWasm", "_scriptDir", "moduleArg", "g", "aa", "ba", "readyPromise", "a", "b", "da", "c", "d", "e", "h", "t", "k", "l", "ca", "ea", "fa", "ha", "ia", "ja", "ka", "v", "la", "ma", "na", "fs", "oa", "pa", "qa", "w", "ra", "x", "sa", "z", "E", "ta", "ua", "G", "I", "va", "wa", "xa", "ya", "za", "Aa", "Ba", "Ca", "Da", "Ea", "Fa", "Ga", "Ha", "Ia", "Ja", "Ka", "La", "Ma", "Na", "Oa", "J", "m", "n", "q", "r", "p", "u", "y", "Pa", "Qa", "Ra", "L", "Sa", "Za", "Ta", "Ua", "$a", "ab", "bb", "M", "cb", "N", "db", "eb", "fb", "O", "gb", "P", "hb", "ib", "Q", "jb", "R", "S", "kb", "lb", "mb", "nb", "ob", "pb", "qb", "rb", "sb", "tb", "vb", "ub", "T", "wb", "xb", "yb", "zb", "U", "Ab", "V", "Bb", "Cb", "Db", "Eb", "Fb", "Gb", "Hb", "Ib", "Jb", "Kb", "Lb", "Mb", "Nb", "Ob", "Pb", "Qb", "Rb", "Sb", "Tb", "Ub", "Vb", "Wb", "W", "Xb", "Yb", "Zb", "$b", "ac", "cc", "bc", "dc", "ec", "fc", "gc", "hc", "B", "A", "ic", "jc", "Cf", "kc", "lc", "X", "mc", "nc", "oc", "pc", "qc", "rc", "sc", "tc", "uc", "vc", "wc", "xc", "yc", "zc", "Ac", "Bc", "Cc", "Dc", "Ec", "Fc", "Gc", "Hc", "Ic", "Jc", "Kc", "Lc", "Mc", "Nc", "Oc", "Pc", "Qc", "Rc", "Sc", "Tc", "Uc", "Vc", "Wc", "Xc", "Yc", "Zc", "$c", "ad", "bd", "cd", "dd", "ed", "fd", "gd", "hd", "jd", "kd", "ld", "md", "nd", "od", "pd", "qd", "rd", "sd", "td", "ud", "vd", "wd", "xd", "yd", "zd", "Ad", "Bd", "Cd", "Dd", "Ed", "Fd", "Gd", "Hd", "Id", "Jd", "Kd", "Ld", "Md", "Nd", "Od", "Pd", "Qd", "Rd", "Sd", "Td", "Ud", "Vd", "Wd", "Xd", "Yd", "Zd", "$d", "ae", "be", "ce", "de", "ee", "fe", "ge", "he", "ie", "je", "ke", "le", "me", "ne", "oe", "pe", "qe", "re", "se", "te", "ue", "ve", "we", "xe", "ye", "ze", "Ae", "Be", "Ce", "De", "Ee", "Fe", "Ge", "He", "Ie", "Je", "Ke", "Le", "Me", "Ne", "Oe", "Pe", "Qe", "Re", "Se", "Te", "Ue", "Ve", "We", "Xe", "Ye", "Ze", "$e", "af", "bf", "cf", "df", "ef", "ff", "gf", "hf", "jf", "kf", "lf", "mf", "nf", "of", "pf", "qf", "rf", "sf", "tf", "uf", "vf", "wf", "xf", "yf", "zf", "Af", "Bf", "Df", "Y", "Ef", "Z", "dynCall_vii", "Ff", "dynCall_iii", "Gf", "Hf", "dynCall_vi", "dynCall_v", "If", "Jf", "Kf", "Lf", "Mf", "Nf", "Of", "Pf", "Qf", "Rf", "Sf", "Tf", "Uf", "Vf", "Wf", "Xf", "Yf", "Zf", "$f", "ag", "bg", "cg", "dg", "eg", "fg", "gg", "hg", "ig", "jg", "kg", "lg", "mg", "ng", "og", "pg", "qg", "rg", "sg", "tg", "ug", "vg", "wg", "xg", "yg", "zg", "Ag", "Bg", "Cg", "Dg", "Eg", "Fg", "Gg", "Hg", "Ig", "Jg", "Kg", "Lg", "Mg", "Ng", "Og", "Pg", "Qg", "Rg", "Sg", "Tg", "Ug", "Vg", "Wg", "C", "D", "Xg", "Yg", "Zg", "$g", "ah", "bh", "ch", "dh", "eh", "fh", "gh", "hh", "F", "ih", "jh", "kh", "lh", "mh", "nh", "oh", "ph", "qh", "rh", "sh", "th", "uh", "vh", "wh", "xh", "yh", "zh", "Ah", "Bh", "Ch", "Dh", "Eh", "Fh", "Gh", "Hh", "Ih", "Jh", "Kh", "Lh", "Mh", "Nh", "Oh", "Ph", "Qh", "Rh", "H", "K", "Va", "Wa", "Xa", "Sh", "Th", "Uh", "Vh", "Wh", "Xh", "Yh", "Zh", "$h", "ai", "bi", "ci", "di", "ei", "fi", "gi", "hi", "ii", "ji", "ki", "li", "mi", "ni", "oi", "pi", "qi", "ri", "si", "ti", "ui", "vi", "wi", "xi", "yi", "zi", "Ai", "Bi", "Ci", "Di", "Ei", "Fi", "Gi", "Hi", "Ii", "Ji", "Ki", "Li", "Mi", "Ni", "Qi", "Ya", "Oi", "Pi", "Ri", "require_worker_threads", "__commonJSMin", "require_perf_hooks", "__commonJSMin", "os_exports", "__export", "cpus", "init_os", "__esmMin", "require_ort_wasm_simd_threaded_jsep", "__commonJSMin", "exports", "module", "ortWasmThreaded", "_scriptDir", "moduleArg", "d", "l", "p", "t", "v", "aa", "z", "ba", "A", "ca", "da", "ea", "B", "fa", "C", "a", "b", "c", "e", "f", "h", "k", "q", "n", "m", "r", "w", "x", "D", "g", "u", "ha", "ia", "ja", "E", "ka", "F", "G", "H", "I", "la", "ma", "J", "na", "fs", "oa", "pa", "qa", "ra", "K", "L", "noExitRuntime", "M", "N", "sa", "P", "Q", "ta", "ua", "va", "wa", "xa", "ya", "R", "za", "S", "Aa", "Ba", "Ca", "T", "Da", "Ea", "Fa", "Ga", "U", "Ha", "y", "V", "Ia", "Ja", "Ka", "W", "La", "Ma", "Na", "Oa", "X", "Qa", "Pa", "Ra", "Sa", "Ta", "Ua", "Va", "Wa", "Xa", "Ya", "Za", "$a", "ab", "bb", "cb", "db", "eb", "fb", "gb", "hb", "ib", "jb", "kb", "lb", "mb", "nb", "ob", "pb", "qb", "rb", "sb", "tb", "ub", "vb", "Y", "wb", "xb", "yb", "zb", "Bb", "Ab", "Cb", "Db", "Fb", "Eb", "Gb", "Hb", "Ib", "Jb", "Lb", "Kb", "Mb", "Nb", "Ob", "Pb", "Qb", "Rb", "Tb", "Ub", "Vb", "Wb", "Xb", "Yb", "Sb", "O", "Zb", "$b", "ac", "Z", "bc", "cc", "dc", "ec", "fc", "gc", "hc", "ic", "jc", "kc", "lc", "mc", "nc", "oc", "pc", "qc", "tc", "rc", "sc", "uc", "vc", "wc", "xc", "require_ort_wasm_threaded_worker", "__commonJSMin", "exports", "module", "ortWasmFactory", "ortWasmFactoryThreaded", "wasm", "initialized", "initializing", "aborted", "isMultiThreadSupported", "isSimdSupported", "getWasmFileName", "initializeWebAssembly", "getInstance", "init_wasm_factory", "__esmMin", "numThreads", "useSimd", "useThreads", "flags", "timeout", "simd", "wasmPaths", "wasmPrefixOverride", "wasmFileName", "wasmPathOverride", "isTimeout", "tasks", "resolve", "reject", "factory", "config", "fileName", "scriptDirectory", "prefix", "scriptSourceCode", "module", "what", "allocWasmString", "iterateExtraOptions", "checkLastError", "init_wasm_utils", "__esmMin", "init_wasm_factory", "data", "allocs", "wasm", "getInstance", "dataLength", "dataOffset", "options", "prefix", "seen", "handler", "key", "value", "name", "message", "stack", "paramsOffset", "errorCode", "errorMessagePointer", "errorMessage", "setRunOptions", "init_run_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "options", "wasm", "getInstance", "runOptionsHandle", "allocs", "runOptions", "tagDataOffset", "allocWasmString", "checkLastError", "iterateExtraOptions", "key", "value", "keyDataOffset", "valueDataOffset", "e", "alloc", "getGraphOptimzationLevel", "getExecutionMode", "appendDefaultOptions", "setExecutionProviders", "setSessionOptions", "init_session_options", "__esmMin", "init_wasm_factory", "init_wasm_utils", "graphOptimizationLevel", "executionMode", "options", "session", "ep", "sessionOptionsHandle", "executionProviders", "allocs", "epName", "webnnOptions", "keyDataOffset", "allocWasmString", "valueDataOffset", "getInstance", "checkLastError", "numThreads", "webgpuOptions", "epNameDataOffset", "wasm", "sessionOptions", "logIdDataOffset", "logSeverityLevel", "logVerbosityLevel", "optimizedModelFilePathOffset", "name", "value", "nameOffset", "iterateExtraOptions", "key", "e", "alloc", "tensorDataTypeStringToEnum", "tensorDataTypeEnumToString", "getTensorElementSize", "tensorTypeToTypedArrayConstructor", "logLevelStringToEnum", "isGpuBufferSupportedType", "dataLocationStringToEnum", "init_wasm_common", "__esmMin", "type", "typeProto", "dateType", "logLevel", "location", "loadFile", "init_wasm_utils_load_file", "__esmMin", "file", "e", "stream", "chunks", "chunk", "response", "contentLengthHeader", "fileSize", "reader", "buffer", "pages", "offset", "done", "value", "chunkSize", "logLevelPrefix", "doLog", "configLogLevel", "debug", "configureLogger", "LOG", "LOG_DEBUG", "init_log", "__esmMin", "init_wasm_common", "level", "message", "$configLogLevel", "$debug", "logLevel", "msg", "messageLevel", "logLevelStringToEnum", "configLevel", "args", "createView", "init_tensor_view", "__esmMin", "init_wasm_common", "dataBuffer", "type", "tensorTypeToTypedArrayConstructor", "init_types", "__esmMin", "bucketFreelist", "bucketArr", "calcNormalizedBufferSize", "calcBucketBufferSize", "guid", "createNewGpuDataId", "downloadGpuData", "GpuDataManagerImpl", "createGpuDataManager", "init_gpu_data_manager", "__esmMin", "init_log", "init_types", "size", "idx", "sizeForBucket", "backend", "gpuBuffer", "originalSize", "getTargetBuffer", "bufferSize", "gpuReadBuffer", "commandEncoder", "arrayBuffer", "targetBuffer", "key", "id", "data", "srcArrayBuffer", "srcOffset", "srcLength", "gpuDataCache", "gpuBufferForUploading", "LOG_DEBUG", "sourceId", "destinationId", "sourceGpuDataCache", "destinationGpuDataCache", "buffer", "previousBuffer", "usage", "isStorage", "isUniform", "buffers", "gpuData", "cachedData", "maxInFreeList", "freelist", "capturedBuffers", "storage", "sessionId", "pendingBuffers", "args", "AttributeWithCacheKeyImpl", "createAttributeWithCacheKey", "init_attribute_with_cache_key", "__esmMin", "attribute", "name", "MatMulUtil", "BroadcastUtil", "ShapeUtil", "PoolConvUtil", "GemmUtil", "MIN_CLIP", "MAX_CLIP", "init_util", "__esmMin", "a", "b", "adims", "bdims", "isMatMul", "arank", "brank", "crank", "cdims", "cShapeMatMul", "i", "aLen", "bLen", "max", "shape", "finalShape", "inputRank", "finalRank", "_ShapeUtil", "dims", "size", "rank", "newDims", "axis", "start", "end", "strides", "tensorRank", "axes", "x", "perm", "v", "pad", "shape1", "shape2", "_PoolConvUtil", "isGlobalOperator", "inputDims", "kernelShape", "dilations", "pads", "dim", "isChannelLast", "autoPad", "outputDims", "filterDims", "inSize", "stride", "dilation", "kernel", "padHeadIndex", "padTailIndex", "dkernel", "padNeeded", "leftShape", "transLeft", "rightShape", "transRight", "biasShape", "M", "K", "N", "kDim", "WORKGROUP_SIZE", "getWgslMappedType", "tensorTypeToWsglStorageType", "tensorTypeToWsglValueType", "createTensorShapeVariables", "getMaxComponents", "fillVector", "castToF32", "sumVector", "getElementAt", "createIndicesHelper", "inputVariable", "outputVariable", "internalVariable", "ShaderHelperImpl", "createShaderHelper", "getBroadcastDims", "init_common", "__esmMin", "init_wasm_common", "init_util", "type", "components", "mappedType", "dims", "programUniforms", "dim", "ShapeUtil", "size", "dataType", "value", "name", "index", "length", "tensorType", "shapeOrRank", "usage", "useUniform", "rank", "rankIdentity", "indicesType", "valueType", "storageType", "normalizeDim", "implementationUsed", "uniformPrefix", "shape", "strides", "o2iSnippet", "i", "offsetToIndicesImplementation", "offsetToIndices", "varOffset", "offsets", "indicesToOffsetImplementation", "indicesToOffset", "varIndices", "indices", "init", "indicesGet", "idx", "indicesSet", "broadcastedIndicesToOffsetImplementation", "broadcastedIndicesToOffset", "output", "implKey", "setByOffset", "offset", "getByOffset", "getByIndicesImplementation", "getImplementation", "functionParams", "dimsParams", "get", "normalizedIndices", "getByIndices", "setByIndicesImplementation", "setImplementation", "impls", "needShapeStrides", "impl", "indicesAndValue", "normalizedDispatchGroup", "limits", "workgroupSize", "workgroupSizeX", "workgroupSizeY", "workgroupSizeZ", "is1DimensionDispatch", "paramList", "globalIdxDefinition", "variable", "bindingIndex", "access", "variables", "v", "additionalUniforms", "uniformSnippets", "typeTemp", "uniformWgslTypeToDataType", "u", "dispatchGroup", "inShape", "outShape", "inRank", "a", "validateInputs", "getAdjustedPerm", "getOutputShape", "permFunctionBody", "createTransposeProgramInfo", "transpose", "parseTransposeAttributes", "init_transpose", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "inputRank", "perm", "inputShape", "ShapeUtil", "rank", "input", "output", "reverseFunc", "i", "inputTensor", "permAttr", "inputDataType", "outputShape", "outputVariable", "inputVariable", "getShaderSource", "shaderHelper", "outputSize", "createTensorShapeVariables", "context", "attributes", "createAttributeWithCacheKey", "reduceOps", "reduceSharedOps", "reduceInitValues", "reduceOutputValues", "getInnerMostAxes", "computeOutAndReduceShapes", "expandShapeToKeepDim", "areAxesInnerMostDims", "getAxesPermutation", "createReduceSharedProgramInfo", "reduceCommon", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "init_reduce_shared", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_reduce", "init_transpose", "numInnerAxes", "rank", "res", "i", "shape", "axes", "outputShape", "dim", "reduceShape", "expandShape", "shapeIdx", "axis", "name", "shaderCache", "inputs", "reduceType", "outputDataType", "inputShape", "outputSize", "ShapeUtil", "reduceSize", "input", "inputVariable", "output", "outputVariable", "workgroupSize", "sharedMemorySnippet", "shaderHelper", "context", "attributes", "updatedAttributes", "createReduceAttributesFromInputs", "updatedAxes", "_dim", "normalizeAxes", "permutedAxes", "createTransposeProgramInfo", "finalOutputShape", "validateInputs", "noOp", "createReduceProgramInfo", "createReduceAttributesFromInputs", "runReduceProgram", "reduceLogSumNaive", "reduceL1Naive", "reduceL2Naive", "reduceLogSumExpNaive", "reduceMaxNaive", "reduceMeanNaive", "reduceMinNaive", "reduceProdNaive", "reduceSumNaive", "reduceSumSquareNaive", "useNaiveReduceMethod", "reduceMean", "reduceL1", "reduceL2", "reduceLogSumExp", "reduceMax", "reduceMin", "reduceProd", "reduceSum", "reduceSumSquare", "reduceLogSum", "init_reduce", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "init_reduce_shared", "inputs", "input", "name", "shaderCache", "reduceOp", "axesInput", "outputDataType", "keepDims", "noopWithEmptyAxes", "outputShape", "inputShape", "inputRank", "axes", "ShapeUtil", "reduceOnAllAxes", "d", "i", "outputRank", "outputSize", "shaderHelper", "idxCopy", "inputVariable", "output", "outputVariable", "ops", "reduceOps", "k", "l", "createTensorShapeVariables", "attributes", "v", "createAttributeWithCacheKey", "context", "updatedAttributes", "_output", "idxZero", "size", "shape", "reduceSize", "dim", "reduceMeanShared", "reduceL1Shared", "reduceL2Shared", "reduceLogSumExpShared", "reduceMaxShared", "reduceMinShared", "reduceProdShared", "reduceSumShared", "reduceSumSquareShared", "reduceLogSumShared", "validateInputs", "argMin", "argMax", "parseArgMinMaxAttributes", "init_argminmax", "__esmMin", "init_wasm_common", "init_attribute_with_cache_key", "init_reduce", "inputs", "context", "attributes", "argMinMaxOp", "input", "output", "axes", "idxZero", "k", "createReduceProgramInfo", "createAttributeWithCacheKey", "validateInputs", "calculateInputIndexImpl", "assignOutputData", "createConcatProgramInfo", "concat", "parseConcatAttributes", "init_concat", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "axis", "referenceIndex", "referenceInput", "inputType", "inputRank", "input", "i", "dim", "numberOfTensors", "sizeInConcatAxisStr", "output", "codeLines", "returnSnippet", "adjustedAxis", "outputShape", "dataType", "outputSize", "ShapeUtil", "sizeInConcatAxis", "inputVars", "previousSum", "inputDependencies", "inputRanks", "programUniforms", "inputVariable", "createTensorShapeVariables", "outputVariable", "indicesAxis", "getShaderSource", "shaderHelper", "context", "attributes", "inputShape", "sum", "nonEmptyInputs", "createAttributeWithCacheKey", "validateAttentionInputs", "createInPlaceSoftmaxProgramInfo", "createAttentionProbsProgramInfo", "createVxAttentionScoreProgramInfo", "applyAttention", "prepare", "attention", "init_attention", "__esmMin", "init_wasm_common", "init_types", "init_common", "init_concat", "inputs", "attributes", "input", "weights", "bias", "maskIndex", "past", "relativePositionBias", "batchSize", "sequenceLength", "inputHiddenSize", "qHiddenSize", "kHiddenSize", "vHiddenSize", "sz", "kvSequenceLength", "pastSequenceLength", "totalSequenceLength", "maxSequenceLength", "maskType", "_context", "n", "d", "components", "getMaxComponents", "WG", "dComp", "elementsPerThread", "programUniforms", "dataType", "tensorTypeToWsglStorageType", "f32Type", "tensorTypeToWsglValueType", "getShaderSource", "shaderHelper", "inputHelper", "outputVariable", "uniforms", "q", "key", "parameters", "probsShape", "alpha", "vectorizedHeadSize", "TILE_SIZE", "dispatch", "inputDependencies", "createTensorShapeVariables", "qInput", "inputVariable", "kInput", "inputVars", "relativePositionBiasInput", "output", "probs", "v", "params", "outputShape", "probsHelper", "vHelper", "context", "k", "_maskIndex", "_past", "pastKey", "pastValue", "outputPresentKey", "outputPresentValue", "presentKeyShape", "concatKeyInputs", "createConcatProgramInfo", "presentValueShape", "concatValueInputs", "value", "inputsK", "inputsV", "M", "K", "N", "outputQ", "outputK", "outputV", "weight", "validateInputs", "createBatchNormInferenceProgramInfo", "parseBatchNormAttributes", "batchNorm", "init_batch_norm", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "checkShapeEqual", "actual", "expected", "message", "r", "v", "i", "shape", "epsilon", "spatial", "format", "yShape", "components", "getMaxComponents", "cComponents", "outputSize", "ShapeUtil", "useShapesUniforms", "shapeOrRank", "x", "inputVariable", "scale", "bias", "inputMean", "inputVar", "y", "outputVariable", "calcCOffset", "cOffset", "getInferenceModeShaderSource", "helper", "createTensorShapeVariables", "createAttributeWithCacheKey", "context", "outputCount", "updatedAttributes", "env", "validateInputs", "createBiasAddProgramInfo", "biasAdd", "init_bias_add", "__esmMin", "init_util", "init_common", "inputs", "outputShape", "channels", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "bias", "residual", "output", "outputVariable", "shaderHelper", "context", "createElementwiseProgramShader", "createElementwiseProgramInfo", "abs", "acos", "acosh", "asin", "asinh", "atan", "atanh", "parseCastAttributes", "cast", "generateClipAttributesFromInputs", "clip", "ceil", "cos", "cosh", "parseAlphaAttributes", "elu", "erfImpl", "erf", "exp", "floor", "gelu", "leakyRelu", "not", "neg", "reciprocal", "relu", "sigmoid", "parseHardSigmoidAttributes", "hardSigmoid", "sin", "sinh", "sqrt", "tan", "tanhExpression", "tanh", "fastGeluImpl", "fastGeluExpression", "fastGelu", "thresholdedRelu", "log", "init_unary_op", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "shaderHelper", "datasize", "inputDataType", "outputDataType", "funcCall", "additionalImplementation", "vecSize", "expression", "input", "inputVariable", "output", "outputVariable", "name", "cacheKey", "ShapeUtil", "inputTensors", "context", "attributes", "createAttributeWithCacheKey", "func", "inputs", "min", "MIN_CLIP", "max", "MAX_CLIP", "clipAttributes", "dataType", "tensorTypeToWsglValueType", "a", "varType", "x", "validateInputs", "createBiasSplitGeluProgramInfo", "biasSplitGelu", "init_bias_split_gelu", "__esmMin", "init_util", "init_common", "init_unary_op", "inputs", "outputShape", "input", "inputVariable", "bias", "output", "outputVariable", "outputSize", "ShapeUtil", "dataType", "tensorTypeToWsglStorageType", "shaderHelper", "erfImpl", "context", "createBinaryOpProgramShader", "createBinaryOpProgramInfo", "runBinaryOp", "add", "div", "equal", "mul", "pow", "sub", "greater", "less", "greaterOrEqual", "lessOrEqual", "init_binary_op", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "dimsA", "dimsB", "dimsOutput", "vectorize", "doBroadcast", "sharedDimensionDivisibleBy4", "funcCall", "typeA", "typeB", "typeOutput", "additionalImplementation", "expressionScalar", "expressionVector", "a", "b", "output", "outputVariable", "inputVariable", "assignment", "isAOneElement", "ShapeUtil", "isBOneElement", "aLastDimDivisibleBy4", "bLastDimDivisibleBy4", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "name", "cacheKey", "outputDataType", "isBroadcast", "outputShape", "outputSize", "cacheKeyAux", "calculatedShape", "BroadcastUtil", "sharedDimension", "i", "dimA", "dimB", "createTensorShapeVariables", "context", "type", "getActivationSnippet", "appendActivationUniformsData", "appendActivationUniforms", "parseInternalActivationAttributes", "init_fuse_utils", "__esmMin", "init_wasm_common", "init_util", "attributes", "valueType", "baseType", "programUniform", "uniforms", "activation", "alpha", "beta", "clipMin", "clipMax", "MIN_CLIP", "MAX_CLIP", "typeSnippet", "biasSnippet", "init_activation_util", "__esmMin", "component", "dataType", "hasBias", "utilFunctions", "init_conv_util", "__esmMin", "strideStr", "writeDataToSubAVec4Snippet", "calculateResultSnippet", "makeMatMulPackedVec4Source", "writeDataToSubASnippet", "readDataFromSubASnippet", "makeMatMulPackedSource", "matMulReadWriteFnSource", "createMatmulProgramInfo", "init_matmul_packed_webgpu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_fuse_utils", "init_activation_util", "transpose", "batchDims", "transposeA", "innerElementSize", "workPerThread", "workgroupSize", "type", "tileInner", "splitK", "splitedDimInner", "tileAOuter", "tileBOuter", "tileAWidth", "tileAHight", "rowPerThreadB", "sequentialAccessByThreads", "rowPerThreadA", "colPerThreadA", "matmulSnippet", "component", "hasBias", "applyActivation", "variables", "batchShapes", "isChannelsLast", "batchAShape", "batchBShape", "batchShape", "batchVariable", "aVariable", "bVariable", "outputVariable", "broadCastADims", "getBroadcastDims", "broadCastBDims", "dataType", "tensorTypeToWsglStorageType", "getAIndices", "aRank", "batchRank", "resStr", "i", "j", "getBIndices", "bRank", "typeSnippet", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "aShape", "bShape", "outerDimsA", "outerDimsB", "outerDims", "batchSize", "ShapeUtil", "dimAOuter", "dimInner", "dimBOuter", "isVec4", "elementsPerThread", "dispatch", "components", "aShapeTemp", "bShapeTemp", "outputShapeTemp", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "internalVariable", "A", "inputVariable", "B", "output", "inputVariables", "biasComponents", "uniforms", "appendActivationUniforms", "baseType", "getActivationSnippet", "declareFunctions", "conv2dCommonSnippet", "createConv2DMatMulProgramInfo", "init_conv2d_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "fitAOuter", "fitBOuter", "fitInner", "addBias", "attributes", "innerElementSizeX", "innerElementSizeW", "innerElementSize", "dataType", "getXSnippet", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readXSnippet", "typeSnippet", "sampleX", "sampleW", "resType", "aType", "bType", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileAOuter", "tileBOuter", "tileInner", "elementsSize", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "uniforms", "appendActivationUniforms", "components", "t", "tensorTypeToWsglStorageType", "declareFunctions", "x", "inputVariable", "w", "inputVariables", "output", "outputVariable", "bias", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createGroupedConvProgramInfo", "createGroupedConvVectorizeProgramInfo", "init_conv_grouped", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_conv", "init_fuse_utils", "inputs", "attributes", "squeezeOutputShapeFunction", "hasBias", "processBias", "xShape", "wShape", "outputChannelsPerGroup", "isChannelLast", "outputShape", "calculateOutputShape", "outputSize", "ShapeUtil", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "x", "inputVariable", "w", "inputVars", "uniforms", "appendActivationUniforms", "components", "getMaxComponents", "outputNumber", "outputShapeInShader", "xNumber", "createNaiveMatmulProgramInfo", "validateInputs", "matMul", "init_matmul", "__esmMin", "init_wasm_common", "init_util", "init_matmul_packed_webgpu", "init_common", "init_fuse_utils", "inputs", "activationAttributes", "outputShape", "reshapedOutputShape", "isChannelsLast", "aShape", "bShape", "M", "N", "K", "components", "getMaxComponents", "aComponents", "outputNumber", "outputSize", "ShapeUtil", "hasBias", "outerDims", "outputShapeInShader", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "batchDims", "internalVariable", "a", "inputVariable", "b", "output", "outputVariable", "baseType", "tensorTypeToWsglStorageType", "applyActivation", "getActivationSnippet", "inputVariables", "processBias", "biasComponents", "outerDimsA", "outerDimsB", "broadCastADims", "getBroadcastDims", "broadCastBDims", "uniforms", "appendActivationUniforms", "getIndices", "variable", "broadCastDims", "rank", "name", "batchRank", "resStr", "i", "j", "calcResult", "calcStr", "context", "BroadcastUtil", "createMatmulProgramInfo", "calculateOutputShape", "weightTransposeAttribute", "validateInputs", "getAdjustedConvAttributes", "parseConvAttributes", "conv2d", "conv1d", "conv", "init_conv", "__esmMin", "init_util", "init_conv2d_mm_webgpu", "init_matmul_packed_webgpu", "init_conv_grouped", "init_fuse_utils", "init_matmul", "init_transpose", "inputShape", "kernelShape", "dilations", "adjustPads", "strides", "isChannelLast", "batchSize", "inputSpatialShape", "spatialRank", "outChannels", "dilatedKernelShape", "v", "i", "outputShape", "inputs", "attributes", "dataChannel", "filterInChannel", "pads", "PoolConvUtil", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "autoPad", "group", "wIsConst", "context", "adjustedAttributes", "isChannelsLast", "transposedWeight", "createTransposeProgramInfo", "convInputs", "createGroupedConvVectorizeProgramInfo", "createGroupedConvProgramInfo", "hasBias", "inputHeight", "inputWidth", "inputChannels", "weightHeight", "weightWidth", "outHeight", "outWidth", "sameSize", "batch", "xReshaped", "wReshaped", "matmulOutputShape", "matmulInputs", "sharedDim", "N", "K", "createNaiveMatmulProgramInfo", "createMatmulProgramInfo", "sequentialAccessByThreads", "dimAOuter", "dimBOuter", "dimInner", "createConv2DMatMulProgramInfo", "conv2dTransposeCommonSnippet", "createConv2DTransposeMatMulProgramInfo", "init_conv_backprop_mm_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_common", "init_fuse_utils", "init_activation_util", "init_conv_util", "init_matmul_packed_webgpu", "isChannelsLast", "addBias", "attributes", "type", "innerElementSize", "getWSnippet", "coordASnippet", "coordResSnippet", "xHeight", "xWidth", "row", "col", "readASnippet", "sampleA", "sampleW", "applyActivation", "getActivationSnippet", "biasSnippet", "inputs", "outputShape", "dimAOuter", "dimBOuter", "dimInner", "hasBias", "sequentialAccessByThreads", "inChannels", "batchSize", "outWidth", "outHeight", "outChannels", "isVec4", "dispatchX", "dispatchY", "workGroupSize", "elementsPerThread", "dispatch", "LOG_DEBUG", "tileInner", "components", "filterDims", "effectiveFilterDims", "pads", "programUniforms", "appendActivationUniformsData", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "x", "inputVariable", "w", "output", "outputVariable", "inputVariables", "declareFunctions", "bias", "uniforms", "appendActivationUniforms", "elemType", "tensorTypeToWsglStorageType", "utilFunctions", "makeMatMulPackedVec4Source", "makeMatMulPackedSource", "createConvTranspose2DOpProgramShaderSource", "createConvTranspose2DProgramInfo", "init_conv_backprop_webgpu", "__esmMin", "init_wasm_common", "init_log", "init_util", "init_common", "shaderHelper", "inputs", "outputShape", "hasBias", "is1DimensionDispatch", "isVec4", "dataType", "uniforms", "isChannelsLast", "rowDim", "colDim", "channelDim", "workPerThread", "declareFunctions", "components", "w", "inputVariable", "dy", "inputVariables", "output", "outputVariable", "codeSnippet4", "codeSnippet", "attributes", "squeezeOutputShapeFunction", "outputSize", "ShapeUtil", "dispatch", "LOG_DEBUG", "inputDependencies", "strides", "filterDims", "dilations", "effectiveFilterDims", "pads", "group", "wShape", "inputChannelsPerGroup", "outputChannelsPerGroup", "programUniforms", "createTensorShapeVariables", "getShaderSource", "tensorTypeToWsglStorageType", "computeTotalPad", "distributePadding", "calculateOutputShapeAndPads", "getAdjustedConvTransposeAttributes", "parseConvTransposeAttributes", "validateInputs", "weightTransposePerm", "convTranspose2d", "convTranspose1d", "convTranspose", "init_conv_transpose", "__esmMin", "init_conv_backprop_mm_webgpu", "init_conv_backprop_webgpu", "init_fuse_utils", "init_transpose", "inDim", "stride", "adj", "kernel", "dilation", "outSize", "totalPad", "autoPad", "pads", "head", "tail", "smallPad", "inputShape", "kernelShape", "dilations", "group", "strides", "isChannelLast", "outputPadding", "outputShape", "spatialRank", "updateOutputShape", "i", "batchSize", "outChannels", "j", "inSize", "attributes", "inputs", "a", "b", "isChannelsLast", "newAttributes", "activationAttributes", "parseInternalActivationAttributes", "format", "wIsConst", "dataChannel", "filterInChannel", "featureMaps", "context", "adjustedAttributes", "inputChannels", "createConvTranspose2DProgramInfo", "outHeight", "outWidth", "weightHeight", "weightWidth", "dimAOuter", "dimBOuter", "dimInner", "sequentialAccessByThreads", "transposedWeight", "createTransposeProgramInfo", "convTransposeInputs", "hasBias", "createConv2DTransposeMatMulProgramInfo", "createCumsumProgramInfo", "cumsum", "parseCumSumAttributes", "init_cumsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputType", "inputShape", "axisInput", "attributes", "outputSize", "ShapeUtil", "rank", "input", "inputVariable", "output", "outputVariable", "axisValue", "axis", "getShaderSource", "shaderHelper", "index", "max", "getElementAt", "lowerLimit", "upperLimit", "createTensorShapeVariables", "context", "exclusive", "reverse", "createAttributeWithCacheKey", "validateInputs", "permFunctionBody", "createDepthToSpaceProgramInfo", "depthToSpace", "parseDepthToSpaceAttributes", "init_depth_to_space", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "perm", "rank", "input", "output", "reverseFunc", "i", "inputTensor", "attributes", "n", "h", "w", "shape", "isChannelLast", "blocksize", "isDCRmode", "reshapedInputTensor", "reshapedInputRank", "inputDataType", "reshapedInput", "inputVariable", "permedOutput", "outputVariable", "getShaderSource", "shaderHelper", "outputShape", "outputSize", "ShapeUtil", "shapeBeforePerm", "shapeAfterPerm", "createTensorShapeVariables", "context", "createAttributeWithCacheKey", "symbolPattern", "termPattern", "termPatternOnly", "lhsPattern", "lhsPatternOnly", "EinsumTerm", "EinsumEquation", "appendMax", "createEinsumProgramInfo", "einsum", "parseEinsumAttributes", "init_einsum", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputIndex", "symbol", "index", "value", "inputs", "equation", "lhs", "rhs", "inputTerm", "dims", "einsumTerm", "sym", "info", "dimValue", "term", "isInput", "rank", "ellipsis", "ellipsisDims", "nextDim", "indexSymbols", "i", "ellipsisDimLength", "j", "name", "inputShapes", "dataType", "einsumEquation", "outputShape", "inputVars", "inputVariable", "outputSize", "ShapeUtil", "output", "outputVariable", "uniformsSymbols", "getShaderSource", "shaderHelper", "idxCopy", "initProd", "initSum", "updateSum", "reduceOpsSetIndices", "reduceOpsLoopHeaders", "reduceOpsLoopFooters", "reduceOpCompute", "isReduceOpsWithoutLoop", "outputIndex", "indices", "reduceOps", "inputVar", "_var", "programUniformsInit", "programUniforms", "_", "createTensorShapeVariables", "acc", "inputProgramUniforms", "context", "attributes", "input", "createAttributeWithCacheKey", "validateInputs", "getAdjustedShape", "calculateOutputShape", "createExpandProgramInfo", "expand", "init_expand", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "inputShape", "shape", "shapeIndex", "inputShapeIndex", "shape1", "shape2", "diff", "i", "outputShape", "dataType", "components", "outputSize", "ShapeUtil", "getShaderSource", "shaderHelper", "input", "inputVariable", "output", "outputVariable", "assignment", "singleAssignment", "resStr", "x", "typeCast", "programUniforms", "createTensorShapeVariables", "context", "createFastGeluProgramInfo", "fastGelu", "init_fast_gelu", "__esmMin", "init_wasm_common", "init_util", "init_common", "init_unary_op", "inputTensors", "dataType", "outputSize", "ShapeUtil", "biasLength", "useVec4", "getShaderSource", "shaderHelper", "x", "inputVariable", "bias", "outputVariable", "uniforms", "singleElementBias", "i", "biasGetExpression", "fastGeluImpl", "tensorTypeToWsglValueType", "WORKGROUP_SIZE", "fastGeluExpression", "inputs", "context", "validateInputs", "createGatherProgramInfo", "parseGatherAttributes", "gather", "init_gather", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "indicesShape", "inputRank", "axis", "ShapeUtil", "outputShape", "axisDimLimit", "components", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "data", "inputVariable", "indices", "output", "outputVariable", "calcDataIndices", "x", "indicesRank", "calcStr", "i", "j", "assignment", "singleAssignment", "resStr", "typeCast", "createAttributeWithCacheKey", "context", "validateInputs", "createGatherElementsProgramInfo", "parseGatherElementsAttributes", "gatherElements", "init_gather_elements", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "inputShape", "inputOutputDataType", "inputRank", "indicesShape", "indicesDataType", "axis", "ShapeUtil", "axisDimLimit", "outputShape", "outputSize", "input", "inputVariable", "indices", "output", "outputVariable", "programUniforms", "createTensorShapeVariables", "shaderHelper", "createAttributeWithCacheKey", "context", "validateInputs", "createGemmProgramInfo", "parseGemmAttributes", "gemm", "init_gemm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "aShape", "bShape", "M", "N", "K", "GemmUtil", "outputShape", "outputSize", "ShapeUtil", "programUniforms", "inputDependencies", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "line", "calculateAlpha", "a", "inputVariable", "b", "dataType", "c", "variables", "output", "outputVariable", "uniforms", "transA", "transB", "alpha", "beta", "context", "createInstanceNormProgramInfo", "computeMean", "createInstanceNormNHWCProgramInfo", "instanceNorm", "init_instance_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "xShape", "outputShape", "axis", "normCount", "ShapeUtil", "normSize", "components", "getMaxComponents", "normPackedSize", "inputShape", "inputDependencies", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "x", "inputVariable", "scale", "bias", "output", "outputVariable", "variables", "dataType", "f32Type", "workgroupSize", "uniforms", "sumVector", "context", "input", "n", "h", "c", "epsilon", "WG", "outputType", "sumCastType", "setOutputValue", "var1", "var2", "unitsOfWork", "wgSize", "meanInputDependencies", "meanProgramUniforms", "getMeanShaderSource", "inputHelper", "fillVector", "meanValues", "scaleHelper", "biasHelper", "N", "C", "H", "outputSize", "channelScaleShift", "tensorTypeToWsglStorageType", "scaleType", "scaleCastType", "outputHelper", "validateInputs", "createLayerNormProgramInfo", "layerNorm", "init_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "attributes", "outputCount", "simplified", "xShape", "scale", "bias", "outputShape", "axis", "ShapeUtil", "normCount", "normSize", "scaleSize", "biasSize", "meanInvStdDevDim", "i", "components", "getMaxComponents", "inputDependencies", "programUniforms", "hasMeanDataOutput", "hasInvStdOutput", "getShaderSource", "shaderHelper", "dataType", "tensorTypeToWsglStorageType", "variables", "inputVariable", "outputVariable", "uniforms", "fillVector", "castToF32", "sumVector", "outputs", "context", "validateInputs", "createMatMulNBitsProgramInfo", "matMulNBits", "parseMatMulNBitsAttributes", "init_matmulnbits", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "a", "aRank", "nBlocksPerCol", "blobSize", "b", "ShapeUtil", "scalesShape", "zeroPointsShape", "expectedZeroPointsSize", "maxComputeWorkgroupSizes", "maxComputeWorkgroupStorageSize", "inputShape", "dimAOuter", "dimInner", "dimBOuter", "batchDims", "batchSize", "blobSizeInWords", "dataType", "outputNumber", "getMaxComponents", "aComponents", "bComponents", "elementSize", "getTensorElementSize", "workgroupOutputSize", "maxNumberOfComponents", "useBlockwiseMatMulNBits", "components", "outputShape", "outputSize", "programUniforms", "inputShapeTemp", "bShape", "createTensorShapeVariables", "outputShapeTemp", "getShaderSource", "shaderHelper", "inputRank", "inputVariable", "scales", "inputVariables", "zeroPoints", "outputRank", "output", "outputVariable", "uniforms", "tensorTypeToWsglStorageType", "qDqDataType", "processOneBlock", "_", "i", "updateZeroPointIndex", "context", "createAttributeWithCacheKey", "getInput", "validateInputs", "parseMultiHeadAttentionAttributes", "weightTransposeAttribute", "addBiasTranspose", "maybeTransposeToBNSHAndAddBias", "multiHeadAttention", "init_multihead_attentiion", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_types", "init_attention", "init_common", "init_transpose", "inputs", "i", "ShapeUtil", "attributes", "query", "key", "value", "bias", "keyPaddingMask", "relativePositionBias", "pastKey", "pastValue", "dmmhaPacking", "batchSize", "sequenceLength", "hiddenSize", "kvSequenceLength", "pastSequenceLength", "maxSequenceLength", "headSize", "qkvFormat", "maskType", "maskDims", "passPastInKv", "vHiddenSize", "totalSequenceLength", "broadcastResPosBias", "createAttributeWithCacheKey", "context", "qkv", "biasOffset", "outputShape", "outputSize", "programUniforms", "getShaderSource", "shaderHelper", "output", "outputVariable", "qkvInput", "inputVariable", "biasInput", "uniforms", "numHeads", "input", "reshapedInput", "createTransposeProgramInfo", "params", "kvBNSH", "Q", "applyAttention", "K", "V", "validateInputs", "getPadConstant", "getPadReflect", "getPadEdge", "getPadWrap", "getPadSnippet", "createPadProgramInfo", "createPadAttributesFromInputs", "pad", "init_pad", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "validPads", "output", "inputRank", "padsLength", "block", "i", "getElementAt", "attributes", "outputShape", "ShapeUtil", "inputDims", "outputSize", "programUniforms", "createTensorShapeVariables", "inputDependencies", "getShaderSource", "shaderHelper", "outputVariable", "input", "inputVariable", "dataType", "padSnippet", "uniforms", "bigInt64Pads", "value", "updatePads", "axes", "v", "pads", "context", "updatedAttributes", "validateInputs", "getAdjustedPoolAttributesAndOutputShape", "getUniformAndPadInfo", "generatePoolingCode", "createShaderKeyFromAttributes", "createAveragePoolShaderKeyFromAttributes", "createMaxPoolShaderKeyFromAttributes", "parsePoolCommonAttributes", "createAveragePoolProgramInfo", "parseAveragePoolAttributes", "averagePool", "globalPoolAttributes", "parseGlobalAveragePoolAttributes", "globalAveragePool", "createMaxPoolProgramInfo", "maxPool", "parseMaxPoolAttributes", "parseGlobalMaxPoolAttributes", "globalMaxPool", "init_pool", "__esmMin", "init_esm", "init_wasm_common", "init_util", "init_common", "inputs", "env", "input", "attributes", "isGlobalOperator", "isChannelsLast", "inputShapeAsChannelFirst", "hasDilations", "kernelShape", "strides", "dilations", "pads", "PoolConvUtil", "outputShapeAsChannelFirst", "newAttributes", "outputShapeAsChannelLast", "outputShape", "outputSize", "ShapeUtil", "kernelSize", "programUniforms", "uniforms", "kw", "sw", "pwStart", "pwEnd", "pwStartEndNotZero", "phStartEndNotZero", "kh", "sh", "phStart", "phEnd", "kernelStrides", "hasPads", "sum", "cur", "shaderHelper", "x", "rank", "outputShapeRank", "op1", "op2", "start", "dataType", "output", "outputVariable", "codeW", "codeH", "codeHEnd", "dimIdxW", "dimIdxH", "stridesRank", "padsRank", "padCode", "getElementAt", "name", "adjustedAttributes", "inputVariable", "createTensorShapeVariables", "inputDependencies", "countIncludePad", "attr", "averagePoolAttributes", "context", "format", "storageOrder", "maxPoolAttributes", "validateInputsContent", "createRangeProgramInfo", "range", "init_range", "__esmMin", "init_esm", "init_wasm_common", "init_common", "start", "limit", "delta", "sameStartLimit", "increasingRangeNegativeStep", "decreasingRangePositiveStep", "dataType", "numElements", "outputShape", "outputSize", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "output", "outputVariable", "wgslType", "uniforms", "context", "env", "validateScales", "updateScales", "validateInputs", "getOriginalCoordinateFromResizedCoordinate", "getNearestPixelFromOriginal", "updateRoI", "initOutputShape", "adjustOutputShape", "calculateOriginalIndicesFromOutputIndices", "calculateInputIndicesFromOutputIndices", "checkInputIndices", "setChannelAndBatchIndices", "bilinearInterpolation", "bicubicInterpolation", "trilinearInterpolation", "createResizeProgramInfo", "getOpsetVersionFromCustomDataBuffer", "resize", "parseResizeAttributes", "init_resize", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "scales", "attributes", "value", "axes", "rank", "newScales", "index", "inputs", "opsetVersion", "sizes", "roi", "roiInputIndex", "scalesInputIndex", "sizesInputIndex", "coordinateTransferMode", "dType", "nearestMode", "roiTmp", "roiLocal", "v", "i", "inputShape", "outputShape", "scaleInPolicy", "adjustedOutputShape", "output", "scalesLength", "roiLength", "getElementAt", "input", "useExtrapolation", "channelIdx", "batchIdx", "spacialDims", "extrapolationValue", "heightIdx", "widthIdx", "cubicCoeffA", "excludeOutside", "is2D", "isNchw", "createCubicInterpolationFunction", "idx", "direction", "depthIdx", "inputTensor", "scalesInput", "roiInput", "outputVariable", "inputVariable", "outputSize", "ShapeUtil", "noScale", "d", "dataType", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "customDataBuffer", "antialias", "coordinateTransformMode", "keepAspectRatioPolicy", "mode", "createAttributeWithCacheKey", "validateInputs", "createRotaryEmbeddingProgramInfo", "rotaryEmbedding", "init_rotary_embedding", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "input", "positionIds", "cosCache", "sinCache", "numHeads", "rotaryEmbeddingDim", "ShapeUtil", "batchSize", "sequenceLength", "maxSequenceLength", "hiddenSize", "headSize", "interleaved", "scale", "batchStride", "halfRotaryEmbeddingDim", "globalShape", "globalStrides", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "inputVariable", "output", "outputVariable", "WORKGROUP_SIZE", "createAttributeWithCacheKey", "context", "validateInputs", "createSkipLayerNormProgramInfo", "skipLayerNorm", "init_skip_layer_norm", "__esmMin", "init_wasm_common", "init_util", "init_common", "inputs", "input", "skip", "gamma", "hiddenSize", "sequenceLength", "beta", "bias", "attributes", "outputCount", "isTraining", "simplified", "inputShape", "inputSize", "ShapeUtil", "outputShape", "outputSize", "meanInvStdDevDim", "hasBetaInput", "hasBiasInput", "hasMeanOutput", "hasInvStdDevOutput", "hasInputSkipBiasSumOutput", "components", "getMaxComponents", "programUniforms", "getShaderSource", "shaderHelper", "uniformsArray", "variables", "inputVariable", "outputVariable", "dataType", "tensorTypeToWsglStorageType", "fillVector", "castToF32", "sumVector", "outputs", "_input", "_index", "context", "validateInputs", "readInput", "createSliceAttributesFromInputs", "fixStartEndValues", "calculateInputIndicesImpl", "createSliceProgramInfo", "slice", "parseSliceAttributes", "init_slice", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "_", "idx", "input", "v", "starts", "ends", "axes", "createAttributeWithCacheKey", "value", "index", "inputShape", "steps", "newValue", "output", "getElementAt", "inputSize", "ShapeUtil", "step", "start", "i", "end", "signs", "array", "numSteps", "newEnd", "newStart", "outputShape", "axis", "outputTensorInfo", "outputVariable", "inputVariable", "outputSize", "uniforms", "programUniforms", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "validateInputs", "createSoftmaxProgramInfo", "softmax", "parseSoftmaxAttributes", "init_softmax", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "input", "attributes", "shape", "outputSize", "ShapeUtil", "WG", "axis", "cols", "rows", "components", "getMaxComponents", "packedCols", "maxVector", "name", "x", "inputVariable", "output", "outputVariable", "valueType", "threadMaxDecl", "tensorTypeToWsglStorageType", "getShaderSource", "shaderHelper", "sumVector", "context", "createAttributeWithCacheKey", "validateInputs", "createSplitAttributesFromInputs", "calculateOutputIndexImpl", "writeBufferDataImpl", "createSplitProgramInfo", "split", "parseSplitAttributes", "init_split", "__esmMin", "init_wasm_common", "init_util", "init_attribute_with_cache_key", "init_common", "inputs", "attributes", "splitSizes", "numOutputs", "v", "createAttributeWithCacheKey", "numberOfTensors", "getElementAt", "outputs", "codeLines", "i", "returnSnippet", "inputShape", "inputSize", "ShapeUtil", "dataType", "axis", "input", "inputVariable", "sizeInSplitAxis", "outputsTensorInfo", "outputShapes", "previousSum", "programUniforms", "outputShape", "outputVariable", "createTensorShapeVariables", "getShaderSource", "shaderHelper", "context", "updatedAttributes", "getRepeats", "validateInputs", "getOutputShape", "createTileProgramInfo", "tile", "init_tile", "__esmMin", "init_wasm_common", "init_util", "init_common", "repeatsTensorView", "inputs", "inputShape", "repeats", "outputShape", "i", "outputSize", "ShapeUtil", "dataType", "input", "inputVariable", "output", "outputVariable", "getShaderSource", "shaderHelper", "createTensorShapeVariables", "context", "createWhereOpProgramShader", "createWhereOpProgramInfo", "where", "init_where", "__esmMin", "init_wasm_common", "init_util", "init_common", "shaderHelper", "inputs", "dimsOutput", "isBroadcast", "typeOutput", "output", "outputVariable", "a", "inputVariable", "b", "c", "assignment", "expression", "singleAssignment", "resStr", "x", "typeCast", "expressionA", "expressionB", "expressionC", "dimsA", "dimsB", "dimsC", "outputDataType", "ShapeUtil", "outputShape", "outputSize", "calculatedShape", "BroadcastUtil", "vecSize", "createTensorShapeVariables", "context", "WEBGPU_OP_RESOLVE_RULES", "init_op_resolve_rules", "__esmMin", "init_argminmax", "init_attention", "init_batch_norm", "init_bias_add", "init_bias_split_gelu", "init_binary_op", "init_concat", "init_conv", "init_conv_transpose", "init_cumsum", "init_depth_to_space", "init_einsum", "init_expand", "init_fast_gelu", "init_gather", "init_gather_elements", "init_gemm", "init_instance_norm", "init_layer_norm", "init_matmul", "init_matmulnbits", "init_multihead_attentiion", "init_pad", "init_pool", "init_range", "init_reduce", "init_resize", "init_rotary_embedding", "init_skip_layer_norm", "init_slice", "init_softmax", "init_split", "init_tile", "init_transpose", "init_unary_op", "init_where", "abs", "acos", "acosh", "add", "argMax", "parseArgMinMaxAttributes", "argMin", "asin", "asinh", "atan", "atanh", "attention", "averagePool", "parseAveragePoolAttributes", "batchNorm", "biasAdd", "biasSplitGelu", "cast", "parseCastAttributes", "ceil", "clip", "concat", "parseConcatAttributes", "conv", "parseConvAttributes", "convTranspose", "parseConvTransposeAttributes", "cos", "cosh", "cumsum", "parseCumSumAttributes", "depthToSpace", "parseDepthToSpaceAttributes", "div", "einsum", "parseEinsumAttributes", "elu", "parseAlphaAttributes", "equal", "erf", "exp", "expand", "fastGelu", "floor", "gather", "parseGatherAttributes", "gatherElements", "parseGatherElementsAttributes", "gelu", "gemm", "parseGemmAttributes", "globalAveragePool", "parseGlobalAveragePoolAttributes", "globalMaxPool", "parseGlobalMaxPoolAttributes", "greater", "greaterOrEqual", "hardSigmoid", "parseHardSigmoidAttributes", "instanceNorm", "layerNorm", "leakyRelu", "less", "lessOrEqual", "log", "matMul", "matMulNBits", "parseMatMulNBitsAttributes", "maxPool", "parseMaxPoolAttributes", "mul", "multiHeadAttention", "parseMultiHeadAttentionAttributes", "neg", "not", "pad", "pow", "range", "reciprocal", "reduceMin", "reduceMean", "reduceMax", "reduceSum", "reduceProd", "reduceL1", "reduceL2", "reduceLogSum", "reduceLogSumExp", "reduceSumSquare", "relu", "resize", "parseResizeAttributes", "rotaryEmbedding", "sigmoid", "sin", "sinh", "slice", "parseSliceAttributes", "skipLayerNorm", "split", "parseSplitAttributes", "sqrt", "softmax", "parseSoftmaxAttributes", "sub", "tan", "tanh", "thresholdedRelu", "tile", "transpose", "parseTransposeAttributes", "where", "ProgramManager", "init_program_manager", "__esmMin", "init_esm", "init_log", "init_common", "backend", "key", "artifact", "buildArtifact", "inputs", "outputs", "dispatchGroup", "uniformBufferBinding", "TRACE_FUNC_BEGIN", "device", "computePassEncoder", "entries", "input", "output", "bindGroup", "commandInfo", "TRACE_FUNC_END", "programInfo", "normalizedDispatchGroupSize", "extensions", "shaderHelper", "createShaderHelper", "userCode", "code", "shaderModule", "LOG_DEBUG", "computePipeline", "x", "y", "z", "limitPerDimension", "size", "dispatchAverage", "getProgramInputTensorInfoDependencyKey", "getProgramInfoUniqueKey", "AdapterInfoImpl", "WebGpuBackend", "init_backend_webgpu", "__esmMin", "init_esm", "init_wasm_common", "init_log", "init_tensor_view", "init_gpu_data_manager", "init_op_resolve_rules", "init_program_manager", "inputTensors", "inputDependencies", "inputInfos", "i", "type", "rank", "dims", "programInfo", "is1DimensionDispatch", "key", "adapterInfo", "architecture", "vendor", "data", "env", "adapter", "requiredFeatures", "deviceDescriptor", "createGpuDataManager", "ProgramManager", "configureLogger", "ev", "commandEncoder", "computePassDescriptor", "TRACE_FUNC_BEGIN", "queryReadBuffer", "mappedData", "pendingKernels", "pendingKernelInfo", "kernelId", "kernelInfo", "kernelType", "kernelName", "programName", "inputTensorViews", "outputTensorViews", "startTimeU64", "endTimeU64", "startTime", "endTime", "value", "tensorDataTypeEnumToString", "inputShapes", "outputShapes", "TRACE", "TRACE_FUNC_END", "program", "outputIndices", "createKernelOutput", "createIntermediateOutput", "outputCount", "inputDatas", "gpuData", "outputs", "dispatchGroup", "programUniforms", "validatedOutputIndices", "_", "outputDatas", "isTemporary", "isPersistent", "tensorView", "persistentData", "uniformBufferBinding", "currentOffset", "offsets", "v", "sizeOfElement", "sizeOfVecOrMat", "baseAlignment", "elementPerVecOrMat", "maxAlignmentOfField", "arrayBuffer", "offset", "uniformBufferData", "normalizedDispatchGroup", "artifact", "LOG_DEBUG", "uniform", "actualType", "actualLength", "length", "gpuDataId", "src", "dst", "getTargetBuffer", "size", "ptr", "attribute", "op", "WEBGPU_OP_RESOLVE_RULES", "context", "errors", "kernel", "kernelEntry", "attributes", "useErrorScope", "e", "err", "sessionId", "index", "buffer", "sessionInputOutputMapping", "previousBuffer", "id", "bufferInfo", "gpuBuffer", "downloadGpuData", "createView", "sessionCommandList", "sessionPendingKernels", "computePassEncoder", "command", "init_exports", "__export", "init", "TensorViewImpl", "ComputeContextImpl", "init_init", "__esmMin", "init_wasm_common", "init_backend_webgpu", "init_log", "init_util", "_TensorViewImpl", "module", "dataType", "data", "dims", "elementCount", "ShapeUtil", "newDims", "backend", "contextDataOffset", "heapU32", "dataIndex", "inputCount", "inputs", "i", "dim", "d", "program", "inputsOutputsMapping", "mappedInputs", "outputIndices", "createKernelOutput", "index", "createTemporaryOutput", "elementSize", "getTensorElementSize", "bufferSize", "gpuDataId", "stack", "offset", "e", "name", "env", "gpuAdapter", "jsepInit", "WebGpuBackend", "size", "ptr", "src", "dst", "isSourceGpu", "LOG_DEBUG", "dataOffset", "kernelType", "kernelId", "attribute", "kernel", "sessionHandle", "errors", "context", "initOrt", "initRuntime", "initEp", "activeSessions", "getSessionInputOutputCount", "copyFromExternalBuffer", "createSession", "releaseSession", "prepareInputOutputTensor", "run", "endProfiling", "extractTransferableBuffers", "init_wasm_core_impl", "__esmMin", "init_run_options", "init_session_options", "init_wasm_common", "init_wasm_factory", "init_wasm_utils", "init_wasm_utils_load_file", "numThreads", "loggingLevel", "getInstance", "checkLastError", "env", "logLevelStringToEnum", "epName", "initJsep", "adapter", "powerPreference", "forceFallbackAdapter", "sessionHandle", "wasm", "stack", "dataOffset", "model", "modelDataOffset", "modelData", "options", "modelDataLength", "sessionOptionsHandle", "ioBindingHandle", "allocs", "inputNamesUTF8Encoded", "outputNamesUTF8Encoded", "setSessionOptions", "loadingPromises", "file", "path", "loadFile", "data", "inputCount", "outputCount", "enableGraphCapture", "inputNames", "outputNames", "outputPreferredLocations", "i", "name", "nameString", "location", "bindingState", "l", "dataLocationStringToEnum", "e", "buf", "alloc", "sessionId", "session", "ioBindingState", "tensor", "tensorHandles", "index", "dataType", "dims", "rawData", "dataByteLength", "gpuBuffer", "elementSizeInBytes", "getTensorElementSize", "tensorDataTypeStringToEnum", "a", "b", "registerBuffer", "dataIndex", "allocWasmString", "dimsOffset", "dimIndex", "d", "inputIndices", "inputTensors", "outputIndices", "outputTensors", "inputOutputBound", "runOptionsHandle", "runOptionsAllocs", "inputTensorHandles", "outputTensorHandles", "inputOutputAllocs", "beforeRunStack", "inputValuesOffset", "inputNamesOffset", "outputValuesOffset", "outputNamesOffset", "setRunOptions", "inputValuesIndex", "inputNamesIndex", "outputValuesIndex", "outputNamesIndex", "handle", "outputPreferredLocationsEncoded", "errorCode", "output", "beforeGetTensorDataStack", "tensorDataOffset", "keepOutputTensor", "type", "tensorDataIndex", "dimsLength", "size", "tensorDataTypeEnumToString", "preferredLocation", "stringData", "offset", "maxBytesToRead", "getBuffer", "elementSize", "isGpuBufferSupportedType", "typedArrayConstructor", "tensorTypeToTypedArrayConstructor", "v", "p", "profileFileName", "tensors", "buffers", "require_main", "__commonJSMin", "exports", "module", "isProxy", "proxyWorker", "initializing", "initialized", "aborted", "initWasmCallbacks", "queuedCallbacks", "enqueueCallbacks", "ensureWorker", "onProxyWorkerMessage", "scriptSrc", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "copyFromExternalBuffer", "createSession", "releaseSession", "run", "endProfiling", "init_proxy_wrapper", "__esmMin", "init_esm", "init_wasm_core_impl", "init_wasm_factory", "env", "type", "callbacks", "queue", "ev", "resolve", "reject", "workerUrl", "message", "initializeWebAssembly", "initRuntime", "epName", "initEp", "buffer", "model", "options", "transferable", "sessionId", "inputIndices", "inputs", "outputIndices", "outputs", "t", "serializableInputs", "extractTransferableBuffers", "encodeTensorMetadata", "decodeTensorMetadata", "OnnxruntimeWebAssemblySessionHandler", "init_session_handler_inference", "__esmMin", "init_esm", "init_proxy_wrapper", "init_wasm_common", "init_wasm_utils_load_file", "tensor", "getName", "Tensor", "dataType", "isGpuBufferSupportedType", "gpuBuffer", "download", "dispose", "path", "copyFromExternalBuffer", "loadFile", "pathOrBuffer", "options", "TRACE_FUNC_BEGIN", "model", "createSession", "TRACE_FUNC_END", "releaseSession", "feeds", "fetches", "inputArray", "inputIndices", "kvp", "name", "index", "outputArray", "outputIndices", "inputs", "t", "i", "outputs", "results", "run", "resultMap", "endProfiling", "initializeFlags", "OnnxruntimeWebAssemblyBackend", "init_backend_wasm", "__esmMin", "init_esm", "init_proxy_wrapper", "init_session_handler_inference", "env", "numCpuLogicalCores", "backendName", "initializeWebAssemblyAndOrtRuntime", "initializeOrtEp", "pathOrBuffer", "options", "handler", "OnnxruntimeWebAssemblySessionHandler", "backend_wasm_inference_exports", "__export", "wasmBackend", "init_backend_wasm_inference", "__esmMin", "init_backend_wasm", "OnnxruntimeWebAssemblyBackend", "init_esm", "version", "lib_default", "esm_exports", "wasmBackend", "registerBackend", "env", "version"] +}