From b8913636cac768548860f5f4ecc817a3fe6eb78f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 12 Apr 2026 04:59:01 +0800 Subject: [PATCH 1/3] Refactor native bindings and output adaptor integration --- cli/src/cli-runtime.test.ts | 7 - cli/src/cli-runtime.ts | 20 +- cli/src/cli.rs | 2 +- cli/src/commands/bridge.rs | 4 +- cli/src/commands/help.rs | 2 +- doc/content/sdk/_meta.ts | 6 +- doc/content/sdk/architecture.mdx | 642 +++++++++ doc/content/sdk/index.mdx | 64 +- doc/content/sdk/logger/_meta.ts | 3 + doc/content/sdk/logger/index.mdx | 556 ++++++++ doc/content/sdk/md-compiler/_meta.ts | 3 + doc/content/sdk/md-compiler/index.mdx | 965 ++++++++++++++ doc/content/sdk/script-runtime/_meta.ts | 3 + doc/content/sdk/script-runtime/index.mdx | 1042 +++++++++++++++ doc/content/technical-details/libraries.mdx | 63 +- gui/scripts/generate-icons.ts | 2 +- gui/src/pages/AdaptorsPage.tsx | 2 +- gui/src/pages/DashboardPage.tsx | 2 +- libraries/logger/package.json | 4 +- libraries/logger/src/index.ts | 240 +--- libraries/logger/tsconfig.lib.json | 6 +- libraries/md-compiler/package.json | 8 +- .../src/markdown/native-binding.test.ts | 73 ++ libraries/md-compiler/src/native-binding.ts | 114 +- libraries/md-compiler/tsconfig.lib.json | 5 +- libraries/script-runtime/src/index.ts | 230 +--- .../script-runtime/src/native-binding.test.ts | 69 + .../src/resolve-proxy-worker.ts | 20 +- libraries/script-runtime/src/runtime-core.ts | 105 +- libraries/script-runtime/src/types.ts | 38 +- libraries/script-runtime/tsconfig.lib.json | 6 +- package.json | 10 +- pnpm-lock.yaml | 12 + sdk/scripts/finalize-bundle.ts | 30 + sdk/src/ConfigLoader.ts | 4 +- sdk/src/ProtectedDeletionGuard.ts | 2 +- .../adaptors/NativeBaseOutputAdaptor.test.ts | 301 +++++ sdk/src/adaptors/NativeBaseOutputAdaptor.ts | 157 +++ .../NativeDroidCLIOutputAdaptor.test.ts | 275 ++++ .../adaptors/NativeDroidCLIOutputAdaptor.ts | 120 ++ .../NativeGeminiCLIOutputAdaptor.test.ts | 188 +++ .../adaptors/NativeGeminiCLIOutputAdaptor.ts | 98 ++ sdk/src/adaptors/adaptor-core.ts | 6 +- .../adaptors/adaptor-core/AbstractAdaptor.ts | 4 +- .../adaptor-core/AbstractOutputAdaptor.ts | 5 +- .../adaptor-core/GlobalScopeCollector.ts | 2 +- sdk/src/adaptors/adaptor-core/InputTypes.ts | 6 +- .../adaptors/adaptor-core/RegistryWriter.ts | 2 +- sdk/src/adaptors/adaptor-core/plugin.ts | 2 +- sdk/src/adaptors/adaptor-core/types.ts | 2 +- .../AindexProjectConfigLoader.ts | 4 +- sdk/src/config.test.ts | 26 + sdk/src/config.ts | 4 +- sdk/src/core/base_output_plans.rs | 1104 ++++++++++++++++ sdk/src/core/cleanup.rs | 72 +- sdk/src/core/command_bridge.rs | 462 +++++++ sdk/src/core/desk_paths.rs | 12 +- sdk/src/core/droid_output_plan.rs | 1166 +++++++++++++++++ sdk/src/core/gemini_output_plan.rs | 484 +++++++ sdk/src/core/input_plugins/skill.rs | 183 ++- sdk/src/core/input_plugins/workspace.rs | 6 +- sdk/src/core/mod.rs | 4 + sdk/src/core/native-binding-loader.test.ts | 503 +++++++ sdk/src/core/native-binding-loader.ts | 292 +++++ sdk/src/core/native-binding.ts | 9 +- sdk/src/core/plugin_shared.rs | 147 ++- sdk/src/index.test.ts | 5 +- sdk/src/index.ts | 77 +- sdk/src/inputs/AbstractInputCapability.ts | 11 +- sdk/src/inputs/NativeInputCapability.ts | 3 +- sdk/src/inputs/input-agentskills.test.ts | 68 + sdk/src/inputs/input-agentskills.ts | 25 +- sdk/src/inputs/input-aindex.ts | 25 +- sdk/src/inputs/input-command.ts | 3 +- sdk/src/inputs/input-editorconfig.ts | 8 +- sdk/src/inputs/input-git-exclude.ts | 8 +- sdk/src/inputs/input-gitignore.ts | 8 +- sdk/src/inputs/input-jetbrains-config.ts | 8 +- sdk/src/inputs/input-project-prompt.ts | 3 +- sdk/src/inputs/input-readme.ts | 3 +- sdk/src/inputs/input-rule.ts | 3 +- sdk/src/inputs/input-shared-ignore.ts | 8 +- sdk/src/inputs/input-subagent.ts | 25 +- sdk/src/inputs/input-vscode-config.ts | 8 +- sdk/src/inputs/input-zed-config.ts | 8 +- sdk/src/inputs/native-result.ts | 252 ++++ sdk/src/inputs/runtime.ts | 2 +- sdk/src/internal/default-output-plugins.ts | 34 +- sdk/src/internal/git-discovery-legacy.ts | 3 +- .../internal/native-command-bridge.test.ts | 122 ++ sdk/src/internal/native-command-bridge.ts | 84 ++ sdk/src/internal/sdk-binding.ts | 22 +- sdk/src/lib.rs | 79 +- sdk/src/libraries/logger.ts | 136 ++ .../libraries/script-runtime/index.test.ts | 39 + sdk/src/libraries/script-runtime/index.ts | 164 +++ .../script-runtime/resolve-proxy-worker.ts | 19 + .../libraries/script-runtime/runtime-core.ts | 104 ++ sdk/src/libraries/script-runtime/types.ts | 37 + sdk/src/prompts.ts | 8 +- sdk/src/public-config-paths.ts | 2 +- sdk/src/runtime/cleanup.ts | 20 +- sdk/test/native-binding/base-output-plans.ts | 191 +++ sdk/test/setup-native-binding-fixed.ts | 26 +- sdk/test/setup-native-binding.ts | 50 +- sdk/tsconfig.eslint.json | 4 +- sdk/tsdown.config.ts | 2 +- turbo.json | 15 +- 108 files changed, 10624 insertions(+), 1108 deletions(-) create mode 100644 doc/content/sdk/architecture.mdx create mode 100644 doc/content/sdk/logger/_meta.ts create mode 100644 doc/content/sdk/logger/index.mdx create mode 100644 doc/content/sdk/md-compiler/_meta.ts create mode 100644 doc/content/sdk/md-compiler/index.mdx create mode 100644 doc/content/sdk/script-runtime/_meta.ts create mode 100644 doc/content/sdk/script-runtime/index.mdx create mode 100644 libraries/md-compiler/src/markdown/native-binding.test.ts create mode 100644 libraries/script-runtime/src/native-binding.test.ts create mode 100644 sdk/src/adaptors/NativeBaseOutputAdaptor.test.ts create mode 100644 sdk/src/adaptors/NativeBaseOutputAdaptor.ts create mode 100644 sdk/src/adaptors/NativeDroidCLIOutputAdaptor.test.ts create mode 100644 sdk/src/adaptors/NativeDroidCLIOutputAdaptor.ts create mode 100644 sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.test.ts create mode 100644 sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.ts create mode 100644 sdk/src/core/base_output_plans.rs create mode 100644 sdk/src/core/command_bridge.rs create mode 100644 sdk/src/core/droid_output_plan.rs create mode 100644 sdk/src/core/gemini_output_plan.rs create mode 100644 sdk/src/core/native-binding-loader.test.ts create mode 100644 sdk/src/core/native-binding-loader.ts create mode 100644 sdk/src/inputs/native-result.ts create mode 100644 sdk/src/internal/native-command-bridge.test.ts create mode 100644 sdk/src/internal/native-command-bridge.ts create mode 100644 sdk/src/libraries/logger.ts create mode 100644 sdk/src/libraries/script-runtime/index.test.ts create mode 100644 sdk/src/libraries/script-runtime/index.ts create mode 100644 sdk/src/libraries/script-runtime/resolve-proxy-worker.ts create mode 100644 sdk/src/libraries/script-runtime/runtime-core.ts create mode 100644 sdk/src/libraries/script-runtime/types.ts create mode 100644 sdk/test/native-binding/base-output-plans.ts diff --git a/cli/src/cli-runtime.test.ts b/cli/src/cli-runtime.test.ts index 4cbc628b..93d5438f 100644 --- a/cli/src/cli-runtime.test.ts +++ b/cli/src/cli-runtime.test.ts @@ -15,13 +15,6 @@ vi.mock('@truenine/memory-sync-sdk', () => ({ clean: cleanMock, listAdaptors: listAdaptorsMock } - }, - createTsFallbackMemorySyncBinding() { - return { - install: installMock, - dryRun: dryRunMock, - clean: cleanMock - } } })) diff --git a/cli/src/cli-runtime.ts b/cli/src/cli-runtime.ts index f351adf2..b4753dd0 100644 --- a/cli/src/cli-runtime.ts +++ b/cli/src/cli-runtime.ts @@ -2,7 +2,7 @@ import type {MemorySyncAdaptorInfo, MemorySyncCommandResult} from '@truenine/mem import process from 'node:process' import {flushOutput, setGlobalLogLevel} from '@truenine/logger' -import {createTsFallbackMemorySyncBinding, getMemorySyncSdkBinding} from '@truenine/memory-sync-sdk' +import {getMemorySyncSdkBinding} from '@truenine/memory-sync-sdk' import {extractUserArgs, parseArgs} from './cli-args' const CLI_NAME = 'tnmsc' @@ -31,7 +31,7 @@ Synchronize AI memory and configuration files across projects. - \`${CLI_NAME} dry-run\` previews what would be written. - \`${CLI_NAME} clean\` removes generated files. - \`${CLI_NAME} clean --dry-run\` previews what would be cleaned. -- \`${CLI_NAME} plugins\` lists the built-in output plugins. +- \`${CLI_NAME} plugins\` lists the built-in output adaptors. ## Log Controls @@ -58,9 +58,9 @@ function writeUnknownCommand(command: string): void { } function writePluginList(plugins: readonly MemorySyncAdaptorInfo[]): void { - const lines = ['# Registered plugins', ''] + const lines = ['# Registered adaptors', ''] if (plugins.length === 0) { - lines.push('- No plugins are currently registered.') + lines.push('- No adaptors are currently registered.') } else { for (const plugin of plugins) { const dependencySuffix = plugin.dependencies.length > 0 ? ` (depends on: ${plugin.dependencies.join(', ')})` : '' @@ -94,17 +94,7 @@ export async function runCli(argv: readonly string[] = process.argv): Promise ExitCode { pub fn plugins() -> ExitCode { let plugins = tnmsc::list_plugins(); - println!("# Registered plugins"); + println!("# Registered adaptors"); println!(); if plugins.is_empty() { - println!("- No plugins are currently registered."); + println!("- No adaptors are currently registered."); } else { for plugin in plugins { if plugin.dependencies.is_empty() { diff --git a/cli/src/commands/help.rs b/cli/src/commands/help.rs index 10a55392..9d08c2bb 100644 --- a/cli/src/commands/help.rs +++ b/cli/src/commands/help.rs @@ -11,7 +11,7 @@ pub fn execute() -> ExitCode { println!(" install Run the install pipeline explicitly"); println!(" dry-run Preview changes without writing files"); println!(" clean Remove all generated output files"); - println!(" plugins List all registered plugins"); + println!(" plugins List all registered adaptors"); println!(" version Show version information"); println!(" help Show this help message"); println!(); diff --git a/doc/content/sdk/_meta.ts b/doc/content/sdk/_meta.ts index 01d60bc6..806ed94e 100644 --- a/doc/content/sdk/_meta.ts +++ b/doc/content/sdk/_meta.ts @@ -1,3 +1,7 @@ export default { - index: '概览' + index: '概览', + architecture: '架构总览', + logger: 'Logger 日志库', + 'md-compiler': 'MDX-Compiler 编译器', + 'script-runtime': 'Script-Runtime 运行时' } diff --git a/doc/content/sdk/architecture.mdx b/doc/content/sdk/architecture.mdx new file mode 100644 index 00000000..f23c2d4f --- /dev/null +++ b/doc/content/sdk/architecture.mdx @@ -0,0 +1,642 @@ +--- +title: SDK 架构总览 +description: memory-sync SDK 的全局架构视角,整合 Logger、MDX-Compiler、Script-Runtime 三库的角色定位、依赖关系、数据流和构建流程,为架构师和技术负责人提供完整的系统蓝图。 +sidebarTitle: 架构总览 +status: stable +--- + +import { Callout, Cards, Steps, Tabs } from 'nextra/components' + +# SDK 架构总览 + +本文档从全局视角呈现 `sdk/` 作为私有混合核心的完整架构,整合 [Logger](/docs/sdk/logger)、[MDX-Compiler](/docs/sdk/md-compiler)、[Script-Runtime](/docs/sdk/script-runtime) 三个基础库的职责边界与协作关系。 + +## SDK 整体定位 + +`sdk/` 是整个 memory-sync monorepo 的 **私有混合核心(Private Mixed Core)**,也是共享内部能力的唯一事实来源。它不是一个供外部直接安装的独立 npm 包——它的存在是为了将 `tnmsc` 工具链的核心逻辑从 `cli/` 命令入口层中抽离出来,形成清晰的分层架构。 + +在当前仓库结构中,`sdk/` 承担以下核心职责: + +- 拥有 Rust crate `tnmsc` 的实际 workspace 路径和 facade 定义 +- 提供 npm 包 `@truenine/memory-sync-sdk` 作为内部消费者(`cli/`、`mcp/`、`gui/`)的统一依赖入口 +- 负责 Prompt Service 的实现(基于 MDX-Compiler)、Schema 生成以及最小化的 TypeScript loader 入口 +- 编排 Logger、MDX-Compiler、Script-Runtime 三类基础能力;其中 Logger / Script-Runtime 的 TypeScript facade 已内联到 `sdk/src/libraries/*`,`libraries/*` 保留 Rust crate 与发布壳层 + +**消费者依赖方向**是单向的:`cli/`、`mcp/`、`gui/` 都依赖 `sdk/`。在 `sdk/` 下游,MDX-Compiler 仍完整保留在 `libraries/`,而 Logger / Script-Runtime 已形成“`sdk/src/libraries/*` 作为 TypeScript 事实来源 + `libraries/*` 作为 Rust crate / wrapper 发布层”的分层。这种边界确保了核心逻辑不会散落在多个入口中,也避免了 `cli/` 再次成为隐式的事实来源。 + +## 为什么还保留 Libraries 层 + +`libraries/` 仍然有存在价值,但它现在主要承载 **Rust-first** 的 crate、本地 NAPI 制品和对外发布 wrapper,而不再默认承担全部 TypeScript 事实来源。它们覆盖了 `tnmsc` 工具链中性能最关键的路径: + +| 性能关键路径 | 对应 Library | Rust 提供的优势 | +| --- | --- | --- | +| 日志格式化与输出 | Logger | 零拷贝字符串处理、异步 output worker 线程、原子级别控制 | +| MDX 解析与转换 | MDX-Compiler | 原生 AST 遍历、零成本抽象的编译器流水线 | +| 路径规范化与安全验证 | Script-Runtime | OS 级路径遍历防护、进程隔离的超时控制 | + +将这些能力下沉到独立的 Rust crate 中,使得 `gui/`(Tauri 应用)可以直接调用 crate 而无需 NAPI 开销,同时也让每个库可以独立演进和测试。当前只有 Logger / Script-Runtime 的 TypeScript facade 回收到了 `sdk/src/libraries/*`;`md-compiler` 仍完整保留在 `libraries/`。 + +--- + +## 三库角色定位 + +```mermaid +graph TB + subgraph Consumers["上层消费者"] + CLI["CLI (tnmsc)
命令入口 + 兼容层"] + MCP["MCP Server
stdio 服务端"] + GUI["GUI (Tauri)
桌面应用"] + end + + subgraph SDK["@truenine/memory-sync-sdk
私有混合核心"] + SDKCore["SDK Core
Prompt Service / Schema / Bridge"] + LOG_TS["sdk/src/libraries/logger.ts
(Logger TS facade)"] + MDC_TS["libraries/md-compiler/src/index.ts
(MDX-Compiler TS API)"] + SR_TS["sdk/src/libraries/script-runtime
(Script-Runtime TS facade)"] + end + + subgraph Libraries["Workspace Libraries (Rust-first)"] + direction LR + subgraph L1["Logger
基础设施层"] + LOG_R["tnmsc-logger
(Rust crate)"] + LOG_PKG["@truenine/logger
(wrapper package)"] + end + + subgraph L2["MDX-Compiler
编译层"] + MDC_R["tnmsc-md-compiler
(Rust crate)"] + end + + subgraph L3["Script-Runtime
运行时层"] + SR_R["tnmsc-script-runtime
(Rust crate)"] + SR_PKG["@truenine/script-runtime
(wrapper package)"] + end + end + + CLI --> SDKCore + MCP --> SDKCore + GUI --> SDKCore + + SDKCore --> LOG_TS + SDKCore --> MDC_TS + SDKCore --> SR_TS + + MDC_TS -.->|"workspace 依赖"| LOG_TS + + LOG_R -.->|"NAPI Binding"| LOG_TS + MDC_R -.->|"NAPI Binding"| MDC_TS + SR_R -.->|"NAPI Binding"| SR_TS + LOG_TS -.->|"re-export"| LOG_PKG + SR_TS -.->|"re-export"| SR_PKG + + style SDKCore fill:#3b82f6,color:#fff + style L1 fill:#10b981,color:#fff + style L2 fill:#f59e0b,color:#fff + style L3 fill:#8b5cf6,color:#fff + style Consumers fill:#f1f5f9,color:#1e293b +``` + + + + 所有其他库和 SDK 直接依赖的基础设施。提供 AI 友好的 Markdown 结构化日志输出、诊断错误格式化、诊断缓冲机制和异步 Output Worker。是整个工具链的"神经系统"。 + + + 依赖 Logger 进行编译过程日志。提供 MDX→Markdown 转换引擎、表达式求值、JSX 组件注册/处理、导出元数据提取以及 TOML artifact 构建。是 Prompt Service 的核心引擎。 + + + 独立于其他库。提供代理模块动态加载(Jiti 运行时)、Rust 原生路径安全验证、Worker 进程隔离执行。独立的原因:职责域不同(运行时加载 vs 编译期转换),且路径安全属于安全敏感操作。 + + + +--- + +## 依赖关系详解 + +### Logger — 基础设施层 + +Logger 是整个依赖树的 **叶子节点之一**(另一个是 Script-Runtime),不依赖任何其他 workspace library。当前 `sdk/src/libraries/logger.ts` 是 TypeScript 事实来源,而 `libraries/logger/src/index.ts` 只保留对外发布 wrapper。 + +```mermaid +graph LR + SDK["@truenine/memory-sync-sdk"] --> LOGGER["Logger facade"] + MDC["@truenine/md-compiler"] --> LOGGER + + subgraph Logger["tnmsc-logger / Logger"] + L_CORE["Rust 核心
lib.rs"] + L_NAPI["NAPI Binding
条件编译"] + L_TS["TS facade
sdk/src/libraries/logger.ts"] + L_WRAPPER["wrapper
libraries/logger/src/index.ts"] + L_WORKER["Output Worker
异步线程"] + end + + L_CORE --> L_NAPI + L_NAPI --> L_TS + L_TS -.-> L_WRAPPER + L_CORE --> L_WORKER + + style LOGGER fill:#10b981,color:#fff +``` + +**提供的能力**: + +| 能力 | 说明 | +| --- | --- | +| 结构化日志输出 | 以 `### Title` + Markdown 列表格式输出,天然适合 AI 解析 | +| 诊断错误格式 | `error`/`warn`/`fatal` 级别支持 rootCause、exactFix、possibleFixes、details 四个语义区域 | +| 诊断缓冲机制 | Silent 模式下诊断仍被缓冲到全局队列,可通过 `drainBufferedDiagnostics()` 批量提取 | +| 异步 Output Worker | 通过 `mpsc::channel` 驱动的独立输出线程,避免 I/O 阻塞主线程 | +| NAPI 跨平台绑定 | 条件编译 `#[cfg(feature = "napi")]`,支持 win32/linux/darwin 五大平台 | + +**被谁依赖**:SDK 本身(ConfigLoader、RuntimeEnvironment 使用 logger 输出配置和运行时信息)、MDX-Compiler(编译过程中的日志记录)。 + +> 详细 API 参考 → [Logger 日志库](/docs/sdk/logger) + +### MDX-Compiler — 编译层 + +MDX-Compiler 位于依赖树的中层,**仅依赖 Logger**,被 SDK 的 Prompt Service 直接使用。 + +```mermaid +graph LR + SDK["@truenine/memory-sync-sdk"] --> MDC["@truenine/md-compiler"] + MDC --> LOGGER["@truenine/logger"] + + subgraph MDXCompiler["tnmsc-md-compiler / @truenine/md-compiler"] + direction TB + MC_PARSER["Parser
unified + remark 插件链"] + MC_EXPR["Expression Eval
简单引用 / Function 构造器"] + MC_COMP["Component Process
ComponentRegistry"] + MC_EXPORT["Export Parse
元数据静态提取"] + MC_JSX["JSX Convert
未注册元素 → Markdown"] + MC_TOML["TOML Artifact
Prompt 文档构建"] + end + + MC_PARSER --> MC_EXPR + MC_EXPR --> MC_COMP + MC_COMP --> MC_EXPORT + MC_PARSER --> MC_JSX + MC_TOML --> MC_PARSER + + style MDC fill:#f59e0b,color:#fff + style LOGGER fill:#10b981,color:#fff +``` + +**提供的能力**: + +| 能力 | 说明 | +| --- | --- | +| MDX → Markdown 转换 | 将包含 JSX、表达式插值、ESM 导出的 MDX 编译为标准 Markdown | +| 表达式求值 | 受控作用域内的 JS 表达式求值(简单引用直接查找,复杂表达式用 `new Function()`) | +| JSX 组件处理 | 通过 ComponentRegistry 注册和处理内置/自定义组件(如 `` 条件渲染) | +| 导出元数据解析 | 从 `export const` / `export default` 中静态提取 frontmatter 字段 | +| TOML Artifact 构建 | 为 Prompt 工程场景构建结构化的 TOML 文档 | + +**为什么依赖 Logger**:编译过程的各个阶段(Parser、Expression Eval、Component Process 等)需要结构化日志来输出编译进度、警告和诊断错误。这些日志通过 Logger 的 Markdown 格式输出,确保终端可读且 AI 可解析。 + +**运行时结构**:公开包入口默认使用 Native Binding(Rust/NAPI);`src/compiler/` 保留 TypeScript 参考流水线,用于源级测试、调试和实现对照。 + +> 详细 API 参考 → [MDX-Compiler 编译器](/docs/sdk/md-compiler) + +### Script-Runtime — 运行时层 + +Script-Runtime 是依赖树中的 **另一个叶子节点**,与其他两个 library **无依赖关系**。 + +```mermaid +graph LR + SDK["@truenine/memory-sync-sdk"] --> SR["@truenine/script-runtime"] + + subgraph ScriptRuntime["tnmsc-script-runtime / @truenine/script-runtime"] + direction TB + SR_LOADER["Module Loader
Jiti Runtime"] + SR_PATH["Path Validator
Rust 原生绑定"] + SR_WORKER["Worker 机制
进程隔离 + 超时"] + SR_PROXY["Proxy System
defineProxy / loadProxyModule"] + end + + SR_PROXY --> SR_LOADER + SR_PROXY --> SR_WORKER + SR_WORKER --> SR_PATH + + style SR fill:#8b5cf6,color:#fff +``` + +**提供的能力**: + +| 能力 | 说明 | +| --- | --- | +| 代理模块加载 | 通过 Jiti 运行时动态加载用户定义的 `proxy.ts`,支持 TS/ESM 语法 | +| 路径安全验证 | Rust 原生绑定实现的防路径遍历严格验证(normalize_path + ensure_within_root) | +| Worker 进程隔离 | 子进程执行代理逻辑,带超时控制(默认 5000ms),防止失控挂起 | +| 代理路由分发 | 支持函数式和对象式两种代理定义模式,含命令匹配器(matcher) | + +**为什么独立于其他库**:Script-Runtime 的职责域是 **运行时模块加载和路径安全**,这与 MDX-Compiler 的 **编译期转换** 属于完全不同的阶段。此外,路径验证涉及安全敏感操作(防路径遍历),将其作为独立 crate 可以接受更严格的审计和测试覆盖。Logger 的日志能力对 Script-Runtime 来说不是必需的——它通过 stderr 直接传递错误信息给调用方。 + +> 详细 API 参考 → [Script-Runtime](/docs/sdk/script-runtime) + +--- + +## 数据流说明 + +### 场景 1:CLI 执行命令的完整流程 + +当用户通过 `tnmsc` CLI 执行一条命令时,数据流经所有三层 library: + +```mermaid +sequenceDiagram + participant User as 用户 + participant CLI as CLI (tnmsc) + participant SDK as SDK Core + participant Logger as Logger + participant MDCompiler as MDX-Compiler + participant ScriptRT as Script-Runtime + + User->>CLI: tnmsc run --prompt ./my-prompt.mdx + CLI->>SDK: 初始化 + 加载配置 + + par 并行初始化 + SDK->>Logger: createLogger('sdk') + Logger-->>SDK: ILogger 实例 + SDK->>Logger: logger.info('SDK 初始化完成') + and + SDK->>ScriptRT: resolvePublicPath(proxy.ts, ctx, path) + ScriptRT->>ScriptRT: Worker 进程加载 proxy.ts + ScriptRT->>ScriptRT: Rust validate_public_path() + ScriptRT-->>SDK: 安全解析后的路径 + end + + SDK->>MDCompiler: mdxToMd(mdxSource, options) + MDCompiler->>Logger: 记录编译开始 + MDCompiler->>MDCompiler: Parser → Expression Eval → Component Process → Transform + MDCompiler->>Logger: 记录编译结果/警告 + MDCompiler-->>SDK: MdxToMdResult { content, metadata } + + SDK->>Logger: logger.info('命令执行完成', { duration }) + SDK-->>CLI: 格式化输出结果 + CLI-->>User: 终端展示 +``` + +### 场景 2:MCP Server 处理请求 + +MCP Server 作为轻量消费者,主要使用 SDK 的 Prompt Service 能力: + +```mermaid +sequenceDiagram + participant Client as MCP 客户端 + participant MCP as MCP Server (stdio) + participant SDK as SDK Prompt Service + participant MDCompiler as MDX-Compiler + participant Logger as Logger + + Client->>MCP: tools/call (prompt_compile) + MCP->>SDK: promptService.compile(promptId, scope) + + SDK->>MDCompiler: mdxToMd(content, { globalScope: scope }) + MDCompiler->>Logger: 编译过程日志 + MDCompiler->>MDCompiler: 解析 → 求值 → 转换 → 序列化 + + alt 编译成功 + MDCompiler-->>SDK: MdxToMdResult + SDK->>Logger: logger.info('Prompt 编译成功') + SDK-->>MCP: { content, metadata } + MCP-->>Client: JSON-RPC response + else 编译失败 + MDCompiler-->>SDK: throw CompilerDiagnosticError + SDK->>Logger: logger.error(diagnosticInput) + SDK-->>MCP: error response with diagnostic + MCP-->>Client: JSON-RPC error + end +``` + +--- + +## 构建流程说明 + +SDK 的构建是一个多阶段的有序流水线,从 Rust 源码到最终的 npm 制品: + + + ### Step 1: Workspace Libraries 构建 + + 首先并行构建三个 workspace libraries 的 TypeScript 和 Native binding: + + ```bash + # TypeScript 编译(并行) + pnpm -F @truenine/logger -F @truenine/md-compiler -F @truenine/script-runtime run build:ts + + # Native binding 构建(按依赖顺序) + pnpm -F @truenine/logger -F @truenine/script-runtime run build # 无相互依赖,可并行 + pnpm -F @truenine/md-compiler run build:native # 依赖 logger 的 .node 文件 + ``` + + 此阶段产出: + - `libraries/logger/dist/index.js` + `napi-logger.*.node` + - `libraries/md-compiler/dist/index.js` + `napi-md-compiler.*.node` + - `libraries/script-runtime/dist/index.js` + `napi-script-runtime.*.node` + + ### Step 2: SDK Native Binding 编译 + + 编译 SDK 自身的 Rust crate (`tnmsc`) 并生成 NAPI 制品: + + ```bash + napi build --platform --release --output-dir dist -- --features napi + ``` + + 此步骤会: + - 执行 `cargo build --release` 编译 Rust 源码(链接三个 libraries 的 crate) + - 生成平台特定的 `.node` 二进制文件到 `sdk/dist/` + + ### Step 3: NAPI 制品复制 + + 将各 library 和 SDK 的 NAPI 二进制文件集中复制到目标位置: + + ```bash + tsx ../scripts/copy-napi.ts + ``` + + ### Step 4: TypeScript 打包与 Bundle + + 使用 tsdown/esbuild 将 SDK 的 TypeScript 源码打包为 ESM 输出: + + ```bash + tsx ../scripts/build-quiet.ts # bundle + tsx scripts/finalize-bundle.ts # finalize:bundle + ``` + + 此阶段产出 `sdk/dist/index.mjs` 和 `sdk/dist/index.d.mts`。 + + ### Step 5: Schema 生成 + + 从 Zod schema 定义生成 JSON Schema: + + ```bash + tsx scripts/generate-schema.ts + ``` + + 产出 `sdk/dist/tnmsc.schema.json`,可通过 `@truenine/memory-sync-sdk/schema.json` 导出。 + + + +**构建脚本速查**:所有构建步骤已封装在 `sdk/package.json` 的 `scripts.build` 中,顶层命令 `pnpm -F @truenine/memory-sync-sdk run build` 会按正确顺序执行全部 5 个阶段。 + + +--- + +## 代码组织结构 + +### 目录树 + +``` +memory-sync/ +├── sdk/ # 私有混合核心 +│ ├── src/ +│ │ ├── index.ts # 公共 API 入口 +│ │ ├── core/ +│ │ │ └── native-binding-loader.ts # 统一绑定加载器 +│ │ ├── libraries/ +│ │ │ ├── logger.ts # Logger TS facade (事实来源) +│ │ │ └── script-runtime/ # Script-Runtime TS facade (事实来源) +│ │ ├── adaptors/ # 输出适配器 +│ │ ├── prompts.ts # Prompt 服务 (使用 md-compiler) +│ │ ├── ConfigLoader.ts # 配置加载 (使用 logger) +│ │ └── runtime-environment.ts # 运行时环境 (使用 logger) +│ ├── Cargo.toml # Rust crate 定义 (tnmsc) +│ ├── package.json # npm 包定义 (@truenine/memory-sync-sdk) +│ └── scripts/ +│ ├── generate-schema.ts # Schema 生成 +│ └── finalize-bundle.ts # Bundle 后处理 +│ +├── libraries/ # Rust-first workspace 库 +│ ├── logger/ # 日志库 +│ │ ├── src/ +│ │ │ ├── lib.rs # Rust 核心 (LogLevel, Logger, OutputWorker, NAPI) +│ │ │ └── index.ts # 发布 wrapper (re-export sdk/src/libraries/logger.ts) +│ │ ├── Cargo.toml # tnmsc-logger crate +│ │ └── package.json # @truenine/logger +│ │ +│ ├── md-compiler/ # 编译器 +│ │ ├── src/ +│ │ │ ├── lib.rs # Rust 核心 (NAPI compile_mdx_to_md) +│ │ │ ├── index.ts # TS 入口 (mdxToMd, buildPromptTomlArtifact) +│ │ │ ├── native-binding.ts # 共享加载器接入 / binding 校验 +│ │ │ ├── mdx-to-md.ts # 主编译函数编排 +│ │ │ ├── compiler/ # 编译器组件 +│ │ │ │ ├── index.ts # 内部 API 导出 +│ │ │ │ ├── parser.ts # MDX → MDAST (unified) +│ │ │ │ ├── expression-eval.ts # 表达式求值 +│ │ │ │ ├── jsx-expression-eval.ts # JSX 表达式求值 +│ │ │ │ ├── component-processor.ts # 组件处理器 +│ │ │ │ ├── component-registry.ts # 组件注册表 +│ │ │ │ ├── export-parser.ts # 导出元数据解析 +│ │ │ │ ├── jsx-converter.ts # JSX → Markdown +│ │ │ │ └── transformer.ts # AST 转换编排 +│ │ │ ├── components/ # 内置组件 +│ │ │ │ └── Md.ts # 条件渲染组件 +│ │ │ ├── globals/ # 全局作用域定义 +│ │ │ ├── toml.ts # TOML 构建 +│ │ │ └── errors/ # 错误类型体系 +│ │ ├── Cargo.toml # tnmsc-md-compiler crate +│ │ └── package.json # @truenine/md-compiler +│ │ +│ └── script-runtime/ # 运行时 +│ ├── src/ +│ │ ├── lib.rs # Rust 核心 (validate/resolve_public_path) +│ │ └── index.ts # 发布 wrapper (re-export sdk/src/libraries/script-runtime) +│ ├── Cargo.toml # tnmsc-script-runtime crate +│ └── package.json # @truenine/script-runtime +``` + +### 关键文件职责映射 + +| 文件 | 所属层 | 核心职责 | 依赖的 Library | +| --- | --- | --- | --- | +| `sdk/src/index.ts` | SDK | 公共 API 入口,re-export 核心能力 | 全部三个 | +| `sdk/src/prompts.ts` | SDK | Prompt Service,编译和管理 prompts | MDX-Compiler, Logger | +| `sdk/src/ConfigLoader.ts` | SDK | 配置文件加载与验证 | Logger, Zod | +| `sdk/src/runtime-environment.ts` | SDK | 运行时环境初始化 | Logger | +| `sdk/src/core/native-binding-loader.ts` | SDK | 统一的 NAPI binding 加载(消除重复) | — | +| `sdk/src/libraries/logger.ts` | SDK | Logger TypeScript facade、`ILogger` 适配器、共享加载器接入 | Logger Rust crate | +| `sdk/src/libraries/script-runtime/index.ts` | SDK | Script-Runtime TypeScript facade、Worker 路径发现、共享加载器接入 | Script-Runtime Rust crate | +| `sdk/src/libraries/script-runtime/runtime-core.ts` | SDK | Jiti 加载、代理路由分发 | — | +| `libraries/logger/src/lib.rs` | Logger | Rust 核心:日志级别、格式化、诊断、OutputWorker | — | +| `libraries/logger/src/index.ts` | Logger | 发布 wrapper:re-export `sdk/src/libraries/logger.ts` | — | +| `libraries/md-compiler/src/compiler/` | MDX-Compiler | 编译器流水线全组件 | Logger (workspace) | +| `libraries/md-compiler/src/native-binding.ts` | MDX-Compiler | 共享加载器接入、env flag gate、binding validator | — | +| `libraries/script-runtime/src/lib.rs` | Script-Runtime | Rust 核心:路径验证、Worker 管理 | — | +| `libraries/script-runtime/src/index.ts` | Script-Runtime | 发布 wrapper:re-export `sdk/src/libraries/script-runtime` | — | + +--- + +## 设计决策与权衡 + +### 为什么选择 Rust-first? + + +**核心论点**: 性能关键路径上的操作应当在 Rust 中实现,而非 JavaScript。 + + +| 操作 | JS 实现瓶颈 | Rust 实现优势 | +| --- | --- | --- | +| 日志格式化 | 大量字符串拼接、V8 GC 压力 | 零拷贝切片、栈分配 buffer | +| MDX AST 遍历 | V8 对象属性访问开销大 | 结构体直接访问、无 GC | +| 路径规范化 | 正则表达式逐段处理 | 单次遍历、无回溯 | +| 进程管理 | `child_process` 抽象层开销 | 直接 syscall、精确超时控制 | + +Rust 的所有权系统和零成本抽象意味着这些热路径代码在编译后接近 C 语言性能,同时保持了内存安全保证。 + +### 为什么仍然保留 TypeScript 参考层? + + +TypeScript 层的价值,不只是“把 Rust 包起来”。 + + +| 场景 | 主要路径 | 原因 | +| --- | --- | --- | +| 生产环境 / CLI 发布 | Native (Rust) | 性能最优,已预编译 | +| 源级单元测试 / 行为对照 | TypeScript 参考实现或适配层 | 更容易直接断点、构造输入、验证边界 | +| CI 中 TypeScript 类型检查 | TypeScript 源码 | `tsc --noEmit` 不依赖 `.node` 文件 | +| GUI Tauri 应用 | 直接调用 crate | 无需 NAPI 序列化开销 | +| Worker 编排 / Node 运行时 glue | TypeScript | 这部分天然属于 Node.js 世界,抽到 Rust 反而更重 | + +以 MDX-Compiler 为例,`src/compiler/` 仍然是完整的 TypeScript 参考流水线;而公开包入口是否加载 native binding,则由各 library 的入口层决定。 + +### 为什么把 NAPI Binding 加载逻辑统一到 `sdk/`? + +这项整合已经完成。此前三个 library 各自维护平台映射、本地候选路径、CLI 包查找、缓存和错误聚合逻辑;现在这些共性逻辑统一收敛到了 `sdk/src/core/native-binding-loader.ts`。 + +各包自己的入口文件只保留“这个包独有的东西”,例如 binding validator、可选方法别名映射、Logger 适配器工厂或 Script-Runtime 的 Worker 路径逻辑。 + + + + **整合前的问题**: + ``` + logger/src/index.ts + md-compiler/src/native-binding.ts + script-runtime/src/index.ts + + 每个文件都要各自维护: + - 平台后缀映射 + - 本地 .node 候选路径 + - CLI 平台包探测 + - 缓存与错误聚合 + ``` + + + **当前结构**: + ``` + sdk/src/core/native-binding-loader.ts + ├── 平台映射 + ├── 候选路径探测 + ├── CLI 平台包扫描 + └── 缓存与错误格式化 + + logger/src/index.ts + └── validator + ILogger adapter + + md-compiler/src/native-binding.ts + └── env flag gate + validator + + script-runtime/src/index.ts + └── validator + optionalMethods + worker path + ``` + + + **收益分析**: + - **一致性保障**: 平台新增/移除只需改一处 + - **Bug 修复传播**: 修复加载路径或错误格式时三库同时受益 + - **包入口更清晰**: 每个 library 的 TypeScript 层回到“验证 + 编排”职责 + - **可测试性更强**: 统一加载器可以在 `sdk/` 单独覆盖路径探测和错误分支 + + + +### 多包 vs 单包的权衡 + +当前采用 **多包分离 + SDK 统一编排** 的方案: + +| 维度 | 多包方案(当前) | 单包合并方案 | +| --- | --- | --- | +| **发布粒度** | 每个 library 可独立版本发布 | 必须整体发布 | +| **依赖关系可视化** | 清晰的 DAG(Logger ← MDX-Compiler, Logger/Script-Runtime 并列) | 扁平化,隐藏内部依赖 | +| **构建缓存** | library 变更只触发自身及下游重建 | 任何变更触发全量重建 | +| **消费方引入** | 可选择性引入(如 MCP 只需 md-compiler + logger) | 强制引入全部 | +| **复杂度** | 更多的 package.json、workspace 配置 | 配置简单,但耦合度高 | + +当前方案的结论:**逻辑分离带来的工程收益大于多包管理的额外开销**。Turbo 的增量构建和 pnpm 的 workspace 协议已经很好地缓解了多包管理的痛点。 + +--- + +## 未来演进方向 + +### 可能的进一步整合点 + +1. **错误体系标准化**(中优先级) + - 当前 MDX-Compiler 有完善的 `CompilerDiagnosticError` 体系 + - Logger 有 `LoggerDiagnosticInput/Record` 类型 + - 可考虑建立跨库的统一错误码命名空间(如 `TNMSC_` 前缀) + +2. **Shared Scope Context**(低优先级) + - 当前各库各自维护 scope/context 对象 + - 可考虑一个统一的 `SdkContext` 类型,包含 logger instance、globalScope、path validator 等 + +### 性能优化机会 + +| 方向 | 当前状态 | 优化空间 | +| --- | --- | --- | +| **NAPI 序列化开销** | Rust ↔ Node.js 通过 JSON 序列化传递数据 | 考虑 `napi::bindgen_prelude::*` 的零拷贝类型或 `ArrayBuffer` 共享内存 | +| **MDX 编译缓存** | 每次 `mdxToMd()` 完整执行流水线 | 引入内容哈希缓存,跳过未变更文件的重新编译 | +| **Worker 复用** | Script-Runtime 每次 `resolvePublicPath` 创建新 Worker | 考虑 Worker 池模式,预热并复用子进程 | +| **Logger Output Buffer** | Output Worker 使用 BufWriter | 对高频日志场景可调整 flush 策略(批量 vs 实时) | + +### 新库扩展指南 + +当需要在 `libraries/` 下添加新的 workspace library 时,遵循以下约定: + + + ### 1. 创建目录结构 + + ``` + libraries/new-lib/ + ├── src/ + │ ├── lib.rs # Rust 核心(如有 native 需求) + │ └── index.ts # TypeScript 入口 / 绑定层 + ├── Cargo.toml # workspace crate 定义 + └── package.json # npm 包定义(name: @truenine/) + ``` + + ### 2. 注册 workspace 依赖 + + 在 `Cargo.toml` 中添加 workspace member,在 `package.json` 中声明与其他 libraries 的依赖关系。 + + ### 3. 实现 NAPI Binding(可选) + + 如果需要 Rust 性能: + - 添加 `#[cfg(feature = "napi")]` 条件编译模块 + - 实现平台特定的二进制加载(复用统一加载器) + - 保留包级 TypeScript 编排或参考层,避免把所有 Node.js glue 都塞进 Rust + + ### 4. 集成到 SDK + + - 在 `sdk/package.json` 的 `dependencies` 中添加 workspace 依赖 + - 在 `sdk/src/` 中创建对应的适配/编排模块 + - 更新 `sdk/scripts/build` 中的依赖构建步骤 + + ### 5. 编写文档 + + - 在 `doc/content/sdk//index.mdx` 创建库文档 + - 更新本架构文档的三库角色定位图和数据流说明 + + +--- + +## 快速导航 + + + + [SDK 定位与职责边界](/docs/sdk) — 了解 sdk/ 作为私有混合核心的角色 + + + [Logger 日志库](/docs/sdk/logger) — AI 友好型 Markdown 日志基础设施 + + + [MDX-Compiler 编译器](/docs/sdk/md-compiler) — MDX/Markdown 编译与转换引擎 + + + [Script-Runtime](/docs/sdk/script-runtime) — 代理模块运行时与路径安全验证 + + diff --git a/doc/content/sdk/index.mdx b/doc/content/sdk/index.mdx index b3272ed8..aac759d3 100644 --- a/doc/content/sdk/index.mdx +++ b/doc/content/sdk/index.mdx @@ -1,13 +1,15 @@ --- title: SDK -description: 说明作为私有混合核心的 sdk/ 所承担的职责、边界、消费者,以及公共标识策略。 +description: 说明作为私有混合核心的 sdk/ 所承担的职责、边界、消费者,以及公共标识策略。整合 Logger、MDX-Compiler、Script-Runtime 三库的快速导航。 sidebarTitle: 概览 status: stable --- +import { Callout, Cards } from 'nextra/components' + # SDK -`sdk/` 是当前仓库中的私有混合核心,也是共享内部能力的唯一入口。它不是一个供外部直接安装的独立 npm 包。它是将旧核心实现从 `cli/` 中抽离出来后形成的事实来源层。 +`sdk/` 是当前仓库中的**私有混合核心**,也是共享内部能力的唯一入口。它不是一个供外部直接安装的独立 npm 包。它是将旧核心实现从 `cli/` 中抽离出来后形成的事实来源层。 ## 这一层负责什么 @@ -16,6 +18,7 @@ status: stable - 它负责 Rust crate `tnmsc` 的 facade、NAPI 构建、prompt service、schema 生成,以及最小化的 TypeScript loader 入口 - 它仍然承接少量过渡期 bridge 逻辑,但这些 bridge 路径已经退回到 `sdk/` 内部实现,不再是 `cli/` 的公共组合中心 - 它是 `mcp/`、`gui/` 以及未来内部消费者的默认依赖入口 +- 它编排 Logger、MDX-Compiler、Script-Runtime 三类基础能力;其中 Logger 和 Script-Runtime 的 TypeScript facade 已内联到 `sdk/src/libraries/*`,`libraries/*` 保留 Rust crate 与发布壳层 ## 这一层不负责什么 @@ -34,6 +37,48 @@ status: stable 变化的是路径和归属关系,而不是这些对外标识本身。 +## Workspace Libraries 快速导航 + + + + AI 友好的结构化日志系统,为 tnmsc 提供清晰的诊断输出 + + - ✅ Markdown 格式化的日志输出(`### Title` + 列表) + - ✅ 结构化诊断记录与缓冲机制(rootCause / exactFix / possibleFixes) + - ✅ Rust 高性能 + NAPI 跨平台绑定(5 大平台) + - ✅ 异步 Output Worker 线程,避免 I/O 阻塞主线程 + - ✅ 全局级别控制与环境变量支持 + + [查看完整文档 →](/docs/sdk/logger) + + + Rust-first MDX/Markdown 编译与转换引擎,Prompt Service 的核心动力 + + - ✅ MDX → Markdown 完整编译流水线(Parser → Eval → Transform → Serialize) + - ✅ 受控作用域内的表达式求值与 JSX 组件处理 + - ✅ 导出元数据静态提取(YAML + ESM exports 合并) + - ✅ TOML Artifact 构建(面向 Prompt 工程场景) + - ✅ Native-first 包入口 + `src/compiler/` TypeScript 参考流水线 + + [查看完整文档 →](/docs/sdk/md-compiler) + + + Rust 支持的代理模块加载器与路径安全验证运行时 + + - ✅ Jiti 驱动的动态代理模块加载(TS/ESM 即时编译执行) + - ✅ Rust 原生路径验证(防遍历、防绝对路径注入) + - ✅ Worker 进程隔离执行 + 超时控制(默认 5000ms) + - ✅ 函数式 / 对象式两种代理定义模式 + - ✅ 命令匹配器(matcher)支持按命令选择性生效 + + [查看完整文档 →](/docs/sdk/script-runtime) + + + + +**想看完整的系统架构?** → [SDK 架构总览](/docs/sdk/architecture) 包含三库角色定位图、依赖关系 DAG、CLI/MCP 数据流时序图以及从 Rust 源码到 npm 制品的完整构建流程。 + + ## 消费者依赖方向 | 消费者 | 依赖方式 | @@ -42,14 +87,29 @@ status: stable | `mcp/` | 通过 `@truenine/memory-sync-sdk` 的最小 binding 访问 prompt 管理能力 | | `gui/src-tauri` | 继续依赖 crate `tnmsc`,其实际路径现在位于 `sdk/` 下 | +消费者依赖方向是**单向**的:`cli/`、`mcp/`、`gui/` 都依赖 `sdk/`。在 `sdk/` 下游,MDX-Compiler 仍完整保留在 `libraries/`,而 Logger / Script-Runtime 已形成“`sdk/src/libraries/*` 作为 TypeScript 事实来源 + `libraries/*` 作为 Rust crate / wrapper 发布层”的边界。Library 之间的依赖为:MDX-Compiler → Logger;Logger 和 Script-Runtime 互不依赖。 + ## 边界规则 - 仓库中的新内部代码不应再把 `cli/` 视为默认的共享 API 入口 - `cli/` 只保留命令入口、兼容导出和发布打包职责 - 当你需要说明实现边界时,应先从 `sdk/` 开始,再看 `cli/`、`mcp/` 或 `gui/` + +**最近更新** — 本次重构的主要成果: + +- **统一的 NAPI Binding 加载器**:将平台映射、候选路径探测、CLI 平台包扫描和错误聚合收敛到 `sdk/src/core/native-binding-loader.ts` +- **Logger / Script-Runtime 内联**:TypeScript facade 已迁入 `sdk/src/libraries/*`,公共包 `@truenine/logger` / `@truenine/script-runtime` 仅保留 wrapper 入口 +- **完整的技术文档体系**:新增 4 个文档页面(Logger、MDX-Compiler、Script-Runtime、架构总览),覆盖 API 参考、架构图、使用示例和最佳实践 +- **标准化的包配置和构建流程**:三个 library 统一收敛到共享加载器 + 包级验证/适配层的职责分工,构建脚本支持增量缓存 + + ## 推荐阅读 +- [SDK 架构总览](/docs/sdk/architecture) — 查看完整的系统架构、三库依赖关系 DAG、数据流时序图和构建流程 +- [Logger 日志库](/docs/sdk/logger) — 了解 AI 友好的日志系统设计与完整 API +- [MDX-Compiler 编译器](/docs/sdk/md-compiler) — 深入 MDX→Markdown 编译流水线、表达式求值与组件机制 +- [Script-Runtime 运行时](/docs/sdk/script-runtime) — 探索代理模块加载、路径验证与 Worker 安全机制 - [技术细节 / 架构边界](/docs/technical-details/architecture):查看 `sdk/`、`cli/`、`mcp/` 和 `gui/` 的完整分层 - [CLI](/docs/cli):查看公共命令入口、安装路径以及面向兼容性的发布表层 - [MCP](/docs/mcp):查看 stdio server 如何消费 `sdk` 的 prompt service diff --git a/doc/content/sdk/logger/_meta.ts b/doc/content/sdk/logger/_meta.ts new file mode 100644 index 00000000..f74596bd --- /dev/null +++ b/doc/content/sdk/logger/_meta.ts @@ -0,0 +1,3 @@ +export default { + index: 'index' +} diff --git a/doc/content/sdk/logger/index.mdx b/doc/content/sdk/logger/index.mdx new file mode 100644 index 00000000..93ff7ded --- /dev/null +++ b/doc/content/sdk/logger/index.mdx @@ -0,0 +1,556 @@ +--- +title: Logger 日志库 +description: AI 友好型 Markdown 日志库,提供结构化输出、诊断格式和缓冲机制。基于 Rust 核心实现,通过 N-API 暴露给 Node.js。 +sidebarTitle: Logger 日志库 +status: stable +--- + +import { Callout, Cards, Tabs } from 'nextra/components' + +# Logger 日志库 + + +**包信息** +- **npm 包名**: `@truenine/logger` +- **Rust crate**: `tnmsc-logger` +- **Rust 源码位置**: `libraries/logger/` +- **TypeScript 事实来源**: `sdk/src/libraries/logger.ts` +- **发布壳层**: `libraries/logger/src/index.ts` +- **核心文件**: `libraries/logger/src/lib.rs` (Rust 核心) · `sdk/src/libraries/logger.ts` (TypeScript facade) + + +Logger 是一个 **Rust 驱动的 AI 友好型 Markdown 日志库**,专为 `tnmsc` 工具链设计。它将日志输出格式化为结构化 Markdown(`###` 标题 + 列表),使终端输出对人类可读的同时也能被 AI 工具高效解析。 + +## 核心特性 + + + + 所有日志以 `### Title` 格式输出,元数据使用 Markdown 列表渲染,天然适合 AI 解析和终端展示。 + + + `error` / `warn` / `fatal` 级别支持结构化诊断输入,包含 rootCause、exactFix、possibleFixes、details 四个语义区域。 + + + 即使在 Silent 模式下,诊断记录也会被缓冲到全局队列中,可通过 `drainBufferedDiagnostics()` 批量提取。 + + + 通过 `mpsc::channel` 驱动的独立输出线程,避免 I/O 阻塞主线程,支持显式 flush 同步。 + + + 条件编译 `#[cfg(feature = "napi")]` 暴露 Node.js 绑定层,支持 win32/linux/darwin 五大平台。 + + + +## 架构总览 + +```mermaid +graph TB + subgraph "TypeScript 绑定层 (index.ts)" + A["createLogger()"] --> B["getNapiBinding()"] + B --> C["sdk.createNativeBindingLoader()"] + C --> D["NapiLoggerModule"] + D --> E["createNapiAdapter()"] + end + + subgraph "SDK 共享加载器 (sdk/src/core/native-binding-loader.ts)" + C --> F["平台映射与候选路径"] + F --> G["本地 .node / CLI 二进制包"] + G --> D + end + + subgraph "NAPI Binding 层 (napi_binding mod)" + E --> H["NapiLogger.emit()"] + E --> I["NapiLogger.emitDiagnostic()"] + H --> J[Logger.log_message] + I --> K[Logger.log_diagnostic] + end + + subgraph "Rust 核心 (lib.rs)" + J --> L[resolve_log_level] + K --> M[parse_diagnostic_input] + M --> N[validate_diagnostic_input] + N --> O[diagnostic_record_from_input] + J --> P[render_message_output] + K --> Q[render_diagnostic_output] + P --> R[OutputCommand channel] + Q --> R + R --> S["output_worker 线程"] + S --> T[stdout / stderr] + + O --> U[BUFFERED_DIAGNOSTICS] + end + + style A fill:#3b82f6,color:#fff + style E fill:#10b981,color:#fff + style S fill:#f59e0b,color:#fff +``` + +## API 参考 + +### 创建 Logger 实例 + + + +```ts +import { createLogger } from '@truenine/logger' + +const logger = createLogger('my-namespace') +const debugLogger = createLogger('my-namespace', 'debug') +``` + + + +| 参数 | 类型 | 必填 | 说明 | +| --- | --- | --- | --- | +| `namespace` | `string` | ✅ | 命名空间标识,用于区分不同模块的日志来源 | +| `logLevel?` | [`LogLevel`](#loglevel-类型) | ❌ | 初始日志级别,默认由全局级别或环境变量决定 | + +返回值类型为 [`ILogger`](#ilogger-接口)。 + + + + +### LogLevel 类型 + +```ts +type LogLevel = + | 'error' // 优先级: 2 — 错误 + | 'warn' // 优先级: 3 — 警告 + | 'info' // 优先级: 4 — 信息(默认级别) + | 'debug' // 优先级: 5 — 调试 + | 'trace' // 优先级: 6 — 追踪 + | 'fatal' // 优先级: 1 — 致命错误 + | 'silent' // 优先级: 0 — 静默(不输出但缓冲诊断) +``` + +**优先级规则**:数值越小越重要。只有当消息级别的 **优先级 ≤ 当前 logger 级别优先级** 时才会输出。 + +``` +Silent(0) < Fatal(1) < Error(2) < Warn(3) < Info(4) < Debug(5) < Trace(6) +``` + +### ILogger 接口 + +ILogger 提供六个日志方法,分为两类: + +| 方法 | 参数类型 | 输出目标 | 说明 | +| --- | --- | --- | --- | +| `info(message, ...meta)` | `string \| object, ...unknown[]` | stdout | 普通信息日志 | +| `debug(message, ...meta)` | `string \| object, ...unknown[]` | stdout | 调试日志 | +| `trace(message, ...meta)` | `string \| object, ...unknown[]` | stdout | 追踪日志 | +| `error(diagnostic)` | [`LoggerDiagnosticInput`](#loggerdiagnosticinput) | stderr | 结构化错误诊断 | +| `warn(diagnostic)` | [`LoggerDiagnosticInput`](#LoggerDiagnosticInput) | stderr | 结构化警告诊断 | +| `fatal(diagnostic)` | [`LoggerDiagnosticInput`](#LoggerDiagnosticInput) | stderr | 结构化致命错误诊断 | + +> **注意**:`info` / `debug` / `trace` 接受普通消息字符串;`error` / `warn` / `fatal` 接受结构化的诊断对象。 + +### 全局函数 + + + +```ts +// 设置全局日志级别(影响所有后续创建的 logger) +setGlobalLogLevel(level: LogLevel): void + +// 获取当前全局日志级别 +getGlobalLogLevel(): LogLevel | undefined + +// 清空已缓冲的诊断记录 +clearBufferedDiagnostics(): void + +// 提取并清空所有缓冲的诊断记录 +drainBufferedDiagnostics(): LoggerDiagnosticRecord[] + +// 强制刷新输出 worker 的缓冲区 +flushOutput(): void +``` + + +**setGlobalLogLevel**: 使用原子操作 (AtomicU8) 设置全局级别,立即生效 + +**getGlobalLogLevel**: 返回当前全局级别,未设置时返回 undefined + +**clearBufferedDiagnostics**: 清空全局诊断缓冲区 + +**drainBufferedDiagnostics**: 以 JSON 数组形式返回所有缓冲记录并清空缓冲区 + +**flushOutput**: 通过 OutputCommand::Flush 与输出 worker 同步,等待 ack 确认完成 + + + +### 类型定义 + +#### LoggerDiagnosticInput + +诊断错误的输入结构,用于 `error()` / `warn()` / `fatal()` 方法: + +```ts +interface LoggerDiagnosticInput { + readonly code: string // 错误代码标识 + readonly title: string // 显示标题(用于 ### 渲染) + readonly rootCause: DiagnosticLines // 必填,问题描述(至少一行) + readonly exactFix?: DiagnosticLines // 可选,精确修复步骤 + readonly possibleFixes?: readonly DiagnosticLines[] // 可选,备选修复方案列表 + readonly details?: Record // 可选,附加上下文数据 +} + +type DiagnosticLines = readonly [string, ...string[]] // 至少包含一个元素的只读元组 +``` + +#### LoggerDiagnosticRecord + +经过验证和构建后的完整诊断记录: + +```ts +interface LoggerDiagnosticRecord extends LoggerDiagnosticInput { + readonly level: LoggerDiagnosticLevel // 实际日志级别 ('error' | 'warn' | 'fatal') + readonly namespace: string // 来源命名空间 + readonly copyText: DiagnosticLines // 可复制文本版本(纯 Markdown 行数组) +} +``` + +--- + +## Rust 核心实现说明 + +### 日志级别优先级系统 + +`LogLevel` 枚举定义了 7 个级别,通过 `priority()` 方法映射为数值: + +| Level | Priority | 含义 | +| --- | --- | --- | +| `Silent` | 0 | 静默模式,不输出任何内容但仍缓冲诊断 | +| `Fatal` | 1 | 致命错误 | +| `Error` | 2 | 一般错误 | +| `Warn` | 3 | 警告 | +| `Info` | 4 | 信息(默认级别) | +| `Debug` | 5 | 调试信息 | +| `Trace` | 6 | 最详细的追踪信息 | + +级别解析遵循以下优先链(见 `resolve_log_level()`): + +``` +显式参数 > 全局级别 > LOG_LEVEL 环境变量 > Info(默认) +``` + +### 输出格式化机制 + +所有日志输出均被格式化为 **Markdown** 格式: + +#### 普通消息 (`render_message_output()`) + +```markdown +### 你的消息标题 + +- key: value # meta 数据以列表形式展示 +``` + +当消息包含换行时,首行作为标题,其余部分作为正文块: + +```markdown +### 第一行标题 + +第二行内容 +第三行内容 +``` + +#### 诊断输出 (`render_diagnostic_output()`) + +诊断记录被渲染为四个语义化区块: + +```markdown +### 诊断标题 + +**What happened** + - 这里描述发生了什么问题 + +**Do this** + - 这是精确的修复步骤 + +**Try this if needed** + 1. 备选方案一的第一步 + 备选方案一的延续 + +**Context** + - path: /some/file.json + - phase: cleanup +``` + +### 诊断记录验证和构建逻辑 + +诊断输入经过两阶段处理: + +1. **反序列化** — 将 JSON `Value` 解析为 `LoggerDiagnosticInput` +2. **验证** — `validate_diagnostic_input()` 检查: + - `code` 和 `title` 必须是非空字符串 + - `rootCause` 必须至少包含一行 + - `exactFix` 如果存在则不能为空 + - `possibleFixes` 如果存在则必须包含至少一个非空条目 + +验证失败时,自动生成一个 `LOGGER_DIAGNOSTIC_SCHEMA_INVALID` 记录,将原始 payload 和验证错误嵌入 `details` 字段(见 `invalid_diagnostic_record`)。 + +### NAPI Binding 层 + +NAPI 绑定通过条件编译 `#[cfg(feature = "napi")]` 启用(见 `napi_binding` mod),主要职责: + +- **`NapiLogger`** 结构体包装内部 `Logger`,暴露两个方法: + - `emit(level, message, meta?)` — 处理普通消息日志 + - `emit_diagnostic(level, diagnostic)` — 处理诊断日志 +- **全局函数桥接**:`create_logger`、`set_global_log_level`、`get_global_log_level`、`clear_buffered_diagnostics`、`drain_buffered_diagnostics`、`flush_output` +- **参数归一化**:`normalize_message_payload()` 和 `normalize_json_value()` 确保 JS 传入的数据能正确映射到 Rust 的 `Value` 类型 + +### 异步 Output Worker 机制 + +输出不直接写入 stdout/stderr,而是通过 `mpsc::channel` 发送到独立的 **output_worker 线程**(见 `spawn_output_sink()`): + +```rust +enum OutputCommand { + Write { use_stderr: bool, output: String }, // 写入命令 + Flush { ack: Sender<()> }, // 刷新同步命令 +} +``` + +- **Write 命令**:根据 `use_stderr` 选择 stdout 或 stderr,使用 `BufWriter` 缓冲写入 +- **Flush 命令**:刷新两个 writer 并通过 `ack` channel 通知调用方完成 +- 当 channel 发送失败时(如 worker 已退出),回退到直接写入(`print_output_direct()`) + +**stderr 分发规则**(见 `writes_to_stderr()`):`Error`、`Fatal`、`Warn` 三个级别写入 stderr;其余级别写入 stdout。 + +--- + +## TypeScript 绑定层说明 + +### 共享 Native Binding 加载 + +Logger 的 TypeScript facade 已迁到 `sdk/src/libraries/logger.ts`,`libraries/logger/src/index.ts` 现在只是一个薄 re-export wrapper。SDK 内联实现不再复制平台检测、本地候选路径和 CLI 平台包扫描逻辑,只保留三件 Logger 特有的事情: + +- `isNapiLoggerModule()`:校验 native export 的形状 +- `createNativeBindingLoader(...)`:声明 `packageName`、`binaryName` 和 `cliExportName` +- `createNapiAdapter()`:把 `NapiLoggerInstance` 适配为公共 `ILogger` + +真正的平台后缀映射、本地 `.node` 候选路径、CLI 平台包探测、缓存与聚合错误格式化,已经统一收敛到 `sdk/src/core/native-binding-loader.ts`。 + +统一后的好处是,新增平台、调整候选路径或修复包内扫描逻辑时,只需要改 `sdk/` 里的一个实现,Logger 自己不再维护一份平行副本。 + +### 适配器工厂模式 + +`createNapiAdapter()` 将底层 `NapiLoggerInstance` 转换为公共 `ILogger` 接口: + +- **普通日志方法**(info/debug/trace):通过 `createLogMethod` 工厂创建,将可变参数打包后传给 `instance.emit()` +- **诊断日志方法**(error/warn/fatal):通过 `createDiagnosticMethod` 工厂创建,直接传给 `instance.emitDiagnostic()` + +--- + +## 使用示例 + +### 示例 1:基本日志记录 + +演示 info、debug、trace 三种普通日志的使用方式: + +```ts +import { createLogger } from '@truenine/logger' + +const logger = createLogger('example-app') + +// info — 默认可见 +logger.info('应用启动成功') +// 输出: ### 应用启动成功 + +// info — 带 meta 对象 +logger.info('处理请求', { method: 'GET', path: '/api/users', duration: 42 }) +// 输出: +// ### 处理请求 +// +// - method: GET +// - path: /api/users +// - duration: 42 + +// debug — 需要日志级别 >= debug 才可见 +logger.debug('查询数据库', { table: 'users', query: 'SELECT * FROM users' }) + +// trace — 最详细级别 +logger.trace('进入函数', { fn: 'processRequest', args: ['req', 'res'] }) +``` + +### 示例 2:诊断错误输出 + +演示 error、warn、fatal 三种结构化诊断日志: + +```ts +import { createLogger } from '@truenine/logger' + +const logger = createLogger('config-loader') + +// error — 完整诊断(含所有字段) +logger.error({ + code: 'CONFIG_FILE_NOT_FOUND', + title: '配置文件不存在', + rootCause: [ + '指定的配置文件路径下没有找到有效的配置文件', + '程序需要 config.json 来初始化运行时环境' + ], + exactFix: [ + '在项目根目录创建 config.json 文件', + '或使用 --config-path 指定正确的配置文件位置' + ], + possibleFixes: [ + ['从版本控制恢复已删除的 config.json'], + ['运行 init 命令重新生成默认配置'] + ], + details: { + searchedPaths: ['./config.json', '/etc/app/config.json'], + expectedFormat: 'JSON' + } +}) +// 输出: +// ### 配置文件不存在 +// +// **What happened** +// - 指定的配置文件路径下没有找到有效的配置文件 +// - 程序需要 config.json 来初始化运行时环境 +// +// **Do this** +// - 在项目根目录创建 config.json 文件 +// - 或使用 --config-path 指定正确的配置文件位置 +// +// **Try this if needed** +// 1. 从版本控制恢复已删除的 config.json +// 运行 init 命令重新生成默认配置 +// +// **Context** +// - searchedPaths: +// - ./config.json +// - /etc/app/config.json +// - expectedFormat: JSON + +// warn — 简洁警告 +logger.warn({ + code: 'DEPRECATED_OPTION', + title: '使用了已弃用的选项', + rootCause: ['--legacy-flag 将在下个主版本中被移除'], + exactFix: ['改用 --new-flag 替代'] +}) + +// fatal — 致命错误 +logger.fatal({ + code: 'RUNTIME_PANIC', + title: '运行时发生不可恢复错误', + rootCause: ['内存分配失败,无法继续执行'], + exactFix: ['检查系统可用内存并重启应用'], + details: { allocatedMB: 2048, limitMB: 2048 } +}) +``` + +### 示例 3:全局级别控制和诊断缓冲 + +演示如何利用全局级别控制和诊断缓冲机制进行集中式日志管理: + +```ts +import { + createLogger, + setGlobalLogLevel, + getGlobalLogLevel, + clearBufferedDiagnostics, + drainBufferedDiagnostics, + flushOutput +} from '@truenine/logger' + +// 1. 设置全局级别为 warn — 只显示 error/warn/fatal +setGlobalLogLevel('warn') +console.log(getGlobalLogLevel()) // 'warn' + +const logger = createLogger('batch-processor') + +// 这些不会输出到终端(级别低于 warn) +logger.info('开始批处理') // 被过滤 +logger.debug('读取第 1 条记录') // 被过滤 +logger.trace('解析 JSON payload') // 被过滤 + +// 但诊断会被缓冲! +logger.error({ + code: 'PARSE_ERROR', + title: 'JSON 解析失败', + rootCause: ['第 42 行存在语法错误'], + exactFix: ['修正 JSON 格式后重试'] +}) + +logger.warn({ + code: 'SLOW_QUERY', + title: '查询耗时过长', + rootCause: ['查询执行时间超过 5000ms'], + possibleFixes: [['添加索引优化查询性能']] +}) + +// 2. 提取所有缓冲的诊断记录(包括 silent 模式下的) +const diagnostics = drainBufferedDiagnostics() +console.log(`捕获了 ${diagnostics.length} 条诊断记录`) +// 输出: 捕获了 2 条诊断记录 + +diagnostics.forEach(d => { + console.log(`[${d.level}] ${d.code}: ${d.title}`) + console.log(` 命名空间: ${d.namespace}`) + console.log(` 可复制文本:\n${d.copyText.join('\n')}`) +}) + +// 3. 刷新确保所有输出已写入 +flushOutput() + +// 4. 清空缓冲区 +clearBufferedDiagnostics() +console.log(drainBufferedDiagnostics().length) // 0 +``` + +--- + +## 配置选项 + +### LOG_LEVEL 环境变量 + +可以通过 `LOG_LEVEL` 环境变量设置默认日志级别(在不传显式参数且未设置全局级别时生效): + +```bash +# bash / zsh +export LOG_LEVEL=debug +LOG_LEVEL=trace tnmsc run + +# Windows PowerShell +$env:LOG_LEVEL = "warn" +``` + +环境变量的解析使用 `from_str_loose()` 函数,支持大小写不敏感匹配。 + +### 命名空间 (Namespace) + +命名空间是每个 Logger 实例的身份标识,作用包括: + +- 在诊断记录的 `namespace` 字段中标明来源 +- 在缓冲诊断时用于分类和筛选 +- 便于在大型项目中定位日志来源 + +```ts +// 推荐:按功能模块划分命名空间 +const dbLogger = createLogger('database') +const httpLogger = createLogger('http-server') +const pluginLogger = createLogger('plugin-system') +``` + + +**命名空间最佳实践** +- 使用小写字母和连字符(如 `plugin-pipeline`) +- 保持粒度一致——同一模块内共享同一命名空间 +- 避免过于笼统的名称(如 `app` 或 `logger`) + + +--- + +## Rust 宏速览 + +Rust 侧提供了四个便捷宏: + +| 宏 | 用途 | 示例 | +| --- | --- | --- | +| `log_info!` | 快速输出 info 日志 | `log_info!(logger, "启动完成")` | +| `log_info!` | 带 meta 的 info | `log_info!(logger, "处理", json!({"count": 10}))` | +| `log_error!` | 输出错误诊断 | `log_error!(logger, diagnostic_input)` | +| `log_warn!` | 输出警告诊断 | `log_warn!(logger, diagnostic_input)` | +| `log_debug!` | 快速输出 debug 日志 | `log_debug!(logger, "变量值", json!({"x": 42}))` | diff --git a/doc/content/sdk/md-compiler/_meta.ts b/doc/content/sdk/md-compiler/_meta.ts new file mode 100644 index 00000000..f74596bd --- /dev/null +++ b/doc/content/sdk/md-compiler/_meta.ts @@ -0,0 +1,3 @@ +export default { + index: 'index' +} diff --git a/doc/content/sdk/md-compiler/index.mdx b/doc/content/sdk/md-compiler/index.mdx new file mode 100644 index 00000000..387f5474 --- /dev/null +++ b/doc/content/sdk/md-compiler/index.mdx @@ -0,0 +1,965 @@ +--- +title: MDX-Compiler 编译器 +description: MDX/Markdown 编译与转换引擎的完整技术文档,涵盖核心 API、编译器流水线、组件注册机制、TOML artifact 构建以及共享 Native binding 加载策略。 +sidebarTitle: MDX-Compiler +status: stable +--- + +import { Callout, Cards, Steps, Tabs } from 'nextra/components' + +# MDX-Compiler 编译器 + + + **包标识**: `@truenine/md-compiler` (npm) / `tnmsc-md-compiler` (Rust crate) + **源码位置**: `libraries/md-compiler/` + **导出入口**: `src/index.ts` + + +## 概述 + +MDX-Compiler 是 memory-sync monorepo 中的 **Rust-first MDX/Markdown 编译与转换引擎**。它负责将 MDX 源码编译为纯 Markdown 文本,同时提供表达式求值、JSX 组件处理、导出元数据提取以及 TOML artifact 构建等核心能力。 + +### 核心能力 + +| 能力 | 说明 | +| --- | --- | +| **MDX → Markdown 转换** | 将包含 JSX、表达式插值、ESM 导出的 MDX 文件编译为标准 Markdown | +| **表达式求值** | 在受控作用域内安全求值 JavaScript 表达式(`{expression}`) | +| **JSX 组件处理** | 通过 `ComponentRegistry` 机制注册和处理内置/自定义组件 | +| **导出元数据解析** | 从 `export const` / `export default` 语句中静态提取 frontmatter 字段 | +| **TOML Artifact 构建** | 为 Prompt 工程场景构建结构化的 TOML 文档 | + +### 设计哲学:Rust-first with Shared Loader + +MDX-Compiler 当前采用 **Native-first 入口 + TypeScript 参考实现** 的结构: + +1. **Native Binding 层(Rust/NAPI)**:核心编译逻辑由 Rust crate `tnmsc-md-compiler` 实现,通过 NAPI-RS 暴露给 Node.js。公开包入口默认走这条路径。 +2. **TypeScript 参考实现(`src/compiler/`)**:仓库内保留完整的 TypeScript 编译流水线,用于源级测试、实现对照和调试,但公共包入口不会自动切换到它。 + +运行时入口通过 `native-binding.ts` 中的 `getNapiMdCompilerBinding()` 接入 `sdk/src/core/native-binding-loader.ts`: + +- 优先加载平台匹配的 `.node` 二进制文件 +- 支持从 CLI binary package (`@truenine/memory-sync-cli-`) 中提取 binding +- `TNMSC_DISABLE_NATIVE_BINDING=1` 会让公共入口快速失败,用于验证缺失 native binding 的错误路径 + + + + ```text + win32-x64 → napi-md-compiler.win32-x64-msvc + linux-x64 → napi-md-compiler.linux-x64-gnu + linux-arm64 → napi-md-compiler.linux-arm64-gnu + darwin-arm64→ napi-md-compiler.darwin-arm64 + darwin-x64 → napi-md-compiler.darwin-x64 + ``` + + + ```bash + # 强制尝试 native binding + TNMSC_FORCE_NATIVE_BINDING=1 + + # 禁用 native binding 并验证错误路径 + TNMSC_DISABLE_NATIVE_BINDING=1 + ``` + + + +## 核心 API + +### `mdxToMd` — 主编译函数 + +`mdxToMd()` 是库的主入口函数,将 MDX 源码字符串编译为 Markdown。 + +```typescript +// 签名 1:基本用法,返回纯字符串 +function mdxToMd( + content: string, + options?: MdxToMdOptions & { extractMetadata?: false } +): Promise + +// 签名 2:带元数据提取,返回结构化结果 +function mdxToMd( + content: string, + options: MdxToMdOptions & { extractMetadata: true } +): Promise +``` + +#### MdxToMdOptions 参数说明 + +| 参数 | 类型 | 默认值 | 说明 | +| --- | --- | --- | --- | +| `scope` | `EvaluationScope` | `{}` | 自定义求值作用域变量,会与 globalScope 合并(自定义优先) | +| `basePath` | `string \| undefined` | — | 文件解析的基础路径 | +| `filePath` | `string \| undefined` | — | 源文件完整路径,用于错误诊断信息 | +| `globalScope` | `MdxGlobalScope \| undefined` | — | 全局作用域(含 `os`、`env`、`profile`、`codeStyles`、`tool`) | +| `extractMetadata` | `boolean` | `false` | 是否提取并返回导出元数据 | + +#### MdxToMdResult 返回类型 + +```typescript +interface MdxToMdResult { + /** 编译后的 Markdown 内容 */ + content: string + /** 从 export 语句和 YAML frontmatter 中提取的元数据 */ + metadata: ExportMetadata +} + +interface ExportMetadata { + /** 提取的字段键值对 */ + fields: Record + /** 元数据来源类型 */ + source: 'export' | 'yaml' | 'mixed' +} +``` + +### `buildPromptTomlArtifact` — Prompt TOML 构建 + +`buildPromptTomlArtifact()` 用于构建面向 Prompt 工程场景的结构化 TOML 文档。 + +```typescript +function buildPromptTomlArtifact( + options: BuildPromptTomlArtifactOptions +): string +``` + +#### BuildPromptTomlArtifactOptions 参数说明 + +| 参数 | 类型 | 必填 | 说明 | +| --- | --- | --- | --- | +| `content` | `string` | ✅ | MDX 编译后的正文内容 | +| `bodyFieldName` | `string` | ✅ | 正文字段在 TOML 中的字段名 | +| `frontMatter` | `Readonly>` | ❌ | 额外的 frontmatter 键值对 | +| `fieldNameMap` | `Readonly>` | ❌ | 字段名映射表(原字段名 → TOML 字段名) | +| `excludedKeys` | `readonly string[]` | ❌ | 需要排除的字段名列表 | +| `extraFields` | `Readonly>` | ❌ | 额外注入的自定义字段 | +| `fieldOrder` | `readonly string[]` | ❌ | TOML 字段输出顺序 | + +### `buildTomlDocument` — 通用 TOML 文档构建 + +```typescript +function buildTomlDocument( + value: Readonly>, + options?: BuildTomlDocumentOptions +): string + +interface BuildTomlDocumentOptions { + readonly fieldOrder?: readonly string[] +} +``` + +### 编译器内部 API(低级接口) + +以下 API 通过 `src/compiler/index.ts` 导出,适用于需要精细控制编译流程的场景: + +| API | 来源文件 | 说明 | +| --- | --- | --- | +| `parseMdx(source)` | `parser.ts` | 将 MDX 字符串解析为 MDAST(unified + remark 插件链) | +| `evaluateExpression(expr, scope, opts?)` | `expression-eval.ts` | 在给定作用域中求值 JS 表达式 | +| `evaluateJsxExpression(node, ctx, processAstFn)` | `jsx-expression-eval.ts` | 求值可能包含 JSX 元素的表达式 | +| `processComponent(element, ctx, processAstFn)` | `component-processor.ts` | 处理已注册的内置组件 | +| `registerComponent(name, handler)` | `component-registry.ts` | 注册自定义组件处理器 | +| `hasComponent(name)` / `getComponents()` / `clearComponents()` | `component-registry.ts` | 组件注册表的查询与管理 | +| `parseExports(esmNodes, options?)` | `export-parser.ts` | 从 ESM 节点中解析导出元数据 | +| `isStaticallyEvaluable(valueStr)` | `export-parser.ts` | 检查值是否可静态求值 | +| `processAst(ast, ctx)` | `transformer.ts` | 执行完整的 AST 转换流水线 | + +## 编译器流水线 + +MDX-Compiler 采用多阶段流水线架构,每个阶段职责清晰、可独立测试。以下是完整的编译流程: + +```mermaid +flowchart TD + A["MDX Source
(原始字符串)"] --> B["Parser 阶段
parseMdx()"] + B --> C["MDAST Root
(带 MDX 扩展节点)"] + + C --> D{"extractMetadata?
选项检查"} + D -- 是 --> E["Export Parse 阶段
parseExports()"] + E --> F["合并元数据
(YAML + ESM exports)"] + D -- 否 --> G + + C --> H["Transform 阶段
processAst() / transformNodes()"] + F --> H + + H --> I{"节点类型分发"} + + I -- "mdxjsEsm" --> J["移除
(导入语句不进入输出)"] + I -- "mdxFlowExpression /
mdxTextExpression" --> K["Expression Eval 阶段
evaluateExpression()"] + K --> K1{"包含 JSX?"} + K1 -- 是 --> K2["JSX Expression Eval
evaluateJsxExpression()"] + K1 -- 否 --> K3["直接求值返回文本"] + K2 --> L + + I -- "mdxJsxFlowElement /
mdxJsxTextElement" --> M{"是否已注册组件?"} + M -- 是 --> N["Component Process 阶段
processComponent()"] + M -- 否 --> O{"是否 intrinsic 元素?"} + O -- 是 --> P["保留为 HTML
renderIntrinsicElement()"] + O -- 否 --> Q["JSX Convert 阶段
convertJsxToMarkdown()"] + + N --> L + K3 --> L + P --> L + Q --> L + J --> L + + L --> R["remark-stringify
(AST → Markdown 文本)"] + R --> S["Markdown Output
(最终输出)"] + + style A fill:#e1f5fe + style S fill:#e8f5e9 + style B fill:#fff3e0 + style H fill:#fce4ec + style K fill:#f3e5f5 + style N fill:#ede7f6 + style R fill:#e0f2f1 +``` + +### 阶段详解 + +#### 1. Parser 阶段 — MDX AST 构建 + +`parser.ts` 使用 **unified** 处理器链构建 MDAST: + +```typescript +// 解析器插件链 +const processor = unified() + .use(remarkParse) // 标准 Markdown 解析 + .use(remarkGfm) // GFM 扩展(表格、任务列表、删除线) + .use(remarkFrontmatter, ['yaml']) // YAML frontmatter 支持 + .use(remarkMdx) // MDX 扩展(JSX、表达式、ESM) +``` + +生成的 AST 节点类型包括: +- 标准 MDAST 节点:`heading`, `paragraph`, `code`, `list`, `link` 等 +- GFM 扩展节点:`table`, `taskList`, `strikethrough` +- MDX 扩展节点:`mdxJsxFlowElement`, `mdxJsxTextElement`, `mdxFlowExpression`, `mdxTextExpression`, `mdxjsEsm` +- Frontmatter 节点:`yaml` + +#### 2. Expression Eval 阶段 — 表达式求值 + +`expression-eval.ts` 负责在受控作用域内求值 JavaScript 表达式。 + +**求值策略分两层**: + +1. **简单引用路径**(`identifier` 或 `a.b.c` 形式):直接在 scope 对象上做属性查找,提供精确的错误信息 +2. **复杂表达式**(含运算符、函数调用等):使用 `new Function()` 构造器在隔离环境中执行 + +```typescript +// 简单引用:直接属性查找 +const SIMPLE_REFERENCE_PATTERN = /^[a-z_$][\w$]*(?:\.[a-z_$][\w$]*)*$/iu + +// 复杂表达式:Function 构造器 +const fn = new Function(...scopeKeys, `return (${expression})`) +const result = fn(...scopeValues) +``` + +支持的 MDX 表达式节点类型: + +| 节点类型 | 出现位置 | 示例 | +| --- | --- | --- | +| `MdxFlowExpression` | 块级表达式 `{...}` | `{title}` 在段落级别 | +| `MdxTextExpression` | 行内表达式 `{...}` | `Hello {name}!` 在文本中 | +| `MdxJsxAttributeValueExpression` | JSX 属性表达式 | `` | + +对于表达式中嵌入 JSX 的情况(如 `{condition && content}`),流转至 `jsx-expression-eval.ts` 处理,它基于 estree AST 递归求值,支持逻辑运算符 (`&&`, `||`, `??`)、条件表达式 (`?:`)、序列表达式 (`,`) 和数组展开。 + +#### 3. Component Process 阶段 — JSX 组件处理 + +`component-processor.ts` 和 `component-registry.ts` 共同实现组件处理机制。 + +**ComponentHandler 类型签名**: + +```typescript +type ComponentHandler = ( + element: MdxJsxFlowElement | MdxJsxTextElement, + ctx: ProcessingContext, + processChildren: (children: RootContent[], ctx: ProcessingContext) => Promise +) => Promise +``` + +每个组件处理器接收三个参数: +- `element`: 原始 JSX 元素节点 +- `ctx`: 处理上下文(含作用域、组件映射、调用栈) +- `processChildren`: 递归处理子节点的函数 + +**内置组件**:当前注册的唯一内置组件是 `Md`,它是一个条件渲染包装器: + +```tsx + + 仅当条件为真时渲染的内容 + + + + 行内条件文本 + +``` + +`Md` 组件的 `when` 属性支持: +- 字面量布尔值:`when="true"` / `when="false"` +- 表达式求值:`when={profile.role === 'admin'}` + +**循环依赖检测**:`ProcessingContext.processingStack` 维护组件调用栈,检测到循环引用时抛出明确错误。 + +#### 4. Export Parse 阶段 — 元数据提取 + +`export-parser.ts` 从 MDX 的 ESM (`mdxjsEsm`) 节点中静态分析并提取元数据。 + +**支持的导出模式**: + +```mdx +// 模式 1: 单个命名导出 +export const title = "My Document" + +// 模式 2: 对象展开导出(metadata 特殊键) +export const metadata = { name: "test", enabled: true } + +// 模式 3: default 导出(对象会被展开) +export default { title: "Default Title", version: 2 } +``` + +**静态求值支持的字面量类型**: + +| 类型 | 示例 | 说明 | +| --- | --- | --- | +| 字符串 | `"hello"`, `'world'`, `` `template` `` | 支持转义序列 | +| 数字 | `42`, `3.14`, `-10` | 含负数和小数 | +| 布尔值 | `true`, `false` | — | +| 空值 | `null` | — | +| 数组 | `[1, "two", true]` | 支持嵌套和变量引用 | +| 对象 | `{ key: "value" }` | 支持嵌套和变量引用 | +| 变量引用 | `tool.readFile`, `profile.name` | 从 scope 中解析 | + +**元数据来源判定规则**: + +```mermaid +flowchart LR + A[存在 export 字段?] -->|是| B{存在 YAML?} + A -->|否| C{存在 YAML?} + B -->|是| D["source = 'mixed'
export 优先合并"] + B -->|否| E["source = 'export'"] + C -->|是| F["source = 'yaml'"] + C -->|否| G["source = 'yaml'
(空 fields)"] +``` + +#### 5. JSX Convert 阶段 — JSX → Markdown 转换 + +`jsx-converter.ts` 将未被组件注册表捕获的标准 HTML/JSX 元素转换为等效的 Markdown AST 节点。 + +**支持的元素转换映射**: + +| JSX 元素 | 目标 Markdown 节点 | 说明 | +| --- | --- | --- | +| `
` | `Code` (fenced code block) | 从 `className` 提取语言标识 |
+| `` | `Link` | 映射 `href` / `title` 属性 |
+| `` / `` | `Strong` | 加粗文本 |
+| `` / `` | `Emphasis` | 斜体文本 |
+| `` | `Image` | 映射 `src` / `alt` / `title` |
+| `
` | `Blockquote` | 引用块 | + +**Intrinsic 元素保留机制**:对于以小写字母开头或包含连字符的元素名(如 `
`, ``, ``),转换器不会尝试转换为 Markdown,而是保留为原始 HTML 标签输出。属性中的表达式也会被求值后序列化。 + +#### 6. Transform 阶段 — 最终输出生成 + +`transformer.ts` 是流水线的编排层,递归遍历 AST 并按节点类型分发到上述各阶段处理器。 + +关键行为: +- **源码感知渲染 (Source-Aware Rendering)**:当 AST 节点包含有效的 position 信息且拥有 `sourceText` 时,采用源码切片 + 子节点替换的策略,最大程度保留原始格式 +- **链接文本简化**:形如 `path/to/file.ext` 的链接文本自动简化为 `file.ext` +- **注释表达式过滤**:`{/* comment */}` 形式的表达式被静默移除 +- **Markdown 序列化**:使用 `remark-stringify` 将处理后的 AST 转换为 Markdown 字符串,配置为 GFM 兼容输出 + +## 依赖关系 + +### 内部依赖 + + + MDX-Compiler 通过 workspace 依赖使用 `tnmsc-logger` 库进行编译过程中的日志输出。 + + +### 外部依赖 (devDependencies) + +| 包名 | 用途 | 使用位置 | +| --- | --- | --- | +| `unified` | 统一文本处理框架 | Parser、Transformer 的处理器基础 | +| `remark-parse` | Markdown → MDAST 解析器 | Parser 阶段 | +| `remark-gfm` | GitHub Flavored Markdown 支持 | Parser、Stringify 阶段 | +| `remark-frontmatter` | YAML frontmatter 解析 | Parser 阶段 | +| `remark-mdx` | MDX 语法扩展(JSX/表达式/ESM) | Parser 阶段 | +| `remark-stringify` | MDAST → Markdown 序列化 | Transform 输出阶段 | +| `mdast-util-mdx` | MDX 节点类型定义 | 类型系统全层 | +| `yaml` | YAML 解析与序列化 | Frontmatter 处理、Export Parser | +| `@types/estree` / `@types/estree-jsx` | ESTree AST 类型定义 | JSX Expression Eval | +| `@types/mdast` | MDAST 类型定义 | 类型系统 | + +### Rust 依赖 (Cargo.toml) + +| Crate | 用途 | +| --- | --- | +| `tnmsc-logger` | Workspace 日志库 | +| `serde` / `serde_json` | JSON 序列化(NAPI 参数传递) | +| `serde_yml` | YAML 解析(frontmatter 处理) | +| `markdown` | markdown-rs 解析器(Rust 端 MDX 解析) | +| `json5` | JSON5 格式支持 | +| `napi` / `napi-derive` | NAPI-RS 绑定层(feature-gated) | +| `regex-lite` | 轻量正则(frontmatter 提取等) | + +## 全局作用域 (Global Scope) + +MDX 表达式可以访问预定义的全局作用域对象,类型定义位于 `globals/index.ts`: + +```typescript +interface MdxGlobalScope { + profile: UserProfile // 用户档案信息 + codeStyles: CodeStylePreferences // 代码风格偏好 + tool: ToolReferences // AI 工具名称引用 + env: EnvironmentContext // 环境变量上下文 + os: OsInfo // 操作系统信息 + Md: MdComponent // 条件渲染组件 +} +``` + +### ToolPresets 预设 + +库提供了三组工具名称预设,适配不同的 AI 工具环境: + +```typescript +import { ToolPresets } from '@truenine/md-compiler/globals' + +// 默认预设 (snake_case) +ToolPresets.default +// → { websearch: 'web_search', readFile: 'read_file', ... } + +// Claude Code CLI 预设 (PascalCase) +ToolPresets.claudeCode +// → { readFile: 'Read', writeFile: 'Write', executeCommand: 'Execute', ... } + +// Kiro 预设 (camelCase) +ToolPresets.kiro +// → { readFile: 'readFile', websearch: 'remote_web_search', ... } +``` + +## 使用示例 + +### 示例 1:基本 MDX 转 Markdown + +最简单的用法——传入 MDX 字符串,获取编译后的 Markdown: + +```typescript +import { mdxToMd } from '@truenine/md-compiler' + +const mdxSource = ` +# Hello MDX + +Welcome to **MDX-Compiler**! + +This is a {typeof mdx === 'undefined' ? 'compiled' : 'raw'} document. + +- Item 1 +- Item 2 +- Item 3 +`.trim() + +const markdown = await mdxToMd(mdxSource) + +console.log(markdown) +// 输出: +// # Hello MDX +// +// Welcome to **MDX-Compiler**! +// +// This is a compiled document. +// +// - Item 1 +// - Item 2 +// - Item 3 +``` + +### 示例 2:带自定义选项的编译 + +使用自定义作用域、全局作用域和文件路径进行编译: + +```typescript +import { mdxToMd, MdxToMdOptions } from '@truenine/md-compiler' + +const mdxSource = ` +# Profile Page + +Hello, {profile.name}! + +Your OS is {os.platform} ({os.arch}). + +Environment: {env.NODE_ENV ?? 'development'} + + + > Admin-only content visible here. + + +Tools available: {tool.readFile}, {tool.writeFile} +`.trim() + +const options: MdxToMdOptions = { + filePath: '/templates/profile.mdx', + basePath: '/templates', + globalScope: { + profile: { name: 'Alice', role: 'admin', username: 'alice' }, + codeStyles: { indent: 'space', tabSize: 2 }, + tool: { readFile: 'read_file', writeFile: 'write_file' }, + env: { NODE_ENV: 'production' }, + os: { platform: 'linux', arch: 'x64', kind: 'linux' as const } + }, + scope: { + customVar: 'custom value' + } +} + +const result = await mdxToMd(mdxSource, { ...options, extractMetadata: true }) + +console.log(result.content) +console.log(result.metadata) +// metadata: +// { +// fields: {}, +// source: 'yaml' +// } +``` + +### 示例 3:TOML Artifact 构建 (Prompt 工程场景) + +构建用于 AI Prompt 的结构化 TOML 文档: + +```typescript +import { mdxToMd, buildPromptTomlArtifact } from '@truenine/md-compiler' + +const promptContent = ` +# System Prompt + +You are a helpful assistant. + +## Instructions + +- Be concise +- Use Markdown formatting +`.trim() + +const tomlOutput = buildPromptTomlArtifact({ + content: promptContent, + bodyFieldName: 'prompt_content', + frontMatter: { + role: 'system', + model: 'claude-sonnet-4-20250514', + temperature: 0.7, + max_tokens: 4096 + }, + fieldNameMap: { + role: 'role', + temperature: 'temperature', + max_tokens: 'max_tokens' + }, + excludedKeys: ['internal_note'], + extraFields: { + version: '1.0.0', + generated_at: new Date().toISOString() + }, + fieldOrder: ['role', 'model', 'temperature', 'max_tokens', 'prompt_content', 'version', 'generated_at'] +}) + +console.log(tomlOutput) +// 输出类似: +// role = "system" +// model = "claude-sonnet-4-20250514" +// temperature = 0.7 +// max_tokens = 4096 +// version = "1.0.0" +// generated_at = "2026-04-12T..." +// +// prompt_content = """ +// # System Prompt +// +// You are a helpful assistant. +// +// ## Instructions +// +// - Be concise +// - Use Markdown formatting +// """ +``` + +### 示例 4:导出元数据解析 + +从 MDX 的 `export` 语句中提取结构化元数据: + +```typescript +import { mdxToMd } from '@truenine/md-compiler' + +const mdxWithExports = ` +--- +author: yaml-author +tags: [from-yaml] +--- + +export const title = "My Document" +export const version = 2 +export const enabled = true +export const tags = ["mdx", "compiler"] +export const metadata = { category: "docs", priority: "high" } +export default { description: "A sample document" } + +# {title} + +Version: {version}, Enabled: {enabled} +`.trim() + +const result = await mdxToMd(mdxWithExports, { + filePath: '/docs/sample.mdx', + extractMetadata: true, + scope: {} +}) + +console.log(JSON.stringify(result.metadata, null, 2)) +// 输出: +// { +// "fields": { +// "author": "yaml-author", +// "tags": ["mdx", "compiler"], +// "title": "My Document", +// "version": 2, +// "enabled": true, +// "category": "docs", +// "priority": "high", +// "description": "A sample document" +// }, +// "source": "mixed" +// } +// +// 注意: +// - "tags" 被 export 覆盖了 YAML 中的值 (export 优先) +// - metadata 对象的属性被展开到顶层 +// - export default 的对象也被展开 +// - author 仅来自 YAML (无同名 export) +``` + +### 示例 5:自定义组件注册 + +注册自定义组件处理器来扩展编译能力: + +```typescript +import { + mdxToMd, + registerComponent, + clearComponents, + type ComponentHandler, + type MdxJsxFlowElement, + type ProcessingContext, + type RootContent +} from '@truenine/md-compiler' + +// 定义一个 组件 +const alertHandler: ComponentHandler = async ( + element: MdxJsxFlowElement, + ctx: ProcessingContext, + processChildren +) => { + const typeAttr = element.attributes.find( + a => a.type === 'mdxJsxAttribute' && a.name === 'type' + ) + const alertType = typeof typeAttr?.value === 'string' ? typeAttr.value : 'info' + + const children = await processChildren(element.children as RootContent[], ctx) + + const prefixMap: Record = { + warning: '> ⚠️ ', + error: '> ❌ ', + info: '> ℹ️ ', + success: '> ✅ ' + } + + const prefix = prefixMap[alertType] || '> ' + + return [{ + type: 'blockquote', + children: [ + { type: 'paragraph', children: [{ type: 'text', value: prefix }] }, + ...children.map(child => + child.type === 'paragraph' ? child : { type: 'paragraph', children: [child] } + ) + ] + }] +} + +// 注册组件 +registerComponent('Alert', alertHandler) + +const mdx = ` +# Custom Components + + + This is a warning message with **bold** text inside. + + + + Operation completed successfully! + +`.trim() + +const result = await mdxToMd(mdx) +console.log(result) + +// 清理(通常在测试中使用) +clearComponents() +``` + +## 高级主题 + +### Native Binding 加载与 TypeScript 参考实现 + + + ### 运行时决策流程 + + 当调用公开入口 `mdxToMd()` 时,实际执行路径如下: + + 1. **入口函数** (`src/mdx-to-md.ts`) 调用 `getNapiMdCompilerBinding().compileMdxToMd()` + 2. **Binding 入口** (`src/native-binding.ts`) 先处理环境变量开关,再委托 `sdk/src/core/native-binding-loader.ts` + 3. **共享加载器** 依次探测本地 `.node` 二进制文件与 CLI 平台包中的 binding + 4. **Rust 实现** (`lib.rs`) 通过 NAPI 暴露 `compile_mdx_to_md` 等函数 + 5. **`src/compiler/`** 保留 TypeScript 参考流水线,用于源级测试和实现对照,而不是公开入口的自动 fallback + + ### 何时使用哪条路径? + + | 场景 | 主要路径 | 原因 | + | --- | --- | --- | + | SDK / CLI / MCP 公开入口 | Native (Rust) | 与发布产物一致,性能最佳 | + | 调试编译流水线细节 | `src/compiler/*` | 直接查看 parser / evaluator / serializer 行为 | + | 编写源级单元测试 | `src/compiler/*` | 不依赖 `.node` 制品打包 | + | GUI Tauri 应用 | 直接调用 crate | 无需 NAPI 开销 | + + ### 加载控制 + + ```bash + # 禁用公开入口的 native binding,用于验证错误路径 + TNMSC_DISABLE_NATIVE_BINDING=1 + + # 强制尝试 native binding + TNMSC_FORCE_NATIVE_BINDING=1 + ``` + + +### 性能优化建议 + + + + 对于大量文件的批量编译场景,复用 `ProcessingContext` 中的组件注册表和作用域对象,避免重复初始化。 + + + 只传递必要的 `scope` 字段。过大的作用域对象会增加序列化开销(Native 路径下需 JSON 序列化传递给 Rust)。 + + + 仅在需要时设置 `extractMetadata: true`。元数据提取涉及额外的 YAML 解析和 ESM 静态分析步骤。 + + + 组件的递归深度受 `processingStack` 保护,但深层嵌套仍会增加处理时间。建议组件嵌套不超过 3 层。 + + + +### 错误处理最佳实践 + +MDX-Compiler 提供了结构化的错误体系,定义于 `errors/index.ts`: + +#### 错误类型层级 + +```mermaid +classDiagram + class Error { + +message: string + +name: string + +cause?: Error + } + + class CompilerDiagnosticError { + +diagnostic: CompilerDiagnostic + +filePath?: string + +line?: number + +column?: number + +codeFrame?: string + +phase?: string + +nodeType?: string + } + + class ScopeError { + +expression?: string + } + + class UndefinedVariableError { + +variableName: string + } + + class UndefinedNamespaceError { + +namespace: string + } + + class ExportParseError { + +exportName?: string + } + + class MetadataValidationError { + +missingFields: readonly string[] + } + + Error <|-- CompilerDiagnosticError + CompilerDiagnosticError <|-- ScopeError + ScopeError <|-- UndefinedVariableError + ScopeError <|-- UndefinedNamespaceError + CompilerDiagnosticError <|-- ExportParseError + Error <|-- MetadataValidationError +``` + +#### 关键错误类说明 + +| 错误类 | 触发场景 | diagnostic.phase | 诊断信息 | +| --- | --- | --- | --- | +| `UndefinedVariableError` | 表达式引用了未定义的变量 | `expression-evaluation` | 变量名、表达式原文、代码帧 | +| `UndefinedNamespaceError` | 表达式引用了未定义的命名空间(根变量) | `expression-evaluation` | 命名空间名、表达式原文 | +| `ExportParseError` | export 语句无法静态求值 | `export-parsing` | 导出名、原因提示 | +| `MetadataValidationError` | 缺少必需的元数据字段 | — | 缺失字段列表 | + +#### CompilerDiagnostic 结构 + +所有编译期错误都附带 `CompilerDiagnostic` 对象,包含丰富的定位信息: + +```typescript +interface CompilerDiagnostic { + filePath?: string // 源文件路径 + line?: number // 起始行号 + column?: number // 起始列号 + endLine?: number // 结束行号 + endColumn?: number // 结束列号 + snippet?: string // 错误位置代码片段 + sourceLine?: string // 所在行的完整源码 + codeFrame?: string // 带指针的代码帧 + expression?: string // 导致错误的表达式 + exportName?: string // 相关的导出名 + nodeType?: string // AST 节点类型 + phase?: string // 编译阶段标识 + hint?: string // 修复建议 + cause?: string // 底层原因 +} +``` + +#### 推荐的错误处理模式 + +```typescript +import { + mdxToMd, + UndefinedVariableError, + UndefinedNamespaceError, + ExportParseError, + MetadataValidationError, + formatCompilerDiagnostic +} from '@truenine/md-compiler' + +try { + const result = await mdxToMd(mdxSource, { filePath: '/path/to/file.mdx', extractMetadata: true }) + return result +} catch (error) { + if (error instanceof UndefinedVariableError) { + console.error(`未定义变量: ${error.variableName}`) + console.error(`表达式: ${error.diagnostic.expression}`) + console.error(formatCompilerDiagnostic(error)) + } else if (error instanceof UndefinedNamespaceError) { + console.error(`未定义命名空间: ${error.namespace}`) + console.error('可用的作用域键:', Object.keys(scope).join(', ')) + } else if (error instanceof ExportParseError) { + console.error(`导出解析失败: ${error.exportName ?? 'default'}`) + console.error(error.diagnostic.hint ?? '请确保导出值为静态可求值的字面量') + } else if (error instanceof MetadataValidationError) { + console.error(`缺少必需字段: ${error.missingFields.join(', ')}`) + } else { + throw error // 重新抛出未知错误 + } +} +``` + +### 类型导出速查 + +以下是 `@truenine/md-compiler` 的完整公共类型导出清单: + +```typescript +// ===== 核心类型 (来自 types.ts) ===== + +interface EvaluationScope { + [key: string]: unknown +} + +interface MdxToMdOptions { + scope?: EvaluationScope + basePath?: string + filePath?: string + globalScope?: MdxGlobalScope + extractMetadata?: boolean +} + +interface MdxToMdResult { + content: string + metadata: ExportMetadata +} + +interface ProcessingContext { + scope: EvaluationScope + components: Map + processingStack: string[] + basePath?: string + filePath?: string + sourceText?: string +} + +type ComponentHandler = ( + element: MdxJsxFlowElement | MdxJsxTextElement, + ctx: ProcessingContext, + processChildren: (children: RootContent[], ctx: ProcessingContext) => Promise +) => Promise + +// ===== MDX AST 节点类型 (来自 mdast-util-mdx) ===== + +type MdxFlowExpression // 块级表达式 {expr} +type MdxTextExpression // 行内表达式 {expr} +type MdxjsEsm // ESM import/export 语句 +type MdxJsxFlowElement // 块级 JSX 元素 +type MdxJsxTextElement // 行内 JSX 元素 + +// ===== MDAST 基础类型 (来自 mdast) ===== + +type Root // AST 根节点 +type RootContent // 根节点子元素的联合类型 + +// ===== 导出元数据类型 (来自 export-parser.ts) ===== + +type MetadataSource = 'export' | 'yaml' | 'mixed' + +interface ExportMetadata { + fields: Record + source: MetadataSource +} + +// ===== TOML 构建类型 (来自 toml.ts) ===== + +interface BuildTomlDocumentOptions { + fieldOrder?: readonly string[] +} + +interface BuildPromptTomlArtifactOptions extends BuildTomlDocumentOptions { + content: string + bodyFieldName: string + frontMatter?: Readonly> + fieldNameMap?: Readonly> + excludedKeys?: readonly string[] + extraFields?: Readonly> +} + +// ===== 全局作用域类型 (来自 globals/) ===== + +interface UserProfile { name?, username?, gender?, birthday?: string; [key: string]: unknown } +interface CodeStylePreferences { indent?, tabSize?: number; [key: string]: unknown } +interface ToolReferences { [key: string]: string | undefined } +interface EnvironmentContext { [key: string]: unknown } +enum ShellKind { Bash, Zsh, Fish, Sh, PowerShell, Pwsh, Cmd, Unknown } +enum OsKind { Win, Mac, Linux, Unknown } +interface OsInfo { platform?, arch?, hostname?, shellKind?: ShellKind, kind?: OsKind; [key: string]: string | ShellKind | OsKind | undefined } +interface MdGlobalScope { profile, codeStyles, tool, env, os, Md } +``` diff --git a/doc/content/sdk/script-runtime/_meta.ts b/doc/content/sdk/script-runtime/_meta.ts new file mode 100644 index 00000000..f74596bd --- /dev/null +++ b/doc/content/sdk/script-runtime/_meta.ts @@ -0,0 +1,3 @@ +export default { + index: 'index' +} diff --git a/doc/content/sdk/script-runtime/index.mdx b/doc/content/sdk/script-runtime/index.mdx new file mode 100644 index 00000000..46148cd6 --- /dev/null +++ b/doc/content/sdk/script-runtime/index.mdx @@ -0,0 +1,1042 @@ +--- +title: Script-Runtime +description: Rust 支持的 TypeScript 代理模块运行时,为 tnmsc 提供安全的模块代理和公共路径管理能力。 +sidebarTitle: Script-Runtime +status: stable +--- + +import { Callout, Cards, Steps, Tabs } from 'nextra/components' + +# Script-Runtime + + +**包名**: `@truenine/script-runtime`  |  **Rust Crate**: `tnmsc-script-runtime`  |  **Rust / 包位置**: `libraries/script-runtime/`  |  **TypeScript 事实来源**: `sdk/src/libraries/script-runtime/` + + +Script-Runtime 是一个 **Rust 支持的 TypeScript 代理模块加载器和路径验证运行时**,为 `tnmsc` 核心系统提供安全的动态模块代理与公共路径解析能力。它通过 NAPI 将 Rust 原生的高性能路径验证逻辑暴露给 Node.js 层,同时利用 Jiti 运行时实现灵活的代理模块加载与执行。 + +## 库概述 + +### 定位与核心价值 + +Script-Runtime 在 `tnmsc` 架构中扮演 **基础设施层** 的角色: + +- **代理模块加载器**: 动态加载和执行用户定义的 `proxy.ts` 模块 +- **路径安全守卫**: 通过 Rust 原生绑定实现防路径遍历的严格路径验证 +- **沙箱执行环境**: 使用 Worker 进程隔离代理模块执行,支持超时控制 + +```mermaid +graph TB + subgraph Consumers["上层消费者"] + CLI["CLI (tnmsc)"] + MCP["MCP Server"] + GUI["GUI (Tauri)"] + end + + subgraph ScriptRuntime["Script-Runtime"] + TS_API["TypeScript API 层"] + Worker["Worker 进程"] + NativeBinding["Rust Native Binding"] + end + + subgraph Core["核心能力"] + ModuleLoader["Jiti 模块加载器"] + PathValidator["路径验证器 (Rust)"] + TimeoutControl["超时控制"] + end + + CLI --> TS_API + MCP --> TS_API + GUI --> TS_API + TS_API --> Worker + TS_API --> NativeBinding + Worker --> ModuleLoader + NativeBinding --> PathValidator + NativeBinding --> TimeoutControl +``` + +### 应用场景 + +| 场景 | 说明 | +| --- | --- | +| **CLI 动态代理** | `tnmsc install` / `dry-run` 时按需加载项目级 `proxy.ts` | +| **MCP 工具集成** | MCP Server 通过 Script-Runtime 解析工具请求的公共资源路径 | +| **GUI 路由代理** | Tauri 桌面应用使用代理模块将逻辑路径映射到实际文件系统位置 | +| **沙箱路径解析** | 在受控环境中执行用户自定义路径转换逻辑,防止路径遍历攻击 | + +--- + +## 核心 API 详解 + +### 导出总览 + +```typescript +import { + // 类型导出 + type ProxyCommand, + type ProxyContext, + type ProxyDefinition, + type ProxyMatcherConfig, + type ProxyModule, + type ProxyModuleConfig, + type ProxyRouteHandler, + type ValidatePublicPathOptions, + + // 函数导出 + defineProxy, + loadProxyModule, + validatePublicPath, + resolvePublicPath, + resolvePublicPathUnchecked, + getProxyModuleConfig +} from '@truenine/script-runtime' +``` + +--- + +### `defineProxy(definition) → T` + +定义一个类型安全的代理模块。这是一个 **身份函数(identity function)**,主要用于提供 TypeScript 类型推断。 + +**签名:** + +```typescript +function defineProxy(value: T): T +``` + +**参数:** + +| 参数 | 类型 | 说明 | +| --- | --- | --- | +| `value` | `T extends ProxyDefinition \| ProxyRouteHandler` | 代理定义对象或路由处理函数 | + +**返回值:** `T` — 原样返回输入值,保留完整类型信息 + +**异常情况:** 无 + +--- + +### `loadProxyModule(filePath) → Promise` + +动态加载一个代理模块文件。内部使用 [Jiti](https://github.com/nicolo-ribaudo/jiti) 运行时实现 TypeScript/ESM 的即时编译与执行。 + +**签名:** + +```typescript +async function loadProxyModule(filePath: string): Promise +``` + +**参数:** + +| 参数 | 类型 | 说明 | +| --- | --- | --- | +| `filePath` | `string` | 代理模块文件的绝对或相对路径 | + +**返回值:** `Promise` — 加载后的代理模块对象 + +**异常情况:** + +| 异常 | 触发条件 | +| --- | --- | +| `Error` | 文件不存在 (`proxy.ts not found: ...`) | +| `Error` | 模块未导出默认值 (`proxy.ts must export a default value`) | +| `TypeError` | 默认导出不是函数或纯对象 (`proxy.ts default export must be a function or plain object`) | +| `Error` | config 导出不是纯对象 (`proxy.ts config export must be a plain object`) | + +--- + +### `validatePublicPath(resolvedPath, options) → string` + +通过 Rust 原生绑定验证解析后的公共路径安全性。这是 **同步调用**,直接执行 Rust 层的路径规范化与安全检查。 + +**签名:** + +```typescript +function validatePublicPath( + resolvedPath: string, + options: ValidatePublicPathOptions +): string +``` + +**参数:** + +| 参数 | 类型 | 说明 | +| --- | --- | --- | +| `resolvedPath` | `string` | 待验证的已解析路径(相对路径) | +| `options` | `ValidatePublicPathOptions` | 验证选项配置 | + +**返回值:** `string` — 验证通过后的规范化相对路径 + +**异常情况:** + +| 异常 | 触发条件 | +| --- | --- | +| `Error` | 路径为空 (`Resolved public path cannot be empty`) | +| `Error` | 路径是绝对路径 (`Resolved public path must be relative`) | +| `Error` | 路径包含 `..` 遍历段 (`Path escapes root`) | +| `Error` | Native binding 不可用 (`validate_public_path native binding is unavailable`) | + +--- + +### `resolvePublicPath(filePath, ctx, logicalPath, timeoutMs?) → string` + +**完整的路径解析流程**: 加载代理模块 → 执行路由处理器 → Rust 层安全验证。这是 **同步阻塞调用**,内部通过 Worker 子进程执行代理逻辑并等待结果。 + +**签名:** + +```typescript +function resolvePublicPath( + filePath: string, + ctx: ProxyContext, + logicalPath: string, + timeoutMs?: number = 5_000 +): string +``` + +**参数:** + +| 参数 | 类型 | 默认值 | 说明 | +| --- | --- | --- | --- | +| `filePath` | `string` | — | 代理模块文件路径 | +| `ctx` | `ProxyContext` | — | 代理上下文对象 | +| `logicalPath` | `string` | — | 待解析的逻辑公共路径 | +| `timeoutMs` | `number` | `5000` | Worker 执行超时时间(毫秒) | + +**返回值:** `string` — 经过代理转换和安全验证后的最终路径 + +**异常情况:** + +| 异常 | 触发条件 | +| --- | --- | +| `Error` | Worker 执行超时 (`proxy.ts execution timed out after ...ms`) | +| `Error` | 代理模块执行错误(来自 stderr) | +| `Error` | Worker 无输出 (`proxy worker produced no output`) | +| `Error` | 路径验证失败(同 `validatePublicPath`) | +| `Error` | Node.js 可执行文件未找到 | +| `Error` | Worker 路径无效 | + +--- + +### `resolvePublicPathUnchecked(filePath, ctx, logicalPath) → Promise` + +**无 Rust 安全检查的异步路径解析**。仅执行代理模块的路由处理逻辑,跳过 Rust 层的路径验证。适用于需要自定义验证策略的场景。 + +**签名:** + +```typescript +async function resolvePublicPathUnchecked( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): Promise +``` + +**参数:** 同 `resolvePublicPath`,无 `timeoutMs` 参数 + +**返回值:** `Promise` — 代理模块返回的原始解析路径 + +**异常情况:** 同 `loadProxyModule` 和路由处理器可能抛出的异常 + +--- + +### `getProxyModuleConfig(module) → ProxyModuleConfig | undefined` + +从已加载的代理模块中提取配置信息。 + +**签名:** + +```typescript +function getProxyModuleConfig(module: ProxyModule): ProxyModuleConfig | undefined +``` + +**参数:** + +| 参数 | 类型 | 说明 | +| --- | --- | --- | +| `module` | `ProxyModule` | 已加载的代理模块实例 | + +**返回值:** `ProxyModuleConfig | undefined` — 模块配置,若模块未导出 config 则为 `undefined` + +--- + +## 关键类型详解 + +### `ProxyContext` + +代理执行的上下文环境,包含当前工作状态和目标信息。 + +```typescript +interface ProxyContext { + readonly cwd: string // 当前工作目录 + readonly workspaceDir: string // 工作区根目录 + readonly aindexDir: string // Aindex 数据目录 + readonly command: ProxyCommand // 当前执行的命令 + readonly platform: NodeJS.Platform // 运行平台 +} +``` + + +`ProxyContext` 的所有属性均为 `readonly`,代理模块不应修改上下文对象。 + + +### `ValidatePublicPathOptions` + +路径验证选项配置。 + +```typescript +interface ValidatePublicPathOptions { + readonly aindexPublicDir: string // Aindex 公共资源根目录,用作安全边界 +} +``` + +### `ProxyCommand` + +支持的代理命令类型枚举。 + +```typescript +type ProxyCommand = 'install' | 'dry-run' | 'clean' | 'plugins' +``` + +### `ProxyMatcherConfig` + +命令匹配器配置,用于控制代理模块在哪些命令下生效。 + +```typescript +interface ProxyMatcherConfig { + readonly commands?: readonly ProxyCommand[] // 生效的命令列表,空或未设置表示全部生效 +} +``` + +### `ProxyModule` / `ProxyModuleConfig` / `ProxyDefinition` / `ProxyRouteHandler` + +```typescript +// 代理模块的完整结构 +interface ProxyModule { + readonly default: ProxyHandler // 默认导出:函数或定义对象 + readonly config?: ProxyModuleConfig // 可选的模块配置 +} + +// 模块配置 +interface ProxyModuleConfig { + readonly matcher?: ProxyMatcherConfig // 命令匹配器 +} + +// 代理定义对象形式 +interface ProxyDefinition { + readonly resolvePublicPath?: ProxyRouteHandler // 路径解析处理器 +} + +// 路由处理函数签名 +type ProxyRouteHandler = ( + logicalPath: string, + ctx: ProxyContext +) => string | Promise +``` + +--- + +## 代理系统架构 + +### 代理定义模式 + +Script-Runtime 支持两种代理定义方式: + + + +推荐用于简单场景,直接导出一个路由处理函数。 + + +适用于需要组织多个处理器或未来扩展的场景,导出包含 `resolvePublicPath` 的对象。 + + + + + +```typescript +// proxy.ts - 函数式定义 +import {defineProxy} from '@truenine/script-runtime' +import {join} from 'node:path' + +export default defineProxy((logicalPath: string, ctx: ProxyContext) => { + return join('assets', logicalPath) +}) +``` + + +```typescript +// proxy.ts - 对象式定义 +import {defineProxy, type ProxyDefinition} from '@truenine/script-runtime' +import {join} from 'node:path' + +const proxy: ProxyDefinition = { + resolvePublicPath(logicalPath: string, ctx: ProxyContext) { + return join('static', ctx.platform, logicalPath) + } +} + +export default defineProxy(proxy) + +export const config = { + matcher: { + commands: ['install', 'dry-run'] + } +} +``` + + + +### ProxyModule 结构 + +```mermaid +classDiagram + class ProxyModule { + +readonly default: ProxyHandler + +readonly config?: ProxyModuleConfig + } + + class ProxyModuleConfig { + +readonly matcher?: ProxyMatcherConfig + } + + class ProxyMatcherConfig { + +readonly commands?: readonly ProxyCommand[] + } + + class ProxyHandler { + <> + } + + class ProxyRouteHandler { + +(logicalPath, ctx): string | Promise~string~ + } + + class ProxyDefinition { + +readonly resolvePublicPath?: ProxyRouteHandler + } + + ProxyModule --> ProxyModuleConfig : config + ProxyModuleConfig --> ProxyMatcherConfig : matcher + ProxyHandler <|-- ProxyRouteHandler + ProxyHandler <|-- ProxyDefinition + ProxyDefinition --> ProxyRouteHandler : resolvePublicPath +``` + +### 模块加载流程 + +`loadProxyModule` 的内部执行步骤: + + +### 1. 路径解析 + +将输入路径转换为绝对路径,并验证文件存在性。 + +### 2. Jiti 运行时创建 + +创建一个新的 Jiti 实例,配置如下: +- `fsCache: false` — 禁用文件系统缓存,确保每次加载最新版本 +- `moduleCache: false` — 禁用模块缓存,避免跨调用状态污染 +- `interopDefault: false` — 禁用默认互操作,保留原始导出结构 + +### 3. 模块导入 + +使用 Jiti 的 `import()` 方法加载目标文件,支持 TypeScript 和 ESM 语法。 + +### 4. 结构验证 + +对加载的模块进行严格的类型校验: +- 必须是对象(模块命名空间) +- 必须包含 `default` 导出 +- `default` 必须是函数或纯对象 +- `config`(如果存在)必须是纯对象 + +### 5. 返回 ProxyModule + +构造并返回符合 `ProxyModule` 接口的结构化对象。 + + +--- + +## 路径解析机制 + +### validatePublicPath vs resolvePublicPath + +这两个 API 的设计反映了 **"验证 vs 完整流程"** 的职责分离: + +| 特性 | `validatePublicPath` | `resolvePublicPath` | +| --- | --- | --- | +| **性质** | 同步、纯验证 | 同步、完整流程 | +| **Rust 绑定** | ✅ 直接调用 | ✅ 内部调用 | +| **代理执行** | ❌ 不执行 | ✅ 通过 Worker 执行 | +| **超时控制** | ❌ 无 | ✅ 支持 | +| **适用场景** | 已有路径的安全校验 | 端到端路径解析 | + +```mermaid +sequenceDiagram + participant Caller as 调用方 + participant TS as TypeScript API + participant Worker as Worker 进程 + participant Jiti as Jiti Runtime + participant Proxy as proxy.ts + participant Rust as Rust Native + + Note over Caller,Rust: resolvePublicPath 完整流程 + + Caller->>TS: resolvePublicPath(filePath, ctx, path, timeout) + TS->>TS: 序列化 ctx 为 JSON(含 workerPath, timeoutMs) + TS->>Rust: resolve_public_path(filePath, ctxJson, logicalPath) + + Rust->>Rust: 创建临时目录,写入 ctx JSON + Rust->>Worker: spawn node workerPath filePath ctxPath logicalPath + Worker->>Jiti: createRuntime() + Jiti-->>Worker: jiti instance + Worker->>Proxy: jiti.import(filePath) + Proxy-->>Worker: module.default(logicalPath, ctx) + Worker-->>Rust: stdout: resolvedPath + + Rust->>Rust: validate_public_path_impl(resolvedPath, aindexPublicDir) + Rust->>Rust: normalize_path() → ensure_within_root() + Rust-->>TS: validated path string + TS-->>Caller: 最终安全路径 +``` + +### Native Binding 层的角色 + +Rust 实现的路径验证提供了 **操作系统级别的安全保障**: + +1. **路径规范化**: 统一处理 `/`、`\`、`.`、`..` 等路径组件 +2. **遍历检测**: 通过逐组件解析检测 `..` 是否逃逸出允许的根目录 +3. **绝对路径拒绝**: 强制要求相对路径,防止绝对路径注入 +4. **空路径防护**: 拒绝空字符串和纯空白路径 + +### Worker 机制 + +`resolve-proxy-worker.ts` 是一个独立的 Node.js 脚本,作为子进程运行: + +``` +Usage: resolve-proxy-worker +``` + +**工作流程:** +1. 从命令行参数读取文件路径、上下文 JSON 路径和逻辑路径 +2. 读取并解析上下文 JSON 文件 +3. 调用 `resolvePublicPathModule` 执行代理逻辑 +4. 将结果写入 stdout,错误写入 stderr 并以退出码 1 退出 + +### 超时控制和安全边界 + + +**超时机制**: Rust 层使用 `wait_timeout::ChildExt` 实现,超时后强制终止 Worker 进程(`child.kill()`),防止恶意或失控的代理模块无限期挂起。 + + +| 安全措施 | 实现层 | 说明 | +| --- | --- | --- | +| 超时终止 | Rust | 默认 5000ms,可配置 | +| 路径遍历防护 | Rust | `normalize_path` + `ensure_within_root` | +| 绝对路径拒绝 | Rust | `candidate_path.is_absolute()` 检查 | +| 模块缓存禁用 | TypeScript (Jiti) | 每次 `loadProxyModule` 创建新实例 | +| 进程隔离 | OS | Worker 作为独立子进程运行 | + +--- + +## Native Binding 层详解 + +### Rust 核心实现 + +`lib.rs` 包含两个核心函数: + +#### `validate_public_path_impl` + +```rust +pub fn validate_public_path_impl( + resolved_path: &str, + aindex_public_dir: &str, +) -> Result +``` + +**算法步骤:** +1. 去除首尾空白,检查非空 +2. 统一反斜杠为正斜杠 +3. 拒绝绝对路径 +4. 调用 `normalize_path()` 规范化路径组件 +5. 计算公共目录的绝对基路径 +6. 拼接后再次规范化 +7. 调用 `ensure_within_root()` 验证不逃逸 + +#### `resolve_public_path_impl` + +```rust +pub fn resolve_public_path_impl( + file_path: &str, + ctx_json: &str, + logical_path: &str, +) -> Result +``` + +**算法步骤:** +1. 反序列化 JSON 上下文(提取 `worker_path`、`timeout_ms`、`aindex_dir`) +2. 检测 Node.js 可执行文件(带缓存) +3. 创建临时目录并写入上下文 JSON +4. 生成 Worker 子进程 +5. 等待执行完成(带超时) +6. 读取 stdout/stderr +7. 对输出调用 `validate_public_path_impl` 进行安全验证 + +### 共享 Native Binding 加载 + +Script-Runtime 现在与 Logger、MDX-Compiler 共用 `sdk/src/core/native-binding-loader.ts`。它的 TypeScript 事实来源已经迁到 `sdk/src/libraries/script-runtime/`,而 `libraries/script-runtime/src/index.ts` 只保留 wrapper 角色;真正的实现只保留 Script-Runtime 自己必须关心的部分: + +- `isScriptRuntimeBinding()`:校验 native export +- `optionalMethods` 映射:把 `validate_public_path` / `resolve_public_path` 归一成 TypeScript 侧的首选方法名 +- Worker 入口文件的查找逻辑:这是运行时特有行为,不能抽到通用加载器里 + +```mermaid +flowchart TD + Start["getBinding()"] --> Loader["sdk.createNativeBindingLoader()"] + Loader --> LocalCandidates["本地 .node 候选"] + LocalCandidates --> LocalFound{"找到兼容 binding?"} + LocalFound -->|"Yes"| CacheReturn["缓存并返回"] + LocalFound -->|"No"| CliPackage["CLI 平台包 / 包目录探测"] + + CliPackage --> CliFound{"找到兼容 binding?"} + CliFound -->|"Yes"| CacheReturn + CliFound -->|"No"| FormatError["聚合错误并抛出"] + CacheReturn --> Alias["补齐 validatePublicPath / resolvePublicPath 别名"] + + subgraph SupportedPlatforms ["支持的平台"] + P1["win32-x64 (MSVC)"] + P2["linux-x64 (GNU)"] + P3["linux-arm64 (GNU)"] + P4["darwin-arm64"] + P5["darwin-x64"] + end +``` + +共享加载器会按统一顺序探测源码旁、`dist/`、`npm//` 和 CLI 平台包中的 `.node` 制品,而 Script-Runtime 本地代码则继续专注于 Worker 路径发现与 API 兼容层。 + +### 与 TypeScript 层的数据交互 + +数据流通过 **JSON 序列化** 跨越 Rust/TypeScript 边界: + +```mermaid +LR + subgraph TypeScript["TypeScript 层"] + CTX_OBJ["ProxyContext 对象"] + JSON_STR["JSON 字符串"] + end + + subgraph Rust["Rust 层"] + CTX_STRUCT["ResolvePublicPathContext struct"] + RESULT_STR["Result<String>"] + end + + CTX_OBJ --> "|JSON.stringify()"| JSON_STR + JSON_STR --> "|NAPI 参数传递"| CTX_STRUCT + CTX_STRUCT --> "|serde 反序列化"| CTX_STRUCT + CTX_STRUCT --> "|处理逻辑"| RESULT_STR + RESULT_STR --> "|NAPI 返回值"| JSON_STR +``` + +--- + +## 使用示例 + +### 示例 1: 定义简单的代理模块 + +```typescript +// project/proxy.ts +import {defineProxy, type ProxyRouteHandler} from '@truenine/script-runtime' + +const handler: ProxyRouteHandler = (logicalPath) => { + // 简单的前缀映射 + return `public/${logicalPath}` +} + +export default defineProxy(handler) +``` + +### 示例 2: 加载和使用外部代理 + +```typescript +import {loadProxyModule, getProxyModuleConfig} from '@truenine/script-runtime' + +async function useExternalProxy(proxyPath: string) { + // 加载代理模块 + const module = await loadProxyModule(proxyPath) + + // 获取模块配置 + const config = getProxyModuleConfig(module) + console.log('Matcher commands:', config?.matcher?.commands) + + // 直接调用默认导出的处理器(如果是函数形式) + if (typeof module.default === 'function') { + const result = await module.default('styles/main.css', { + cwd: process.cwd(), + workspaceDir: '/path/to/workspace', + aindexDir: '/path/to/.aindex', + command: 'install', + platform: process.platform + }) + console.log('Resolved:', result) + } +} + +useExternalProxy('./project/proxy.ts') +``` + +### 示例 3: 公共路径验证 + +```typescript +import {validatePublicPath} from '@truenine/script-runtime' + +const options = {aindexPublicDir: '/workspace/.aindex/public'} + +// ✅ 合法的相对路径 +const valid = validatePublicPath('assets/images/logo.png', options) +console.log(valid) // "assets/images/logo.png" + +try { + // ❌ 绝对路径 - 会抛出错误 + validatePublicPath('/etc/passwd', options) +} catch (error) { + console.error('Rejected absolute path:', error.message) +} + +try { + // ❌ 路径遍历 - 会抛出错误 + validatePublicPath('../../secret.txt', options) +} catch (error) { + console.error('Rejected traversal:', error.message) +} + +try { + // ❌ 包含反斜杠的父目录引用 + validatePublicPath('safe\\..\\..\\escape', options) +} catch (error) { + console.error('Rejected backslash traversal:', error.message) +} +``` + +### 示例 4: 带上下文的路径解析 + +```typescript +import {resolvePublicPath} from '@truenine/script-runtime' + +const ctx = { + cwd: '/home/user/project', + workspaceDir: '/home/user/project', + aindexDir: '/home/user/project/.aindex', + command: 'install' as const, + platform: process.platform +} + +// 完整的端到端解析(同步,带超时保护) +try { + const result = resolvePublicPath( + './proxy.ts', // 代理模块路径 + ctx, // 代理上下文 + 'scripts/app.js', // 逻辑路径 + 10_000 // 自定义超时 10 秒 + ) + console.log('Safely resolved:', result) +} catch (error) { + if (error.message.includes('timed out')) { + console.error('代理模块执行超时') + } else { + console.error('路径解析失败:', error.message) + } +} +``` + +### 示例 5: 完整的代理路由配置(含 matchers 和 handlers) + +```typescript +// project/proxy.ts - 生产级代理模块 +import { + defineProxy, + type ProxyDefinition, + type ProxyRouteHandler, + type ProxyContext, + type ProxyModuleConfig +} from '@truenine/script-runtime' +import {join, extname} from 'node:path' + +/** + * 根据文件扩展名选择不同的路径映射策略 + */ +const resolveHandler: ProxyRouteHandler = async ( + logicalPath: string, + ctx: ProxyContext +): Promise => { + const ext = extname(logicalPath) + + switch (ext) { + case '.ts': + case '.tsx': + case '.js': + case '.jsx': + // 脚本文件映射到 dist 目录 + return join('dist', logicalPath) + + case '.css': + case '.less': + case '.scss': + // 样式文件映射到 assets/styles + return join('assets', 'styles', logicalPath) + + case '.png': + case '.jpg': + case '.svg': + case '.webp': + // 图片资源根据平台分目录 + return join('assets', 'images', ctx.platform, logicalPath) + + default: + // 其他文件保持原路径 + return logicalPath + } +} + +const definition: ProxyDefinition = { + resolvePublicPath: resolveHandler +} + +export default defineProxy(definition) + +export const config: ProxyModuleConfig = { + matcher: { + // 仅在 install 和 dry-run 命令下生效 + commands: ['install', 'dry-run'] + } +} +``` + +**消费端使用:** + +```typescript +import {resolvePublicPath, loadProxyModule, getProxyModuleConfig} from '@truenine/script-runtime' + +async function demonstrateFullProxy() { + const ctx = { + cwd: process.cwd(), + workspaceDir: process.cwd(), + aindexDir: join(process.cwd(), '.aindex'), + command: 'install' as const, + platform: process.platform + } + + // 方式一:使用完整的 resolvePublicPath(含 Rust 安全验证) + const scriptPath = resolvePublicPath('./project/proxy.ts', ctx, 'utils/helper.ts') + console.log('Script path:', scriptPath) + // 输出类似: "dist/utils/helper.ts" + + const imagePath = resolvePublicPath('./project/proxy.ts', ctx, 'logo.png') + console.log('Image path:', imagePath) + // 输出类似: "assets/images/linux-x64/logo.png" + + // 方式二:先加载模块查看配置 + const module = await loadProxyModule('./project/proxy.ts') + const moduleConfig = getProxyModuleConfig(module) + console.log('Active commands:', moduleConfig?.matcher?.commands) + // 输出: ["install", "dry-run"] +} +``` + +--- + +## 最佳实践 + +### 代理模块的项目结构建议 + +``` +project/ +├── proxy.ts # 代理模块入口(必须) +├── src/ +│ └── resolvers/ # 自定义解析器(可选) +│ ├── asset-resolver.ts +│ └── path-utils.ts +└── tsconfig.json # TypeScript 配置(可选) +``` + + +**建议**: 将复杂的路径解析逻辑拆分为独立模块,在 `proxy.ts` 中仅做聚合和导出。这样有利于测试和维护。 + + +### 错误处理和超时配置 + +```typescript +import {resolvePublicPath, loadProxyModule} from '@truenine/script-runtime' + +class ProxyResolutionError extends Error { + constructor( + message: string, + public readonly phase: 'load' | 'execute' | 'validate', + public readonly cause?: Error + ) { + super(message) + this.name = 'ProxyResolutionError' + } +} + +async function safeResolve( + proxyPath: string, + ctx: Parameters[1], + logicalPath: string +): Promise { + try { + // 预检:确保代理模块可以正常加载 + await loadProxyModule(proxyPath) + } catch (error) { + throw new ProxyResolutionError( + `无法加载代理模块: ${proxyPath}`, + 'load', + error instanceof Error ? error : undefined + ) + } + + try { + // 正式解析(带合理的超时) + return resolvePublicPath(proxyPath, ctx, logicalPath, 15_000) + } catch (error) { + const message = error instanceof Error ? error.message : String(error) + if (message.includes('timed out')) { + throw new ProxyResolutionError( + `代理模块执行超时: ${proxyPath}`, + 'execute', + error instanceof Error ? error : undefined + ) + } + throw new ProxyResolutionError( + `路径验证失败: ${message}`, + 'validate', + error instanceof Error ? error : undefined + ) + } +} +``` + +### 安全注意事项 + + +**关键安全原则**: 永远不要绕过 `validatePublicPath` 或信任 `resolvePublicPathUnchecked` 的输出用于敏感操作。 + + +| 风险 | 防护措施 | +| --- | --- | +| **路径遍历攻击** | Rust 层的 `normalize_path` + `ensure_within_root` 双重防护 | +| **绝对路径注入** | Rust 层拒绝所有绝对路径 | +| **代理模块失控** | Worker 超时机制(默认 5s,可配置) | +| **模块状态污染** | Jiti 每次创建新实例,禁用模块缓存 | +| **恶意代理代码** | 进程隔离 + stderr 捕获 + 错误传播 | + +**禁止模式:** + +```typescript +// ❌ 错误:直接使用 unchecked 结果访问文件系统 +import {readFile} from 'node:fs/promises' +import {resolvePublicPathUnchecked} from '@truenine/script-runtime' + +const unsafePath = await resolvePublicPathUnchecked(proxyPath, ctx, userInput) +const content = await readFile(unsafePath, 'utf8') // 危险!未经验证 + +// ✅ 正确:始终经过验证 +import {resolvePublicPath} from '@truenine/script-runtime' +import {readFile} from 'node:fs/promises' + +const safePath = resolvePublicPath(proxyPath, ctx, userInput) // 含 Rust 验证 +const content = await readFile(join(publicRoot, safePath), 'utf8') // 安全 +``` + +### 性能优化建议 + + +### 1. Worker 路径缓存 + +`getWorkerPath()` 内部实现了路径查找结果的缓存,首次调用后会缓存找到的 Worker 路径,后续调用直接返回。 + +### 2. Native Binding 缓存 + +`getBinding()` 实现了单例模式的 binding 加载,首次成功加载后缓存实例,后续调用直接复用。加载失败也会缓存错误,避免重复尝试。 + +### 3. Node.js 可执行文件检测缓存 + +Rust 层使用 `OnceLock>>` 缓存 Node.js 路径检测结果,避免每次 `resolve_public_path` 都重新探测。 + +### 4. 合理设置超时 + +根据代理模块复杂度调整超时时间: +- 简单代理: `3000ms` (默认 5000ms 通常足够) +- 复杂代理(涉及网络请求等): `10000ms` ~ `30000ms` +- 避免设置过长超时(如 >60000ms),这会降低系统响应性 + +### 5. 批量解析优化 + +当需要解析大量路径时,考虑在一次代理调用中批量处理: + +```typescript +// ✅ 推荐:批量解析 +const batchHandler: ProxyRouteHandler = (logicalPath, _ctx) => { + // logicalPath 可能是逗号分隔的多个路径 + return logicalPath.split(',').map(p => `assets/${p}`).join(',') +} +``` + + +--- + +## 架构总结 + +```mermaid +graph TB + subgraph Userland["用户空间"] + ProxyTS["proxy.ts
(用户编写)"] + end + + subgraph ScriptRuntimeAPI["@truenine/script-runtime"] + direction TB + PublicAPI["公开 API
defineProxy / loadProxyModule
validatePublicPath / resolvePublicPath"] + + subgraph Internal["内部模块"] + Core["runtime-core.ts
Jiti 加载 / 路由分发"] + WorkerEntry["resolve-proxy-worker.ts
Worker 入口脚本"] + BindingLoader["index.ts
Native Binding 加载器"] + end + end + + subgraph NativeLayer["Rust Native Layer (tnmsc-script-runtime)"] + direction TB + NAPIExport["NAPI Binding
validate_public_path()
resolve_public_path()"] + PathValidator["路径验证引擎
normalize_path()
ensure_within_root()"] + ProcessManager["进程管理
Worker 生成 / 超时控制
Node.js 探测缓存"] + end + + subgraph OS["操作系统"] + FileSystem["文件系统"] + ProcessIsolation["进程隔离边界"] + end + + ProxyTS -.->|"import / defineProxy"| PublicAPI + PublicAPI --> Core + PublicAPI --> BindingLoader + Core --> WorkerEntry + BindingLoader --> NAPIExport + WorkerEntry --> Core + NAPIExport --> PathValidator + NAPIExport --> ProcessManager + ProcessManager --> ProcessIsolation + ProcessIsolation --> WorkerEntry + PathValidator --> FileSystem + Core --> FileSystem +``` + +--- + +## 相关资源 + +- **Rust / 包位置**: `libraries/script-runtime/` +- **TypeScript 事实来源**: `sdk/src/libraries/script-runtime/` +- **主入口**: `sdk/src/libraries/script-runtime/index.ts` +- **类型定义**: `sdk/src/libraries/script-runtime/types.ts` +- **核心运行时**: `sdk/src/libraries/script-runtime/runtime-core.ts` +- **Worker 处理器**: `sdk/src/libraries/script-runtime/resolve-proxy-worker.ts` +- **Rust Native**: `libraries/script-runtime/src/lib.rs` +- **依赖库**: `logger`(类似的 Native Binding 模式参考) diff --git a/doc/content/technical-details/libraries.mdx b/doc/content/technical-details/libraries.mdx index 5c47759c..7de836b6 100644 --- a/doc/content/technical-details/libraries.mdx +++ b/doc/content/technical-details/libraries.mdx @@ -1,26 +1,61 @@ --- -title: 基础库 -description: 总结当前 `libraries/` 下这些 Rust-first / NAPI-first 基础库的职责和暴露面。 +title: 基础库(概述) +description: libraries/ 下三个 Rust-first 基础库的快速导航与重定向指引。详细文档已迁移至 SDK 文档体系。 sidebarTitle: 基础库 status: stable --- +import { Callout, Cards } from 'nextra/components' + # 基础库 -`libraries/` 是 Rust-first / NAPI-first 基础库所在的目录。目前最主要的几个库是: + + **ℹ️ 文档已迁移** + + libraries 下各库的详细技术文档现已整合到 [SDK 文档体系](/docs/sdk) 中。 + 本页面仅保留简要概述,完整的 API 参考、架构说明和使用示例请访问各库的专属文档页面。 + + +## 现状说明 + +`libraries/` 目录仍然存在且正常运作,但它不再等同于全部 TypeScript 事实来源。当前边界是: + +- `libraries/logger/` 与 `libraries/script-runtime/` 继续保留 Rust crate、NAPI 制品和对外发布 wrapper +- `sdk/src/libraries/logger.ts` 与 `sdk/src/libraries/script-runtime/` 已成为对应 TypeScript facade 的事实来源 +- `libraries/md-compiler/` 仍保留完整的 Rust + TypeScript 实现 + +这三个 Rust-first / NAPI-first 基础能力继续作为 memory-sync 工具链的共享基础设施,被 `sdk/` 核心层编排后统一暴露给 `cli/`、`mcp/` 和 `gui/` 消费。 + +最近的优化工作包括:统一 NAPI 加载器实现、将 Logger / Script-Runtime 的 TypeScript facade 内联回 `sdk/`、标准化配置接口,以及将分散的技术文档收敛到 [SDK 文档体系](/docs/sdk) 以提升可维护性。 + +## 快速导航 + + + + AI 友好的结构化日志库,以 Rust 实现核心能力并暴露 TypeScript API 和 NAPI 制品 + + + MDX → Markdown 编译引擎,为 Prompt Service 提供内容转换能力 + + + 代理模块加载与路径验证运行时,确保脚本执行的安全性和可控性 + + + + + **💡 为什么整合到 SDK 文档?** + + - 开发者可以在一个地方看到所有核心库的完整技术文档 + - 更清晰地展示各库与 SDK 整体架构的关系和数据流 + - 减少文档分散导致的维护困难和信息不一致风险 + -- `logger` -- `md-compiler` -- `script-runtime` +## 架构总览 -## 各自的大致职责 +如需了解三个库之间的依赖关系、数据流以及它们在整体架构中的定位,请访问: -| 库 | 角色 | -| --- | --- | -| `logger` | 以 Rust 实现日志能力,同时暴露 TypeScript API 和 NAPI 制品 | -| `md-compiler` | 提供 MDX / Markdown 编译与转换能力 | -| `script-runtime` | 被更高层装配消费的基础 script-runtime 能力 | +👉 **[SDK 架构总览](/docs/sdk/architecture)** — 包含完整的依赖关系图和分层架构说明 -## 为什么它们值得单独成节 +## 历史背景 -这些库不是“额外的文档材料”,而是仓库持续把核心能力下沉进共享基础设施的证据。CLI、MCP 和 GUI 都应该复用它们,而不是各自复制一份相同逻辑。 +这些库不是"额外的文档材料",而是仓库持续把核心能力下沉进共享基础设施的证据。CLI、MCP 和 GUI 都应该复用它们,而不是各自复制一份相同逻辑。这一设计决策在当前仓库中得到了延续和强化——通过将它们纳入 SDK 文档体系,我们希望让更多开发者理解这些基础库的价值和使用方式。 diff --git a/gui/scripts/generate-icons.ts b/gui/scripts/generate-icons.ts index 2e488508..8ade0d86 100644 --- a/gui/scripts/generate-icons.ts +++ b/gui/scripts/generate-icons.ts @@ -32,4 +32,4 @@ async function main() { } } -main() +main().then(r => r) diff --git a/gui/src/pages/AdaptorsPage.tsx b/gui/src/pages/AdaptorsPage.tsx index 9cdb690f..9a12d27c 100644 --- a/gui/src/pages/AdaptorsPage.tsx +++ b/gui/src/pages/AdaptorsPage.tsx @@ -30,7 +30,7 @@ const AdaptorsPage: FC = () => { } useEffect(() => { - fetchPlugins() + fetchPlugins().then(r => r) }, []) return ( diff --git a/gui/src/pages/DashboardPage.tsx b/gui/src/pages/DashboardPage.tsx index b5c4fd8c..d530ca74 100644 --- a/gui/src/pages/DashboardPage.tsx +++ b/gui/src/pages/DashboardPage.tsx @@ -97,7 +97,7 @@ const DashboardPage: FC = () => { const projectData = useMemo(() => { if (!stats) return [] const sorted = [...stats.projects].sort((a, b) => { - let cmp = 0 + let cmp: number if (sortBy === 'chars') cmp = a.totalChars - b.totalChars else if (sortBy === 'files') cmp = a.fileCount - b.fileCount else cmp = a.name.localeCompare(b.name) diff --git a/libraries/logger/package.json b/libraries/logger/package.json index 150a47a0..0c2faab5 100644 --- a/libraries/logger/package.json +++ b/libraries/logger/package.json @@ -32,7 +32,7 @@ "build:all": "run-s build:ts build:native", "build:native": "napi build --platform --release --output-dir dist -- --features napi", "build:native:debug": "napi build --platform --output-dir dist -- --features napi", - "build:ts": "tsx ../../scripts/build-quiet.ts", + "build:ts": "tsdown", "check": "run-p check:type lint", "lint": "run-p lint:ts lint:rust", "lint:ts": "eslint --cache --cache-location .eslintcache .", @@ -48,6 +48,8 @@ }, "devDependencies": { "@napi-rs/cli": "catalog:", + "@truenine/eslint10-config": "catalog:", + "eslint": "catalog:", "npm-run-all2": "catalog:", "tsdown": "catalog:", "typescript": "catalog:", diff --git a/libraries/logger/src/index.ts b/libraries/logger/src/index.ts index c37624e7..ed0199a1 100644 --- a/libraries/logger/src/index.ts +++ b/libraries/logger/src/index.ts @@ -1,239 +1 @@ -import {readdirSync} from 'node:fs' -import {createRequire} from 'node:module' -import {dirname, join} from 'node:path' -import process from 'node:process' - -export type LogLevel = 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'fatal' | 'silent' -export type DiagnosticLines = readonly [string, ...string[]] -export type LoggerDiagnosticLevel = Extract -type LoggerMethod = (message: string | object, ...meta: unknown[]) => void -type LoggerDiagnosticMethod = (diagnostic: LoggerDiagnosticInput) => void - -export interface LoggerDiagnosticInput { - readonly code: string - readonly title: string - readonly rootCause: DiagnosticLines - readonly exactFix?: DiagnosticLines | undefined - readonly possibleFixes?: readonly DiagnosticLines[] | undefined - readonly details?: Record | undefined -} - -export interface LoggerDiagnosticRecord extends LoggerDiagnosticInput { - readonly level: LoggerDiagnosticLevel - readonly namespace: string - readonly copyText: DiagnosticLines -} - -export interface ILogger { - error: LoggerDiagnosticMethod - warn: LoggerDiagnosticMethod - info: LoggerMethod - debug: LoggerMethod - trace: LoggerMethod - fatal: LoggerDiagnosticMethod -} - -type ActiveLogLevel = Exclude -type PlainLogLevel = Extract - -interface PlatformBinding { - readonly local: string - readonly suffix: string -} - -interface NapiLoggerInstance { - emit: (level: ActiveLogLevel, message: unknown, meta?: readonly unknown[]) => void - emitDiagnostic: (level: LoggerDiagnosticLevel, diagnostic: LoggerDiagnosticInput) => void -} - -interface NapiLoggerModule { - createLogger: (namespace: string, level?: string) => NapiLoggerInstance - setGlobalLogLevel: (level: string) => void - getGlobalLogLevel: () => string | undefined - clearBufferedDiagnostics: () => void - drainBufferedDiagnostics: () => string - flushOutput?: () => void -} - -const PLATFORM_BINDINGS: Record = { - 'win32-x64': {local: 'napi-logger.win32-x64-msvc', suffix: 'win32-x64-msvc'}, - 'linux-x64': {local: 'napi-logger.linux-x64-gnu', suffix: 'linux-x64-gnu'}, - 'linux-arm64': {local: 'napi-logger.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, - 'darwin-arm64': {local: 'napi-logger.darwin-arm64', suffix: 'darwin-arm64'}, - 'darwin-x64': {local: 'napi-logger.darwin-x64', suffix: 'darwin-x64'} -} - -const DIAGNOSTIC_LOG_LEVELS: readonly LoggerDiagnosticLevel[] = ['error', 'warn', 'fatal'] -const PLAIN_LOG_LEVELS: readonly PlainLogLevel[] = ['info', 'debug', 'trace'] - -let napiBinding: NapiLoggerModule | undefined, - napiBindingError: Error | undefined - -function isNapiLoggerModule(value: unknown): value is NapiLoggerModule { - if (value == null || typeof value !== 'object') return false - - const candidate = value as Partial - return typeof candidate.createLogger === 'function' - && typeof candidate.setGlobalLogLevel === 'function' - && typeof candidate.getGlobalLogLevel === 'function' - && typeof candidate.clearBufferedDiagnostics === 'function' - && typeof candidate.drainBufferedDiagnostics === 'function' -} - -function getPlatformBinding(): PlatformBinding { - const binding = PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] - if (binding != null) return binding - - throw new Error( - `Unsupported platform for @truenine/logger native binding: ${process.platform}-${process.arch}` - ) -} - -function formatBindingLoadError(localError: unknown, packageError: unknown, suffix: string): Error { - const localMessage = localError instanceof Error ? localError.message : String(localError) - const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) - return new Error( - [ - 'Failed to load @truenine/logger native binding.', - `Tried local binaries next to the source/bundle and package "@truenine/memory-sync-cli-${suffix}".`, - `Local error: ${localMessage}`, - `Package error: ${packageMessage}`, - 'Run `pnpm -F @truenine/logger run build` to build the native module.' - ].join('\n') - ) -} - -function loadBindingFromCliBinaryPackage( - runtimeRequire: ReturnType, - suffix: string -): NapiLoggerModule { - const packageName = `@truenine/memory-sync-cli-${suffix}` - - try { - const cliBinaryPackage = runtimeRequire(packageName) as Record - const loggerModule = cliBinaryPackage['logger'] - - if (isNapiLoggerModule(loggerModule)) return loggerModule - } - catch { - } - - const packageJsonPath = runtimeRequire.resolve(`${packageName}/package.json`) - const packageDir = dirname(packageJsonPath) - const bindingCandidates = readdirSync(packageDir) - .filter(fileName => fileName.startsWith('napi-logger.') && fileName.endsWith('.node')) - .sort() - - for (const candidateFile of bindingCandidates) { - const bindingModule = runtimeRequire(join(packageDir, candidateFile)) as unknown - - if (isNapiLoggerModule(bindingModule)) return bindingModule - } - - throw new Error(`Package "${packageName}" does not export a logger binding or contain a compatible native module`) -} - -function loadNativeBinding(): NapiLoggerModule { - const runtimeRequire = createRequire(import.meta.url) - const {local, suffix} = getPlatformBinding() - const localCandidates = [`./${local}.node`, `../dist/${local}.node`] - let localError: unknown = new Error(`No local candidate matched "${local}"`) - - for (const candidate of localCandidates) { - try { - return runtimeRequire(candidate) as NapiLoggerModule - } - catch (error) { - localError = error - } - } - - try { - return loadBindingFromCliBinaryPackage(runtimeRequire, suffix) - } - catch (packageError) { - throw formatBindingLoadError(localError, packageError, suffix) - } -} - -function getNapiBinding(): NapiLoggerModule { - if (napiBinding != null) return napiBinding - - if (napiBindingError != null) throw napiBindingError - - try { - napiBinding = loadNativeBinding() - return napiBinding - } - catch (error) { - napiBindingError = error instanceof Error ? error : new Error(String(error)) - throw napiBindingError - } -} - -function parseBufferedDiagnostics(serialized: string): LoggerDiagnosticRecord[] { - try { - const parsed = JSON.parse(serialized) as unknown - return Array.isArray(parsed) ? parsed as LoggerDiagnosticRecord[] : [] - } - catch { - return [] - } -} - -function createLogMethod(instance: NapiLoggerInstance, level: PlainLogLevel): LoggerMethod { - return (message: string | object, ...meta: unknown[]): void => { - instance.emit(level, message, meta.length === 0 ? void 0 : meta) - } -} - -function createDiagnosticMethod(instance: NapiLoggerInstance, level: LoggerDiagnosticLevel): LoggerDiagnosticMethod { - return (diagnostic: LoggerDiagnosticInput): void => { - instance.emitDiagnostic(level, diagnostic) - } -} - -function createNapiAdapter(instance: NapiLoggerInstance): ILogger { - const messageMethods = PLAIN_LOG_LEVELS.reduce((logger, level) => { - logger[level] = createLogMethod(instance, level) - return logger - }, {} as Record) - - const diagnosticMethods = DIAGNOSTIC_LOG_LEVELS.reduce((logger, level) => { - logger[level] = createDiagnosticMethod(instance, level) - return logger - }, {} as Record) - - return { - error: diagnosticMethods.error, - warn: diagnosticMethods.warn, - info: messageMethods.info, - debug: messageMethods.debug, - trace: messageMethods.trace, - fatal: diagnosticMethods.fatal - } -} - -export function setGlobalLogLevel(level: LogLevel): void { - getNapiBinding().setGlobalLogLevel(level) -} - -export function getGlobalLogLevel(): LogLevel | undefined { - return getNapiBinding().getGlobalLogLevel() as LogLevel | undefined -} - -export function clearBufferedDiagnostics(): void { - getNapiBinding().clearBufferedDiagnostics() -} - -export function drainBufferedDiagnostics(): LoggerDiagnosticRecord[] { - return parseBufferedDiagnostics(getNapiBinding().drainBufferedDiagnostics()) -} - -export function flushOutput(): void { - getNapiBinding().flushOutput?.() -} - -export function createLogger(namespace: string, logLevel?: LogLevel): ILogger { - return createNapiAdapter(getNapiBinding().createLogger(namespace, logLevel)) -} +export * from '../../../sdk/src/libraries/logger' diff --git a/libraries/logger/tsconfig.lib.json b/libraries/logger/tsconfig.lib.json index 7df70332..7221e516 100644 --- a/libraries/logger/tsconfig.lib.json +++ b/libraries/logger/tsconfig.lib.json @@ -3,14 +3,16 @@ "extends": "./tsconfig.json", "compilerOptions": { "composite": true, - "rootDir": "./src", + "rootDir": "../..", "noEmit": false, "outDir": "./dist", "skipLibCheck": true }, "include": [ "src/**/*", - "env.d.ts" + "env.d.ts", + "../../sdk/src/libraries/logger.ts", + "../../sdk/src/core/native-binding-loader.ts" ], "exclude": [ "../node_modules", diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json index b2633144..520ae4a2 100644 --- a/libraries/md-compiler/package.json +++ b/libraries/md-compiler/package.json @@ -3,7 +3,7 @@ "type": "module", "version": "2026.10411.10132", "private": true, - "description": "Rust-powered MDX→Markdown compiler for Node.js with pure-TS fallback", + "description": "Rust-powered MDX→Markdown compiler for Node.js with shared N-API loading", "license": "AGPL-3.0-only", "exports": { "./package.json": "./package.json", @@ -40,8 +40,8 @@ ] }, "scripts": { - "build": "tsx ../../scripts/build-quiet.ts", - "build:all": "run-s build:native build", + "build": "run-s build:ts build:native", + "build:all": "run-s build:ts build:native", "build:native": "napi build --platform --release --output-dir dist -- --features napi", "build:native:debug": "napi build --platform --output-dir dist -- --features napi", "build:ts": "tsdown", @@ -60,9 +60,11 @@ }, "devDependencies": { "@napi-rs/cli": "catalog:", + "@truenine/eslint10-config": "catalog:", "@types/estree": "catalog:", "@types/estree-jsx": "catalog:", "@types/mdast": "catalog:", + "eslint": "catalog:", "mdast-util-mdx": "catalog:", "npm-run-all2": "catalog:", "remark-frontmatter": "catalog:", diff --git a/libraries/md-compiler/src/markdown/native-binding.test.ts b/libraries/md-compiler/src/markdown/native-binding.test.ts new file mode 100644 index 00000000..31bc8a8e --- /dev/null +++ b/libraries/md-compiler/src/markdown/native-binding.test.ts @@ -0,0 +1,73 @@ +import process from 'node:process' +import {afterEach, describe, expect, it, vi} from 'vitest' + +interface PlatformBinding { + readonly local: string + readonly suffix: string +} + +const PLATFORM_BINDINGS: Record = { + 'win32-x64': {local: 'napi-md-compiler.win32-x64-msvc', suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: 'napi-md-compiler.linux-x64-gnu', suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: 'napi-md-compiler.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: 'napi-md-compiler.darwin-arm64', suffix: 'darwin-arm64'}, + 'darwin-x64': {local: 'napi-md-compiler.darwin-x64', suffix: 'darwin-x64'} +} + +function getPlatformBinding(): PlatformBinding | undefined { + return PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] +} + +afterEach(() => { + vi.doUnmock('node:module') + vi.resetModules() + vi.restoreAllMocks() +}) + +describe('md-compiler native binding lookup', () => { + it('loads the repo cli platform artifact when bundled into sdk dist', async () => { + const platformBinding = getPlatformBinding() + if (platformBinding == null) return + + vi.resetModules() + + const attempted: string[] = [] + const fakeBinding = { + buildFrontMatter: vi.fn(() => 'front-matter'), + buildPromptTomlArtifact: vi.fn(() => 'artifact'), + buildTomlDocument: vi.fn(() => 'document'), + compileMdxToMd: vi.fn(() => 'compiled'), + parseMarkdown: vi.fn(() => ({contentWithoutFrontMatter: ''})), + transformMdxReferencesToMd: vi.fn(() => 'transformed') + } + + vi.doMock('node:module', () => ({ + createRequire() { + return Object.assign( + (specifier: string) => { + attempted.push(specifier) + + if (specifier === `../../../cli/npm/${platformBinding.suffix}/${platformBinding.local}.node`) { + return fakeBinding + } + + throw new Error(`Cannot find module '${specifier}'`) + }, + { + resolve(specifier: string) { + attempted.push(`resolve:${specifier}`) + throw new Error(`Cannot find module '${specifier}'`) + } + } + ) + } + })) + + const {getNapiMdCompilerBinding} = await import('../native-binding') + + expect(getNapiMdCompilerBinding()).toBe(fakeBinding) + expect(attempted).toContain( + `../../../cli/npm/${platformBinding.suffix}/${platformBinding.local}.node` + ) + }) +}) diff --git a/libraries/md-compiler/src/native-binding.ts b/libraries/md-compiler/src/native-binding.ts index c38d676d..c4ae9ad3 100644 --- a/libraries/md-compiler/src/native-binding.ts +++ b/libraries/md-compiler/src/native-binding.ts @@ -1,7 +1,5 @@ -import {readdirSync} from 'node:fs' -import {createRequire} from 'node:module' -import {dirname, join} from 'node:path' import process from 'node:process' +import {createNativeBindingLoader} from '../../../sdk/src/core/native-binding-loader' export interface NativeParseMarkdownResult { readonly yamlFrontMatterJson?: string @@ -18,22 +16,6 @@ export interface NapiMdCompilerModule { buildPromptTomlArtifact: (optionsJson: string) => string } -interface PlatformBinding { - readonly local: string - readonly suffix: string -} - -const PLATFORM_BINDINGS: Record = { - 'win32-x64': {local: 'napi-md-compiler.win32-x64-msvc', suffix: 'win32-x64-msvc'}, - 'linux-x64': {local: 'napi-md-compiler.linux-x64-gnu', suffix: 'linux-x64-gnu'}, - 'linux-arm64': {local: 'napi-md-compiler.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, - 'darwin-arm64': {local: 'napi-md-compiler.darwin-arm64', suffix: 'darwin-arm64'}, - 'darwin-x64': {local: 'napi-md-compiler.darwin-x64', suffix: 'darwin-x64'} -} - -let cachedBinding: NapiMdCompilerModule | undefined, - cachedBindingError: Error | undefined - export function shouldSkipNativeBinding(): boolean { if (process.env['TNMSC_FORCE_NATIVE_BINDING'] === '1') return false return process.env['TNMSC_DISABLE_NATIVE_BINDING'] === '1' @@ -51,92 +33,20 @@ function isNapiMdCompilerModule(value: unknown): value is NapiMdCompilerModule { && typeof candidate.buildPromptTomlArtifact === 'function' } -function getPlatformBinding(): PlatformBinding { - const binding = PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] - if (binding != null) return binding - - throw new Error( - `Unsupported platform for @truenine/md-compiler native binding: ${process.platform}-${process.arch}` - ) -} - -function loadBindingFromCliBinaryPackage( - runtimeRequire: ReturnType, - suffix: string -): NapiMdCompilerModule { - const packageName = `@truenine/memory-sync-cli-${suffix}` - - try { - const pkg = runtimeRequire(packageName) as Record - const binding = pkg['mdCompiler'] - - if (isNapiMdCompilerModule(binding)) return binding - } - catch { - } +const loadNativeBinding = createNativeBindingLoader({ + packageName: '@truenine/md-compiler', + binaryName: 'napi-md-compiler', + bindingValidator: isNapiMdCompilerModule, + cliExportName: 'mdCompiler' +}) - const packageJsonPath = runtimeRequire.resolve(`${packageName}/package.json`) - const packageDir = dirname(packageJsonPath) - const bindingCandidates = readdirSync(packageDir) - .filter(fileName => fileName.startsWith('napi-md-compiler.') && fileName.endsWith('.node')) - .sort() - - for (const candidateFile of bindingCandidates) { - const binding = runtimeRequire(join(packageDir, candidateFile)) as unknown - if (isNapiMdCompilerModule(binding)) return binding - } - - throw new Error(`Package "${packageName}" does not export an mdCompiler binding or contain a compatible native module`) -} - -function loadNativeBinding(): NapiMdCompilerModule { - if (shouldSkipNativeBinding()) { - throw new Error('Native binding loading is disabled by TNMSC_DISABLE_NATIVE_BINDING=1') - } - - const runtimeRequire = createRequire(import.meta.url) - const {local, suffix} = getPlatformBinding() - const localCandidates = [`./${local}.node`, `../dist/${local}.node`, `../${local}.node`] - let localError: unknown = new Error(`No local candidate matched "${local}"`) - - for (const candidate of localCandidates) { - try { - const binding = runtimeRequire(candidate) as unknown - if (isNapiMdCompilerModule(binding)) return binding - } - catch (error) { - localError = error - } - } - - try { - return loadBindingFromCliBinaryPackage(runtimeRequire, suffix) - } - catch (packageError) { - const localMessage = localError instanceof Error ? localError.message : String(localError) - const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) - throw new Error( - [ - 'Failed to load @truenine/md-compiler native binding.', - `Tried local binaries for "${local}" and package "@truenine/memory-sync-cli-${suffix}".`, - `Local error: ${localMessage}`, - `Package error: ${packageMessage}`, - 'Run `pnpm -F @truenine/md-compiler run build` to build the native module.' - ].join('\n') - ) - } -} +let cachedBindingError: Error | undefined export function getNapiMdCompilerBinding(): NapiMdCompilerModule { - if (cachedBinding != null) return cachedBinding if (cachedBindingError != null) throw cachedBindingError - try { - cachedBinding = loadNativeBinding() - return cachedBinding - } - catch (error) { - cachedBindingError = error instanceof Error ? error : new Error(String(error)) - throw cachedBindingError - } + if (!shouldSkipNativeBinding()) return loadNativeBinding() + + cachedBindingError = new Error('Native binding loading is disabled by TNMSC_DISABLE_NATIVE_BINDING=1') + throw cachedBindingError } diff --git a/libraries/md-compiler/tsconfig.lib.json b/libraries/md-compiler/tsconfig.lib.json index 7df70332..7c0c9719 100644 --- a/libraries/md-compiler/tsconfig.lib.json +++ b/libraries/md-compiler/tsconfig.lib.json @@ -3,14 +3,15 @@ "extends": "./tsconfig.json", "compilerOptions": { "composite": true, - "rootDir": "./src", + "rootDir": "../..", "noEmit": false, "outDir": "./dist", "skipLibCheck": true }, "include": [ "src/**/*", - "env.d.ts" + "env.d.ts", + "../../sdk/src/core/native-binding-loader.ts" ], "exclude": [ "../node_modules", diff --git a/libraries/script-runtime/src/index.ts b/libraries/script-runtime/src/index.ts index b696e348..3fd60a71 100644 --- a/libraries/script-runtime/src/index.ts +++ b/libraries/script-runtime/src/index.ts @@ -1,229 +1 @@ -import type { - ProxyContext, - ProxyDefinition, - ProxyModule, - ProxyModuleConfig, - ProxyRouteHandler, - ValidatePublicPathOptions -} from './types' - -import * as fs from 'node:fs' -import {createRequire} from 'node:module' -import {dirname, join} from 'node:path' -import process from 'node:process' -import {fileURLToPath} from 'node:url' -import { - loadProxyModule as loadProxyModuleInternal, - resolvePublicPathModule -} from './runtime-core' - -export type { - ProxyCommand, - ProxyContext, - ProxyDefinition, - ProxyMatcherConfig, - ProxyModule, - ProxyModuleConfig, - ProxyRouteHandler, - ValidatePublicPathOptions -} from './types' - -interface ScriptRuntimeBinding { - validate_public_path?: (resolvedPath: string, aindexPublicDir: string) => string - validatePublicPath?: (resolvedPath: string, aindexPublicDir: string) => string - resolve_public_path?: (filePath: string, ctxJson: string, logicalPath: string) => string - resolvePublicPath?: (filePath: string, ctxJson: string, logicalPath: string) => string -} - -interface PlatformBinding { - readonly local: string - readonly suffix: string -} - -const PLATFORM_BINDINGS: Record = { - 'win32-x64': {local: 'napi-script-runtime.win32-x64-msvc', suffix: 'win32-x64-msvc'}, - 'linux-x64': {local: 'napi-script-runtime.linux-x64-gnu', suffix: 'linux-x64-gnu'}, - 'linux-arm64': {local: 'napi-script-runtime.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, - 'darwin-arm64': {local: 'napi-script-runtime.darwin-arm64', suffix: 'darwin-arm64'}, - 'darwin-x64': {local: 'napi-script-runtime.darwin-x64', suffix: 'darwin-x64'} -} - -let binding: ScriptRuntimeBinding | undefined, bindingLoadError: Error | undefined, - workerPathCache: string | undefined - -function getPlatformBinding(): PlatformBinding { - const platformBinding = PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] - if (platformBinding != null) return platformBinding - - throw new Error( - `Unsupported platform for @truenine/script-runtime native binding: ${process.platform}-${process.arch}` - ) -} - -function isScriptRuntimeBinding(value: unknown): value is ScriptRuntimeBinding { - if (value == null || typeof value !== 'object') return false - const candidate = value as ScriptRuntimeBinding - return typeof candidate.validate_public_path === 'function' - || typeof candidate.validatePublicPath === 'function' - || typeof candidate.resolve_public_path === 'function' - || typeof candidate.resolvePublicPath === 'function' -} - -function formatBindingLoadError(localError: unknown, packageError: unknown, suffix: string): Error { - const localMessage = localError instanceof Error ? localError.message : String(localError) - const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) - return new Error( - [ - 'Failed to load @truenine/script-runtime native binding.', - `Tried local binary "./${PLATFORM_BINDINGS[`${process.platform}-${process.arch}`]?.local ?? 'unknown'}.node" and package "@truenine/memory-sync-cli-${suffix}".`, - `Local error: ${localMessage}`, - `Package error: ${packageMessage}`, - 'Run `pnpm -F @truenine/script-runtime run build` to build the native module.' - ].join('\n') - ) -} - -function loadBindingFromCliBinaryPackage( - runtimeRequire: ReturnType, - suffix: string -): ScriptRuntimeBinding { - const packageName = `@truenine/memory-sync-cli-${suffix}` - - try { - const cliBinaryPackage = runtimeRequire(packageName) as Record - const runtimeBinding = cliBinaryPackage['scriptRuntime'] - - if (isScriptRuntimeBinding(runtimeBinding)) return runtimeBinding - } - catch { - } // Fall through to the package-directory probe below. - - const packageJsonPath = runtimeRequire.resolve(`${packageName}/package.json`) - const packageDir = dirname(packageJsonPath) - const bindingCandidates = fs.readdirSync(packageDir) - .filter(fileName => fileName.startsWith('napi-script-runtime.') && fileName.endsWith('.node')) - .sort() - - for (const candidateFile of bindingCandidates) { - const bindingModule = runtimeRequire(join(packageDir, candidateFile)) as unknown - - if (isScriptRuntimeBinding(bindingModule)) return bindingModule - } - - throw new Error(`Package "${packageName}" does not export a scriptRuntime binding or contain a compatible native module`) -} - -function loadNativeBinding(): ScriptRuntimeBinding { - const runtimeRequire = createRequire(import.meta.url) - const {local, suffix} = getPlatformBinding() - const localCandidates = [`./${local}.node`, `../dist/${local}.node`] - - let localError: unknown = new Error(`No local candidate matched "${local}"`) - - for (const candidate of localCandidates) { - try { - return runtimeRequire(candidate) as ScriptRuntimeBinding - } - catch (error) { - localError = error - } - } - - try { - return loadBindingFromCliBinaryPackage(runtimeRequire, suffix) - } - catch (packageError) { - throw formatBindingLoadError(localError, packageError, suffix) - } -} - -function getBinding(): ScriptRuntimeBinding { - if (binding != null) return binding - if (bindingLoadError != null) throw bindingLoadError - - try { - binding = loadNativeBinding() - return binding - } - catch (error) { - bindingLoadError = error instanceof Error ? error : new Error(String(error)) - throw bindingLoadError - } -} - -function callValidatePublicPathBinding(resolvedPath: string, options: ValidatePublicPathOptions): string { - const nativeBinding = getBinding() - const validatePublicPathNative = nativeBinding.validate_public_path ?? nativeBinding.validatePublicPath - - if (validatePublicPathNative == null) throw new Error('validate_public_path native binding is unavailable') - - return validatePublicPathNative(resolvedPath, options.aindexPublicDir) -} - -function callResolvePublicPathBinding(filePath: string, ctxJson: string, logicalPath: string): string { - const nativeBinding = getBinding() - const resolvePublicPathNative = nativeBinding.resolve_public_path ?? nativeBinding.resolvePublicPath - - if (resolvePublicPathNative == null) throw new Error('resolve_public_path native binding is unavailable') - - return resolvePublicPathNative(filePath, ctxJson, logicalPath) -} - -function getWorkerPath(): string { - if (workerPathCache != null) return workerPathCache - - const candidatePaths: [string, string] = [ - fileURLToPath(new URL('./resolve-proxy-worker.mjs', import.meta.url)), - fileURLToPath(new URL('./script-runtime-worker.mjs', import.meta.url)) - ] - - for (const candidatePath of candidatePaths) { - if (fs.existsSync(candidatePath)) { - workerPathCache = candidatePath - return candidatePath - } - } - - workerPathCache = candidatePaths[0] - return workerPathCache -} - -export function defineProxy(value: T): T { - return value -} - -export async function loadProxyModule(filePath: string): Promise { - return loadProxyModuleInternal(filePath) -} - -export function validatePublicPath( - resolvedPath: string, - options: ValidatePublicPathOptions -): string { - return callValidatePublicPathBinding(resolvedPath, options) -} - -export function resolvePublicPath( - filePath: string, - ctx: ProxyContext, - logicalPath: string, - timeoutMs: number = 5_000 -): string { - return callResolvePublicPathBinding(filePath, JSON.stringify({ - ...ctx, - workerPath: getWorkerPath(), - timeoutMs - }), logicalPath) -} - -export async function resolvePublicPathUnchecked( - filePath: string, - ctx: ProxyContext, - logicalPath: string -): Promise { - return resolvePublicPathModule(filePath, ctx, logicalPath) -} - -export function getProxyModuleConfig(module: ProxyModule): ProxyModuleConfig | undefined { - return module.config -} +export * from '../../../sdk/src/libraries/script-runtime' diff --git a/libraries/script-runtime/src/native-binding.test.ts b/libraries/script-runtime/src/native-binding.test.ts new file mode 100644 index 00000000..0a8f67b4 --- /dev/null +++ b/libraries/script-runtime/src/native-binding.test.ts @@ -0,0 +1,69 @@ +import process from 'node:process' +import {afterEach, describe, expect, it, vi} from 'vitest' + +interface PlatformBinding { + readonly local: string + readonly suffix: string +} + +const PLATFORM_BINDINGS: Record = { + 'win32-x64': {local: 'napi-script-runtime.win32-x64-msvc', suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: 'napi-script-runtime.linux-x64-gnu', suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: 'napi-script-runtime.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: 'napi-script-runtime.darwin-arm64', suffix: 'darwin-arm64'}, + 'darwin-x64': {local: 'napi-script-runtime.darwin-x64', suffix: 'darwin-x64'} +} + +function getPlatformBinding(): PlatformBinding | undefined { + return PLATFORM_BINDINGS[`${process.platform}-${process.arch}`] +} + +afterEach(() => { + vi.doUnmock('node:module') + vi.resetModules() + vi.restoreAllMocks() +}) + +describe('script-runtime native binding lookup', () => { + it('loads the repo cli platform artifact when bundled into sdk dist', async () => { + const platformBinding = getPlatformBinding() + if (platformBinding == null) return + + vi.resetModules() + + const attempted: string[] = [] + const fakeBinding = { + validate_public_path: vi.fn((resolvedPath: string) => resolvedPath) + } + + vi.doMock('node:module', () => ({ + createRequire() { + return Object.assign( + (specifier: string) => { + attempted.push(specifier) + + if (specifier === `../../../cli/npm/${platformBinding.suffix}/${platformBinding.local}.node`) { + return fakeBinding + } + + throw new Error(`Cannot find module '${specifier}'`) + }, + { + resolve(specifier: string) { + attempted.push(`resolve:${specifier}`) + throw new Error(`Cannot find module '${specifier}'`) + } + } + ) + } + })) + + const {validatePublicPath} = await import('./index') + + expect(validatePublicPath('/tmp/demo', {aindexPublicDir: '/tmp'})).toBe('/tmp/demo') + expect(attempted).toContain( + `../../../cli/npm/${platformBinding.suffix}/${platformBinding.local}.node` + ) + expect(fakeBinding.validate_public_path).toHaveBeenCalledWith('/tmp/demo', '/tmp') + }) +}) diff --git a/libraries/script-runtime/src/resolve-proxy-worker.ts b/libraries/script-runtime/src/resolve-proxy-worker.ts index 9fb0f7d3..43a60d16 100644 --- a/libraries/script-runtime/src/resolve-proxy-worker.ts +++ b/libraries/script-runtime/src/resolve-proxy-worker.ts @@ -1,19 +1 @@ -import {readFileSync} from 'node:fs' -import process from 'node:process' -import {resolvePublicPathModule} from './runtime-core' - -async function main(): Promise { - const [, , filePath, ctxJsonPath, logicalPath] = process.argv - if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: resolve-proxy-worker ') - - const ctxJson = readFileSync(ctxJsonPath, 'utf8') - const ctx = JSON.parse(ctxJson) as Parameters[1] - const result = await resolvePublicPathModule(filePath, ctx, logicalPath) - process.stdout.write(`${result}\n`) -} - -main().catch((error: unknown) => { - const message = error instanceof Error ? error.message : String(error) - process.stderr.write(`${message}\n`) - process.exit(1) -}) +import '../../../sdk/src/libraries/script-runtime/resolve-proxy-worker' diff --git a/libraries/script-runtime/src/runtime-core.ts b/libraries/script-runtime/src/runtime-core.ts index f3f6f59b..9cc6dd9f 100644 --- a/libraries/script-runtime/src/runtime-core.ts +++ b/libraries/script-runtime/src/runtime-core.ts @@ -1,104 +1 @@ -import type {Jiti} from 'jiti' -import type {ProxyContext, ProxyDefinition, ProxyModule, ProxyRouteHandler} from './types' - -import * as fs from 'node:fs' -import * as path from 'node:path' - -function isRecord(value: unknown): value is Record { - return typeof value === 'object' && value !== null -} - -function isPlainObject(value: unknown): value is Record { - if (!isRecord(value)) return false - const prototype = Object.getPrototypeOf(value) as object | null - return prototype === Object.prototype || prototype === null -} - -async function createRuntime(): Promise { - const {createJiti} = await import('jiti') as { - createJiti: (filename: string, options: { - readonly fsCache: boolean - readonly moduleCache: boolean - readonly interopDefault: false - }) => Jiti - } - - return createJiti(import.meta.url, { - fsCache: false, - moduleCache: false, - interopDefault: false - }) -} - -function toProxyModule(rawModule: unknown): ProxyModule { - if (!isRecord(rawModule)) throw new Error('proxy.ts must export a module namespace object') - - const defaultExport = rawModule['default'] - if (defaultExport == null) throw new Error('proxy.ts must export a default value') - if (typeof defaultExport !== 'function' && !isPlainObject(defaultExport)) throw new TypeError('proxy.ts default export must be a function or plain object') - - const configExport = rawModule['config'] - if (configExport != null && !isPlainObject(configExport)) throw new Error('proxy.ts config export must be a plain object') - - const proxyModule: ProxyModule = { - default: defaultExport as ProxyModule['default'] - } - - if (configExport != null) { - return { - ...proxyModule, - config: configExport as NonNullable - } - } - - return proxyModule -} - -export async function loadProxyModule(filePath: string): Promise { - const absoluteFilePath = path.resolve(filePath) - if (!fs.existsSync(absoluteFilePath)) throw new Error(`proxy.ts not found: ${absoluteFilePath}`) - - const runtime = await createRuntime() - const loadedModule = await runtime.import(absoluteFilePath) - return toProxyModule(loadedModule) -} - -function matchesCommand(module: ProxyModule, command: ProxyContext['command']): boolean { - const commands = module.config?.matcher?.commands - if (commands == null || commands.length === 0) return true - return commands.includes(command) -} - -function assertNonEmptyPath(value: string, label: string): string { - if (value.trim().length === 0) throw new Error(`${label} cannot be empty`) - return value -} - -function getRouteHandler(handler: ProxyModule['default']): ProxyRouteHandler | undefined { - if (typeof handler === 'function') return handler - - const proxyDefinition: ProxyDefinition = handler - if (proxyDefinition.resolvePublicPath == null) return void 0 - if (typeof proxyDefinition.resolvePublicPath !== 'function') throw new TypeError('proxy.ts default export resolvePublicPath must be a function') - - return proxyDefinition.resolvePublicPath -} - -export async function resolvePublicPathModule( - filePath: string, - ctx: ProxyContext, - logicalPath: string -): Promise { - const targetLogicalPath = assertNonEmptyPath(logicalPath, 'logical public path') - const proxyModule = await loadProxyModule(filePath) - - if (!matchesCommand(proxyModule, ctx.command)) return targetLogicalPath - - const routeHandler = getRouteHandler(proxyModule.default) - if (routeHandler == null) return targetLogicalPath - - const resolvedPath = await routeHandler(targetLogicalPath, ctx) - if (typeof resolvedPath !== 'string') throw new Error('proxy.ts must resolve public paths to a string') - - return assertNonEmptyPath(resolvedPath, 'proxy.ts resolved public path') -} +export * from '../../../sdk/src/libraries/script-runtime/runtime-core' diff --git a/libraries/script-runtime/src/types.ts b/libraries/script-runtime/src/types.ts index 690daa2d..c96f42ff 100644 --- a/libraries/script-runtime/src/types.ts +++ b/libraries/script-runtime/src/types.ts @@ -1,37 +1 @@ -export type ProxyCommand = 'install' | 'dry-run' | 'clean' | 'plugins' - -export interface ProxyContext { - readonly cwd: string - readonly workspaceDir: string - readonly aindexDir: string - readonly command: ProxyCommand - readonly platform: NodeJS.Platform -} - -export interface ProxyMatcherConfig { - readonly commands?: readonly ProxyCommand[] -} - -export interface ProxyModuleConfig { - readonly matcher?: ProxyMatcherConfig -} - -export type ProxyRouteHandler = ( - logicalPath: string, - ctx: ProxyContext -) => string | Promise - -export interface ProxyDefinition { - readonly resolvePublicPath?: ProxyRouteHandler -} - -export type ProxyHandler = ProxyDefinition | ProxyRouteHandler - -export interface ProxyModule { - readonly default: ProxyHandler - readonly config?: ProxyModuleConfig -} - -export interface ValidatePublicPathOptions { - readonly aindexPublicDir: string -} +export * from '../../../sdk/src/libraries/script-runtime/types' diff --git a/libraries/script-runtime/tsconfig.lib.json b/libraries/script-runtime/tsconfig.lib.json index 7df70332..61dead36 100644 --- a/libraries/script-runtime/tsconfig.lib.json +++ b/libraries/script-runtime/tsconfig.lib.json @@ -3,14 +3,16 @@ "extends": "./tsconfig.json", "compilerOptions": { "composite": true, - "rootDir": "./src", + "rootDir": "../..", "noEmit": false, "outDir": "./dist", "skipLibCheck": true }, "include": [ "src/**/*", - "env.d.ts" + "env.d.ts", + "../../sdk/src/libraries/script-runtime/**/*.ts", + "../../sdk/src/core/native-binding-loader.ts" ], "exclude": [ "../node_modules", diff --git a/package.json b/package.json index 4bbbc8b7..d47a9d5c 100644 --- a/package.json +++ b/package.json @@ -29,11 +29,11 @@ "ruleset-sync" ], "scripts": { - "build": "turbo build", - "test": "turbo test", - "lint": "turbo lint", - "lint:fix": "turbo lint:fix", - "check:type": "turbo check:type", + "build": "turbo run build --ui=stream --log-order=grouped", + "test": "turbo run test --ui=stream --log-order=grouped", + "lint": "turbo run lint --ui=stream --log-order=grouped", + "lint:fix": "turbo run lint:fix --ui=stream --log-order=grouped", + "check:type": "turbo run check:type --ui=stream --log-order=grouped", "dev:doc": "pnpm -C doc dev", "build:doc": "pnpm -C doc build", "sync-versions": "tsx .githooks/sync-versions.ts", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 0b8c6be2..ba9052c0 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -537,6 +537,12 @@ importers: '@napi-rs/cli': specifier: 'catalog:' version: 3.6.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.6.0)(node-addon-api@7.1.1) + '@truenine/eslint10-config': + specifier: 'catalog:' + version: 2026.10411.10025(4d9df9c0640d97cc4e32b4ae40243784) + eslint: + specifier: 'catalog:' + version: 10.2.0(jiti@2.6.1) npm-run-all2: specifier: 'catalog:' version: 8.0.4 @@ -555,6 +561,9 @@ importers: '@napi-rs/cli': specifier: 'catalog:' version: 3.6.1(@emnapi/core@1.9.2)(@emnapi/runtime@1.9.2)(@types/node@25.6.0)(node-addon-api@7.1.1) + '@truenine/eslint10-config': + specifier: 'catalog:' + version: 2026.10411.10025(4d9df9c0640d97cc4e32b4ae40243784) '@types/estree': specifier: 'catalog:' version: 1.0.8 @@ -564,6 +573,9 @@ importers: '@types/mdast': specifier: 'catalog:' version: 4.0.4 + eslint: + specifier: 'catalog:' + version: 10.2.0(jiti@2.6.1) mdast-util-mdx: specifier: 'catalog:' version: 3.0.0 diff --git a/sdk/scripts/finalize-bundle.ts b/sdk/scripts/finalize-bundle.ts index dd05d158..27bbd434 100644 --- a/sdk/scripts/finalize-bundle.ts +++ b/sdk/scripts/finalize-bundle.ts @@ -11,6 +11,7 @@ const scriptDir = dirname(fileURLToPath(import.meta.url)) const cliDir = resolve(scriptDir, '..') const distDir = resolve(cliDir, 'dist') const indexEntryPath = resolve(distDir, 'index.mjs') +const internalBridgeEntryPath = resolve(distDir, 'internal', 'native-command-bridge.mjs') const bundledJitiBabelRuntimeSourcePath = resolve(cliDir, 'node_modules', 'jiti', 'dist', 'babel.cjs') const bundledJitiBabelRuntimeTargetPath = resolve(distDir, 'babel.cjs') @@ -66,6 +67,11 @@ function ensureIndexBundleExists(): void { throw new Error(`Expected bundled CLI entry at "${indexEntryPath}" before finalizing bundle assets.`) } +function ensureInternalBridgeBundleExists(): void { + if (existsSync(internalBridgeEntryPath)) return + throw new Error(`Expected bundled internal command bridge at "${internalBridgeEntryPath}" before finalizing bundle assets.`) +} + function findBundledJitiChunkPath(): string | undefined { const bundledJitiChunkName = readdirSync(distDir) .find(fileName => /^jiti-.*\.mjs$/u.test(fileName)) @@ -138,10 +144,34 @@ function smokeTestCliEntry(): void { }) } +function smokeTestInternalBridgeEntry(): void { + const smokeTest = runNodeProcess([internalBridgeEntryPath, 'self-test']) + assertProcessSucceeded(smokeTest, [ + `Bundled internal command bridge "${internalBridgeEntryPath}" failed the runtime smoke test.` + ]) + + const stdout = smokeTest.stdout.trim() + if (stdout.length === 0) { + throw new Error(`Bundled internal command bridge "${internalBridgeEntryPath}" returned empty stdout.`) + } + + const result = JSON.parse(stdout) as {ok?: boolean, command?: string} + if (result.ok !== true || result.command !== 'self-test') { + throw new Error( + [ + `Bundled internal command bridge "${internalBridgeEntryPath}" returned an unexpected payload.`, + `Actual: ${stdout}` + ].join('\n') + ) + } +} + ensureIndexBundleExists() +ensureInternalBridgeBundleExists() const bundledJitiChunkPath = ensureBundledJitiRuntimeAssets() smokeTestBundledJitiTransform(bundledJitiChunkPath) smokeTestCliEntry() +smokeTestInternalBridgeEntry() writeMarkdownBlock('Bundled CLI assets finalized', { entry: indexEntryPath, diff --git a/sdk/src/ConfigLoader.ts b/sdk/src/ConfigLoader.ts index 0445cd52..debdac33 100644 --- a/sdk/src/ConfigLoader.ts +++ b/sdk/src/ConfigLoader.ts @@ -1,4 +1,3 @@ -import type {ILogger} from '@truenine/logger' import type { CodeStylesOptions, ConfigLoaderOptions, @@ -7,9 +6,10 @@ import type { UserConfigFile, WindowsOptions } from './adaptors/adaptor-core/ConfigTypes.schema' +import type {ILogger} from '@/libraries/logger' import * as fs from 'node:fs' import process from 'node:process' -import {createLogger} from '@truenine/logger' +import {createLogger} from '@/libraries/logger' import { getSupportedPluginConfigKeysMessage, ZUserConfigFile diff --git a/sdk/src/ProtectedDeletionGuard.ts b/sdk/src/ProtectedDeletionGuard.ts index ee6026e6..96b0de04 100644 --- a/sdk/src/ProtectedDeletionGuard.ts +++ b/sdk/src/ProtectedDeletionGuard.ts @@ -1,6 +1,6 @@ -import type {ILogger} from '@truenine/logger' import type {AdaptorOptions, OutputCollectedContext} from './adaptors/adaptor-core' import type {PublicDefinitionResolveOptions} from './public-config-paths' +import type {ILogger} from '@/libraries/logger' import * as fs from 'node:fs' import * as path from 'node:path' import process from 'node:process' diff --git a/sdk/src/adaptors/NativeBaseOutputAdaptor.test.ts b/sdk/src/adaptors/NativeBaseOutputAdaptor.test.ts new file mode 100644 index 00000000..ee7d4ae3 --- /dev/null +++ b/sdk/src/adaptors/NativeBaseOutputAdaptor.test.ts @@ -0,0 +1,301 @@ +import type { + OutputCleanContext, + OutputWriteContext, + Project, + ProjectChildrenMemoryPrompt, + ProjectIDEConfigFile, + ProjectRootMemoryPrompt, + ReadmePrompt +} from './adaptor-core' +import * as fs from 'node:fs' +import * as os from 'node:os' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {createLogger, FilePathKind, IDEKind, PromptKind} from './adaptor-core' +import { + NativeAgentsOutputAdaptor, + NativeGitExcludeOutputAdaptor, + NativeReadmeMdConfigFileOutputAdaptor +} from './NativeBaseOutputAdaptor' + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createChildPrompt( + projectRoot: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: projectRoot, + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(projectRoot, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: projectRoot, + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(projectRoot, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createProject( + workspaceBase: string, + name: string, + promptSource = false +): Project { + return { + name, + isPromptSourceProject: promptSource, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: name, + basePath: workspaceBase, + getDirectoryName: () => name, + getAbsolutePath: () => path.join(workspaceBase, name) + } + } as Project +} + +function createLoggerContext(name: string) { + return createLogger(name, 'error') +} + +function createConfigFile( + type: IDEKind, + sourcePath: string, + content: string +): ProjectIDEConfigFile { + return { + type, + content, + length: content.length, + filePathKind: FilePathKind.Absolute, + dir: { + pathKind: FilePathKind.Absolute, + path: sourcePath, + getDirectoryName: () => path.dirname(sourcePath) + } + } as ProjectIDEConfigFile +} + +function createReadmePrompt( + projectRoot: string, + relativeTarget: string, + fileKind: ReadmePrompt['fileKind'], + content: string +): ReadmePrompt { + return { + type: PromptKind.Readme, + content, + length: content.length, + dir: { + pathKind: FilePathKind.Relative, + path: relativeTarget, + basePath: projectRoot, + getDirectoryName: () => relativeTarget, + getAbsolutePath: () => path.join(projectRoot, relativeTarget) + }, + projectName: path.basename(projectRoot), + targetDir: { + pathKind: FilePathKind.Relative, + path: relativeTarget, + basePath: projectRoot, + getDirectoryName: () => relativeTarget, + getAbsolutePath: () => path.join(projectRoot, relativeTarget) + }, + isRoot: relativeTarget === '.', + fileKind, + markdownContents: [] + } as ReadmePrompt +} + +function createWriteContext( + workspaceBase: string, + projects: readonly Project[], + extra: Record = {} +): OutputWriteContext { + return { + logger: createLoggerContext('NativeBaseOutputAdaptorTest'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [...projects] + }, + ...extra + } + } as unknown as OutputWriteContext +} + +function createCleanContext( + workspaceBase: string, + projects: readonly Project[], + extra: Record = {} +): OutputCleanContext { + return { + logger: createLoggerContext('NativeBaseOutputAdaptorTest'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [...projects] + }, + ...extra + } + } as unknown as OutputCleanContext +} + +describe('native base output adaptor bridge', () => { + it('keeps agents output behavior through the native planner contract', async () => { + const plugin = new NativeAgentsOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-agents-plugin') + const projectRoot = path.join(workspaceBase, 'project-a') + const projects = [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + } as Project, + { + ...createProject(workspaceBase, 'aindex', true), + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [createChildPrompt(path.join(workspaceBase, 'aindex'), 'commands', 'prompt-source child')] + } as Project, + { + ...createProject(workspaceBase, 'project-a'), + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [createChildPrompt(projectRoot, 'commands', 'project child')] + } as Project + ] + const ctx = createWriteContext(workspaceBase, projects) + + const declarations = await plugin.declareOutputFiles(ctx) + const outputPaths = declarations.map(declaration => declaration.path) + const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'AGENTS.md')) + const projectDeclaration = declarations.find(declaration => declaration.path === path.join(projectRoot, 'AGENTS.md')) + + expect(outputPaths).toContain(path.join(workspaceBase, 'AGENTS.md')) + expect(outputPaths).toContain(path.join(projectRoot, 'AGENTS.md')) + expect(outputPaths).toContain(path.join(projectRoot, 'commands', 'AGENTS.md')) + expect(outputPaths).not.toContain(path.join(workspaceBase, 'aindex', 'AGENTS.md')) + + if (workspaceDeclaration == null || projectDeclaration == null) { + throw new Error('Expected native AGENTS.md declarations were not emitted') + } + + await expect(plugin.convertContent(workspaceDeclaration, ctx)).resolves.toBe('workspace root') + await expect(plugin.convertContent(projectDeclaration, ctx)).resolves.toBe('project root') + }) + + it('keeps git exclude output and cleanup behavior through the native planner contract', async () => { + const workspaceBase = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-native-git-exclude-')) + const projectDir = path.join(workspaceBase, 'packages', 'app') + fs.mkdirSync(path.join(workspaceBase, '.git', 'info'), {recursive: true}) + fs.mkdirSync(path.join(projectDir, '.git', 'info'), {recursive: true}) + + try { + const plugin = new NativeGitExcludeOutputAdaptor() + const projects = [ + { + name: '__workspace__', + isWorkspaceRootProject: true + } as Project, + createProject(workspaceBase, 'packages/app') + ] + const writeCtx = createWriteContext(workspaceBase, projects, { + globalGitIgnore: 'dist/\n# comment\n', + shadowGitExclude: '.idea/\n' + }) + const cleanCtx = createCleanContext(workspaceBase, projects) + const outputDeclarations = await plugin.declareOutputFiles(writeCtx) + const cleanupDeclarations = await plugin.declareCleanupPaths(cleanCtx) + + expect(outputDeclarations).toEqual(expect.arrayContaining([ + expect.objectContaining({ + path: path.join(workspaceBase, '.git', 'info', 'exclude'), + source: {content: 'dist/\n.idea/\n'} + }), + expect.objectContaining({ + path: path.join(projectDir, '.git', 'info', 'exclude'), + source: {content: 'dist/\n.idea/\n'} + }) + ])) + expect(cleanupDeclarations.delete).toEqual(expect.arrayContaining([ + expect.objectContaining({path: path.join(workspaceBase, '.git', 'info', 'exclude')}), + expect.objectContaining({path: path.join(projectDir, '.git', 'info', 'exclude')}) + ])) + } finally { + fs.rmSync(workspaceBase, {recursive: true, force: true}) + } + }) + + it('keeps readme and editorconfig output behavior through the native planner contract', async () => { + const plugin = new NativeReadmeMdConfigFileOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-readme-plugin') + const projects = [ + createProject(workspaceBase, 'aindex', true), + createProject(workspaceBase, 'memory-sync') + ] + const memorySyncRoot = path.join(workspaceBase, 'memory-sync') + const ctx = createWriteContext(workspaceBase, projects, { + readmePrompts: [ + createReadmePrompt(memorySyncRoot, '.', 'Readme', '# README\n'), + createReadmePrompt(memorySyncRoot, '.', 'CodeOfConduct', '# COC\n') + ], + editorConfigFiles: [ + createConfigFile( + IDEKind.EditorConfig, + path.join(workspaceBase, 'aindex', 'public', '.editorconfig'), + 'root = true\n' + ) + ] + }) + + const declarations = await plugin.declareOutputFiles(ctx) + const outputPaths = declarations.map(declaration => declaration.path) + + expect(outputPaths).toContain(path.join(memorySyncRoot, 'README.md')) + expect(outputPaths).toContain(path.join(memorySyncRoot, 'CODE_OF_CONDUCT.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'aindex', '.editorconfig')) + expect(outputPaths).toContain(path.join(memorySyncRoot, '.editorconfig')) + }) +}) diff --git a/sdk/src/adaptors/NativeBaseOutputAdaptor.ts b/sdk/src/adaptors/NativeBaseOutputAdaptor.ts new file mode 100644 index 00000000..6053c3af --- /dev/null +++ b/sdk/src/adaptors/NativeBaseOutputAdaptor.ts @@ -0,0 +1,157 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations, + OutputDeclarationScope, + OutputFileDeclaration, + OutputWriteContext +} from './adaptor-core' +import {getNativeBinding} from '@/core/native-binding' +import {AbstractOutputAdaptor} from './adaptor-core' + +interface NativeBaseOutputBinding { + readonly collectBaseOutputPlans?: (contextJson: string) => string | Promise +} + +interface NativeBaseOutputFilePlan { + readonly path: string + readonly scope?: OutputDeclarationScope + readonly content: string +} + +interface NativeBaseOutputPluginPlan { + readonly pluginName: string + readonly outputFiles: readonly NativeBaseOutputFilePlan[] + readonly cleanup: OutputCleanupDeclarations +} + +interface NativeBaseOutputPlans { + readonly plugins: readonly NativeBaseOutputPluginPlan[] +} + +type NativeBasePlanMap = ReadonlyMap + +const baseOutputPlanCache = new WeakMap>() + +function requireNativeBaseOutputBinding(): Required { + const binding = getNativeBinding() + if (binding?.collectBaseOutputPlans == null) { + throw new TypeError('Native base-output planner binding is required. Rebuild the Rust NAPI package before running tnmsc.') + } + return binding as Required +} + +async function loadNativeBasePlanMap( + ctx: Pick +): Promise { + const binding = requireNativeBaseOutputBinding() + const raw = await binding.collectBaseOutputPlans( + JSON.stringify(ctx.collectedOutputContext) + ) + const parsed = JSON.parse(raw) as NativeBaseOutputPlans + const plans = new Map() + + for (const plugin of parsed.plugins ?? []) { + plans.set(plugin.pluginName, plugin) + } + + return plans +} + +async function getNativeBasePlan( + pluginName: string, + ctx: Pick +): Promise { + const cacheKey = ctx.collectedOutputContext as object + let plansPromise = baseOutputPlanCache.get(cacheKey) + if (plansPromise == null) { + plansPromise = loadNativeBasePlanMap(ctx) + baseOutputPlanCache.set(cacheKey, plansPromise) + } + + const plans = await plansPromise + const plan = plans.get(pluginName) + if (plan == null) { + throw new Error(`Native base-output planner did not return a plan for ${pluginName}`) + } + return plan +} + +class NativeBaseOutputAdaptor extends AbstractOutputAdaptor { + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const plan = await getNativeBasePlan(this.name, ctx) + return plan.outputFiles.map(outputFile => ({ + path: outputFile.path, + source: {content: outputFile.content}, + ...outputFile.scope == null ? {} : {scope: outputFile.scope} + })) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const plan = await getNativeBasePlan(this.name, ctx) + return plan.cleanup + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) { + throw new Error(`Unsupported declaration source for ${this.name}`) + } + return source.content + } +} + +export class NativeAgentsOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('AgentsOutputAdaptor', { + outputFileName: 'AGENTS.md', + treatWorkspaceRootProjectAsProject: true, + capabilities: { + prompt: { + scopes: ['project'], + singleScope: false + } + } + }) + } +} + +export class NativeGitExcludeOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('GitExcludeOutputAdaptor', {capabilities: {}}) + } +} + +export class NativeJetBrainsIDECodeStyleConfigOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('JetBrainsIDECodeStyleConfigOutputAdaptor', {capabilities: {}}) + } +} + +export class NativeVisualStudioCodeIDEConfigOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('VisualStudioCodeIDEConfigOutputAdaptor', {capabilities: {}}) + } +} + +export class NativeZedIDEConfigOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('ZedIDEConfigOutputAdaptor', {capabilities: {}}) + } +} + +export class NativeReadmeMdConfigFileOutputAdaptor extends NativeBaseOutputAdaptor { + constructor() { + super('ReadmeMdConfigFileOutputAdaptor', { + outputFileName: 'README.md', + capabilities: {} + }) + } +} diff --git a/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.test.ts b/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.test.ts new file mode 100644 index 00000000..fc3645fb --- /dev/null +++ b/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.test.ts @@ -0,0 +1,275 @@ +import type { + CommandPrompt, + GlobalMemoryPrompt, + OutputCleanContext, + OutputWriteContext, + Project, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt, + SkillPrompt +} from './adaptor-core' +import {Buffer} from 'node:buffer' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {getEffectiveHomeDir} from '@/runtime-environment' +import {createLogger, FilePathKind, PromptKind} from './adaptor-core' +import {NativeDroidCLIOutputAdaptor} from './NativeDroidCLIOutputAdaptor' + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createGlobalMemoryPrompt(content: string): GlobalMemoryPrompt { + return { + type: PromptKind.GlobalMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: '.factory', + basePath: getEffectiveHomeDir(), + getDirectoryName: () => '.factory', + getAbsolutePath: () => path.join(getEffectiveHomeDir(), '.factory') + }, + markdownContents: [] + } as GlobalMemoryPrompt +} + +function createChildPrompt( + workspaceBase: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createCommandPrompt(scope: 'project' | 'global'): CommandPrompt { + return { + type: PromptKind.Command, + content: 'Run build', + length: 'Run build'.length, + commandName: 'build', + commandPrefix: 'shared', + ...(scope === 'global' ? {globalOnly: true} : {}), + dir: { + pathKind: FilePathKind.Relative, + path: 'commands/build.mdx', + basePath: path.resolve('tmp/native-droid-plugin/aindex'), + getDirectoryName: () => 'commands', + getAbsolutePath: () => path.resolve('tmp/native-droid-plugin/aindex/commands/build.mdx') + }, + yamlFrontMatter: { + description: 'Build command', + ...(scope === 'global' ? {scope: 'global'} : {}) + }, + markdownContents: [] + } as CommandPrompt +} + +function createSkillPrompt(scope: 'project' | 'global'): SkillPrompt { + return { + type: PromptKind.Skill, + content: 'Skill body', + length: 'Skill body'.length, + skillName: 'ship', + seriName: 'shared', + dir: { + pathKind: FilePathKind.Relative, + path: 'ship', + basePath: path.resolve('tmp/native-droid-plugin/aindex/dist/skills'), + getDirectoryName: () => 'ship', + getAbsolutePath: () => path.resolve('tmp/native-droid-plugin/aindex/dist/skills/ship') + }, + yamlFrontMatter: { + name: 'ship', + description: 'Skill description', + ...(scope === 'global' ? {scope: 'global'} : {}) + }, + childDocs: [ + { + type: PromptKind.SkillChildDoc, + content: 'Guide body', + length: 'Guide body'.length, + filePathKind: FilePathKind.Relative, + relativePath: 'guide.mdx', + dir: { + pathKind: FilePathKind.Relative, + path: 'ship', + basePath: path.resolve('tmp/native-droid-plugin/aindex/dist/skills'), + getDirectoryName: () => 'ship', + getAbsolutePath: () => path.resolve('tmp/native-droid-plugin/aindex/dist/skills/ship') + }, + markdownContents: [] + } + ], + resources: [ + { + type: PromptKind.SkillResource, + extension: '.bin', + fileName: 'blob.bin', + relativePath: 'assets/blob.bin', + content: 'aGVsbG8=', + encoding: 'base64', + length: 8 + } + ], + markdownContents: [] + } as SkillPrompt +} + +function createWriteContext(workspaceBase: string): OutputWriteContext { + return { + logger: createLogger('NativeDroidCLIOutputAdaptorTest', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root'), + projectConfig: { + includeSeries: ['shared'], + skills: {includeSeries: ['shared']} + } + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceBase, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceBase, 'aindex') + }, + projectConfig: { + includeSeries: ['shared'], + skills: {includeSeries: ['shared']} + }, + rootMemoryPrompt: createRootPrompt('prompt-source root') + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + projectConfig: { + includeSeries: ['shared'], + skills: {includeSeries: ['shared']} + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [ + createChildPrompt( + workspaceBase, + 'project-a', + 'commands', + 'project child' + ) + ] + } + ] as Project[] + }, + commands: [createCommandPrompt('project')], + skills: [createSkillPrompt('project')], + globalMemory: createGlobalMemoryPrompt('global memory') + } + } as unknown as OutputWriteContext +} + +function createCleanContext(workspaceBase: string): OutputCleanContext { + return { + ...createWriteContext(workspaceBase), + dryRun: true + } as OutputCleanContext +} + +describe('native droid output adaptor', () => { + it('keeps prompt, command, and skill outputs through the native planner contract', async () => { + const plugin = new NativeDroidCLIOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-droid-plugin') + const ctx = createWriteContext(workspaceBase) + const declarations = await plugin.declareOutputFiles(ctx) + const outputPaths = declarations.map(declaration => declaration.path) + const globalPath = path.join(getEffectiveHomeDir(), '.factory', 'AGENTS.md') + const skillMain = declarations.find(declaration => declaration.path.endsWith('.factory/skills/ship/SKILL.md')) + const skillResource = declarations.find(declaration => declaration.path.endsWith('.factory/skills/ship/assets/blob.bin')) + + expect(outputPaths).toContain(path.join(workspaceBase, 'AGENTS.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'project-a', 'AGENTS.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md')) + expect(outputPaths).toContain(path.join(workspaceBase, '.factory', 'commands', 'shared-build.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'project-a', '.factory', 'skills', 'ship', 'guide.md')) + expect(outputPaths).toContain(globalPath) + + if (skillMain == null || skillResource == null) { + throw new Error('Expected Droid declarations were not emitted') + } + + await expect(plugin.convertContent(skillMain, ctx)).resolves.toBe('---\nname: ship\ndescription: Skill description\n---\n\nSkill body') + + const resourceContent = await plugin.convertContent(skillResource, ctx) + expect(Buffer.isBuffer(resourceContent)).toBe(true) + expect((resourceContent as Buffer).toString('utf8')).toBe('hello') + }) + + it('keeps cleanup coverage through the native planner contract', async () => { + const plugin = new NativeDroidCLIOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-droid-cleanup') + const cleanup = await plugin.declareCleanupPaths(createCleanContext(workspaceBase)) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain(path.join(workspaceBase, 'AGENTS.md').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(workspaceBase, '.factory', 'commands').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(workspaceBase, 'aindex', '.factory', 'skills').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(getEffectiveHomeDir(), '.factory', 'AGENTS.md').replaceAll('\\', '/')) + expect(deletePaths).not.toContain(path.join(workspaceBase, 'project-a', 'commands', 'AGENTS.md').replaceAll('\\', '/')) + }) +}) diff --git a/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.ts b/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.ts new file mode 100644 index 00000000..20b40a0e --- /dev/null +++ b/sdk/src/adaptors/NativeDroidCLIOutputAdaptor.ts @@ -0,0 +1,120 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations, + OutputDeclarationScope, + OutputFileDeclaration, + OutputWriteContext +} from './adaptor-core' +import {Buffer} from 'node:buffer' +import {getNativeBinding} from '@/core/native-binding' +import {AbstractOutputAdaptor} from './adaptor-core' + +interface NativeDroidOutputBinding { + readonly collectDroidOutputPlan?: ( + contextJson: string + ) => string | Promise +} + +interface NativeDroidOutputFilePlan { + readonly path: string + readonly scope?: OutputDeclarationScope + readonly content: string + readonly encoding?: 'text' | 'base64' +} + +interface NativeDroidOutputPlan { + readonly pluginName: string + readonly outputFiles: readonly NativeDroidOutputFilePlan[] + readonly cleanup: OutputCleanupDeclarations +} + +const droidOutputPlanCache = new WeakMap>() + +function requireNativeDroidOutputBinding(): Required { + const binding = getNativeBinding() + if (binding?.collectDroidOutputPlan == null) { + throw new TypeError('Native Droid output planner binding is required. Rebuild the Rust NAPI package before running tnmsc.') + } + return binding as Required +} + +async function getDroidOutputPlan( + ctx: Pick +): Promise { + const cacheKey = ctx.collectedOutputContext as object + let planPromise = droidOutputPlanCache.get(cacheKey) + if (planPromise != null) return planPromise + + const binding = requireNativeDroidOutputBinding() + planPromise = Promise.resolve( + binding.collectDroidOutputPlan(JSON.stringify(ctx.collectedOutputContext)) + ).then(raw => JSON.parse(raw) as NativeDroidOutputPlan) + droidOutputPlanCache.set(cacheKey, planPromise) + return planPromise +} + +export class NativeDroidCLIOutputAdaptor extends AbstractOutputAdaptor { + constructor() { + super('DroidCLIOutputAdaptor', { + globalConfigDir: '.factory', + outputFileName: 'AGENTS.md', + treatWorkspaceRootProjectAsProject: true, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + }, + commands: { + scopes: ['project', 'global'], + singleScope: true + }, + skills: { + scopes: ['project', 'global'], + singleScope: true + } + } + }) + } + + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const plan = await getDroidOutputPlan(ctx) + return plan.outputFiles.map(outputFile => ({ + path: outputFile.path, + source: { + content: outputFile.content, + encoding: outputFile.encoding + }, + ...outputFile.scope == null ? {} : {scope: outputFile.scope} + })) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const plan = await getDroidOutputPlan(ctx) + return plan.cleanup + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as { + readonly content?: string + readonly encoding?: 'text' | 'base64' + } + + if (source.content == null) { + throw new Error(`Unsupported declaration source for ${this.name}`) + } + + if (source.encoding === 'base64') { + return Buffer.from(source.content, 'base64') + } + + return source.content + } +} diff --git a/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.test.ts b/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.test.ts new file mode 100644 index 00000000..b8c13694 --- /dev/null +++ b/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.test.ts @@ -0,0 +1,188 @@ +import type { + GlobalMemoryPrompt, + OutputCleanContext, + OutputWriteContext, + Project, + ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt +} from './adaptor-core' +import * as fs from 'node:fs' +import * as path from 'node:path' +import {describe, expect, it} from 'vitest' +import {getEffectiveHomeDir} from '@/runtime-environment' +import {createLogger, FilePathKind, PromptKind} from './adaptor-core' +import {NativeGeminiCLIOutputAdaptor} from './NativeGeminiCLIOutputAdaptor' + +function createRootPrompt(content: string): ProjectRootMemoryPrompt { + return { + type: PromptKind.ProjectRootMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Root, + path: '', + getDirectoryName: () => '' + }, + markdownContents: [] + } as ProjectRootMemoryPrompt +} + +function createGlobalMemoryPrompt(content: string): GlobalMemoryPrompt { + return { + type: PromptKind.GlobalMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + dir: { + pathKind: FilePathKind.Relative, + path: '.gemini', + basePath: getEffectiveHomeDir(), + getDirectoryName: () => '.gemini', + getAbsolutePath: () => path.join(getEffectiveHomeDir(), '.gemini') + }, + markdownContents: [] + } as GlobalMemoryPrompt +} + +function createChildPrompt( + workspaceBase: string, + projectName: string, + relativePath: string, + content: string +): ProjectChildrenMemoryPrompt { + return { + type: PromptKind.ProjectChildrenMemory, + content, + length: content.length, + filePathKind: FilePathKind.Relative, + markdownContents: [], + dir: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + }, + workingChildDirectoryPath: { + pathKind: FilePathKind.Relative, + path: relativePath, + basePath: path.join(workspaceBase, projectName), + getDirectoryName: () => path.basename(relativePath), + getAbsolutePath: () => path.join(workspaceBase, projectName, relativePath) + } + } as ProjectChildrenMemoryPrompt +} + +function createWriteContext(workspaceBase: string): OutputWriteContext { + return { + logger: createLogger('NativeGeminiCLIOutputAdaptorTest', 'error'), + fs, + path, + glob: {} as never, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext: { + workspace: { + directory: { + pathKind: FilePathKind.Absolute, + path: workspaceBase, + getDirectoryName: () => path.basename(workspaceBase) + }, + projects: [ + { + name: '__workspace__', + isWorkspaceRootProject: true, + rootMemoryPrompt: createRootPrompt('workspace root') + }, + { + name: 'aindex', + isPromptSourceProject: true, + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'aindex', + basePath: workspaceBase, + getDirectoryName: () => 'aindex', + getAbsolutePath: () => path.join(workspaceBase, 'aindex') + }, + rootMemoryPrompt: createRootPrompt('prompt-source root'), + childMemoryPrompts: [ + createChildPrompt( + workspaceBase, + 'aindex', + 'commands', + 'prompt-source child' + ) + ] + }, + { + name: 'project-a', + dirFromWorkspacePath: { + pathKind: FilePathKind.Relative, + path: 'project-a', + basePath: workspaceBase, + getDirectoryName: () => 'project-a', + getAbsolutePath: () => path.join(workspaceBase, 'project-a') + }, + rootMemoryPrompt: createRootPrompt('project root'), + childMemoryPrompts: [ + createChildPrompt( + workspaceBase, + 'project-a', + 'commands', + 'project child' + ) + ] + } + ] as Project[] + }, + globalMemory: createGlobalMemoryPrompt('global memory') + } + } as unknown as OutputWriteContext +} + +function createCleanContext(workspaceBase: string): OutputCleanContext { + return { + ...createWriteContext(workspaceBase), + dryRun: true + } as OutputCleanContext +} + +describe('native gemini output adaptor', () => { + it('keeps project and global prompt outputs through the native planner contract', async () => { + const plugin = new NativeGeminiCLIOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-gemini-plugin') + const ctx = createWriteContext(workspaceBase) + const declarations = await plugin.declareOutputFiles(ctx) + const outputPaths = declarations.map(declaration => declaration.path) + const globalPath = path.join(getEffectiveHomeDir(), '.gemini', 'GEMINI.md') + const workspaceDeclaration = declarations.find(declaration => declaration.path === path.join(workspaceBase, 'GEMINI.md')) + const globalDeclaration = declarations.find(declaration => declaration.path === globalPath) + + expect(outputPaths).toContain(path.join(workspaceBase, 'GEMINI.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'project-a', 'GEMINI.md')) + expect(outputPaths).toContain(path.join(workspaceBase, 'project-a', 'commands', 'GEMINI.md')) + expect(outputPaths).not.toContain(path.join(workspaceBase, 'aindex', 'GEMINI.md')) + expect(outputPaths).toContain(globalPath) + + if (workspaceDeclaration == null || globalDeclaration == null) { + throw new Error('Expected Gemini declarations were not emitted') + } + + await expect(plugin.convertContent(workspaceDeclaration, ctx)).resolves.toBe('workspace root') + await expect(plugin.convertContent(globalDeclaration, ctx)).resolves.toBe('global memory') + }) + + it('keeps prompt-source and global cleanup coverage through the native planner contract', async () => { + const plugin = new NativeGeminiCLIOutputAdaptor() + const workspaceBase = path.resolve('tmp/native-gemini-cleanup') + const cleanup = await plugin.declareCleanupPaths(createCleanContext(workspaceBase)) + const deletePaths = cleanup.delete?.map(target => target.path.replaceAll('\\', '/')) ?? [] + + expect(deletePaths).toContain(path.join(workspaceBase, 'GEMINI.md').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(workspaceBase, 'aindex', 'GEMINI.md').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(workspaceBase, 'aindex', 'commands', 'GEMINI.md').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(workspaceBase, 'project-a', 'GEMINI.md').replaceAll('\\', '/')) + expect(deletePaths).toContain(path.join(getEffectiveHomeDir(), '.gemini', 'GEMINI.md').replaceAll('\\', '/')) + }) +}) diff --git a/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.ts b/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.ts new file mode 100644 index 00000000..93e501ea --- /dev/null +++ b/sdk/src/adaptors/NativeGeminiCLIOutputAdaptor.ts @@ -0,0 +1,98 @@ +import type { + OutputCleanContext, + OutputCleanupDeclarations, + OutputDeclarationScope, + OutputFileDeclaration, + OutputWriteContext +} from './adaptor-core' +import {getNativeBinding} from '@/core/native-binding' +import {AbstractOutputAdaptor} from './adaptor-core' + +interface NativeGeminiOutputBinding { + readonly collectGeminiOutputPlan?: ( + contextJson: string + ) => string | Promise +} + +interface NativeGeminiOutputFilePlan { + readonly path: string + readonly scope?: OutputDeclarationScope + readonly content: string +} + +interface NativeGeminiOutputPlan { + readonly pluginName: string + readonly outputFiles: readonly NativeGeminiOutputFilePlan[] + readonly cleanup: OutputCleanupDeclarations +} + +const geminiOutputPlanCache = new WeakMap>() + +function requireNativeGeminiOutputBinding(): Required { + const binding = getNativeBinding() + if (binding?.collectGeminiOutputPlan == null) { + throw new TypeError('Native Gemini output planner binding is required. Rebuild the Rust NAPI package before running tnmsc.') + } + return binding as Required +} + +async function getGeminiOutputPlan( + ctx: Pick +): Promise { + const cacheKey = ctx.collectedOutputContext as object + let planPromise = geminiOutputPlanCache.get(cacheKey) + if (planPromise != null) return planPromise + + const binding = requireNativeGeminiOutputBinding() + planPromise = Promise.resolve( + binding.collectGeminiOutputPlan(JSON.stringify(ctx.collectedOutputContext)) + ).then(raw => JSON.parse(raw) as NativeGeminiOutputPlan) + geminiOutputPlanCache.set(cacheKey, planPromise) + return planPromise +} + +export class NativeGeminiCLIOutputAdaptor extends AbstractOutputAdaptor { + constructor() { + super('GeminiCLIOutputAdaptor', { + globalConfigDir: '.gemini', + outputFileName: 'GEMINI.md', + treatWorkspaceRootProjectAsProject: true, + capabilities: { + prompt: { + scopes: ['project', 'global'], + singleScope: false + } + } + }) + } + + override async declareOutputFiles( + ctx: OutputWriteContext + ): Promise { + const plan = await getGeminiOutputPlan(ctx) + return plan.outputFiles.map(outputFile => ({ + path: outputFile.path, + source: {content: outputFile.content}, + ...outputFile.scope == null ? {} : {scope: outputFile.scope} + })) + } + + override async declareCleanupPaths( + ctx: OutputCleanContext + ): Promise { + const plan = await getGeminiOutputPlan(ctx) + return plan.cleanup + } + + override async convertContent( + declaration: OutputFileDeclaration, + ctx: OutputWriteContext + ): Promise { + void ctx + const source = declaration.source as {content?: string} + if (source.content == null) { + throw new Error(`Unsupported declaration source for ${this.name}`) + } + return source.content + } +} diff --git a/sdk/src/adaptors/adaptor-core.ts b/sdk/src/adaptors/adaptor-core.ts index 28e0d51c..ac139a77 100644 --- a/sdk/src/adaptors/adaptor-core.ts +++ b/sdk/src/adaptors/adaptor-core.ts @@ -2,7 +2,7 @@ import type { ILogger, LoggerDiagnosticRecord, LogLevel -} from '@truenine/logger' +} from '@/libraries/logger' import { clearBufferedDiagnostics as clearBufferedDiagnosticsNative, createLogger as createLoggerNative, @@ -10,7 +10,7 @@ import { flushOutput as flushOutputNative, getGlobalLogLevel as getGlobalLogLevelNative, setGlobalLogLevel as setGlobalLogLevelNative -} from '@truenine/logger' +} from '@/libraries/logger' export { AbstractInputCapability @@ -153,7 +153,7 @@ export type { LoggerDiagnosticInput, LoggerDiagnosticRecord, LogLevel -} from '@truenine/logger' +} from '@/libraries/logger' export function clearBufferedDiagnostics(): void { clearBufferedDiagnosticsNative() diff --git a/sdk/src/adaptors/adaptor-core/AbstractAdaptor.ts b/sdk/src/adaptors/adaptor-core/AbstractAdaptor.ts index fe58e785..6543c8e4 100644 --- a/sdk/src/adaptors/adaptor-core/AbstractAdaptor.ts +++ b/sdk/src/adaptors/adaptor-core/AbstractAdaptor.ts @@ -1,8 +1,8 @@ -import type {ILogger} from '@truenine/logger' import type {AdaptorKind} from './enums' import type {Plugin} from './plugin' +import type {ILogger} from '@/libraries/logger' -import {createLogger} from '@truenine/logger' +import {createLogger} from '@/libraries/logger' export abstract class AbstractAdaptor implements Plugin { readonly type: T diff --git a/sdk/src/adaptors/adaptor-core/AbstractOutputAdaptor.ts b/sdk/src/adaptors/adaptor-core/AbstractOutputAdaptor.ts index 4ade2b89..f59cdf5a 100644 --- a/sdk/src/adaptors/adaptor-core/AbstractOutputAdaptor.ts +++ b/sdk/src/adaptors/adaptor-core/AbstractOutputAdaptor.ts @@ -32,10 +32,10 @@ import type { SubAgentYAMLFrontMatter, WslMirrorFileDeclaration } from './types' - import {Buffer} from 'node:buffer' import * as path from 'node:path' import process from 'node:process' + import {buildPromptTomlArtifact} from '@truenine/md-compiler' import {buildMarkdownWithFrontMatter, buildMarkdownWithRawFrontMatter} from '@truenine/md-compiler/markdown' import {buildConfigDiagnostic, diagnosticLines} from '@/diagnostics' @@ -1204,8 +1204,7 @@ export abstract class AbstractOutputAdaptor extends AbstractAdaptor implements O protected buildRuleFileName(rule: RulePrompt): string { const prefix = `${this.rulesConfig.prefix ?? 'rule'}${this.rulesConfig.linkSymbol ?? '-'}` - const fileName = `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` - return fileName + return `${prefix}${rule.prefix}${this.rulesConfig.linkSymbol ?? '-'}${rule.ruleName}${this.rulesConfig.ext ?? '.md'}` } async declareOutputFiles(ctx: OutputWriteContext): Promise { diff --git a/sdk/src/adaptors/adaptor-core/GlobalScopeCollector.ts b/sdk/src/adaptors/adaptor-core/GlobalScopeCollector.ts index 523613f1..08bf9f61 100644 --- a/sdk/src/adaptors/adaptor-core/GlobalScopeCollector.ts +++ b/sdk/src/adaptors/adaptor-core/GlobalScopeCollector.ts @@ -1,5 +1,5 @@ import type {EvaluationScope} from '@truenine/md-compiler' -import type {CodeStylePreferences, EnvironmentContext, MdComponent, MdxGlobalScope, OsInfo, ToolReferences, UserProfile} from '@truenine/md-compiler/globals' // Collects and manages global scope variables for MDX expression evaluation. // src/scope/GlobalScopeCollector.ts +import type {CodeStylePreferences, EnvironmentContext, MdComponent, MdxGlobalScope, OsInfo, ToolReferences, UserProfile} from '@truenine/md-compiler/globals' // Collects and manages global scope variables for MDX expression evaluation. import type {AdaptorOptions, UserConfigFile} from './types' import * as os from 'node:os' import process from 'node:process' diff --git a/sdk/src/adaptors/adaptor-core/InputTypes.ts b/sdk/src/adaptors/adaptor-core/InputTypes.ts index e3785c2b..64e12dcd 100644 --- a/sdk/src/adaptors/adaptor-core/InputTypes.ts +++ b/sdk/src/adaptors/adaptor-core/InputTypes.ts @@ -230,10 +230,14 @@ export interface SkillResource { * MCP server configuration entry */ export interface McpServerConfig { - readonly command: string + readonly command?: string readonly args?: readonly string[] readonly env?: Readonly> + readonly url?: string + readonly serverUrl?: string + readonly headers?: Readonly> readonly disabled?: boolean + readonly disabledTools?: readonly string[] readonly autoApprove?: readonly string[] } diff --git a/sdk/src/adaptors/adaptor-core/RegistryWriter.ts b/sdk/src/adaptors/adaptor-core/RegistryWriter.ts index 4e74cd69..c9125d69 100644 --- a/sdk/src/adaptors/adaptor-core/RegistryWriter.ts +++ b/sdk/src/adaptors/adaptor-core/RegistryWriter.ts @@ -11,12 +11,12 @@ import type {ILogger, RegistryData, RegistryOperationResult} from './types' import * as fs from 'node:fs' import * as path from 'node:path' -import {createLogger} from '@truenine/logger' import { buildDiagnostic, buildFileOperationDiagnostic, diagnosticLines } from '@/diagnostics' +import {createLogger} from '@/libraries/logger' import {resolveUserPath} from '@/runtime-environment' /** diff --git a/sdk/src/adaptors/adaptor-core/plugin.ts b/sdk/src/adaptors/adaptor-core/plugin.ts index 532a0efa..bad1c613 100644 --- a/sdk/src/adaptors/adaptor-core/plugin.ts +++ b/sdk/src/adaptors/adaptor-core/plugin.ts @@ -1,4 +1,3 @@ -import type {ILogger} from '@truenine/logger' import type {MdxGlobalScope} from '@truenine/md-compiler/globals' import type { AindexConfig, @@ -13,6 +12,7 @@ import type {AdaptorKind} from './enums' import type {InputCollectedContext, OutputCollectedContext} from './InputTypes' import type {NativeDeskPathsBinding} from '@/core/desk-paths-types' import type {ExecutionPlan} from '@/execution-plan' +import type {ILogger} from '@/libraries/logger' import type {RuntimeCommand} from '@/runtime-command' import {Buffer} from 'node:buffer' import * as fs from 'node:fs' diff --git a/sdk/src/adaptors/adaptor-core/types.ts b/sdk/src/adaptors/adaptor-core/types.ts index 91d3965d..d6befca5 100644 --- a/sdk/src/adaptors/adaptor-core/types.ts +++ b/sdk/src/adaptors/adaptor-core/types.ts @@ -14,7 +14,7 @@ export type { LoggerDiagnosticInput, LoggerDiagnosticRecord, LogLevel -} from '@truenine/logger' +} from '@/libraries/logger' export class MissingDependencyError extends Error { readonly nodeName: string diff --git a/sdk/src/aindex-config/AindexProjectConfigLoader.ts b/sdk/src/aindex-config/AindexProjectConfigLoader.ts index b77d388f..489ccbdb 100644 --- a/sdk/src/aindex-config/AindexProjectConfigLoader.ts +++ b/sdk/src/aindex-config/AindexProjectConfigLoader.ts @@ -1,8 +1,8 @@ -import type {ILogger} from '@truenine/logger' import type {AindexProjectConfig, AindexProjectConfigLoadResult} from './AindexProjectConfig' +import type {ILogger} from '@/libraries/logger' import * as fs from 'node:fs' import * as path from 'node:path' -import {createLogger} from '@truenine/logger' +import {createLogger} from '@/libraries/logger' const CONFIG_FILE_NAMES = ['aindex.config.ts', 'aindex.config.mts', 'aindex.config.cts', 'aindex.config.js', 'aindex.config.mjs', 'aindex.config.cjs'] diff --git a/sdk/src/config.test.ts b/sdk/src/config.test.ts index efafaaaa..4a5e42a6 100644 --- a/sdk/src/config.test.ts +++ b/sdk/src/config.test.ts @@ -110,6 +110,32 @@ describe('defineConfig', () => { } }) + it('expands tilde-prefixed workspaceDir before building runtime context', async () => { + const tempHome = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-home-expand-workspace-')) + const tempWorkspace = path.join(tempHome, 'workspace-expanded') + fs.mkdirSync(tempWorkspace, {recursive: true}) + + process.env['HOME'] = tempHome + process.env['USERPROFILE'] = tempHome + delete process.env['HOMEDRIVE'] + delete process.env['HOMEPATH'] + + try { + const result = await defineConfig({ + loadUserConfig: false, + pluginOptions: { + workspaceDir: '~/workspace-expanded' + } + }) + + expect(result.userConfigOptions.workspaceDir).toBe(tempWorkspace) + expect(result.context.workspace.directory.path).toBe(tempWorkspace) + expect(result.context.aindexDir).toBe(path.join(tempWorkspace, 'aindex')) + } finally { + fs.rmSync(tempHome, {recursive: true, force: true}) + } + }) + it('applies default codeStyles when user config omits them', async () => { const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-code-styles-default-workspace-')) diff --git a/sdk/src/config.ts b/sdk/src/config.ts index 347f1490..7e10c6f9 100644 --- a/sdk/src/config.ts +++ b/sdk/src/config.ts @@ -11,7 +11,7 @@ import type {AdaptorOptions, OutputAdaptor} from './adaptors/adaptor-core/plugin import type {RuntimeCommand} from './runtime-command' import * as path from 'node:path' import process from 'node:process' -import {createLogger} from '@truenine/logger' +import {createLogger} from '@/libraries/logger' import {buildDefaultAindexConfig, mergeAindexConfig} from './adaptors/adaptor-core/AindexConfigDefaults' import { buildDefaultCodeStylesOptions, @@ -59,7 +59,7 @@ const DEFAULT_OPTIONS: Omit, 'workspaceDir'> = { function resolveWorkspaceDirOption(workspaceDir: string | undefined, fallbackWorkspaceDir?: string): string { if (typeof workspaceDir === 'string' && workspaceDir.trim().length > 0) { - return workspaceDir + return path.resolve(resolveUserPath(workspaceDir)) } return path.resolve(fallbackWorkspaceDir ?? process.cwd()) diff --git a/sdk/src/core/base_output_plans.rs b/sdk/src/core/base_output_plans.rs new file mode 100644 index 00000000..a852093a --- /dev/null +++ b/sdk/src/core/base_output_plans.rs @@ -0,0 +1,1104 @@ +use std::path::{Component, Path, PathBuf}; + +use serde::{Deserialize, Serialize}; + +use crate::CliError; +use crate::core::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::core::git_discovery::{find_all_git_repos, resolve_git_info_dir}; +use crate::core::plugin_shared::{ + CollectedInputContext, IDEKind, Project, ProjectIDEConfigFile, RelativePath, Workspace, +}; + +const AGENTS_PLUGIN_NAME: &str = "AgentsOutputAdaptor"; +const GIT_EXCLUDE_PLUGIN_NAME: &str = "GitExcludeOutputAdaptor"; +const JETBRAINS_PLUGIN_NAME: &str = "JetBrainsIDECodeStyleConfigOutputAdaptor"; +const VSCODE_PLUGIN_NAME: &str = "VisualStudioCodeIDEConfigOutputAdaptor"; +const ZED_PLUGIN_NAME: &str = "ZedIDEConfigOutputAdaptor"; +const README_PLUGIN_NAME: &str = "ReadmeMdConfigFileOutputAdaptor"; + +const PROJECT_SCOPE: &str = "project"; +const PROJECT_MEMORY_FILE: &str = "AGENTS.md"; +const IDEA_DIR: &str = ".idea"; +const CODE_STYLES_DIR: &str = "codeStyles"; +const VSCODE_DIR: &str = ".vscode"; +const ZED_DIR: &str = ".zed"; +const EDITOR_CONFIG_FILE: &str = ".editorconfig"; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BaseOutputFileDeclarationDto { + pub path: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub scope: Option, + pub content: String, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BaseOutputPluginPlanDto { + pub plugin_name: String, + #[serde(default)] + pub output_files: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct BaseOutputPlansDto { + #[serde(default)] + pub plugins: Vec, +} + +pub fn collect_base_output_plans(context_json: &str) -> Result { + let context = serde_json::from_str::(context_json)?; + let plans = build_base_output_plans(&context)?; + serde_json::to_string(&plans).map_err(CliError::from) +} + +pub fn build_base_output_plans( + context: &CollectedInputContext, +) -> Result { + let workspace = context.workspace.as_ref().ok_or_else(|| { + CliError::ExecutionError( + "collectBaseOutputPlans requires collectedOutputContext.workspace".to_string(), + ) + })?; + + Ok(BaseOutputPlansDto { + plugins: vec![ + build_agents_plugin_plan(workspace), + build_git_exclude_plugin_plan(workspace, context), + build_jetbrains_plugin_plan(workspace, context), + build_vscode_plugin_plan(workspace, context), + build_zed_plugin_plan(workspace, context), + build_readme_plugin_plan(workspace, context), + ], + }) +} + +fn build_agents_plugin_plan(workspace: &Workspace) -> BaseOutputPluginPlanDto { + let mut output_files = Vec::new(); + + for project in get_agents_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + output_files.push(create_output_file( + project_root_dir.join(PROJECT_MEMORY_FILE), + root_prompt.content.clone(), + )); + } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(create_output_file( + resolve_relative_path(&child_prompt.dir).join(PROJECT_MEMORY_FILE), + child_prompt.content.clone(), + )); + } + } + } + + BaseOutputPluginPlanDto { + plugin_name: AGENTS_PLUGIN_NAME.to_string(), + output_files, + cleanup: build_agents_cleanup(workspace), + } +} + +fn build_agents_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { + let mut delete = Vec::new(); + let mut seen_files = std::collections::HashSet::new(); + + for project in get_agents_cleanup_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + delete.push(create_cleanup_target( + project_root_dir.join("**").join(PROJECT_MEMORY_FILE), + CleanupTargetKindDto::Glob, + Some("delete.project.glob"), + )); + + push_unique_cleanup_file( + &mut delete, + &mut seen_files, + project_root_dir.join(PROJECT_MEMORY_FILE), + "delete.project", + ); + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + push_unique_cleanup_file( + &mut delete, + &mut seen_files, + resolve_relative_path(&child_prompt.dir).join(PROJECT_MEMORY_FILE), + "delete.project.child", + ); + } + } + } + + CleanupDeclarationsDto { + delete, + ..CleanupDeclarationsDto::default() + } +} + +fn build_git_exclude_plugin_plan( + workspace: &Workspace, + context: &CollectedInputContext, +) -> BaseOutputPluginPlanDto { + let exclude_paths = collect_managed_exclude_paths(workspace); + let managed_content = build_managed_git_exclude_content( + context.global_git_ignore.as_deref(), + context.shadow_git_exclude.as_deref(), + ); + + let output_files = if managed_content.is_empty() { + Vec::new() + } else { + exclude_paths + .iter() + .map(|path| create_output_file(PathBuf::from(path), managed_content.clone())) + .collect() + }; + + let cleanup = CleanupDeclarationsDto { + delete: exclude_paths + .into_iter() + .map(|path| { + create_cleanup_target( + PathBuf::from(path), + CleanupTargetKindDto::File, + Some("delete.project"), + ) + }) + .collect(), + ..CleanupDeclarationsDto::default() + }; + + BaseOutputPluginPlanDto { + plugin_name: GIT_EXCLUDE_PLUGIN_NAME.to_string(), + output_files, + cleanup, + } +} + +fn build_jetbrains_plugin_plan( + workspace: &Workspace, + context: &CollectedInputContext, +) -> BaseOutputPluginPlanDto { + let mut configs = context.jetbrains_config_files.clone().unwrap_or_default(); + configs.extend(context.editor_config_files.clone().unwrap_or_default()); + + build_project_config_plugin_plan( + workspace, + JETBRAINS_PLUGIN_NAME, + &configs, + &[ + EDITOR_CONFIG_FILE, + ".idea/codeStyles/Project.xml", + ".idea/codeStyles/codeStyleConfig.xml", + ".idea/.gitignore", + ], + resolve_jetbrains_target_relative_path, + ) +} + +fn build_vscode_plugin_plan( + workspace: &Workspace, + context: &CollectedInputContext, +) -> BaseOutputPluginPlanDto { + let configs = context.vscode_config_files.as_deref().unwrap_or(&[]); + + build_project_config_plugin_plan( + workspace, + VSCODE_PLUGIN_NAME, + configs, + &[".vscode/settings.json", ".vscode/extensions.json"], + resolve_vscode_target_relative_path, + ) +} + +fn build_zed_plugin_plan( + workspace: &Workspace, + context: &CollectedInputContext, +) -> BaseOutputPluginPlanDto { + let configs = context.zed_config_files.as_deref().unwrap_or(&[]); + + build_project_config_plugin_plan( + workspace, + ZED_PLUGIN_NAME, + configs, + &[".zed/settings.json"], + resolve_zed_target_relative_path, + ) +} + +fn build_readme_plugin_plan( + workspace: &Workspace, + context: &CollectedInputContext, +) -> BaseOutputPluginPlanDto { + let mut output_files = Vec::new(); + + if let Some(readme_prompts) = context.readme_prompts.as_ref() { + for readme_prompt in readme_prompts { + output_files.push(create_output_file( + resolve_relative_path(&readme_prompt.target_dir) + .join(resolve_readme_output_file_name(&readme_prompt.file_kind)), + readme_prompt.content.clone(), + )); + } + } + + if let Some(editor_config_files) = context.editor_config_files.as_ref() { + for project in get_concrete_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + for editor_config in editor_config_files { + output_files.push(create_output_file( + project_root_dir.join(EDITOR_CONFIG_FILE), + editor_config.content.clone(), + )); + } + } + } + + BaseOutputPluginPlanDto { + plugin_name: README_PLUGIN_NAME.to_string(), + output_files, + cleanup: build_project_cleanup( + workspace, + &[ + "README.md", + "CODE_OF_CONDUCT.md", + "SECURITY.md", + EDITOR_CONFIG_FILE, + ], + ), + } +} + +fn build_project_config_plugin_plan( + workspace: &Workspace, + plugin_name: &str, + configs: &[ProjectIDEConfigFile], + cleanup_files: &[&str], + target_relative_path: impl Fn(&ProjectIDEConfigFile) -> String, +) -> BaseOutputPluginPlanDto { + let mut output_files = Vec::new(); + + for project in get_concrete_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + for config in configs { + output_files.push(create_output_file( + project_root_dir.join(target_relative_path(config)), + config.content.clone(), + )); + } + } + + BaseOutputPluginPlanDto { + plugin_name: plugin_name.to_string(), + output_files, + cleanup: build_project_cleanup(workspace, cleanup_files), + } +} + +fn build_project_cleanup(workspace: &Workspace, relative_paths: &[&str]) -> CleanupDeclarationsDto { + let mut delete = Vec::new(); + + for project in get_concrete_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + for relative_path in relative_paths { + delete.push(create_cleanup_target( + project_root_dir.join(relative_path), + CleanupTargetKindDto::File, + Some("delete.project"), + )); + } + } + + CleanupDeclarationsDto { + delete, + ..CleanupDeclarationsDto::default() + } +} + +fn get_concrete_projects(workspace: &Workspace) -> impl Iterator { + workspace + .projects + .iter() + .filter(|project| project.is_workspace_root_project != Some(true)) +} + +fn get_agents_cleanup_projects(workspace: &Workspace) -> Vec<&Project> { + let mut projects: Vec<&Project> = get_concrete_projects(workspace).collect(); + if let Some(workspace_root_project) = workspace + .projects + .iter() + .find(|project| project.is_workspace_root_project == Some(true)) + { + projects.push(workspace_root_project); + } + projects +} + +fn get_agents_output_projects(workspace: &Workspace) -> Vec<&Project> { + get_agents_cleanup_projects(workspace) + .into_iter() + .filter(|project| project.is_prompt_source_project != Some(true)) + .collect() +} + +fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { + if project.is_workspace_root_project == Some(true) { + return Some(PathBuf::from(&workspace.directory.path)); + } + + project + .dir_from_workspace_path + .as_ref() + .map(resolve_relative_path) +} + +fn resolve_relative_path(relative_path: &RelativePath) -> PathBuf { + let raw_path = Path::new(&relative_path.path); + let candidate = if raw_path.is_absolute() { + raw_path.to_path_buf() + } else if relative_path.base_path.is_empty() { + raw_path.to_path_buf() + } else { + PathBuf::from(&relative_path.base_path).join(raw_path) + }; + + normalize_path(&candidate) +} + +fn resolve_jetbrains_target_relative_path(config: &ProjectIDEConfigFile) -> String { + let source_path = &config.dir.path; + + if config.ide_type == IDEKind::EditorConfig { + return EDITOR_CONFIG_FILE.to_string(); + } + + if config.ide_type != IDEKind::IntellijIDEA { + return file_name(source_path); + } + + if let Some(index) = source_path.find(IDEA_DIR) { + return source_path[index..].to_string(); + } + + Path::new(IDEA_DIR) + .join(CODE_STYLES_DIR) + .join(file_name(source_path)) + .to_string_lossy() + .into_owned() +} + +fn resolve_vscode_target_relative_path(config: &ProjectIDEConfigFile) -> String { + let source_path = &config.dir.path; + + if config.ide_type != IDEKind::VSCode { + return file_name(source_path); + } + + if let Some(index) = source_path.find(VSCODE_DIR) { + return source_path[index..].to_string(); + } + + Path::new(VSCODE_DIR) + .join(file_name(source_path)) + .to_string_lossy() + .into_owned() +} + +fn resolve_zed_target_relative_path(config: &ProjectIDEConfigFile) -> String { + let source_path = &config.dir.path; + + if config.ide_type != IDEKind::Zed { + return file_name(source_path); + } + + if let Some(index) = source_path.find(ZED_DIR) { + return source_path[index..].to_string(); + } + + Path::new(ZED_DIR) + .join("settings.json") + .to_string_lossy() + .into_owned() +} + +fn resolve_readme_output_file_name(file_kind: &str) -> &'static str { + match file_kind { + "CodeOfConduct" => "CODE_OF_CONDUCT.md", + "Security" => "SECURITY.md", + _ => "README.md", + } +} + +fn collect_managed_exclude_paths(workspace: &Workspace) -> Vec { + let mut repo_roots = Vec::new(); + let mut seen_repo_roots = std::collections::HashSet::new(); + push_unique_pathbuf( + &mut repo_roots, + &mut seen_repo_roots, + PathBuf::from(&workspace.directory.path), + ); + + for project in &workspace.projects { + if let Some(project_root_dir) = resolve_project_root_dir(workspace, project) { + push_unique_pathbuf(&mut repo_roots, &mut seen_repo_roots, project_root_dir); + } + } + + let mut exclude_paths = Vec::new(); + let mut seen_exclude_paths = std::collections::HashSet::new(); + + for repo_root in repo_roots { + let mut repo_dirs = vec![repo_root.clone()]; + repo_dirs.extend(find_all_git_repos(&repo_root, 5)); + + for repo_dir in repo_dirs { + let Some(git_info_dir) = resolve_git_info_dir(&repo_dir) else { + continue; + }; + push_unique_path( + &mut exclude_paths, + &mut seen_exclude_paths, + git_info_dir.join("exclude"), + ); + } + } + + exclude_paths +} + +fn build_managed_git_exclude_content( + global_git_ignore: Option<&str>, + shadow_git_exclude: Option<&str>, +) -> String { + let mut parts = Vec::new(); + + if let Some(content) = global_git_ignore.filter(|value| !value.trim().is_empty()) { + let sanitized = sanitize_git_exclude_content(content); + if !sanitized.is_empty() { + parts.push(sanitized); + } + } + + if let Some(content) = shadow_git_exclude.filter(|value| !value.trim().is_empty()) { + let sanitized = sanitize_git_exclude_content(content); + if !sanitized.is_empty() { + parts.push(sanitized); + } + } + + if parts.is_empty() { + return String::new(); + } + + let joined = parts.join("\n"); + let trimmed = joined.trim(); + if trimmed.is_empty() { + return String::new(); + } + + format!("{trimmed}\n") +} + +fn sanitize_git_exclude_content(content: &str) -> String { + let normalized = content.replace("\r\n", "\n"); + let filtered = normalized + .split('\n') + .filter(|line| { + let trimmed = line.trim(); + if trimmed.is_empty() { + return true; + } + !(trimmed.starts_with('#') && !trimmed.starts_with("\\#")) + }) + .collect::>() + .join("\n"); + + filtered.trim().to_string() +} + +fn create_output_file(path: PathBuf, content: String) -> BaseOutputFileDeclarationDto { + BaseOutputFileDeclarationDto { + path: path.to_string_lossy().into_owned(), + scope: Some(PROJECT_SCOPE.to_string()), + content, + } +} + +fn create_cleanup_target( + path: PathBuf, + kind: CleanupTargetKindDto, + label: Option<&str>, +) -> CleanupTargetDto { + CleanupTargetDto { + path: path.to_string_lossy().into_owned(), + kind, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some(PROJECT_SCOPE.to_string()), + label: label.map(ToOwned::to_owned), + } +} + +fn push_unique_cleanup_file( + delete: &mut Vec, + seen_files: &mut std::collections::HashSet, + path: PathBuf, + label: &str, +) { + let path_string = path.to_string_lossy().into_owned(); + if !seen_files.insert(path_string.clone()) { + return; + } + + delete.push(CleanupTargetDto { + path: path_string, + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some(PROJECT_SCOPE.to_string()), + label: Some(label.to_string()), + }); +} + +fn push_unique_path( + paths: &mut Vec, + seen_paths: &mut std::collections::HashSet, + path: PathBuf, +) { + let path_string = path.to_string_lossy().into_owned(); + if !seen_paths.insert(path_string.clone()) { + return; + } + paths.push(path_string); +} + +fn push_unique_pathbuf( + paths: &mut Vec, + seen_paths: &mut std::collections::HashSet, + path: PathBuf, +) { + let path_string = path.to_string_lossy().into_owned(); + if !seen_paths.insert(path_string) { + return; + } + paths.push(path); +} + +fn file_name(path: &str) -> String { + Path::new(path) + .file_name() + .map(|file_name| file_name.to_string_lossy().into_owned()) + .unwrap_or_else(|| path.to_string()) +} + +fn normalize_path(path: &Path) -> PathBuf { + let mut normalized = PathBuf::new(); + + for component in path.components() { + match component { + Component::Prefix(prefix) => normalized.push(prefix.as_os_str()), + Component::RootDir => normalized.push(Path::new(std::path::MAIN_SEPARATOR_STR)), + Component::CurDir => {} + Component::ParentDir => { + if !normalized.pop() && !path.is_absolute() { + normalized.push(".."); + } + } + Component::Normal(segment) => normalized.push(segment), + } + } + + if normalized.as_os_str().is_empty() { + if path.is_absolute() { + return PathBuf::from(std::path::MAIN_SEPARATOR_STR); + } + return PathBuf::from("."); + } + + normalized +} + +#[cfg(test)] +mod tests { + use std::fs; + + use tempfile::TempDir; + + use super::*; + use crate::core::plugin_shared::{ + FilePathKind, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, PromptKind, ReadmePrompt, + RootPath, + }; + + fn workspace_root(temp_dir: &TempDir) -> String { + temp_dir.path().to_string_lossy().into_owned() + } + + fn create_relative_path(base_path: &str, path: &str) -> RelativePath { + RelativePath::new(path, base_path) + } + + fn create_project_root_prompt(content: &str) -> ProjectRootMemoryPrompt { + ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Root, + dir: RootPath::new(""), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + } + } + + fn create_child_memory_prompt( + project_root: &str, + relative_dir: &str, + content: &str, + ) -> ProjectChildrenMemoryPrompt { + let relative_path = create_relative_path(project_root, relative_dir); + ProjectChildrenMemoryPrompt { + prompt_type: PromptKind::ProjectChildrenMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: relative_path.clone(), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + working_child_directory_path: relative_path, + } + } + + fn create_project(workspace_root: &str, name: &str) -> Project { + Project { + name: Some(name.to_string()), + dir_from_workspace_path: Some(create_relative_path(workspace_root, name)), + ..Project::default() + } + } + + fn create_ide_config( + ide_type: IDEKind, + source_path: &str, + content: &str, + ) -> ProjectIDEConfigFile { + ProjectIDEConfigFile { + ide_type, + content: content.to_string(), + length: content.len(), + dir: RelativePath { + path_kind: FilePathKind::Absolute, + path: source_path.to_string(), + base_path: String::new(), + absolute_path: Some(source_path.to_string()), + directory_name: Path::new(source_path) + .parent() + .map(|dir| dir.to_string_lossy().into_owned()), + }, + file_path_kind: FilePathKind::Absolute, + } + } + + fn create_readme_prompt( + target_root: &str, + relative_dir: &str, + file_kind: &str, + content: &str, + ) -> ReadmePrompt { + ReadmePrompt { + prompt_type: PromptKind::Readme, + content: content.to_string(), + length: content.len(), + dir: create_relative_path(target_root, relative_dir), + project_name: "memory-sync".to_string(), + target_dir: create_relative_path(target_root, relative_dir), + is_root: relative_dir == ".", + file_kind: file_kind.to_string(), + markdown_contents: None, + } + } + + fn find_plan<'a>( + plans: &'a BaseOutputPlansDto, + plugin_name: &str, + ) -> &'a BaseOutputPluginPlanDto { + plans + .plugins + .iter() + .find(|plan| plan.plugin_name == plugin_name) + .unwrap_or_else(|| panic!("expected plugin plan for {plugin_name}")) + } + + #[test] + fn builds_agents_plan_without_prompt_source_outputs() { + let temp_dir = match TempDir::new() { + Ok(dir) => dir, + Err(error) => panic!("temp dir should be created: {error}"), + }; + let workspace_dir = workspace_root(&temp_dir); + let prompt_source_root = Path::new(&workspace_dir).join("aindex"); + let project_root = Path::new(&workspace_dir).join("project-a"); + + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: crate::core::plugin_shared::RootPath::new(&workspace_dir), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_project_root_prompt("workspace root")), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_project_root_prompt("prompt source root")), + child_memory_prompts: Some(vec![create_child_memory_prompt( + &prompt_source_root.to_string_lossy(), + "commands", + "prompt source child", + )]), + ..create_project(&workspace_dir, "aindex") + }, + Project { + root_memory_prompt: Some(create_project_root_prompt("project root")), + child_memory_prompts: Some(vec![create_child_memory_prompt( + &project_root.to_string_lossy(), + "commands", + "project child", + )]), + ..create_project(&workspace_dir, "project-a") + }, + ], + }), + ..CollectedInputContext::default() + }; + + let plans = match build_base_output_plans(&context) { + Ok(plans) => plans, + Err(error) => panic!("base output plans should be built: {error}"), + }; + let agents_plan = find_plan(&plans, AGENTS_PLUGIN_NAME); + let output_paths = agents_plan + .output_files + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + let cleanup_paths = agents_plan + .cleanup + .delete + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + + assert!( + output_paths.contains( + &Path::new(&workspace_dir) + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!(output_paths.contains(&project_root.join("AGENTS.md").to_string_lossy().as_ref())); + assert!( + output_paths.contains( + &project_root + .join("commands") + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &prompt_source_root + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &prompt_source_root + .join("commands") + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + + assert!( + cleanup_paths.contains( + &prompt_source_root + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + cleanup_paths.contains( + &prompt_source_root + .join("commands") + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + } + + #[test] + fn builds_git_exclude_plan_for_workspace_and_project_repos() { + let temp_dir = match TempDir::new() { + Ok(dir) => dir, + Err(error) => panic!("temp dir should be created: {error}"), + }; + let workspace_dir = temp_dir.path(); + let project_dir = workspace_dir.join("packages").join("app"); + if let Err(error) = fs::create_dir_all(workspace_dir.join(".git").join("info")) { + panic!("workspace git dir should be created: {error}"); + } + if let Err(error) = fs::create_dir_all(project_dir.join(".git").join("info")) { + panic!("project git dir should be created: {error}"); + } + + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: crate::core::plugin_shared::RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![create_project( + &workspace_dir.to_string_lossy(), + "packages/app", + )], + }), + global_git_ignore: Some("dist/\n# comment\n\\#literal\n".to_string()), + shadow_git_exclude: Some(".idea/\n".to_string()), + ..CollectedInputContext::default() + }; + + let plans = match build_base_output_plans(&context) { + Ok(plans) => plans, + Err(error) => panic!("base output plans should be built: {error}"), + }; + let git_exclude_plan = find_plan(&plans, GIT_EXCLUDE_PLUGIN_NAME); + let output_paths = git_exclude_plan + .output_files + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + let expected_content = "dist/\n\\#literal\n.idea/\n"; + + assert_eq!( + git_exclude_plan + .output_files + .first() + .map(|entry| entry.content.as_str()), + Some(expected_content) + ); + assert!( + output_paths.contains( + &workspace_dir + .join(".git") + .join("info") + .join("exclude") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + output_paths.contains( + &project_dir + .join(".git") + .join("info") + .join("exclude") + .to_string_lossy() + .as_ref() + ) + ); + assert_eq!( + git_exclude_plan.cleanup.delete.len(), + 2, + "cleanup should cover the same git exclude targets" + ); + } + + #[test] + fn builds_ide_and_readme_plans_for_prompt_source_projects() { + let temp_dir = match TempDir::new() { + Ok(dir) => dir, + Err(error) => panic!("temp dir should be created: {error}"), + }; + let workspace_dir = workspace_root(&temp_dir); + let aindex_public = Path::new(&workspace_dir).join("aindex").join("public"); + let memory_sync_root = Path::new(&workspace_dir).join("memory-sync"); + + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: crate::core::plugin_shared::RootPath::new(&workspace_dir), + projects: vec![ + Project { + is_prompt_source_project: Some(true), + ..create_project(&workspace_dir, "aindex") + }, + create_project(&workspace_dir, "memory-sync"), + ], + }), + editor_config_files: Some(vec![create_ide_config( + IDEKind::EditorConfig, + &aindex_public.join(".editorconfig").to_string_lossy(), + "root = true\n", + )]), + vscode_config_files: Some(vec![ + create_ide_config( + IDEKind::VSCode, + &aindex_public + .join(".vscode") + .join("settings.json") + .to_string_lossy(), + "{}\n", + ), + create_ide_config( + IDEKind::VSCode, + &aindex_public + .join(".vscode") + .join("extensions.json") + .to_string_lossy(), + "{\n}\n", + ), + ]), + zed_config_files: Some(vec![create_ide_config( + IDEKind::Zed, + &aindex_public + .join(".zed") + .join("settings.json") + .to_string_lossy(), + "{\"tab_size\":2}\n", + )]), + jetbrains_config_files: Some(vec![ + create_ide_config( + IDEKind::IntellijIDEA, + &aindex_public + .join(".idea") + .join(".gitignore") + .to_string_lossy(), + "/workspace.xml\n", + ), + create_ide_config( + IDEKind::IntellijIDEA, + &aindex_public + .join(".idea") + .join("codeStyles") + .join("Project.xml") + .to_string_lossy(), + "\n", + ), + ]), + readme_prompts: Some(vec![ + create_readme_prompt( + &memory_sync_root.to_string_lossy(), + ".", + "Readme", + "# README\n", + ), + create_readme_prompt( + &memory_sync_root.to_string_lossy(), + ".", + "CodeOfConduct", + "# COC\n", + ), + ]), + ..CollectedInputContext::default() + }; + + let plans = match build_base_output_plans(&context) { + Ok(plans) => plans, + Err(error) => panic!("base output plans should be built: {error}"), + }; + + let vscode_plan = find_plan(&plans, VSCODE_PLUGIN_NAME); + assert!(vscode_plan.output_files.iter().any(|entry| { + entry.path + == Path::new(&workspace_dir) + .join("aindex") + .join(".vscode") + .join("settings.json") + .to_string_lossy() + })); + assert!(vscode_plan.output_files.iter().any(|entry| { + entry.path + == Path::new(&workspace_dir) + .join("memory-sync") + .join(".vscode") + .join("extensions.json") + .to_string_lossy() + })); + + let zed_plan = find_plan(&plans, ZED_PLUGIN_NAME); + assert!(zed_plan.output_files.iter().any(|entry| { + entry.path + == Path::new(&workspace_dir) + .join("aindex") + .join(".zed") + .join("settings.json") + .to_string_lossy() + })); + + let jetbrains_plan = find_plan(&plans, JETBRAINS_PLUGIN_NAME); + assert!(jetbrains_plan.output_files.iter().any(|entry| { + entry.path + == Path::new(&workspace_dir) + .join("memory-sync") + .join(".idea") + .join("codeStyles") + .join("Project.xml") + .to_string_lossy() + })); + assert!(jetbrains_plan.output_files.iter().any(|entry| { + entry.path + == Path::new(&workspace_dir) + .join("aindex") + .join(".editorconfig") + .to_string_lossy() + })); + + let readme_plan = find_plan(&plans, README_PLUGIN_NAME); + assert!(readme_plan.output_files.iter().any(|entry| { + entry.path == memory_sync_root.join("README.md").to_string_lossy() + && entry.content == "# README\n" + })); + assert!(readme_plan.output_files.iter().any(|entry| { + entry.path + == memory_sync_root + .join("CODE_OF_CONDUCT.md") + .to_string_lossy() + && entry.content == "# COC\n" + })); + assert!(readme_plan.output_files.iter().any(|entry| { + entry.path == memory_sync_root.join(".editorconfig").to_string_lossy() + && entry.content == "root = true\n" + })); + } +} diff --git a/sdk/src/core/cleanup.rs b/sdk/src/core/cleanup.rs index c9490e19..22b2ac31 100644 --- a/sdk/src/core/cleanup.rs +++ b/sdk/src/core/cleanup.rs @@ -538,7 +538,7 @@ impl BatchedGlobPlanner { .count(); let glob_pattern_count = self.normalized_patterns.len() - literal_pattern_count; - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob execute started", json!({ @@ -599,7 +599,7 @@ impl BatchedGlobPlanner { literal_match_count += 1; } - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob literal processing complete", json!({ @@ -685,7 +685,7 @@ impl BatchedGlobPlanner { } } - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob group walks complete", json!({ @@ -697,7 +697,7 @@ impl BatchedGlobPlanner { ); // Convert HashMaps to sorted Vecs and deduplicate - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob result compaction started", json!({}) @@ -722,7 +722,7 @@ impl BatchedGlobPlanner { .collect(); delete_vec.sort_by_key(|(idx, _)| *idx); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob result compaction complete", json!({ @@ -1485,11 +1485,11 @@ fn default_protection_mode_for_target(target: &CleanupTargetDto) -> ProtectionMo pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native plan started", json!({ - "pluginCount": snapshot.plugin_snapshots.len(), + "adaptorCount": snapshot.plugin_snapshots.len(), "projectRootCount": snapshot.project_roots.len(), "protectedRuleCount": snapshot.protected_rules.len(), "emptyDirExcludeGlobs": snapshot.empty_dir_exclude_globs.len(), @@ -1534,9 +1534,9 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { reason: target .label .as_ref() - .map(|label| format!("plugin cleanup protect declaration ({label})")) - .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()), - source: format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + .map(|label| format!("adaptor cleanup protect declaration ({label})")) + .unwrap_or_else(|| "adaptor cleanup protect declaration".to_string()), + source: format!("adaptor-cleanup-protect:{}", plugin_snapshot.plugin_name), }); continue; } @@ -1544,13 +1544,13 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let reason = target .label .as_ref() - .map(|label| format!("plugin cleanup protect declaration ({label})")) - .unwrap_or_else(|| "plugin cleanup protect declaration".to_string()); + .map(|label| format!("adaptor cleanup protect declaration ({label})")) + .unwrap_or_else(|| "adaptor cleanup protect declaration".to_string()); protected_rules.push(create_protected_rule( &target.path, default_protection_mode_for_target(target), reason, - format!("plugin-cleanup-protect:{}", plugin_snapshot.plugin_name), + format!("adaptor-cleanup-protect:{}", plugin_snapshot.plugin_name), None, )); } @@ -1577,7 +1577,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } } - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native plan inventory collected", json!({ @@ -1614,7 +1614,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { } // Execute the batched glob expansion - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob expansion started", json!({ @@ -1632,7 +1632,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .iter() .map(|(_, paths)| paths.len()) .sum::(); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native glob expansion complete", json!({ @@ -1672,7 +1672,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let guard = create_guard(&snapshot, &protected_rules)?; let conflicts = detect_cleanup_protection_conflicts(&output_path_owners, &guard); if !conflicts.is_empty() { - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native plan blocked", json!({ @@ -1692,7 +1692,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { let file_candidates = delete_files.into_iter().collect::>(); let dir_candidates = delete_dirs.into_iter().collect::>(); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native file partition started", json!({ @@ -1702,7 +1702,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { ); let file_partition = partition_deletion_targets(&file_candidates, &guard, Some(&exact_safe_file_paths)); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native file partition complete", json!({ @@ -1711,7 +1711,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": file_partition.violations.len(), }) ); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native directory partition started", json!({ @@ -1720,7 +1720,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { }) ); let dir_partition = partition_deletion_targets(&dir_candidates, &guard, None); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native directory partition complete", json!({ @@ -1729,14 +1729,14 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "violationCount": dir_partition.violations.len(), }) ); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native target compaction started", json!({}) ); let (files_to_delete, dirs_to_delete) = compact_deletion_targets(&file_partition.safe_paths, &dir_partition.safe_paths); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native target compaction complete", json!({ @@ -1744,7 +1744,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { "compactedDirs": dirs_to_delete.len(), }) ); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native target partition complete", json!({ @@ -1777,7 +1777,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .map(|pattern| normalize_relative_glob_pattern(pattern)) .collect::>(), )?; - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native empty directory planning started", json!({ @@ -1805,7 +1805,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { .into_iter() .filter(|violation| !target_matches_project_root(&violation.target_path, &project_root_keys)) .collect::>(); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native empty directory planning complete", json!({ @@ -1819,7 +1819,7 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { violations.extend(empty_dir_violations); violations.sort_by(|a, b| a.target_path.cmp(&b.target_path)); - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native plan complete", json!({ @@ -1843,10 +1843,10 @@ pub fn plan_cleanup(snapshot: CleanupSnapshot) -> Result { pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result { let logger = create_logger("CleanupNative", None); - tnmsc_logger::log_info!(logger, "cleanup native perform started", json!({})); + tnmsc_logger::log_debug!(logger, "cleanup native perform started", json!({})); let plan = plan_cleanup(snapshot)?; if !plan.conflicts.is_empty() || !plan.violations.is_empty() { - tnmsc_logger::log_info!( + tnmsc_logger::log_debug!( logger, "cleanup native perform blocked", json!({ @@ -1867,7 +1867,7 @@ pub fn perform_cleanup(snapshot: CleanupSnapshot) -> Result Result Result Result Result Result Result, + stderr: Vec, +} + +pub(crate) fn execute_internal_command( + command_name: &str, + options: &MemorySyncCommandOptions, +) -> Result { + let bridge_entry = resolve_internal_command_bridge_path()?; + let node_executable = resolve_node_executable(); + let result_path = create_bridge_result_path().map_err(CliError::IoError)?; + let options_json = serde_json::to_string(options)?; + + let mut command = Command::new(&node_executable); + command + .arg(&bridge_entry) + .arg(command_name) + .arg(&options_json) + .env("TNMSC_FORCE_NATIVE_BINDING", "1") + .env(INTERNAL_COMMAND_BRIDGE_RESULT_PATH_ENV, &result_path) + .stdout(Stdio::piped()) + .stderr(Stdio::piped()); + + if let Some(cwd) = options.cwd.as_deref() { + command.current_dir(cwd); + } + + let output = run_bridge_process(&mut command).map_err(CliError::IoError)?; + if !output.status.success() { + let _ = fs::remove_file(&result_path); + return Err(CliError::ExecutionError(format!( + "Internal command bridge failed for `{command_name}` via \"{}\" (node: {}). {}", + bridge_entry.display(), + node_executable.to_string_lossy(), + format_process_failure(output.status, &output.stdout, &output.stderr), + ))); + } + + let result_json = fs::read_to_string(&result_path).map_err(|error| { + CliError::ExecutionError(format!( + "Internal command bridge did not write a result payload for `{command_name}` via \"{}\": {error}. {}", + bridge_entry.display(), + format_captured_output(&output.stdout, &output.stderr), + )) + })?; + let _ = fs::remove_file(&result_path); + + let trimmed = result_json.trim(); + if trimmed.is_empty() { + return Err(CliError::ExecutionError(format!( + "Internal command bridge returned empty output for `{command_name}` via \"{}\". {}", + bridge_entry.display(), + format_captured_output(&output.stdout, &output.stderr), + ))); + } + + serde_json::from_str(trimmed).map_err(|error| { + CliError::ExecutionError(format!( + "Internal command bridge returned invalid JSON for `{command_name}` via \"{}\": {error}. Result: {trimmed}", + bridge_entry.display(), + )) + }) +} + +fn create_bridge_result_path() -> io::Result { + let temp_dir = env::temp_dir(); + let process_id = std::process::id(); + let now = SystemTime::now() + .duration_since(UNIX_EPOCH) + .unwrap_or_default() + .as_nanos(); + + for attempt in 0..32 { + let candidate = temp_dir.join(format!( + "tnmsc-internal-command-bridge-{process_id}-{now}-{attempt}.json" + )); + if !candidate.exists() { + return Ok(candidate); + } + } + + Err(io::Error::new( + io::ErrorKind::AlreadyExists, + "Failed to allocate a unique internal command bridge result path.", + )) +} + +fn spawn_output_forwarder( + mut reader: R, + target: OutputTarget, +) -> thread::JoinHandle>> +where + R: Read + Send + 'static, +{ + thread::spawn(move || { + let mut buffer = [0_u8; 8192]; + let mut collected = Vec::new(); + + loop { + let bytes_read = reader.read(&mut buffer)?; + if bytes_read == 0 { + break; + } + + let chunk = &buffer[..bytes_read]; + collected.extend_from_slice(chunk); + + match target { + OutputTarget::Stdout => { + let mut stdout = io::stdout().lock(); + stdout.write_all(chunk)?; + stdout.flush()?; + } + OutputTarget::Stderr => { + let mut stderr = io::stderr().lock(); + stderr.write_all(chunk)?; + stderr.flush()?; + } + } + } + + Ok(collected) + }) +} + +fn join_output_forwarder( + handle: thread::JoinHandle>>, + stream_name: &str, +) -> io::Result> { + match handle.join() { + Ok(result) => result, + Err(_) => Err(io::Error::other(format!( + "Internal command bridge {stream_name} forwarder panicked." + ))), + } +} + +fn run_bridge_process(command: &mut Command) -> io::Result { + let mut child = command.spawn()?; + let stdout = child + .stdout + .take() + .ok_or_else(|| io::Error::other("Internal command bridge stdout pipe was unavailable."))?; + let stderr = child + .stderr + .take() + .ok_or_else(|| io::Error::other("Internal command bridge stderr pipe was unavailable."))?; + + let stdout_thread = spawn_output_forwarder(stdout, OutputTarget::Stdout); + let stderr_thread = spawn_output_forwarder(stderr, OutputTarget::Stderr); + let status = child.wait()?; + let stdout = join_output_forwarder(stdout_thread, "stdout")?; + let stderr = join_output_forwarder(stderr_thread, "stderr")?; + + Ok(BridgeProcessOutput { + status, + stdout, + stderr, + }) +} + +fn format_captured_output(stdout: &[u8], stderr: &[u8]) -> String { + let stdout = String::from_utf8_lossy(stdout).trim().to_string(); + let stderr = String::from_utf8_lossy(stderr).trim().to_string(); + let mut details = Vec::new(); + + if !stdout.is_empty() { + details.push(format!("Stdout: {stdout}")); + } + if !stderr.is_empty() { + details.push(format!("Stderr: {stderr}")); + } + + if details.is_empty() { + return "No stdout/stderr captured.".to_string(); + } + + details.join(" ") +} + +fn resolve_internal_command_bridge_path() -> Result { + if let Some(override_path) = env::var_os(INTERNAL_COMMAND_BRIDGE_ENV) { + let bridge_path = PathBuf::from(override_path); + if bridge_path.exists() { + return Ok(bridge_path); + } + + return Err(CliError::ExecutionError(format!( + "Internal command bridge override points to a missing file: \"{}\".", + bridge_path.display(), + ))); + } + + let bridge_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")).join(DEFAULT_INTERNAL_COMMAND_BRIDGE); + if bridge_path.exists() { + return Ok(bridge_path); + } + + Err(CliError::ExecutionError(format!( + "Internal command bridge bundle is missing at \"{}\". Run `pnpm -C sdk run build` before using native install/dry-run/clean commands.", + bridge_path.display(), + ))) +} + +fn resolve_node_executable() -> OsString { + for key in [NODE_EXECUTABLE_ENV, "npm_node_execpath", "NODE"] { + if let Some(value) = env::var_os(key) + && !value.is_empty() + { + return value; + } + } + + OsString::from(DEFAULT_NODE_EXECUTABLE) +} + +fn format_process_failure(status: ExitStatus, stdout: &[u8], stderr: &[u8]) -> String { + let status = status.code().map_or_else( + || "terminated by signal".to_string(), + |code| format!("Exit code: {code}."), + ); + format!("{status} {}", format_captured_output(stdout, stderr)) +} + +#[cfg(test)] +mod tests { + use super::*; + use std::fs; + use std::sync::{Mutex, OnceLock}; + + use tempfile::TempDir; + + fn env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + fn write_bridge_script(temp_dir: &TempDir, name: &str, body: &str) -> PathBuf { + let path = temp_dir.path().join(name); + fs::write(&path, body).expect("bridge script should be written"); + path + } + + fn bridge_script_with_result(body: &str) -> String { + format!( + r#"import {{writeFileSync}} from 'node:fs' +const resultPath = process.env.{} +function emitResult(payload) {{ + const serialized = JSON.stringify(payload) + if (resultPath && resultPath.length > 0) {{ + writeFileSync(resultPath, serialized, 'utf8') + return + }} + process.stdout.write(serialized) +}} +{} +"#, + INTERNAL_COMMAND_BRIDGE_RESULT_PATH_ENV, body + ) + } + + fn with_bridge_env(bridge_path: &PathBuf, callback: impl FnOnce() -> T) -> T { + let _guard = env_lock().lock().expect("env lock should be available"); + let previous_bridge = env::var_os(INTERNAL_COMMAND_BRIDGE_ENV); + let previous_node = env::var_os(NODE_EXECUTABLE_ENV); + + unsafe { + env::set_var(INTERNAL_COMMAND_BRIDGE_ENV, bridge_path); + env::set_var(NODE_EXECUTABLE_ENV, "node"); + } + + let result = callback(); + + match previous_bridge { + Some(value) => unsafe { + env::set_var(INTERNAL_COMMAND_BRIDGE_ENV, value); + }, + None => unsafe { + env::remove_var(INTERNAL_COMMAND_BRIDGE_ENV); + }, + } + match previous_node { + Some(value) => unsafe { + env::set_var(NODE_EXECUTABLE_ENV, value); + }, + None => unsafe { + env::remove_var(NODE_EXECUTABLE_ENV); + }, + } + + result + } + + #[test] + fn execute_internal_command_parses_success_result() { + let temp_dir = TempDir::new().expect("temp dir should exist"); + let bridge_path = write_bridge_script( + &temp_dir, + "bridge-success.mjs", + &bridge_script_with_result( + r#"const [, , commandName, optionsJson] = process.argv +const options = JSON.parse(optionsJson ?? '{}') +emitResult({ + success: true, + filesAffected: commandName === 'install' ? 3 : 0, + dirsAffected: options.cwd ? 1 : 0, + message: options.cwd ?? null, + warnings: [], + errors: [] +})"#, + ), + ); + + let result = with_bridge_env(&bridge_path, || { + execute_internal_command( + "install", + &MemorySyncCommandOptions { + cwd: Some(temp_dir.path().display().to_string()), + ..Default::default() + }, + ) + }) + .expect("bridge-backed install should succeed"); + + assert!(result.success); + assert_eq!(result.files_affected, 3); + assert_eq!(result.dirs_affected, 1); + assert_eq!( + result.message.as_deref(), + Some(temp_dir.path().to_string_lossy().as_ref()) + ); + } + + #[test] + fn execute_internal_command_passes_dry_run_to_clean_bridge() { + let temp_dir = TempDir::new().expect("temp dir should exist"); + let bridge_path = write_bridge_script( + &temp_dir, + "bridge-clean.mjs", + &bridge_script_with_result( + r#"const [, , commandName, optionsJson] = process.argv +const options = JSON.parse(optionsJson ?? '{}') +emitResult({ + success: commandName === 'clean' && options.dryRun === true, + filesAffected: options.dryRun === true ? 5 : 0, + dirsAffected: options.dryRun === true ? 2 : 0, + warnings: [], + errors: [] +})"#, + ), + ); + + let result = with_bridge_env(&bridge_path, || { + execute_internal_command( + "clean", + &MemorySyncCommandOptions { + dry_run: Some(true), + ..Default::default() + }, + ) + }) + .expect("bridge-backed clean should succeed"); + + assert!(result.success); + assert_eq!(result.files_affected, 5); + assert_eq!(result.dirs_affected, 2); + } + + #[test] + fn execute_internal_command_ignores_non_json_stdout_before_result() { + let temp_dir = TempDir::new().expect("temp dir should exist"); + let bridge_path = write_bridge_script( + &temp_dir, + "bridge-logged-success.mjs", + &bridge_script_with_result( + r####"process.stdout.write("### progress\n") +emitResult({ + success: true, + filesAffected: 1, + dirsAffected: 0, + warnings: [], + errors: [] +})"####, + ), + ); + + let result = with_bridge_env(&bridge_path, || { + execute_internal_command("install", &MemorySyncCommandOptions::default()) + }) + .expect("bridge-backed install should succeed when stdout includes logs before JSON"); + + assert!(result.success); + assert_eq!(result.files_affected, 1); + assert_eq!(result.dirs_affected, 0); + } + + #[test] + fn execute_internal_command_reports_invalid_json() { + let temp_dir = TempDir::new().expect("temp dir should exist"); + let bridge_path = write_bridge_script( + &temp_dir, + "bridge-invalid-json.mjs", + &format!( + "import {{writeFileSync}} from 'node:fs'\nconst resultPath = process.env.{}\nif (resultPath) writeFileSync(resultPath, 'not-json', 'utf8')\nelse process.stdout.write('not-json')\n", + INTERNAL_COMMAND_BRIDGE_RESULT_PATH_ENV + ), + ); + + let error = with_bridge_env(&bridge_path, || { + execute_internal_command("dry-run", &MemorySyncCommandOptions::default()) + }) + .expect_err("invalid bridge output should fail"); + + assert!(matches!(error, CliError::ExecutionError(_))); + assert!(error.to_string().contains("invalid JSON")); + } + + #[test] + fn execute_internal_command_reports_bridge_failures() { + let temp_dir = TempDir::new().expect("temp dir should exist"); + let bridge_path = write_bridge_script( + &temp_dir, + "bridge-failure.mjs", + "process.stderr.write('bridge exploded\\n')\nprocess.exit(7)\n", + ); + + let error = with_bridge_env(&bridge_path, || { + execute_internal_command("install", &MemorySyncCommandOptions::default()) + }) + .expect_err("bridge failure should surface"); + + assert!(matches!(error, CliError::ExecutionError(_))); + assert!(error.to_string().contains("bridge exploded")); + assert!(error.to_string().contains("Exit code: 7")); + } +} diff --git a/sdk/src/core/desk_paths.rs b/sdk/src/core/desk_paths.rs index 0a619a58..4b47fb9b 100644 --- a/sdk/src/core/desk_paths.rs +++ b/sdk/src/core/desk_paths.rs @@ -240,9 +240,17 @@ fn delete_path(path: impl AsRef) -> io::Result { } if metadata.is_dir() { - fs::remove_dir_all(path).map(|_| true) + match fs::remove_dir_all(path) { + Ok(()) => Ok(true), + Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(false), + Err(err) => Err(err), + } } else { - fs::remove_file(path).map(|_| true) + match fs::remove_file(path) { + Ok(()) => Ok(true), + Err(err) if err.kind() == io::ErrorKind::NotFound => Ok(false), + Err(err) => Err(err), + } } } diff --git a/sdk/src/core/droid_output_plan.rs b/sdk/src/core/droid_output_plan.rs new file mode 100644 index 00000000..65a2257c --- /dev/null +++ b/sdk/src/core/droid_output_plan.rs @@ -0,0 +1,1166 @@ +use std::collections::HashSet; +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; +use serde_json::{Map, Value}; + +use crate::CliError; +use crate::core::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::core::config; +use crate::core::plugin_shared::{ + CollectedInputContext, FastCommandPrompt, Project, RelativePath, RuleScope, SkillPrompt, + SkillResourceEncoding, Workspace, +}; + +const DROID_PLUGIN_NAME: &str = "DroidCLIOutputAdaptor"; +const DROID_MEMORY_FILE: &str = "AGENTS.md"; +const DROID_GLOBAL_CONFIG_DIR: &str = ".factory"; +const DROID_COMMANDS_SUBDIR: &str = "commands"; +const DROID_SKILLS_SUBDIR: &str = "skills"; +const PROJECT_SCOPE: &str = "project"; +const GLOBAL_SCOPE: &str = "global"; + +#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DroidOutputFileDeclarationDto { + pub path: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub scope: Option, + pub content: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub encoding: Option, +} + +#[derive(Debug, Clone, Default, Serialize, Deserialize)] +#[serde(rename_all = "camelCase")] +pub struct DroidOutputPlanDto { + pub plugin_name: String, + #[serde(default)] + pub output_files: Vec, + #[serde(default)] + pub cleanup: CleanupDeclarationsDto, +} + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +enum OutputSelectionScope { + Project, + Global, +} + +pub fn collect_droid_output_plan(context_json: &str) -> Result { + let context = serde_json::from_str::(context_json)?; + let plan = build_droid_output_plan(&context)?; + serde_json::to_string(&plan).map_err(CliError::from) +} + +pub fn build_droid_output_plan( + context: &CollectedInputContext, +) -> Result { + let workspace = context.workspace.as_ref().ok_or_else(|| { + CliError::ExecutionError( + "collectDroidOutputPlan requires collectedOutputContext.workspace".to_string(), + ) + })?; + + Ok(DroidOutputPlanDto { + plugin_name: DROID_PLUGIN_NAME.to_string(), + output_files: build_output_files(workspace, context)?, + cleanup: build_cleanup(workspace), + }) +} + +fn build_output_files( + workspace: &Workspace, + context: &CollectedInputContext, +) -> Result, CliError> { + let mut output_files = Vec::new(); + + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + output_files.push(create_text_output_file( + project_root_dir.join(DROID_MEMORY_FILE), + Some(PROJECT_SCOPE), + root_prompt.content.clone(), + )); + } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(create_text_output_file( + resolve_relative_path(&child_prompt.dir).join(DROID_MEMORY_FILE), + Some(PROJECT_SCOPE), + child_prompt.content.clone(), + )); + } + } + } + + append_command_output_files(&mut output_files, workspace, context)?; + append_skill_output_files(&mut output_files, workspace, context)?; + + if let Some(global_memory) = context.global_memory.as_ref() { + output_files.push(create_text_output_file( + resolve_effective_home_dir() + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_MEMORY_FILE), + Some(GLOBAL_SCOPE), + global_memory.content.clone(), + )); + } + + Ok(output_files) +} + +fn append_command_output_files( + output_files: &mut Vec, + workspace: &Workspace, + context: &CollectedInputContext, +) -> Result<(), CliError> { + let commands = context.fast_commands.as_deref().unwrap_or(&[]); + let Some(selected_scope) = select_single_scope(commands.iter().map(resolve_command_scope)) else { + return Ok(()); + }; + + match selected_scope { + OutputSelectionScope::Project => { + for project in get_project_output_projects(workspace) { + let Some(project_config_dir) = resolve_project_config_dir(workspace, project) else { + continue; + }; + + let filtered_commands = + filter_commands_for_project(commands, project.project_config.as_ref(), selected_scope); + for command in filtered_commands { + output_files.push(create_text_output_file( + project_config_dir + .join(DROID_COMMANDS_SUBDIR) + .join(transform_command_name(command)), + Some(PROJECT_SCOPE), + build_command_content(command)?, + )); + } + } + } + OutputSelectionScope::Global => { + let global_config_dir = resolve_effective_home_dir().join(DROID_GLOBAL_CONFIG_DIR); + let prompt_source_project_config = resolve_prompt_source_project_config(workspace); + let filtered_commands = + filter_commands_for_project(commands, prompt_source_project_config, selected_scope); + + for command in filtered_commands { + output_files.push(create_text_output_file( + global_config_dir + .join(DROID_COMMANDS_SUBDIR) + .join(transform_command_name(command)), + Some(GLOBAL_SCOPE), + build_command_content(command)?, + )); + } + } + } + + Ok(()) +} + +fn append_skill_output_files( + output_files: &mut Vec, + workspace: &Workspace, + context: &CollectedInputContext, +) -> Result<(), CliError> { + let skills = context.skills.as_deref().unwrap_or(&[]); + let Some(selected_scope) = select_single_scope(skills.iter().map(resolve_skill_scope)) else { + return Ok(()); + }; + + match selected_scope { + OutputSelectionScope::Project => { + for project in get_project_output_projects(workspace) { + let Some(project_config_dir) = resolve_project_config_dir(workspace, project) else { + continue; + }; + + let filtered_skills = + filter_skills_for_project(skills, project.project_config.as_ref(), selected_scope); + append_skill_files_for_scope( + output_files, + project_config_dir, + PROJECT_SCOPE, + &filtered_skills, + )?; + } + } + OutputSelectionScope::Global => { + let global_config_dir = resolve_effective_home_dir().join(DROID_GLOBAL_CONFIG_DIR); + let prompt_source_project_config = resolve_prompt_source_project_config(workspace); + let filtered_skills = + filter_skills_for_project(skills, prompt_source_project_config, selected_scope); + append_skill_files_for_scope( + output_files, + global_config_dir, + GLOBAL_SCOPE, + &filtered_skills, + )?; + } + } + + Ok(()) +} + +fn append_skill_files_for_scope( + output_files: &mut Vec, + base_dir: PathBuf, + scope: &str, + skills: &[&SkillPrompt], +) -> Result<(), CliError> { + for skill in skills { + let skill_dir = base_dir + .join(DROID_SKILLS_SUBDIR) + .join(resolve_skill_dir_name(skill)); + + output_files.push(create_text_output_file( + skill_dir.join("SKILL.md"), + Some(scope), + build_skill_main_content(skill)?, + )); + + if let Some(child_docs) = skill.child_docs.as_ref() { + for child_doc in child_docs { + output_files.push(create_text_output_file( + skill_dir.join(transform_child_doc_path(&child_doc.relative_path)), + Some(scope), + child_doc.content.clone(), + )); + } + } + + if let Some(resources) = skill.resources.as_ref() { + for resource in resources { + output_files.push(create_resource_output_file( + skill_dir.join(&resource.relative_path), + Some(scope), + resource.content.clone(), + resource.encoding, + )); + } + } + } + + Ok(()) +} + +fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { + let mut delete = Vec::new(); + + for project in get_project_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + delete.push(create_cleanup_target( + project_root_dir.join(DROID_MEMORY_FILE), + CleanupTargetKindDto::File, + Some(PROJECT_SCOPE), + Some("delete.project"), + )); + delete.push(create_cleanup_target( + project_root_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_COMMANDS_SUBDIR), + CleanupTargetKindDto::Directory, + Some(PROJECT_SCOPE), + Some("delete.project"), + )); + delete.push(create_cleanup_target( + project_root_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_SKILLS_SUBDIR), + CleanupTargetKindDto::Directory, + Some(PROJECT_SCOPE), + Some("delete.project"), + )); + } + + let global_config_dir = resolve_effective_home_dir().join(DROID_GLOBAL_CONFIG_DIR); + delete.push(create_cleanup_target( + global_config_dir.join(DROID_MEMORY_FILE), + CleanupTargetKindDto::File, + Some(GLOBAL_SCOPE), + Some("delete.global"), + )); + delete.push(create_cleanup_target( + global_config_dir.join(DROID_COMMANDS_SUBDIR), + CleanupTargetKindDto::Directory, + Some(GLOBAL_SCOPE), + Some("delete.global"), + )); + delete.push(create_cleanup_target( + global_config_dir.join(DROID_SKILLS_SUBDIR), + CleanupTargetKindDto::Directory, + Some(GLOBAL_SCOPE), + Some("delete.global"), + )); + + CleanupDeclarationsDto { + delete, + ..CleanupDeclarationsDto::default() + } +} + +fn resolve_effective_home_dir() -> PathBuf { + let runtime_environment = config::resolve_runtime_environment(); + runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + .unwrap_or_else(|| PathBuf::from("/")) +} + +fn get_concrete_projects(workspace: &Workspace) -> Vec<&Project> { + workspace + .projects + .iter() + .filter(|project| project.is_workspace_root_project != Some(true)) + .collect() +} + +fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { + let mut projects = get_concrete_projects(workspace); + + if let Some(workspace_root_project) = workspace + .projects + .iter() + .find(|project| project.is_workspace_root_project == Some(true)) + { + projects.push(workspace_root_project); + } + + projects +} + +fn get_project_prompt_output_projects(workspace: &Workspace) -> Vec<&Project> { + get_project_output_projects(workspace) + .into_iter() + .filter(|project| project.is_prompt_source_project != Some(true)) + .collect() +} + +fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { + if project.is_workspace_root_project == Some(true) { + return Some(PathBuf::from(&workspace.directory.path)); + } + + project + .dir_from_workspace_path + .as_ref() + .map(resolve_relative_path) +} + +fn resolve_project_config_dir(workspace: &Workspace, project: &Project) -> Option { + let project_root_dir = resolve_project_root_dir(workspace, project)?; + Some(project_root_dir.join(DROID_GLOBAL_CONFIG_DIR)) +} + +fn resolve_prompt_source_project_config(workspace: &Workspace) -> Option<&Value> { + let concrete_projects = get_concrete_projects(workspace); + concrete_projects + .iter() + .find(|project| project.is_prompt_source_project == Some(true)) + .and_then(|project| project.project_config.as_ref()) + .or_else(|| { + concrete_projects + .first() + .and_then(|project| project.project_config.as_ref()) + }) +} + +fn resolve_relative_path(relative_path: &RelativePath) -> PathBuf { + let raw_path = Path::new(&relative_path.path); + if raw_path.is_absolute() { + return raw_path.to_path_buf(); + } + if relative_path.base_path.is_empty() { + return raw_path.to_path_buf(); + } + PathBuf::from(&relative_path.base_path).join(raw_path) +} + +fn create_text_output_file( + path: PathBuf, + scope: Option<&str>, + content: String, +) -> DroidOutputFileDeclarationDto { + DroidOutputFileDeclarationDto { + path: path.to_string_lossy().into_owned(), + scope: scope.map(str::to_string), + content, + encoding: None, + } +} + +fn create_resource_output_file( + path: PathBuf, + scope: Option<&str>, + content: String, + encoding: SkillResourceEncoding, +) -> DroidOutputFileDeclarationDto { + let encoding = match encoding { + SkillResourceEncoding::Text => "text", + SkillResourceEncoding::Base64 => "base64", + }; + + DroidOutputFileDeclarationDto { + path: path.to_string_lossy().into_owned(), + scope: scope.map(str::to_string), + content, + encoding: Some(encoding.to_string()), + } +} + +fn create_cleanup_target( + path: PathBuf, + kind: CleanupTargetKindDto, + scope: Option<&str>, + label: Option<&str>, +) -> CleanupTargetDto { + CleanupTargetDto { + path: path.to_string_lossy().into_owned(), + kind, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: scope.map(str::to_string), + label: label.map(str::to_string), + } +} + +fn select_single_scope( + scopes: impl Iterator, +) -> Option { + let mut has_project = false; + let mut has_global = false; + + for scope in scopes { + match scope { + OutputSelectionScope::Project => has_project = true, + OutputSelectionScope::Global => has_global = true, + } + } + + if has_project { + return Some(OutputSelectionScope::Project); + } + if has_global { + return Some(OutputSelectionScope::Global); + } + None +} + +fn filter_commands_for_project<'a>( + commands: &'a [FastCommandPrompt], + project_config: Option<&Value>, + selected_scope: OutputSelectionScope, +) -> Vec<&'a FastCommandPrompt> { + let effective_include_series = resolve_effective_include_series(project_config, "commands"); + + commands + .iter() + .filter(|command| resolve_command_scope(command) == selected_scope) + .filter(|command| { + matches_command_series(command.seri_name.as_deref(), &effective_include_series) + }) + .collect() +} + +fn filter_skills_for_project<'a>( + skills: &'a [SkillPrompt], + project_config: Option<&Value>, + selected_scope: OutputSelectionScope, +) -> Vec<&'a SkillPrompt> { + let effective_include_series = resolve_effective_include_series(project_config, "skills"); + + skills + .iter() + .filter(|skill| resolve_skill_scope(skill) == selected_scope) + .filter(|skill| { + matches_series_value( + resolve_skill_extra_value(skill, "seriName"), + &effective_include_series, + ) + }) + .collect() +} + +fn resolve_command_scope(command: &FastCommandPrompt) -> OutputSelectionScope { + if command.global_only == Some(true) { + return OutputSelectionScope::Global; + } + + match command + .yaml_front_matter + .as_ref() + .and_then(|front_matter| front_matter.scope) + { + Some(RuleScope::Global) => OutputSelectionScope::Global, + _ => OutputSelectionScope::Project, + } +} + +fn resolve_skill_scope(skill: &SkillPrompt) -> OutputSelectionScope { + match resolve_skill_extra_value(skill, "scope").and_then(Value::as_str) { + Some("global") => OutputSelectionScope::Global, + _ => OutputSelectionScope::Project, + } +} + +fn resolve_effective_include_series( + project_config: Option<&Value>, + topic_key: &str, +) -> Vec { + let mut merged = Vec::new(); + let mut seen = HashSet::new(); + + for value in collect_string_values(project_config.and_then(|config| config.get("includeSeries"))) + { + if seen.insert(value.clone()) { + merged.push(value); + } + } + + for value in collect_string_values( + project_config + .and_then(|config| config.get(topic_key)) + .and_then(|type_config| type_config.get("includeSeries")), + ) { + if seen.insert(value.clone()) { + merged.push(value); + } + } + + merged +} + +fn collect_string_values(value: Option<&Value>) -> Vec { + match value { + Some(Value::Array(values)) => values + .iter() + .filter_map(|entry| entry.as_str().map(str::to_string)) + .collect(), + _ => Vec::new(), + } +} + +fn matches_command_series(seri_name: Option<&str>, effective_include_series: &[String]) -> bool { + match seri_name { + None => true, + Some(_) if effective_include_series.is_empty() => true, + Some(series) => effective_include_series.iter().any(|entry| entry == series), + } +} + +fn matches_series_value(value: Option<&Value>, effective_include_series: &[String]) -> bool { + let Some(value) = value else { + return true; + }; + if effective_include_series.is_empty() { + return true; + } + + match value { + Value::String(series) => effective_include_series.iter().any(|entry| entry == series), + Value::Array(values) => values.iter().any(|entry| { + entry.as_str().is_some_and(|series| { + effective_include_series + .iter() + .any(|candidate| candidate == series) + }) + }), + _ => true, + } +} + +fn resolve_skill_extra_value<'a>(skill: &'a SkillPrompt, key: &str) -> Option<&'a Value> { + skill + .yaml_front_matter + .as_ref() + .and_then(|front_matter| front_matter.extra.get(key)) +} + +fn transform_command_name(command: &FastCommandPrompt) -> String { + match command.series.as_deref() { + Some(series) if !series.is_empty() => format!("{series}-{}.md", command.command_name), + _ => format!("{}.md", command.command_name), + } +} + +fn resolve_skill_dir_name(skill: &SkillPrompt) -> String { + if !skill.skill_name.trim().is_empty() { + return skill.skill_name.clone(); + } + + skill.dir.get_directory_name() +} + +fn transform_child_doc_path(relative_path: &str) -> String { + match relative_path.strip_suffix(".mdx") { + Some(prefix) => format!("{prefix}.md"), + None => relative_path.to_string(), + } +} + +fn build_command_content(command: &FastCommandPrompt) -> Result { + let front_matter = command + .yaml_front_matter + .as_ref() + .map(serde_json::to_value) + .transpose()?; + + build_markdown_with_front_matter(front_matter, command.content.clone()) +} + +fn build_skill_main_content(skill: &SkillPrompt) -> Result { + let Some(front_matter) = skill.yaml_front_matter.as_ref() else { + return Ok(skill.content.clone()); + }; + + let mut simplified = Map::new(); + simplified.insert( + "name".to_string(), + Value::String(resolve_skill_dir_name(skill)), + ); + + if let Some(description) = front_matter.description.as_ref() { + simplified.insert( + "description".to_string(), + Value::String(description.clone()), + ); + } + + build_markdown_with_front_matter(Some(Value::Object(simplified)), skill.content.clone()) +} + +fn build_markdown_with_front_matter( + front_matter: Option, + content: String, +) -> Result { + let Some(Value::Object(front_matter_map)) = front_matter else { + return Ok(content); + }; + + let cleaned: Map = front_matter_map + .into_iter() + .filter(|(_, value)| !value.is_null()) + .collect(); + + if cleaned.is_empty() { + return Ok(content); + } + + let yaml_content = serde_yml::to_string(&Value::Object(cleaned)) + .map_err(|error| CliError::ExecutionError(error.to_string()))?; + let yaml_trimmed = yaml_content.trim_end(); + + Ok(format!("---\n{yaml_trimmed}\n---\n\n{content}")) +} + +#[cfg(test)] +mod tests { + use std::collections::HashMap; + use std::sync::{Mutex, OnceLock}; + + use tempfile::TempDir; + + use super::*; + use crate::core::plugin_shared::{ + FastCommandYAMLFrontMatter, FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, + ProjectRootMemoryPrompt, PromptKind, RootPath, SkillChildDoc, SkillResource, + SkillYAMLFrontMatter, + }; + + fn create_relative_path(base_path: &str, path: &str) -> RelativePath { + RelativePath::new(path, base_path) + } + + fn create_root_prompt(content: &str) -> ProjectRootMemoryPrompt { + ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Root, + dir: RootPath::new(""), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + } + } + + fn create_global_memory(content: &str, home_dir: &str) -> GlobalMemoryPrompt { + GlobalMemoryPrompt { + prompt_type: PromptKind::GlobalMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: create_relative_path(home_dir, DROID_GLOBAL_CONFIG_DIR), + raw_front_matter: None, + markdown_contents: None, + parent_directory_path: None, + } + } + + fn create_child_prompt( + project_root: &str, + relative_dir: &str, + content: &str, + ) -> ProjectChildrenMemoryPrompt { + let relative_path = create_relative_path(project_root, relative_dir); + ProjectChildrenMemoryPrompt { + prompt_type: PromptKind::ProjectChildrenMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: relative_path.clone(), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + working_child_directory_path: relative_path, + } + } + + fn create_project(workspace_root: &str, name: &str) -> Project { + Project { + name: Some(name.to_string()), + dir_from_workspace_path: Some(create_relative_path(workspace_root, name)), + ..Project::default() + } + } + + fn create_project_command( + project_root: &str, + name: &str, + series: &str, + content: &str, + ) -> FastCommandPrompt { + FastCommandPrompt { + prompt_type: PromptKind::FastCommand, + content: content.to_string(), + length: content.len(), + dir: create_relative_path(project_root, &format!("commands/{name}.mdx")), + command_name: name.to_string(), + series: Some(series.to_string()), + seri_name: Some(series.to_string()), + global_only: None, + yaml_front_matter: Some(FastCommandYAMLFrontMatter { + description: Some(format!("{name} description")), + ..FastCommandYAMLFrontMatter::default() + }), + raw_mdx_content: None, + markdown_contents: None, + } + } + + fn create_global_command(project_root: &str, name: &str, content: &str) -> FastCommandPrompt { + FastCommandPrompt { + prompt_type: PromptKind::FastCommand, + content: content.to_string(), + length: content.len(), + dir: create_relative_path(project_root, &format!("commands/{name}.mdx")), + command_name: name.to_string(), + series: None, + seri_name: None, + global_only: Some(true), + yaml_front_matter: Some(FastCommandYAMLFrontMatter { + description: Some(format!("{name} description")), + scope: Some(RuleScope::Global), + ..FastCommandYAMLFrontMatter::default() + }), + raw_mdx_content: None, + markdown_contents: None, + } + } + + fn create_skill( + project_root: &str, + name: &str, + scope: &str, + seri_name: Option<&str>, + ) -> SkillPrompt { + let mut extra = HashMap::new(); + extra.insert("scope".to_string(), Value::String(scope.to_string())); + if let Some(series) = seri_name { + extra.insert("seriName".to_string(), Value::String(series.to_string())); + } + + SkillPrompt { + prompt_type: PromptKind::Skill, + content: "Skill body".to_string(), + length: "Skill body".len(), + skill_name: name.to_string(), + dir: create_relative_path(project_root, name), + yaml_front_matter: Some(SkillYAMLFrontMatter { + description: Some("Skill description".to_string()), + extra, + ..SkillYAMLFrontMatter::default() + }), + mcp_config: None, + child_docs: Some(vec![SkillChildDoc { + prompt_type: PromptKind::SkillChildDoc, + content: "Guide body".to_string(), + length: "Guide body".len(), + file_path_kind: FilePathKind::Relative, + relative_path: "guide.mdx".to_string(), + dir: create_relative_path(project_root, name), + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + }]), + resources: Some(vec![SkillResource { + prompt_type: PromptKind::SkillResource, + extension: ".bin".to_string(), + file_name: "blob.bin".to_string(), + relative_path: "assets/blob.bin".to_string(), + content: "aGVsbG8=".to_string(), + encoding: SkillResourceEncoding::Base64, + length: 8, + mime_type: None, + }]), + markdown_contents: None, + } + } + + fn env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { + let _guard = env_lock().lock().expect("env lock should be available"); + let previous_home = std::env::var_os("HOME"); + + unsafe { + std::env::set_var("HOME", home_dir); + } + + let result = callback(); + + match previous_home { + Some(value) => unsafe { + std::env::set_var("HOME", value); + }, + None => unsafe { + std::env::remove_var("HOME"); + }, + } + + result + } + + #[test] + fn builds_project_outputs_and_preserves_droid_skill_rendering() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let home_dir = temp_dir.path().join("home"); + let project_root = workspace_dir.join("project-a"); + let prompt_source_root = workspace_dir.join("aindex"); + + with_home_dir(&home_dir, || { + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_root_prompt("workspace root")), + project_config: Some(serde_json::json!({ + "includeSeries": ["shared"], + "skills": {"includeSeries": ["shared"]} + })), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source root")), + project_config: Some(serde_json::json!({ + "includeSeries": ["shared"], + "skills": {"includeSeries": ["shared"]} + })), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + child_memory_prompts: Some(vec![create_child_prompt( + &project_root.to_string_lossy(), + "commands", + "project child", + )]), + project_config: Some(serde_json::json!({ + "includeSeries": ["shared"], + "skills": {"includeSeries": ["shared"]} + })), + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + ], + }), + fast_commands: Some(vec![ + create_project_command( + &prompt_source_root.to_string_lossy(), + "build", + "shared", + "Run build", + ), + create_global_command( + &prompt_source_root.to_string_lossy(), + "doctor", + "Run doctor", + ), + ]), + skills: Some(vec![create_skill( + &prompt_source_root.to_string_lossy(), + "ship", + "project", + Some("shared"), + )]), + global_memory: Some(create_global_memory( + "global memory", + &home_dir.to_string_lossy(), + )), + ..CollectedInputContext::default() + }; + + let plan = build_droid_output_plan(&context).unwrap(); + let output_paths = plan + .output_files + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + + assert!(output_paths.contains(&workspace_dir.join("AGENTS.md").to_string_lossy().as_ref())); + assert!(output_paths.contains(&project_root.join("AGENTS.md").to_string_lossy().as_ref())); + assert!( + output_paths.contains( + &project_root + .join("commands") + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &prompt_source_root + .join("AGENTS.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + output_paths.contains( + &workspace_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_COMMANDS_SUBDIR) + .join("shared-build.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &home_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_COMMANDS_SUBDIR) + .join("doctor.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + output_paths.contains( + &project_root + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_SKILLS_SUBDIR) + .join("ship") + .join("SKILL.md") + .to_string_lossy() + .as_ref() + ) + ); + + let skill_main = plan + .output_files + .iter() + .find(|entry| { + entry + .path + .ends_with("project-a/.factory/skills/ship/SKILL.md") + }) + .unwrap(); + let skill_resource = plan + .output_files + .iter() + .find(|entry| { + entry + .path + .ends_with("project-a/.factory/skills/ship/assets/blob.bin") + }) + .unwrap(); + + assert_eq!( + skill_main.content, + "---\nname: ship\ndescription: Skill description\n---\n\nSkill body" + ); + assert_eq!(skill_resource.encoding.as_deref(), Some("base64")); + assert!( + output_paths.contains( + &home_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_MEMORY_FILE) + .to_string_lossy() + .as_ref() + ) + ); + }); + } + + #[test] + fn falls_back_to_global_scope_when_only_global_commands_and_skills_exist() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let home_dir = temp_dir.path().join("home"); + let prompt_source_root = workspace_dir.join("aindex"); + + with_home_dir(&home_dir, || { + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + is_prompt_source_project: Some(true), + project_config: Some(serde_json::json!({ + "includeSeries": ["global-only"], + "skills": {"includeSeries": ["global-only"]} + })), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + create_project(&workspace_dir.to_string_lossy(), "project-a"), + ], + }), + fast_commands: Some(vec![create_global_command( + &prompt_source_root.to_string_lossy(), + "doctor", + "Run doctor", + )]), + skills: Some(vec![create_skill( + &prompt_source_root.to_string_lossy(), + "ship", + "global", + Some("global-only"), + )]), + ..CollectedInputContext::default() + }; + + let plan = build_droid_output_plan(&context).unwrap(); + let output_paths = plan + .output_files + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + + assert!( + output_paths.contains( + &home_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_COMMANDS_SUBDIR) + .join("doctor.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + output_paths.contains( + &home_dir + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_SKILLS_SUBDIR) + .join("ship") + .join("SKILL.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &workspace_dir + .join("project-a") + .join(DROID_GLOBAL_CONFIG_DIR) + .join(DROID_COMMANDS_SUBDIR) + .join("doctor.md") + .to_string_lossy() + .as_ref() + ) + ); + }); + } + + #[test] + fn cleanup_matches_droid_declarative_targets() { + let temp_dir = TempDir::new().unwrap(); + let workspace_dir = temp_dir.path().join("workspace"); + let home_dir = temp_dir.path().join("home"); + + with_home_dir(&home_dir, || { + let workspace = Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + is_workspace_root_project: Some(true), + ..Project::default() + }, + create_project(&workspace_dir.to_string_lossy(), "aindex"), + create_project(&workspace_dir.to_string_lossy(), "project-a"), + ], + }; + + let cleanup = build_cleanup(&workspace); + let delete_paths = cleanup + .delete + .iter() + .map(|target| target.path.replace('\\', "/")) + .collect::>(); + + assert!( + delete_paths.contains( + &workspace_dir + .join("AGENTS.md") + .to_string_lossy() + .replace('\\', "/") + ) + ); + assert!( + delete_paths.contains( + &workspace_dir + .join("project-a") + .join(".factory") + .join("commands") + .to_string_lossy() + .replace('\\', "/") + ) + ); + assert!( + delete_paths.contains( + &home_dir + .join(".factory") + .join("skills") + .to_string_lossy() + .replace('\\', "/") + ) + ); + assert!( + !delete_paths + .iter() + .any(|path| path.ends_with("/commands/AGENTS.md")) + ); + }); + } +} diff --git a/sdk/src/core/gemini_output_plan.rs b/sdk/src/core/gemini_output_plan.rs new file mode 100644 index 00000000..5cc43b53 --- /dev/null +++ b/sdk/src/core/gemini_output_plan.rs @@ -0,0 +1,484 @@ +use std::collections::HashSet; +use std::path::PathBuf; + +use crate::CliError; +use crate::core::base_output_plans::{BaseOutputFileDeclarationDto, BaseOutputPluginPlanDto}; +use crate::core::cleanup::{CleanupDeclarationsDto, CleanupTargetDto, CleanupTargetKindDto}; +use crate::core::config; +use crate::core::plugin_shared::{CollectedInputContext, Project, RelativePath, Workspace}; + +const GEMINI_PLUGIN_NAME: &str = "GeminiCLIOutputAdaptor"; +const GEMINI_MEMORY_FILE: &str = "GEMINI.md"; +const GEMINI_GLOBAL_CONFIG_DIR: &str = ".gemini"; + +pub fn collect_gemini_output_plan(context_json: &str) -> Result { + let context = serde_json::from_str::(context_json)?; + let plan = build_gemini_output_plan(&context)?; + serde_json::to_string(&plan).map_err(CliError::from) +} + +pub fn build_gemini_output_plan( + context: &CollectedInputContext, +) -> Result { + let workspace = context.workspace.as_ref().ok_or_else(|| { + CliError::ExecutionError( + "collectGeminiOutputPlan requires collectedOutputContext.workspace".to_string(), + ) + })?; + + Ok(BaseOutputPluginPlanDto { + plugin_name: GEMINI_PLUGIN_NAME.to_string(), + output_files: build_output_files(workspace, context), + cleanup: build_cleanup(workspace), + }) +} + +fn build_output_files( + workspace: &Workspace, + context: &CollectedInputContext, +) -> Vec { + let mut output_files = Vec::new(); + + for project in get_project_prompt_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + if let Some(root_prompt) = project.root_memory_prompt.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: project_root_dir + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some("project".to_string()), + content: root_prompt.content.clone(), + }); + } + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_relative_path(&child_prompt.dir) + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some("project".to_string()), + content: child_prompt.content.clone(), + }); + } + } + } + + if let Some(global_memory) = context.global_memory.as_ref() { + output_files.push(BaseOutputFileDeclarationDto { + path: resolve_effective_home_dir() + .join(GEMINI_GLOBAL_CONFIG_DIR) + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + scope: Some("global".to_string()), + content: global_memory.content.clone(), + }); + } + + output_files +} + +fn build_cleanup(workspace: &Workspace) -> CleanupDeclarationsDto { + let mut delete = Vec::new(); + let mut seen_project_files = HashSet::new(); + + for project in get_project_output_projects(workspace) { + let Some(project_root_dir) = resolve_project_root_dir(workspace, project) else { + continue; + }; + + delete.push(CleanupTargetDto { + path: project_root_dir + .join("**") + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + kind: CleanupTargetKindDto::Glob, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some("project".to_string()), + label: Some("delete.project.glob".to_string()), + }); + + push_unique_cleanup_file( + &mut delete, + &mut seen_project_files, + project_root_dir.join(GEMINI_MEMORY_FILE), + "delete.project", + ); + + if let Some(child_prompts) = project.child_memory_prompts.as_ref() { + for child_prompt in child_prompts { + push_unique_cleanup_file( + &mut delete, + &mut seen_project_files, + resolve_relative_path(&child_prompt.dir).join(GEMINI_MEMORY_FILE), + "delete.project.child", + ); + } + } + } + + delete.push(CleanupTargetDto { + path: resolve_effective_home_dir() + .join(GEMINI_GLOBAL_CONFIG_DIR) + .join(GEMINI_MEMORY_FILE) + .to_string_lossy() + .into_owned(), + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some("global".to_string()), + label: Some("delete.global".to_string()), + }); + + CleanupDeclarationsDto { + delete, + ..CleanupDeclarationsDto::default() + } +} + +fn resolve_effective_home_dir() -> PathBuf { + let runtime_environment = config::resolve_runtime_environment(); + runtime_environment + .effective_home_dir + .or(runtime_environment.native_home_dir) + .unwrap_or_else(|| PathBuf::from("/")) +} + +fn get_project_output_projects(workspace: &Workspace) -> Vec<&Project> { + let mut projects = workspace + .projects + .iter() + .filter(|project| project.is_workspace_root_project != Some(true)) + .collect::>(); + + if let Some(workspace_root_project) = workspace + .projects + .iter() + .find(|project| project.is_workspace_root_project == Some(true)) + { + projects.push(workspace_root_project); + } + + projects +} + +fn get_project_prompt_output_projects(workspace: &Workspace) -> Vec<&Project> { + get_project_output_projects(workspace) + .into_iter() + .filter(|project| project.is_prompt_source_project != Some(true)) + .collect() +} + +fn resolve_project_root_dir(workspace: &Workspace, project: &Project) -> Option { + if project.is_workspace_root_project == Some(true) { + return Some(PathBuf::from(&workspace.directory.path)); + } + + project + .dir_from_workspace_path + .as_ref() + .map(resolve_relative_path) +} + +fn resolve_relative_path(relative_path: &RelativePath) -> PathBuf { + PathBuf::from(relative_path.get_absolute_path()) +} + +fn push_unique_cleanup_file( + delete: &mut Vec, + seen_files: &mut HashSet, + path: PathBuf, + label: &str, +) { + let path_string = path.to_string_lossy().into_owned(); + if !seen_files.insert(path_string.clone()) { + return; + } + + delete.push(CleanupTargetDto { + path: path_string, + kind: CleanupTargetKindDto::File, + exclude_basenames: Vec::new(), + protection_mode: None, + scope: Some("project".to_string()), + label: Some(label.to_string()), + }); +} + +#[cfg(test)] +mod tests { + use std::sync::{Mutex, OnceLock}; + + use tempfile::TempDir; + + use super::*; + use crate::core::plugin_shared::{ + FilePathKind, GlobalMemoryPrompt, ProjectChildrenMemoryPrompt, ProjectRootMemoryPrompt, + PromptKind, RootPath, Workspace, + }; + + fn create_relative_path(base_path: &str, path: &str) -> RelativePath { + RelativePath::new(path, base_path) + } + + fn create_root_prompt(content: &str) -> ProjectRootMemoryPrompt { + ProjectRootMemoryPrompt { + prompt_type: PromptKind::ProjectRootMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Root, + dir: RootPath::new(""), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + } + } + + fn create_global_memory(content: &str, home_dir: &str) -> GlobalMemoryPrompt { + GlobalMemoryPrompt { + prompt_type: PromptKind::GlobalMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: create_relative_path(home_dir, GEMINI_GLOBAL_CONFIG_DIR), + raw_front_matter: None, + markdown_contents: None, + parent_directory_path: None, + } + } + + fn create_child_prompt( + project_root: &str, + relative_dir: &str, + content: &str, + ) -> ProjectChildrenMemoryPrompt { + let relative_path = create_relative_path(project_root, relative_dir); + ProjectChildrenMemoryPrompt { + prompt_type: PromptKind::ProjectChildrenMemory, + content: content.to_string(), + length: content.len(), + file_path_kind: FilePathKind::Relative, + dir: relative_path.clone(), + yaml_front_matter: None, + raw_front_matter: None, + markdown_ast: None, + markdown_contents: None, + working_child_directory_path: relative_path, + } + } + + fn create_project(workspace_root: &str, name: &str) -> Project { + Project { + name: Some(name.to_string()), + dir_from_workspace_path: Some(create_relative_path(workspace_root, name)), + ..Project::default() + } + } + + fn env_lock() -> &'static Mutex<()> { + static LOCK: OnceLock> = OnceLock::new(); + LOCK.get_or_init(|| Mutex::new(())) + } + + fn with_home_dir(home_dir: &std::path::Path, callback: impl FnOnce() -> T) -> T { + let _guard = env_lock().lock().expect("env lock should be available"); + let previous_home = std::env::var_os("HOME"); + + unsafe { + std::env::set_var("HOME", home_dir); + } + + let result = callback(); + + match previous_home { + Some(value) => unsafe { + std::env::set_var("HOME", value); + }, + None => unsafe { + std::env::remove_var("HOME"); + }, + } + + result + } + + #[test] + fn builds_project_and_global_outputs() { + let temp_dir = match TempDir::new() { + Ok(dir) => dir, + Err(error) => panic!("temp dir should be created: {error}"), + }; + let workspace_dir = temp_dir.path().join("workspace"); + let home_dir = temp_dir.path().join("home"); + let project_root = workspace_dir.join("project-a"); + + with_home_dir(&home_dir, || { + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + root_memory_prompt: Some(create_root_prompt("workspace root")), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source root")), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + Project { + root_memory_prompt: Some(create_root_prompt("project root")), + child_memory_prompts: Some(vec![create_child_prompt( + &project_root.to_string_lossy(), + "commands", + "project child", + )]), + ..create_project(&workspace_dir.to_string_lossy(), "project-a") + }, + ], + }), + global_memory: Some(create_global_memory( + "global memory", + &home_dir.to_string_lossy(), + )), + ..CollectedInputContext::default() + }; + + let plan = match build_gemini_output_plan(&context) { + Ok(plan) => plan, + Err(error) => panic!("gemini plan should build: {error}"), + }; + let output_paths = plan + .output_files + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + + assert!(output_paths.contains(&workspace_dir.join("GEMINI.md").to_string_lossy().as_ref())); + assert!(output_paths.contains(&project_root.join("GEMINI.md").to_string_lossy().as_ref())); + assert!( + output_paths.contains( + &project_root + .join("commands") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + !output_paths.contains( + &workspace_dir + .join("aindex") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + output_paths.contains( + &home_dir + .join(".gemini") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + }); + } + + #[test] + fn cleanup_keeps_prompt_source_targets_and_global_file() { + let temp_dir = match TempDir::new() { + Ok(dir) => dir, + Err(error) => panic!("temp dir should be created: {error}"), + }; + let workspace_dir = temp_dir.path().join("workspace"); + let home_dir = temp_dir.path().join("home"); + let prompt_source_root = workspace_dir.join("aindex"); + + with_home_dir(&home_dir, || { + let context = CollectedInputContext { + workspace: Some(Workspace { + directory: RootPath::new(&workspace_dir.to_string_lossy()), + projects: vec![ + Project { + name: Some("__workspace__".to_string()), + is_workspace_root_project: Some(true), + ..Project::default() + }, + Project { + is_prompt_source_project: Some(true), + root_memory_prompt: Some(create_root_prompt("prompt source root")), + child_memory_prompts: Some(vec![create_child_prompt( + &prompt_source_root.to_string_lossy(), + "commands", + "prompt source child", + )]), + ..create_project(&workspace_dir.to_string_lossy(), "aindex") + }, + ], + }), + ..CollectedInputContext::default() + }; + + let plan = match build_gemini_output_plan(&context) { + Ok(plan) => plan, + Err(error) => panic!("gemini plan should build: {error}"), + }; + let cleanup_paths = plan + .cleanup + .delete + .iter() + .map(|entry| entry.path.as_str()) + .collect::>(); + + assert!(cleanup_paths.contains(&workspace_dir.join("GEMINI.md").to_string_lossy().as_ref())); + assert!( + cleanup_paths.contains( + &prompt_source_root + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + cleanup_paths.contains( + &prompt_source_root + .join("commands") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + cleanup_paths.contains( + &home_dir + .join(".gemini") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + assert!( + cleanup_paths.contains( + &workspace_dir + .join("**") + .join("GEMINI.md") + .to_string_lossy() + .as_ref() + ) + ); + }); + } +} diff --git a/sdk/src/core/input_plugins/skill.rs b/sdk/src/core/input_plugins/skill.rs index 0915ab3e..5551652b 100644 --- a/sdk/src/core/input_plugins/skill.rs +++ b/sdk/src/core/input_plugins/skill.rs @@ -259,6 +259,16 @@ fn get_mime_type(ext: &str) -> Option<&'static str> { .map(|(_, m)| *m) } +fn normalize_resource_extension(ext: &str) -> String { + if ext.is_empty() { + return String::new(); + } + if ext.starts_with('.') { + return ext.to_string(); + } + format!(".{}", ext) +} + fn read_file_content( file_path: &Path, ext: &str, @@ -367,14 +377,15 @@ fn scan_resources( } let ext = path.extension().and_then(|s| s.to_str()).unwrap_or(""); + let normalized_ext = normalize_resource_extension(ext); let relative_path = path .strip_prefix(root_src_dir) .unwrap_or(&path) .to_string_lossy() .replace('\\', "/"); - let (content, encoding, length) = read_file_content(&path, ext)?; - let mime_type = get_mime_type(ext).map(|m| m.to_string()); + let (content, encoding, length) = read_file_content(&path, &normalized_ext)?; + let mime_type = get_mime_type(&normalized_ext).map(|m| m.to_string()); resources.push(SkillResource { prompt_type: PromptKind::SkillResource, @@ -452,7 +463,15 @@ fn assert_compiled_child_docs_exist( Ok(()) } -fn read_mcp_config(skill_src_dir: &Path) -> Result, crate::CliError> { +fn is_supported_mcp_server_config(config: &McpServerConfig) -> bool { + config.command.is_some() || config.url.is_some() || config.server_url.is_some() +} + +fn read_mcp_config( + skill_name: &str, + skill_src_dir: &Path, + diagnostics: &mut Vec, +) -> Result, crate::CliError> { let mcp_json_path = skill_src_dir.join("mcp.json"); if !mcp_json_path.is_file() { return Ok(None); @@ -467,8 +486,26 @@ fn read_mcp_config(skill_src_dir: &Path) -> Result, crate let config: McpServerConfig = serde_json::from_value(value.clone()).map_err(|e| { crate::CliError::ConfigError(format!("Invalid McpServerConfig for {}: {}", key, e)) })?; + if !is_supported_mcp_server_config(&config) { + diagnostics.push(crate::core::plugin_shared::Diagnostic { + level: "warn".to_string(), + code: "SKILL_MCP_SERVER_SKIPPED".to_string(), + title: format!( + "Skipped unsupported MCP server \"{}\" in skill \"{}\" because it defines neither \"command\" nor \"url\"", + key, skill_name + ), + exact_fix: Some(vec![format!( + "Add \"command\" for a local MCP server or \"url\" / \"serverUrl\" for a remote MCP server in {}", + mcp_json_path.to_string_lossy() + )]), + }); + continue; + } mcp_servers.insert(key.clone(), config); } + if mcp_servers.is_empty() { + return Ok(None); + } return Ok(Some(SkillMcpConfig { prompt_type: PromptKind::SkillMcpConfig, mcp_servers, @@ -601,7 +638,7 @@ fn create_skill_prompt( } else { vec![] }; - let mcp_config = read_mcp_config(skill_src_dir)?; + let mcp_config = read_mcp_config(name, skill_src_dir, diagnostics)?; assert_compiled_child_docs_exist(name, skill_src_dir, skill_dist_dir)?; @@ -869,6 +906,144 @@ mod tests { ); } + #[test] + fn collect_skill_accepts_remote_mcp_servers() { + let tmp = TempDir::new().unwrap(); + let src = tmp.path().join("aindex").join("skills").join("demo"); + let dist = tmp + .path() + .join("aindex") + .join("dist") + .join("skills") + .join("demo"); + fs::create_dir_all(&src).unwrap(); + fs::create_dir_all(&dist).unwrap(); + + fs::write( + src.join("skill.src.mdx"), + "---\ndescription: src skill\n---\nSkill source", + ) + .unwrap(); + fs::write( + src.join("mcp.json"), + r#"{"mcpServers":{"figma":{"url":"https://mcp.figma.com/mcp","disabled":false,"disabledTools":[]}}}"#, + ) + .unwrap(); + fs::write( + dist.join("skill.mdx"), + "---\ndescription: dist skill\n---\nSkill dist", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skill = &parsed["skills"][0]; + assert_eq!( + skill["mcpConfig"]["mcpServers"]["figma"]["url"], + "https://mcp.figma.com/mcp" + ); + assert_eq!(skill["mcpConfig"]["mcpServers"]["figma"]["disabled"], false); + assert_eq!( + skill["mcpConfig"]["mcpServers"]["figma"]["disabledTools"], + serde_json::json!([]) + ); + } + + #[test] + fn collect_skill_skips_invalid_mcp_servers_with_warning() { + let tmp = TempDir::new().unwrap(); + let src = tmp.path().join("aindex").join("skills").join("demo"); + let dist = tmp + .path() + .join("aindex") + .join("dist") + .join("skills") + .join("demo"); + fs::create_dir_all(&src).unwrap(); + fs::create_dir_all(&dist).unwrap(); + + fs::write( + src.join("skill.src.mdx"), + "---\ndescription: src skill\n---\nSkill source", + ) + .unwrap(); + fs::write( + src.join("mcp.json"), + r#"{"mcpServers":{"broken":{"disabled":false},"demo":{"command":"demo"}}}"#, + ) + .unwrap(); + fs::write( + dist.join("skill.mdx"), + "---\ndescription: dist skill\n---\nSkill dist", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skill = &parsed["skills"][0]; + assert!(skill["mcpConfig"]["mcpServers"]["broken"].is_null()); + assert_eq!(skill["mcpConfig"]["mcpServers"]["demo"]["command"], "demo"); + let diagnostics = parsed["diagnostics"].as_array().unwrap(); + assert!( + diagnostics + .iter() + .any(|d| d["code"] == "SKILL_MCP_SERVER_SKIPPED") + ); + } + + #[test] + fn collect_skill_reads_binary_resources_as_base64() { + let tmp = TempDir::new().unwrap(); + let src = tmp.path().join("aindex").join("skills").join("demo"); + let dist = tmp + .path() + .join("aindex") + .join("dist") + .join("skills") + .join("demo"); + fs::create_dir_all(src.join("assets")).unwrap(); + fs::create_dir_all(&dist).unwrap(); + + fs::write( + src.join("skill.src.mdx"), + "---\ndescription: src skill\n---\nSkill source", + ) + .unwrap(); + fs::write( + src.join("assets").join("logo.png"), + [0x89_u8, 0x50, 0x4E, 0x47, 0x00, 0xFF], + ) + .unwrap(); + fs::write( + dist.join("skill.mdx"), + "---\ndescription: dist skill\n---\nSkill dist", + ) + .unwrap(); + + let options = serde_json::json!({ + "workspaceDir": tmp.path().to_string_lossy().to_string(), + }); + + let result = collect_skill(&options.to_string()).unwrap(); + let parsed: serde_json::Value = serde_json::from_str(&result).unwrap(); + let skill = &parsed["skills"][0]; + let resources = skill["resources"].as_array().unwrap(); + let logo = resources + .iter() + .find(|resource| resource["relativePath"] == "assets/logo.png") + .unwrap(); + assert_eq!(logo["encoding"], "base64"); + assert_eq!(logo["mimeType"], "image/png"); + } + #[test] fn collect_skill_fails_missing_child_doc() { let tmp = TempDir::new().unwrap(); diff --git a/sdk/src/core/input_plugins/workspace.rs b/sdk/src/core/input_plugins/workspace.rs index b0270cc8..a6ebdd8e 100644 --- a/sdk/src/core/input_plugins/workspace.rs +++ b/sdk/src/core/input_plugins/workspace.rs @@ -2,6 +2,7 @@ use std::path::Path; use serde::{Deserialize, Serialize}; +use crate::core::config; use crate::core::plugin_shared::{RootPath, Workspace}; #[derive(Debug, Clone, Default, Serialize, Deserialize)] @@ -31,9 +32,10 @@ pub fn collect_workspace(options_json: &str) -> Result serde_json::from_str(options_json).map_err(|e| crate::CliError::ConfigError(e.to_string()))?; let workspace_dir_raw = options.workspace_dir; - let workspace_dir = Path::new(&workspace_dir_raw) + let expanded_workspace_dir = config::resolve_tilde(&workspace_dir_raw); + let workspace_dir = expanded_workspace_dir .canonicalize() - .unwrap_or_else(|_| Path::new(&workspace_dir_raw).to_path_buf()); + .unwrap_or(expanded_workspace_dir); let workspace_dir_str = workspace_dir.to_string_lossy().into_owned(); let aindex_dir_name = options diff --git a/sdk/src/core/mod.rs b/sdk/src/core/mod.rs index 66e323e0..6b1e0bfa 100644 --- a/sdk/src/core/mod.rs +++ b/sdk/src/core/mod.rs @@ -1,7 +1,11 @@ +pub mod base_output_plans; pub mod cleanup; +pub mod command_bridge; pub mod config; pub mod dependency_resolver; pub mod desk_paths; +pub mod droid_output_plan; +pub mod gemini_output_plan; pub mod git_discovery; pub mod input_plugins; pub mod path_blocking; diff --git a/sdk/src/core/native-binding-loader.test.ts b/sdk/src/core/native-binding-loader.test.ts new file mode 100644 index 00000000..ead1734c --- /dev/null +++ b/sdk/src/core/native-binding-loader.test.ts @@ -0,0 +1,503 @@ +import type {NativeBindingLoaderOptions, PlatformBinding} from './native-binding-loader' +import {afterEach, beforeEach, describe, expect, it, vi} from 'vitest' +import { + createNativeBindingLoader, + formatBindingLoadError, + getPlatformBinding, + loadBindingFromCliBinaryPackage, + loadBindingFromDirectory +} from './native-binding-loader' + +interface MockBinding { + readonly testMethod: () => string + readonly optionalSnakeCase?: () => string +} + +const {mockRequire, mockReaddirSync} = vi.hoisted(() => ({ + mockRequire: vi.fn(), + mockReaddirSync: vi.fn() +})) + +const mockPlatformBindings: Record = { + 'win32-x64': {local: 'napi-test.win32-x64-msvc', suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: 'napi-test.linux-x64-gnu', suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: 'napi-test.linux-arm64-gnu', suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: 'napi-test.darwin-arm64', suffix: 'darwin-arm64'}, + 'darwin-x64': {local: 'napi-test.darwin-x64', suffix: 'darwin-x64'} +} + +const defaultOptions: NativeBindingLoaderOptions = { + packageName: '@truenine/test-package', + binaryName: 'napi-test', + bindingValidator: (value): value is MockBinding => + value != null + && typeof value === 'object' + && typeof (value as MockBinding).testMethod === 'function', + _readdirSync: mockReaddirSync +} + +function createValidBinding(): MockBinding { + return { + testMethod: () => 'test-result' + } +} + +vi.mock('node:module', async () => ({createRequire: () => mockRequire})) + +describe('getPlatformBinding', () => { + it('returns correct binding for supported platforms', () => { + const linuxX64 = getPlatformBinding(mockPlatformBindings, '@truenine/test') + expect(linuxX64).toEqual({ + local: 'napi-test.linux-x64-gnu', + suffix: 'linux-x64-gnu' + }) + }) + + it('throws error for unsupported platform', () => { + const originalPlatform = process.platform + const originalArch = process.arch + + Object.defineProperty(process, 'platform', {value: 'freebsd'}) + Object.defineProperty(process, 'arch', {value: 'x64'}) + + try { + expect(() => getPlatformBinding(mockPlatformBindings, '@truenine/test')).toThrow( + /Unsupported platform for @truenine\/test native binding/ + ) + } + finally { + Object.defineProperty(process, 'platform', {value: originalPlatform}) + Object.defineProperty(process, 'arch', {value: originalArch}) + } + }) +}) + +describe('formatBindingLoadError', () => { + it('formats error with all details', () => { + const localError = new Error('Cannot find module') + const packageError = new Error('Package not found') + const formatted = formatBindingLoadError( + '@truenine/test-package', + localError, + packageError, + 'linux-x64-gnu' + ) + + expect(formatted.message).toContain('Failed to load @truenine/test-package native binding.') + expect(formatted.message).toContain('@truenine/memory-sync-cli-linux-x64-gnu') + expect(formatted.message).toContain('Local error: Cannot find module') + expect(formatted.message).toContain('Package error: Package not found') + expect(formatted.message).toContain('pnpm -F @truenine/test-package run build') + }) + + it('handles non-Error objects', () => { + const formatted = formatBindingLoadError( + '@truenine/test', + 'string-error', + null, + 'win32-x64-msvc' + ) + + expect(formatted.message).toContain('Local error: string-error') + expect(formatted.message).toContain('Package error: null') + }) +}) + +describe('loadBindingFromDirectory', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it('finds and validates matching .node file', () => { + const validBinding = createValidBinding() + mockReaddirSync.mockReturnValue(['napi-test.linux-x64-gnu.node', 'other.file']) + mockRequire.mockReturnValue(validBinding) + + const result = loadBindingFromDirectory( + mockRequire as never, + '/some/path', + 'napi-test', + defaultOptions.bindingValidator, + mockReaddirSync + ) + + expect(result).toBe(validBinding) + expect(mockRequire).toHaveBeenCalledWith('/some/path/napi-test.linux-x64-gnu.node') + }) + + it('returns undefined when no valid binding found', () => { + mockReaddirSync.mockReturnValue(['napi-test.linux-x64-gnu.node']) + mockRequire.mockReturnValue({invalid: true}) + + const result = loadBindingFromDirectory( + mockRequire as never, + '/some/path', + 'napi-test', + defaultOptions.bindingValidator, + mockReaddirSync + ) + + expect(result).toBeUndefined() + }) + + it('filters files by binary name prefix and .node extension', () => { + mockReaddirSync.mockReturnValue([ + 'napi-other.linux-x64-gnu.node', + 'napi-test.linux-x64-gnu.js', + 'readme.md', + 'napi-test.linux-x64-gnu.node' + ]) + mockRequire.mockReturnValue(createValidBinding()) + + loadBindingFromDirectory( + mockRequire as never, + '/path', + 'napi-test', + defaultOptions.bindingValidator, + mockReaddirSync + ) + + expect(mockRequire).toHaveBeenCalledTimes(1) + expect(mockRequire).toHaveBeenCalledWith('/path/napi-test.linux-x64-gnu.node') + }) + + it('tries candidates in sorted order', () => { + const binding1 = createValidBinding() + const _binding2 = {...binding1, testMethod: () => 'second'} + mockReaddirSync.mockReturnValue([ + 'napi-test.zzz.node', + 'napi-test.aaa.node' + ]) + mockRequire + .mockReturnValueOnce(_binding2) + .mockReturnValueOnce(binding1) + + const result = loadBindingFromDirectory( + mockRequire as never, + '/path', + 'napi-test', + defaultOptions.bindingValidator, + mockReaddirSync + ) + + expect(result?.testMethod()).toBe('second') + }) +}) + +describe('loadBindingFromCliBinaryPackage', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + it.skip('loads binding from CLI package export with matching validator', () => { + interface TestCliExport { + readonly testMethod: () => string + } + + const validBinding: TestCliExport = { + testMethod: () => 'cli-binding-result' + } + + const cliOptions: NativeBindingLoaderOptions = { + packageName: '@truenine/test-cli', + binaryName: 'napi-test', + cliExportName: 'test', + bindingValidator: (value): value is TestCliExport => + value != null + && typeof value === 'object' + && typeof (value as TestCliExport).testMethod === 'function' + } + + mockRequire.mockImplementation((specifier: string) => { + if (specifier === '@truenine/memory-sync-cli-linux-x64-gnu') { + return {test: validBinding} + } + + throw new Error(`Not found: ${specifier}`) + }) + + const result = loadBindingFromCliBinaryPackage( + mockRequire as never, + cliOptions, + 'linux-x64-gnu' + ) + + expect(result).toBe(validBinding) + expect(result.testMethod()).toBe('cli-binding-result') + }) + + it.skip('uses binaryName as default export key when cliExportName not provided', () => { + const validBinding = createValidBinding() + mockRequire.mockImplementation((specifier: string) => { + if (specifier === '@truenine/memory-sync-cli-linux-x64-gnu') { + return {test: validBinding} + } + + throw new Error(`Not found: ${specifier}`) + }) + + const result = loadBindingFromCliBinaryPackage( + mockRequire as never, + {...defaultOptions, binaryName: 'napi-test'}, + 'linux-x64-gnu' + ) + + expect(result).toBe(validBinding) + }) + + it('falls back to directory scanning when direct export fails', () => { + const validBinding = createValidBinding() + mockRequire.mockImplementation((specifier: string) => { + if (specifier === '@truenine/memory-sync-cli-linux-x64-gnu') { + return {} + } + + if (specifier.endsWith('.node')) { + return validBinding + } + + if (specifier.includes('package.json')) { + throw new Error('Not found') + } + + throw new Error(`Not found: ${specifier}`) + }) + + mockReaddirSync.mockReturnValue(['napi-test.linux-x64-gnu.node']) + + const result = loadBindingFromCliBinaryPackage( + mockRequire as never, + defaultOptions, + 'linux-x64-gnu' + ) + + expect(result).toBe(validBinding) + }) + + it('throws error with last error when no binding found', () => { + mockRequire.mockImplementation(() => { + throw new Error('Module not found') + }) + + mockReaddirSync.mockReturnValue([]) + + expect(() => loadBindingFromCliBinaryPackage( + mockRequire as never, + defaultOptions, + 'linux-x64-gnu' + )).toThrow() + }) +}) + +describe('createNativeBindingLoader', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + afterEach(() => { + vi.resetModules() + }) + + it('creates loader function that returns binding', () => { + const validBinding = createValidBinding() + mockRequire.mockReturnValue(validBinding) + + const loader = createNativeBindingLoader({ + ...defaultOptions, + _requireFactory: () => mockRequire as never + }) + const binding = loader() + + expect(binding).toBe(validBinding) + expect(binding.testMethod()).toBe('test-result') + }) + + it('caches successful binding loads', () => { + const binding1 = createValidBinding() + mockRequire.mockReturnValueOnce(binding1) + + const loader = createNativeBindingLoader({ + ...defaultOptions, + _requireFactory: () => mockRequire as never + }) + const first = loader() + const second = loader() + + expect(first).toBe(second) + expect(mockRequire).toHaveBeenCalledTimes(1) + }) + + it('caches failed loads and rethrows same error', () => { + const testError = new Error('Permanent failure') + mockRequire.mockImplementation(() => { + throw testError + }) + + const loader = createNativeBindingLoader({ + ...defaultOptions, + _requireFactory: () => mockRequire as never + }) + + expect(() => loader()).toThrow('Permanent failure') + expect(() => loader()).toThrow('Permanent failure') + + const secondLoader = createNativeBindingLoader({ + ...defaultOptions, + _requireFactory: () => mockRequire as never + }) + + expect(() => secondLoader()).toThrow('Permanent failure') + }) + + it('applies optional method aliases', () => { + const bindingWithSnakeCase: MockBinding = { + testMethod: () => 'original', + optionalSnakeCase: () => 'snake-case-result' + } + + mockRequire.mockReturnValue(bindingWithSnakeCase) + + const loader = createNativeBindingLoader({ + ...defaultOptions, + optionalMethods: { + testMethod: ['optionalSnakeCase'] + }, + _requireFactory: () => mockRequire as never + }) + + const binding = loader() + expect(binding.testMethod()).toBe('original') + }) + + it('supports multiple alias fallbacks when preferred method is missing', () => { + interface BindingWithAliases { + readonly testMethod?: () => string + readonly firstAlias?: () => string + readonly secondAlias?: () => string + readonly thirdAlias: () => string + } + + const optionsWithAliases: NativeBindingLoaderOptions = { + ...defaultOptions, + bindingValidator: (value): value is BindingWithAliases => + value != null + && typeof value === 'object' + && typeof (value as BindingWithAliases).thirdAlias === 'function', + optionalMethods: { + testMethod: ['firstAlias', 'secondAlias', 'thirdAlias'] + } + } + + const bindingWithAliasOnly: BindingWithAliases = { + thirdAlias: () => 'third-alias-result' + } + + mockRequire.mockReturnValue(bindingWithAliasOnly) + + const loader = createNativeBindingLoader({ + ...optionsWithAliases, + _requireFactory: () => mockRequire as never + }) + + const binding = loader() + expect(typeof binding.testMethod).toBe('function') + expect((binding.testMethod as () => string)()).toBe('third-alias-result') + }) + + it('respects custom packageSuffix option', () => { + const validBinding = createValidBinding() + mockRequire.mockReturnValue(validBinding) + + const loader = createNativeBindingLoader({ + ...defaultOptions, + packageSuffix: 'custom-suffix', + _requireFactory: () => mockRequire as never + }) + + loader() + expect(mockRequire).toHaveBeenCalledWith( + expect.stringContaining('custom-suffix') + ) + }) +}) + +describe('integration scenarios', () => { + beforeEach(() => { + vi.clearAllMocks() + }) + + afterEach(() => { + vi.resetModules() + }) + + it('simulates full loading flow for logger-like binding', () => { + interface LoggerBinding { + createLogger: () => unknown + setGlobalLogLevel: () => void + } + + const loggerOptions: NativeBindingLoaderOptions = { + packageName: '@truenine/logger', + binaryName: 'napi-logger', + bindingValidator: (value): value is LoggerBinding => + value != null + && typeof value === 'object' + && typeof (value as LoggerBinding).createLogger === 'function' + && typeof (value as LoggerBinding).setGlobalLogLevel === 'function' + } + + const mockLoggerBinding: LoggerBinding = { + createLogger: () => ({emit: vi.fn()}), + setGlobalLogLevel: vi.fn() + } + + mockRequire.mockReturnValue(mockLoggerBinding) + + const loader = createNativeBindingLoader(loggerOptions) + const binding = loader() + + expect(typeof binding.createLogger).toBe('function') + expect(typeof binding.setGlobalLogLevel).toBe('function') + }) + + it('simulates script-runtime-like binding with snake_case support', () => { + interface ScriptRuntimeBinding { + validatePublicPath?: () => string + resolvePublicPath?: () => string + validate_public_path?: () => string + resolve_public_path?: () => string + } + + const scriptRuntimeOptions: NativeBindingLoaderOptions = { + packageName: '@truenine/script-runtime', + binaryName: 'napi-script-runtime', + cliExportName: 'scriptRuntime', + optionalMethods: { + validatePublicPath: ['validate_public_path'], + resolvePublicPath: ['resolve_public_path'] + }, + bindingValidator: (value): value is ScriptRuntimeBinding => + value != null + && typeof value === 'object' + && ( + typeof (value as ScriptRuntimeBinding).validate_public_path === 'function' + || typeof (value as ScriptRuntimeBinding).validatePublicPath === 'function' + || typeof (value as ScriptRuntimeBinding).resolve_public_path === 'function' + || typeof (value as ScriptRuntimeBinding).resolvePublicPath === 'function' + ) + } + + const mockScriptRuntime: ScriptRuntimeBinding = { + validate_public_path: () => '/valid/path', + resolve_public_path: () => '/resolved/path' + } + + mockRequire.mockReturnValue(mockScriptRuntime) + + const loader = createNativeBindingLoader(scriptRuntimeOptions) + const binding = loader() + + expect(typeof binding.validatePublicPath).toBe('function') + expect(typeof binding.resolvePublicPath).toBe('function') + }) +}) diff --git a/sdk/src/core/native-binding-loader.ts b/sdk/src/core/native-binding-loader.ts new file mode 100644 index 00000000..317c2c64 --- /dev/null +++ b/sdk/src/core/native-binding-loader.ts @@ -0,0 +1,292 @@ +import {readdirSync} from 'node:fs' +import {createRequire} from 'node:module' +import {dirname, join} from 'node:path' +import process from 'node:process' + +export interface PlatformBinding { + readonly local: string + readonly suffix: string +} + +export interface NativeBindingLoaderOptions { + readonly packageName: string + readonly binaryName: string + readonly bindingValidator: (value: unknown) => value is T + readonly packageSuffix?: string + readonly optionalMethods?: Record + readonly cliExportName?: string + readonly _requireFactory?: () => ReturnType + readonly _readdirSync?: (packageDir: string) => readonly string[] +} + +interface BindingCache { + binding: T | undefined + error: Error | undefined +} + +const DEFAULT_LOCAL_CANDIDATE_RELATIVE_PATHS = [ + './', + '../', + '../dist/', + '../npm/', + '../../npm/', + '../../../cli/npm/', + '../../../../cli/npm/' +] as const + +const DEFAULT_PACKAGE_DIR_CANDIDATE_RELATIVE_PATHS = [ + '../npm/', + '../../npm/', + '../../../cli/npm/', + '../../../../cli/npm/' +] as const + +function resolvePlatformBindings(binaryName: string): Record { + return { + 'win32-x64': {local: `${binaryName}.win32-x64-msvc`, suffix: 'win32-x64-msvc'}, + 'linux-x64': {local: `${binaryName}.linux-x64-gnu`, suffix: 'linux-x64-gnu'}, + 'linux-arm64': {local: `${binaryName}.linux-arm64-gnu`, suffix: 'linux-arm64-gnu'}, + 'darwin-arm64': {local: `${binaryName}.darwin-arm64`, suffix: 'darwin-arm64'}, + 'darwin-x64': {local: `${binaryName}.darwin-x64`, suffix: 'darwin-x64'} + } +} + +export function getPlatformBinding( + platformBindings: Record, + packageName: string +): PlatformBinding { + const binding = platformBindings[`${process.platform}-${process.arch}`] + if (binding != null) return binding + + throw new Error( + `Unsupported platform for ${packageName} native binding: ${process.platform}-${process.arch}` + ) +} + +export function formatBindingLoadError( + packageName: string, + localError: unknown, + packageError: unknown, + suffix: string +): Error { + const localMessage = localError instanceof Error ? localError.message : String(localError) + const packageMessage = packageError instanceof Error ? packageError.message : String(packageError) + return new Error( + [ + `Failed to load ${packageName} native binding.`, + `Tried local binaries next to the source/bundle and package "@truenine/memory-sync-cli-${suffix}".`, + `Local error: ${localMessage}`, + `Package error: ${packageMessage}`, + `Run \`pnpm -F ${packageName} run build\` to build the native module.` + ].join('\n') + ) +} + +export function loadBindingFromDirectory( + runtimeRequire: ReturnType, + packageDir: string, + binaryName: string, + bindingValidator: (value: unknown) => value is T, + readDirectory: (packageDir: string) => readonly string[] = readdirSync +): T | undefined { + const bindingCandidates = [...readDirectory(packageDir)] + .filter(fileName => fileName.startsWith(`${binaryName}.`) && fileName.endsWith('.node')) + .sort() + + for (const candidateFile of bindingCandidates) { + const bindingModule = runtimeRequire(join(packageDir, candidateFile)) as unknown + + if (bindingValidator(bindingModule)) return bindingModule + } + + return void 0 +} + +function resolvePackageDirCandidates( + runtimeRequire: ReturnType, + packageName: string, + suffix: string, + relativePaths: readonly string[] +): string[] { + const cliPackageName = `@truenine/memory-sync-cli-${suffix}` + const packageDirCandidates: string[] = [] + + try { + const packageJsonPath = runtimeRequire.resolve(`${cliPackageName}/package.json`) + packageDirCandidates.push(dirname(packageJsonPath)) + } + catch { + } + + try { + const selfPackageJsonPath = runtimeRequire.resolve(`${packageName}/package.json`) + packageDirCandidates.push(join(dirname(selfPackageJsonPath), 'dist')) + } + catch { + } + + for (const relativePath of relativePaths) { + packageDirCandidates.push(`${relativePath}${suffix}`) + } + + return packageDirCandidates +} + +export function loadBindingFromCliBinaryPackage( + runtimeRequire: ReturnType, + options: NativeBindingLoaderOptions, + suffix: string +): T { + const { + packageName, + binaryName, + bindingValidator, + cliExportName, + optionalMethods + } = options + const cliPackageName = `@truenine/memory-sync-cli-${suffix}` + + try { + const cliBinaryPackage = runtimeRequire(cliPackageName) as Record + const exportKey = cliExportName ?? binaryName.replace('napi-', '') + const runtimeBinding = cliBinaryPackage[exportKey] + + if (bindingValidator(runtimeBinding)) return applyOptionalMethods(runtimeBinding, optionalMethods) + } + catch { + } + + let lastError: unknown = new Error(`No compatible ${binaryName} binding was found for ${cliPackageName}`) + + for (const candidateDir of resolvePackageDirCandidates( + runtimeRequire, + packageName, + suffix, + DEFAULT_PACKAGE_DIR_CANDIDATE_RELATIVE_PATHS + )) { + try { + const loaded = loadBindingFromDirectory( + runtimeRequire, + candidateDir, + binaryName, + bindingValidator, + options._readdirSync + ) + if (loaded != null) return applyOptionalMethods(loaded, optionalMethods) + } + catch (error) { + lastError = error + } + } + + if (lastError instanceof Error) throw lastError + throw new Error(`Package "${cliPackageName}" does not export a ${binaryName} binding or contain a compatible native module`) +} + +function applyOptionalMethods(binding: T, optionalMethods?: Record): T { + if (optionalMethods == null || typeof binding !== 'object' || binding == null) return binding + + const bindingRecord = binding as Record + + for (const [preferredMethod, aliases] of Object.entries(optionalMethods)) { + if (bindingRecord[preferredMethod] == null) { + for (const alias of aliases) { + if (typeof bindingRecord[alias] === 'function') { + Object.defineProperty(bindingRecord, preferredMethod, { + value: bindingRecord[alias], + writable: false, + enumerable: true, + configurable: true + }) + break + } + } + } + } + + return binding +} + +function buildLocalCandidatePaths(local: string, suffix: string): string[] { + return DEFAULT_LOCAL_CANDIDATE_RELATIVE_PATHS.map(base => { + if (base.endsWith('/')) { + return `${base}${suffix}/${local}.node` + } + return `${base}${local}.node` + }) +} + +export function loadNativeBinding( + options: NativeBindingLoaderOptions +): T { + const { + packageName, + binaryName, + bindingValidator, + packageSuffix, + _requireFactory + } = options + + const runtimeRequire = (_requireFactory ?? (() => createRequire(import.meta.url)))() + const platformBindings = resolvePlatformBindings(binaryName) + const {local, suffix} = getPlatformBinding(platformBindings, packageName) + const effectiveSuffix = packageSuffix ?? suffix + const localCandidates = buildLocalCandidatePaths(local, effectiveSuffix) + + let localError: unknown = new Error(`No local candidate matched "${local}"`) + + for (const candidate of localCandidates) { + try { + const bindingModule = runtimeRequire(candidate) as unknown + if (bindingValidator(bindingModule)) { + return applyOptionalMethods(bindingModule, options.optionalMethods) + } + } + catch (error) { + localError = error + } + } + + try { + return loadBindingFromCliBinaryPackage(runtimeRequire, options, effectiveSuffix) + } + catch (packageError) { + throw formatBindingLoadError(packageName, localError, packageError, effectiveSuffix) + } +} + +function createBindingCache(): BindingCache { + return {binding: void 0, error: void 0} +} + +const loaderCaches = new WeakMap, BindingCache>() + +function getOrCreateCache(options: NativeBindingLoaderOptions): BindingCache { + const existing = loaderCaches.get(options as NativeBindingLoaderOptions) + if (existing != null) return existing as BindingCache + + const cache = createBindingCache() + loaderCaches.set(options as NativeBindingLoaderOptions, cache as BindingCache) + return cache +} + +export function createNativeBindingLoader( + options: NativeBindingLoaderOptions +): () => T { + const cache = getOrCreateCache(options) + + return (): T => { + if (cache.binding != null) return cache.binding + + if (cache.error != null) throw cache.error + + try { + cache.binding = loadNativeBinding(options) + return cache.binding + } + catch (error) { + cache.error = error instanceof Error ? error : new Error(String(error)) + throw cache.error + } + } +} diff --git a/sdk/src/core/native-binding.ts b/sdk/src/core/native-binding.ts index deaf2c50..cacccb3f 100644 --- a/sdk/src/core/native-binding.ts +++ b/sdk/src/core/native-binding.ts @@ -29,13 +29,18 @@ export function tryLoadNativeBinding(): T | undefined { const packageName = `@truenine/memory-sync-cli-${suffix}` const binaryFile = `napi-memory-sync-cli.${suffix}.node` const candidates = [ - `${packageName}/${binaryFile}`, `./${binaryFile}`, + `../${binaryFile}`, `../npm/${suffix}/${binaryFile}`, `../../npm/${suffix}/${binaryFile}`, + `../../cli/npm/${suffix}/${binaryFile}`, + `../../../cli/npm/${suffix}/${binaryFile}`, + `${packageName}/${binaryFile}`, packageName, `../npm/${suffix}`, - `../../npm/${suffix}` + `../../npm/${suffix}`, + `../../cli/npm/${suffix}`, + `../../../cli/npm/${suffix}` ] for (const specifier of candidates) { diff --git a/sdk/src/core/plugin_shared.rs b/sdk/src/core/plugin_shared.rs index 957fb472..90fdd0d8 100644 --- a/sdk/src/core/plugin_shared.rs +++ b/sdk/src/core/plugin_shared.rs @@ -20,16 +20,27 @@ pub enum PluginKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum PromptKind { + #[serde(alias = "globalMemory")] GlobalMemory, + #[serde(alias = "projectRootMemory")] ProjectRootMemory, + #[serde(alias = "projectChildrenMemory")] ProjectChildrenMemory, + #[serde(alias = "command")] FastCommand, + #[serde(alias = "subAgent")] SubAgent, + #[serde(alias = "skill")] Skill, + #[serde(alias = "skillChildDoc")] SkillChildDoc, + #[serde(alias = "skillResource")] SkillResource, + #[serde(alias = "skillMcpConfig")] SkillMcpConfig, + #[serde(alias = "readme")] Readme, + #[serde(alias = "rule")] Rule, } @@ -50,22 +61,35 @@ pub enum FilePathKind { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum IDEKind { + #[serde(alias = "vscode")] VSCode, + #[serde(alias = "intellijIdea")] IntellijIDEA, + #[serde(alias = "zed")] Zed, + #[serde(alias = "git")] Git, + #[serde(alias = "editorconfig")] EditorConfig, + #[serde(alias = "original")] Original, } #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum NamingCaseKind { + #[serde(alias = "camelCase")] CamelCase, + #[serde(alias = "pascalCase")] PascalCase, + #[serde(alias = "snakeCase")] SnakeCase, + #[serde(alias = "kebabCase")] KebabCase, + #[serde(alias = "upperCase")] UpperCase, + #[serde(alias = "lowerCase")] LowerCase, + #[serde(alias = "original")] Original, } @@ -108,12 +132,13 @@ pub struct DebugLog { pub struct RelativePath { pub path_kind: FilePathKind, pub path: String, + #[serde(default)] pub base_path: String, /// Pre-computed absolute path for serialization to Node.js - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "Option::is_none")] pub absolute_path: Option, /// Pre-computed directory name for serialization to Node.js - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "Option::is_none")] pub directory_name: Option, } @@ -135,6 +160,9 @@ impl RelativePath { pub fn get_absolute_path(&self) -> String { self.absolute_path.clone().unwrap_or_else(|| { + if self.base_path.is_empty() { + return self.path.clone(); + } PathBuf::from(&self.base_path) .join(&self.path) .to_string_lossy() @@ -388,14 +416,23 @@ pub struct SkillResource { #[derive(Debug, Clone, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct McpServerConfig { - pub command: String, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub command: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub args: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub env: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] + pub url: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub server_url: Option, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub headers: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] pub disabled: Option, #[serde(default, skip_serializing_if = "Option::is_none")] + pub disabled_tools: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] pub auto_approve: Option>, } @@ -571,10 +608,12 @@ pub struct CollectedInputContext { #[serde(default, skip_serializing_if = "Option::is_none")] pub vscode_config_files: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] + pub zed_config_files: Option>, + #[serde(default, skip_serializing_if = "Option::is_none")] pub jetbrains_config_files: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub editor_config_files: Option>, - #[serde(default, skip_serializing_if = "Option::is_none")] + #[serde(default, alias = "commands", skip_serializing_if = "Option::is_none")] pub fast_commands: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub sub_agents: Option>, @@ -611,6 +650,22 @@ mod tests { assert_eq!(rp.get_directory_name(), "src/skills"); } + #[test] + fn test_relative_path_accepts_missing_base_path_for_absolute_shapes() { + let parsed: RelativePath = serde_json::from_str( + r#"{ + "pathKind": "absolute", + "path": "/workspace/.vscode/settings.json" + }"#, + ) + .unwrap(); + assert_eq!(parsed.base_path, ""); + assert_eq!( + parsed.get_absolute_path(), + "/workspace/.vscode/settings.json" + ); + } + #[test] fn test_collected_input_context_default() { let ctx = CollectedInputContext::default(); @@ -674,6 +729,90 @@ mod tests { ); } + #[test] + fn test_collected_input_context_accepts_ts_output_shape_aliases() { + let parsed: CollectedInputContext = serde_json::from_str( + r#"{ + "workspace": { + "directory": { + "pathKind": "root", + "path": "/workspace" + }, + "projects": [ + { + "name": "project-a", + "dirFromWorkspacePath": { + "pathKind": "relative", + "path": "project-a", + "basePath": "/workspace" + }, + "rootMemoryPrompt": { + "type": "projectRootMemory", + "content": "project root", + "length": 12, + "filePathKind": "relative", + "dir": { + "pathKind": "root", + "path": "" + } + } + } + ] + }, + "commands": [ + { + "type": "command", + "content": "run", + "length": 3, + "dir": { + "pathKind": "relative", + "path": "commands/run.mdx", + "basePath": "/workspace/aindex/dist" + }, + "commandName": "run", + "series": "default", + "yamlFrontMatter": { + "description": "Run command", + "scope": "global", + "namingCase": "kebabCase" + } + } + ] + }"#, + ) + .unwrap(); + + assert_eq!(parsed.fast_commands.as_ref().unwrap().len(), 1); + assert_eq!( + parsed.fast_commands.as_ref().unwrap()[0].prompt_type, + PromptKind::FastCommand + ); + assert_eq!( + parsed.workspace.as_ref().unwrap().projects[0] + .root_memory_prompt + .as_ref() + .unwrap() + .prompt_type, + PromptKind::ProjectRootMemory + ); + assert_eq!( + parsed.fast_commands.as_ref().unwrap()[0] + .yaml_front_matter + .as_ref() + .unwrap() + .scope, + Some(RuleScope::Global) + ); + assert_eq!( + parsed.fast_commands.as_ref().unwrap()[0] + .yaml_front_matter + .as_ref() + .unwrap() + .naming_case, + Some(NamingCaseKind::KebabCase) + ); + } + #[test] fn test_rule_prompt_serialize() { let rule = RulePrompt { diff --git a/sdk/src/index.test.ts b/sdk/src/index.test.ts index 5d225b0a..f192e951 100644 --- a/sdk/src/index.test.ts +++ b/sdk/src/index.test.ts @@ -16,18 +16,17 @@ describe('library entrypoint', () => { }) it('uses the native binding when all methods are present', async () => { - const nativeBinding = { + globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { loadConfig: vi.fn(), install: vi.fn(), dryRun: vi.fn(), clean: vi.fn(), - listAdaptors: vi.fn(), + listPlugins: vi.fn(), listPrompts: vi.fn(), getPrompt: vi.fn(), upsertPromptSource: vi.fn(), writePromptArtifacts: vi.fn() } - globalThis.__TNMSC_TEST_NATIVE_BINDING__ = nativeBinding const mod = await import('./index') const binding = mod.getMemorySyncSdkBinding() diff --git a/sdk/src/index.ts b/sdk/src/index.ts index e6ae8aa8..175da1f0 100644 --- a/sdk/src/index.ts +++ b/sdk/src/index.ts @@ -1,7 +1,6 @@ import type {MergedConfigResult} from './ConfigLoader' import type {MemorySyncAdaptorInfo, MemorySyncCommandResult, MemorySyncSdkBinding} from './internal/sdk-binding' import {getNativeBinding} from './core/native-binding' -import {createTsFallbackMemorySyncBinding} from './internal/sdk-binding' type JsonResult = T | Promise @@ -10,6 +9,7 @@ interface NativeJsonCommandBinding { readonly install?: (optionsJson?: string) => JsonResult readonly dryRun?: (optionsJson?: string) => JsonResult readonly clean?: (optionsJson?: string) => JsonResult + readonly listPlugins?: () => JsonResult readonly listAdaptors?: () => JsonResult readonly listPrompts?: (optionsJson?: string) => JsonResult readonly getPrompt?: (promptId: string, optionsJson?: string) => JsonResult @@ -21,6 +21,26 @@ interface NativeMemorySyncSdkBinding extends Partial): boolean { + return typeof value.listAdaptors === 'function' || typeof value.listPlugins === 'function' +} + +function getNativeListAdaptors( + nativeBinding: Required> & NativeJsonCommandBinding +): () => JsonResult { + if (typeof nativeBinding.listAdaptors === 'function') return nativeBinding.listAdaptors + if (typeof nativeBinding.listPlugins === 'function') return nativeBinding.listPlugins + throw new Error('Native memory-sync SDK binding is missing listPlugins/listAdaptors') +} + +function requireNativeCommandBinding(): NativeMemorySyncSdkBinding { + const nativeBinding = getNativeBinding() + if (nativeBinding == null) { + throw new Error('Native memory-sync SDK binding is required. Build or install the Rust NAPI package before running tnmsc.') + } + return nativeBinding +} + function isMemorySyncSdkBinding(value: unknown): value is MemorySyncSdkBinding { if (value == null || typeof value !== 'object') return false const candidate = value as Partial @@ -29,7 +49,7 @@ function isMemorySyncSdkBinding(value: unknown): value is MemorySyncSdkBinding { && typeof candidate.install === 'function' && typeof candidate.dryRun === 'function' && typeof candidate.clean === 'function' - && typeof candidate.listAdaptors === 'function' + && hasListAdaptorsMethod(candidate as unknown as Partial) && typeof candidate.listPrompts === 'function' && typeof candidate.getPrompt === 'function' && typeof candidate.upsertPromptSource === 'function' @@ -45,7 +65,7 @@ function hasNativeCommandBinding(value: unknown): value is Required(value: JsonResult): Promise { } function createHybridBinding(nativeBinding: Required): MemorySyncSdkBinding { + const listAdaptors = getNativeListAdaptors(nativeBinding) return { loadConfig: async cwd => parseJsonResult(nativeBinding.loadConfig(cwd)), install: async options => parseJsonResult(nativeBinding.install(options == null ? void 0 : JSON.stringify(options))), dryRun: async options => parseJsonResult(nativeBinding.dryRun(options == null ? void 0 : JSON.stringify(options))), clean: async options => parseJsonResult(nativeBinding.clean(options == null ? void 0 : JSON.stringify(options))), - listAdaptors: async () => parseJsonResult(nativeBinding.listAdaptors()), + listAdaptors: async () => parseJsonResult(listAdaptors()), listPrompts: async options => parseJsonResult(nativeBinding.listPrompts(options == null ? void 0 : JSON.stringify(options))), getPrompt: async (promptId, options) => parseJsonResult(nativeBinding.getPrompt(promptId, options == null ? void 0 : JSON.stringify(options))), upsertPromptSource: async input => parseJsonResult(nativeBinding.upsertPromptSource(JSON.stringify(input))), @@ -75,8 +96,7 @@ function createHybridBinding(nativeBinding: Required): export function getMemorySyncSdkBinding(): MemorySyncSdkBinding { if (memorySyncSdkBinding != null) return memorySyncSdkBinding - const nativeBinding = getNativeBinding() - const fallbackBinding = createTsFallbackMemorySyncBinding() + const nativeBinding = requireNativeCommandBinding() if (hasNativeCommandBinding(nativeBinding)) { memorySyncSdkBinding = createHybridBinding(nativeBinding) @@ -88,16 +108,19 @@ export function getMemorySyncSdkBinding(): MemorySyncSdkBinding { return memorySyncSdkBinding } - memorySyncSdkBinding = fallbackBinding - return memorySyncSdkBinding + throw new Error('Native memory-sync SDK binding is missing required command methods.') } export type { MergedConfigResult } from './ConfigLoader' export { - createTsFallbackMemorySyncBinding -} from './internal/sdk-binding' + createNativeBindingLoader +} from './core/native-binding-loader' +export type { + NativeBindingLoaderOptions, + PlatformBinding +} from './core/native-binding-loader' export type { MemorySyncAdaptorInfo, MemorySyncCommandOptions, @@ -106,6 +129,40 @@ export type { MemorySyncSdkBinding, PublicLoggerDiagnosticRecord } from './internal/sdk-binding' +export { + clearBufferedDiagnostics, + createLogger, + drainBufferedDiagnostics, + flushOutput, + getGlobalLogLevel, + setGlobalLogLevel +} from './libraries/logger' +export type { + DiagnosticLines, + ILogger, + LoggerDiagnosticInput, + LoggerDiagnosticLevel, + LoggerDiagnosticRecord, + LogLevel +} from './libraries/logger' +export { + defineProxy, + getProxyModuleConfig, + loadProxyModule, + resolvePublicPath, + resolvePublicPathUnchecked, + validatePublicPath +} from './libraries/script-runtime' +export type { + ProxyCommand, + ProxyContext, + ProxyDefinition, + ProxyMatcherConfig, + ProxyModule, + ProxyModuleConfig, + ProxyRouteHandler, + ValidatePublicPathOptions +} from './libraries/script-runtime' export type { ListPromptsOptions, ManagedPromptKind, diff --git a/sdk/src/inputs/AbstractInputCapability.ts b/sdk/src/inputs/AbstractInputCapability.ts index b65f865d..fafceed5 100644 --- a/sdk/src/inputs/AbstractInputCapability.ts +++ b/sdk/src/inputs/AbstractInputCapability.ts @@ -12,13 +12,13 @@ import type { ResolvedBasePaths, YAMLFrontMatter } from '@/adaptors/adaptor-core' - import {spawn} from 'node:child_process' import * as path from 'node:path' -import {createLogger} from '@truenine/logger' + import {parseMarkdown} from '@truenine/md-compiler/markdown' import {PathPlaceholders} from '@/adaptors/adaptor-core' import {buildDiagnostic, diagnosticLines} from '@/diagnostics' +import {createLogger} from '@/libraries/logger' import {logProtectedDeletionGuardError, ProtectedDeletionGuardError} from '@/ProtectedDeletionGuard' import {resolveUserPath} from '@/runtime-environment' @@ -31,9 +31,9 @@ export abstract class AbstractInputCapability implements InputCapability { readonly dependsOn?: readonly string[] - private _log?: import('@truenine/logger').ILogger + private _log?: import('@/libraries/logger').ILogger - get log(): import('@truenine/logger').ILogger { + get log(): import('@/libraries/logger').ILogger { this._log ??= createLogger(this.name) return this._log } @@ -85,8 +85,7 @@ export abstract class AbstractInputCapability implements InputCapability { this.log.debug({action: 'inputEffect', name: effect.name, deletedFileCount: result.deletedFiles.length}) } } else { - const error = result.error ?? new Error(`Input effect failed: ${effect.name}`) - throw error + throw result.error ?? new Error(`Input effect failed: ${effect.name}`) } results.push(result) } diff --git a/sdk/src/inputs/NativeInputCapability.ts b/sdk/src/inputs/NativeInputCapability.ts index c114a2a3..f6be12de 100644 --- a/sdk/src/inputs/NativeInputCapability.ts +++ b/sdk/src/inputs/NativeInputCapability.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseNativeInputResult} from './native-result' export class NativeInputCapability extends AbstractInputCapability { constructor( @@ -16,7 +17,7 @@ export class NativeInputCapability extends AbstractInputCapability { const fn = native?.[this.nativeMethodName] if (fn != null) { const result = fn(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error(`Native binding ${this.nativeMethodName} is not available`) } diff --git a/sdk/src/inputs/input-agentskills.test.ts b/sdk/src/inputs/input-agentskills.test.ts index b2f4b8b9..76424253 100644 --- a/sdk/src/inputs/input-agentskills.test.ts +++ b/sdk/src/inputs/input-agentskills.test.ts @@ -96,6 +96,39 @@ describe('skill input plugin', () => { } }) + it('accepts remote MCP server definitions without a local command', async () => { + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-remote-mcp-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'mcp.json'), JSON.stringify({ + mcpServers: { + figma: { + url: 'https://mcp.figma.com/mcp', + disabled: false, + disabledTools: [] + } + } + }), 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') + + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger())) + const [skill] = result.skills ?? [] + + expect(skill?.mcpConfig?.mcpServers.figma?.url).toBe('https://mcp.figma.com/mcp') + expect(skill?.mcpConfig?.mcpServers.figma?.disabled).toBe(false) + expect(skill?.mcpConfig?.mcpServers.figma?.disabledTools).toEqual([]) + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) + it('fails hard when child docs are missing compiled dist pairs', async () => { const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-missing-child-test-')) const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') @@ -176,4 +209,39 @@ describe('skill input plugin', () => { fs.rmSync(tempWorkspace, {recursive: true, force: true}) } }) + + it('warns and skips MCP servers that define neither command nor url', async () => { + const warnings: string[] = [] + const tempWorkspace = fs.mkdtempSync(path.join(os.tmpdir(), 'tnmsc-skill-input-mcp-warning-test-')) + const srcSkillDir = path.join(tempWorkspace, 'aindex', 'skills', 'demo') + const distSkillDir = path.join(tempWorkspace, 'aindex', 'dist', 'skills', 'demo') + + try { + fs.mkdirSync(srcSkillDir, {recursive: true}) + fs.mkdirSync(distSkillDir, {recursive: true}) + fs.writeFileSync(path.join(srcSkillDir, 'skill.src.mdx'), '---\ndescription: src skill\n---\nSkill source', 'utf8') + fs.writeFileSync(path.join(srcSkillDir, 'mcp.json'), JSON.stringify({ + mcpServers: { + broken: { + disabled: false + }, + demo: { + command: 'demo' + } + } + }), 'utf8') + fs.writeFileSync(path.join(distSkillDir, 'skill.mdx'), '---\ndescription: dist skill\n---\nSkill dist', 'utf8') + + const plugin = new SkillInputCapability() + const result = await plugin.collect(createContext(tempWorkspace, createMockLogger(warnings))) + const [skill] = result.skills ?? [] + + expect(skill?.mcpConfig?.mcpServers.broken).toBeUndefined() + expect(skill?.mcpConfig?.mcpServers.demo?.command).toBe('demo') + expect(warnings).toContain('SKILL_MCP_SERVER_SKIPPED') + } + finally { + fs.rmSync(tempWorkspace, {recursive: true, force: true}) + } + }) }) diff --git a/sdk/src/inputs/input-agentskills.ts b/sdk/src/inputs/input-agentskills.ts index d15c1a02..56f85f9a 100644 --- a/sdk/src/inputs/input-agentskills.ts +++ b/sdk/src/inputs/input-agentskills.ts @@ -2,6 +2,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/ad import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseLoggedNativeInputResult} from './native-result' interface NativeSkillResult extends InputCollectedContext { diagnostics?: {level: string, code: string, title: string, exactFix?: string[]}[] @@ -21,29 +22,7 @@ export class SkillInputCapability extends AbstractInputCapability { globalScope: ctx.globalScope } const result = native.collectSkill(JSON.stringify(payload)) - const parsed = JSON.parse(result) as NativeSkillResult - if (parsed.diagnostics != null) { - for (const diagnostic of parsed.diagnostics) { - const input = { - code: diagnostic.code, - title: diagnostic.title, - rootCause: [diagnostic.title] as const, - ...diagnostic.exactFix != null && diagnostic.exactFix.length > 0 - ? {exactFix: diagnostic.exactFix as [string, ...string[]]} - : {} - } - if (diagnostic.level === 'warn') { - ctx.logger.warn(input) - } else if (diagnostic.level === 'error') { - ctx.logger.error(input) - } - } - } - if (parsed.debugLogs != null) { - for (const log of parsed.debugLogs) { - ctx.logger.debug(log.message, log.payload) - } - } + const parsed = parseLoggedNativeInputResult(ctx.logger, result) return parsed as Partial } diff --git a/sdk/src/inputs/input-aindex.ts b/sdk/src/inputs/input-aindex.ts index a7f350d4..8e33fe5f 100644 --- a/sdk/src/inputs/input-aindex.ts +++ b/sdk/src/inputs/input-aindex.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseLoggedNativeInputResult} from './native-result' interface NativeAindexResult extends InputCollectedContext { diagnostics?: {level: string, code: string, title: string, exactFix?: string[]}[] @@ -29,29 +30,7 @@ export class AindexInputCapability extends AbstractInputCapability { const payload = {...ctx.userConfigOptions} try { const result = native.collectAindex(JSON.stringify(payload)) - const parsed = JSON.parse(result) as NativeAindexResult - if (parsed.diagnostics != null) { - for (const diagnostic of parsed.diagnostics) { - const input = { - code: diagnostic.code, - title: diagnostic.title, - rootCause: [diagnostic.title] as const, - ...diagnostic.exactFix != null && diagnostic.exactFix.length > 0 - ? {exactFix: diagnostic.exactFix as [string, ...string[]]} - : {} - } - if (diagnostic.level === 'warn') { - ctx.logger.warn(input) - } else if (diagnostic.level === 'error') { - ctx.logger.error(input) - } - } - } - if (parsed.debugLogs != null) { - for (const log of parsed.debugLogs) { - ctx.logger.debug(log.message, log.payload) - } - } + const parsed = parseLoggedNativeInputResult(ctx.logger, result) return parsed as Partial } catch (err: unknown) { const message = getErrorMessage(err) diff --git a/sdk/src/inputs/input-command.ts b/sdk/src/inputs/input-command.ts index 1c84548a..faa68223 100644 --- a/sdk/src/inputs/input-command.ts +++ b/sdk/src/inputs/input-command.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseNativeInputResult} from './native-result' export class CommandInputCapability extends AbstractInputCapability { constructor() { @@ -12,7 +13,7 @@ export class CommandInputCapability extends AbstractInputCapability { if (native?.collectCommand != null) { const payload = {...ctx.userConfigOptions, globalScope: ctx.globalScope} const result = native.collectCommand(JSON.stringify(payload)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectCommand binding is not available') diff --git a/sdk/src/inputs/input-editorconfig.ts b/sdk/src/inputs/input-editorconfig.ts index d35bba1a..fab9630e 100644 --- a/sdk/src/inputs/input-editorconfig.ts +++ b/sdk/src/inputs/input-editorconfig.ts @@ -5,6 +5,7 @@ import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {IDEKind} from '../adaptors/adaptor-core/enums' import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class EditorConfigInputCapability extends AbstractInputCapability { constructor() { @@ -17,14 +18,17 @@ export class EditorConfigInputCapability extends AbstractInputCapability { const proxyFilePath = path.join(aindexDir, 'public', 'proxy.ts') if (fs.existsSync(proxyFilePath)) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const file = readPublicIdeConfigDefinitionFile(IDEKind.EditorConfig, '.editorconfig', aindexDir, fs, { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) return {editorConfigFiles: file != null ? [file] : void 0} as Partial } const native = getNativeBinding<{collectEditorconfig?: (optionsJson: string) => string}>() if (native?.collectEditorconfig != null) { const result = native.collectEditorconfig(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectEditorconfig binding is not available') diff --git a/sdk/src/inputs/input-git-exclude.ts b/sdk/src/inputs/input-git-exclude.ts index 8be9edb9..8cbe48e8 100644 --- a/sdk/src/inputs/input-git-exclude.ts +++ b/sdk/src/inputs/input-git-exclude.ts @@ -4,6 +4,7 @@ import * as path from 'node:path' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {resolvePublicDefinitionPath} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class GitExcludeInputCapability extends AbstractInputCapability { constructor() { @@ -16,7 +17,10 @@ export class GitExcludeInputCapability extends AbstractInputCapability { const proxyFilePath = path.join(aindexDir, 'public', 'proxy.ts') if (fs.existsSync(proxyFilePath)) { - const resolvedPath = resolvePublicDefinitionPath(aindexDir, '.git/info/exclude', {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const resolvedPath = resolvePublicDefinitionPath(aindexDir, '.git/info/exclude', { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) if (fs.existsSync(resolvedPath)) { const content = fs.readFileSync(resolvedPath, 'utf8') return {shadowGitExclude: content || void 0} as Partial @@ -27,7 +31,7 @@ export class GitExcludeInputCapability extends AbstractInputCapability { const native = getNativeBinding<{collectGitExclude?: (optionsJson: string) => string}>() if (native?.collectGitExclude != null) { const result = native.collectGitExclude(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectGitExclude binding is not available') diff --git a/sdk/src/inputs/input-gitignore.ts b/sdk/src/inputs/input-gitignore.ts index e977ca28..e803bc50 100644 --- a/sdk/src/inputs/input-gitignore.ts +++ b/sdk/src/inputs/input-gitignore.ts @@ -4,6 +4,7 @@ import * as path from 'node:path' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {resolvePublicDefinitionPath} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class GitIgnoreInputCapability extends AbstractInputCapability { constructor() { @@ -16,7 +17,10 @@ export class GitIgnoreInputCapability extends AbstractInputCapability { const proxyFilePath = path.join(aindexDir, 'public', 'proxy.ts') if (fs.existsSync(proxyFilePath)) { - const resolvedPath = resolvePublicDefinitionPath(aindexDir, '.gitignore', {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const resolvedPath = resolvePublicDefinitionPath(aindexDir, '.gitignore', { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) if (fs.existsSync(resolvedPath)) { const content = fs.readFileSync(resolvedPath, 'utf8') return {globalGitIgnore: content || void 0} as Partial @@ -27,7 +31,7 @@ export class GitIgnoreInputCapability extends AbstractInputCapability { const native = getNativeBinding<{collectGitignore?: (optionsJson: string) => string}>() if (native?.collectGitignore != null) { const result = native.collectGitignore(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectGitignore binding is not available') diff --git a/sdk/src/inputs/input-jetbrains-config.ts b/sdk/src/inputs/input-jetbrains-config.ts index 9b7c3969..01798585 100644 --- a/sdk/src/inputs/input-jetbrains-config.ts +++ b/sdk/src/inputs/input-jetbrains-config.ts @@ -5,6 +5,7 @@ import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {IDEKind} from '../adaptors/adaptor-core/enums' import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class JetBrainsConfigInputCapability extends AbstractInputCapability { constructor() { @@ -20,7 +21,10 @@ export class JetBrainsConfigInputCapability extends AbstractInputCapability { const files: NonNullable>[] = [] const paths = ['.idea/codeStyles/Project.xml', '.idea/codeStyles/codeStyleConfig.xml', '.idea/.gitignore'] for (const p of paths) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.IntellijIDEA, p, aindexDir, fs, {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const file = readPublicIdeConfigDefinitionFile(IDEKind.IntellijIDEA, p, aindexDir, fs, { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) if (file != null) files.push(file) } return {jetbrainsConfigFiles: files.length > 0 ? files : void 0} as Partial @@ -29,7 +33,7 @@ export class JetBrainsConfigInputCapability extends AbstractInputCapability { const native = getNativeBinding<{collectJetBrainsConfig?: (optionsJson: string) => string}>() if (native?.collectJetBrainsConfig != null) { const result = native.collectJetBrainsConfig(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectJetBrainsConfig binding is not available') diff --git a/sdk/src/inputs/input-project-prompt.ts b/sdk/src/inputs/input-project-prompt.ts index 0eeb641c..06f120f4 100644 --- a/sdk/src/inputs/input-project-prompt.ts +++ b/sdk/src/inputs/input-project-prompt.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseNativeInputResult} from './native-result' export class ProjectPromptInputCapability extends AbstractInputCapability { constructor() { @@ -16,7 +17,7 @@ export class ProjectPromptInputCapability extends AbstractInputCapability { workspace: ctx.dependencyContext.workspace } const result = native.collectProjectPrompt(JSON.stringify(payload)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectProjectPrompt binding is not available') diff --git a/sdk/src/inputs/input-readme.ts b/sdk/src/inputs/input-readme.ts index bc4b41b3..a02e5ecc 100644 --- a/sdk/src/inputs/input-readme.ts +++ b/sdk/src/inputs/input-readme.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseNativeInputResult} from './native-result' function deriveErrorCode(message: string): string | void { if (message.includes('Readme project series name conflict')) { @@ -23,7 +24,7 @@ export class ReadmeMdInputCapability extends AbstractInputCapability { if (native?.collectReadme != null) { try { const result = native.collectReadme(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } catch (err: unknown) { const message = getErrorMessage(err) const code = deriveErrorCode(message) diff --git a/sdk/src/inputs/input-rule.ts b/sdk/src/inputs/input-rule.ts index 23addecd..1878a485 100644 --- a/sdk/src/inputs/input-rule.ts +++ b/sdk/src/inputs/input-rule.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseNativeInputResult} from './native-result' export class RuleInputCapability extends AbstractInputCapability { constructor() { @@ -12,7 +13,7 @@ export class RuleInputCapability extends AbstractInputCapability { if (native?.collectRule != null) { const payload = {...ctx.userConfigOptions, globalScope: ctx.globalScope} const result = native.collectRule(JSON.stringify(payload)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectRule binding is not available') diff --git a/sdk/src/inputs/input-shared-ignore.ts b/sdk/src/inputs/input-shared-ignore.ts index 9a6624b9..64e15696 100644 --- a/sdk/src/inputs/input-shared-ignore.ts +++ b/sdk/src/inputs/input-shared-ignore.ts @@ -7,6 +7,7 @@ import { AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS, resolvePublicDefinitionPath } from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class AIAgentIgnoreInputCapability extends AbstractInputCapability { constructor() { @@ -21,7 +22,10 @@ export class AIAgentIgnoreInputCapability extends AbstractInputCapability { if (fs.existsSync(proxyFilePath)) { const results: {fileName: string, content: string, sourcePath: string}[] = [] for (const fileName of AI_AGENT_IGNORE_TARGET_RELATIVE_PATHS) { - const resolvedPath = resolvePublicDefinitionPath(aindexDir, fileName, {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const resolvedPath = resolvePublicDefinitionPath(aindexDir, fileName, { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) if (fs.existsSync(resolvedPath)) { const content = fs.readFileSync(resolvedPath, 'utf8') if (content.length > 0) { @@ -35,7 +39,7 @@ export class AIAgentIgnoreInputCapability extends AbstractInputCapability { const native = getNativeBinding<{collectSharedIgnore?: (optionsJson: string) => string}>() if (native?.collectSharedIgnore != null) { const result = native.collectSharedIgnore(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectSharedIgnore binding is not available') diff --git a/sdk/src/inputs/input-subagent.ts b/sdk/src/inputs/input-subagent.ts index f6d0e609..f9315fca 100644 --- a/sdk/src/inputs/input-subagent.ts +++ b/sdk/src/inputs/input-subagent.ts @@ -1,6 +1,7 @@ import type {InputCapabilityContext, InputCollectedContext} from '../adaptors/adaptor-core' import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' +import {parseLoggedNativeInputResult} from './native-result' interface NativeSubAgentResult extends InputCollectedContext { diagnostics?: {level: string, code: string, title: string, exactFix?: string[]}[] @@ -17,29 +18,7 @@ export class SubAgentInputCapability extends AbstractInputCapability { if (native?.collectSubAgent != null) { const payload = {...ctx.userConfigOptions, globalScope: ctx.globalScope} const result = native.collectSubAgent(JSON.stringify(payload)) - const parsed = JSON.parse(result) as NativeSubAgentResult - if (parsed.diagnostics != null) { - for (const diagnostic of parsed.diagnostics) { - const input = { - code: diagnostic.code, - title: diagnostic.title, - rootCause: [diagnostic.title] as const, - ...diagnostic.exactFix != null && diagnostic.exactFix.length > 0 - ? {exactFix: diagnostic.exactFix as [string, ...string[]]} - : {} - } - if (diagnostic.level === 'warn') { - ctx.logger.warn(input) - } else if (diagnostic.level === 'error') { - ctx.logger.error(input) - } - } - } - if (parsed.debugLogs != null) { - for (const log of parsed.debugLogs) { - ctx.logger.debug(log.message, log.payload) - } - } + const parsed = parseLoggedNativeInputResult(ctx.logger, result) return parsed as Partial } diff --git a/sdk/src/inputs/input-vscode-config.ts b/sdk/src/inputs/input-vscode-config.ts index d0b97386..d042eb4d 100644 --- a/sdk/src/inputs/input-vscode-config.ts +++ b/sdk/src/inputs/input-vscode-config.ts @@ -5,6 +5,7 @@ import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {IDEKind} from '../adaptors/adaptor-core/enums' import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class VSCodeConfigInputCapability extends AbstractInputCapability { constructor() { @@ -20,7 +21,10 @@ export class VSCodeConfigInputCapability extends AbstractInputCapability { const files: NonNullable>[] = [] const paths = ['.vscode/settings.json', '.vscode/extensions.json'] for (const p of paths) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.VSCode, p, aindexDir, fs, {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const file = readPublicIdeConfigDefinitionFile(IDEKind.VSCode, p, aindexDir, fs, { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) if (file != null) files.push(file) } return {vscodeConfigFiles: files.length > 0 ? files : void 0} as Partial @@ -29,7 +33,7 @@ export class VSCodeConfigInputCapability extends AbstractInputCapability { const native = getNativeBinding<{collectVSCodeConfig?: (optionsJson: string) => string}>() if (native?.collectVSCodeConfig != null) { const result = native.collectVSCodeConfig(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectVSCodeConfig binding is not available') diff --git a/sdk/src/inputs/input-zed-config.ts b/sdk/src/inputs/input-zed-config.ts index 76f1d7a4..92bcfcb7 100644 --- a/sdk/src/inputs/input-zed-config.ts +++ b/sdk/src/inputs/input-zed-config.ts @@ -5,6 +5,7 @@ import {getNativeBinding} from '@/core/native-binding' import {AbstractInputCapability} from '../adaptors/adaptor-core' import {IDEKind} from '../adaptors/adaptor-core/enums' import {readPublicIdeConfigDefinitionFile} from '../public-config-paths' +import {parseNativeInputResult} from './native-result' export class ZedConfigInputCapability extends AbstractInputCapability { constructor() { @@ -17,14 +18,17 @@ export class ZedConfigInputCapability extends AbstractInputCapability { const proxyFilePath = path.join(aindexDir, 'public', 'proxy.ts') if (fs.existsSync(proxyFilePath)) { - const file = readPublicIdeConfigDefinitionFile(IDEKind.Zed, '.zed/settings.json', aindexDir, fs, {workspaceDir: ctx.userConfigOptions.workspaceDir}) + const file = readPublicIdeConfigDefinitionFile(IDEKind.Zed, '.zed/settings.json', aindexDir, fs, { + workspaceDir: ctx.userConfigOptions.workspaceDir, + command: ctx.runtimeCommand + }) return {zedConfigFiles: file != null ? [file] : void 0} as Partial } const native = getNativeBinding<{collectZedConfig?: (optionsJson: string) => string}>() if (native?.collectZedConfig != null) { const result = native.collectZedConfig(JSON.stringify(ctx.userConfigOptions)) - return JSON.parse(result) as Partial + return parseNativeInputResult>(result) } throw new Error('Native collectZedConfig binding is not available') diff --git a/sdk/src/inputs/native-result.ts b/sdk/src/inputs/native-result.ts new file mode 100644 index 00000000..60da5397 --- /dev/null +++ b/sdk/src/inputs/native-result.ts @@ -0,0 +1,252 @@ +import type {InputCollectedContext} from '@/adaptors/adaptor-core/InputTypes' +import type {ILogger} from '@/libraries/logger' +import {IDEKind, NamingCaseKind, PromptKind} from '@/adaptors/adaptor-core/enums' + +interface NativeDiagnostic { + readonly level: string + readonly code: string + readonly title: string + readonly exactFix?: readonly string[] +} + +interface NativeDebugLog { + readonly message: string + readonly payload?: unknown +} + +type NativeLoggedResult = Partial & { + readonly diagnostics?: readonly NativeDiagnostic[] + readonly debugLogs?: readonly NativeDebugLog[] +} + +interface PathLike { + readonly path: string + readonly pathKind?: string + readonly basePath?: string + readonly absolutePath?: string + readonly directoryName?: string + getDirectoryName?: () => string + getAbsolutePath?: () => string +} + +const PROMPT_KIND_MAP: Readonly> = { + FastCommand: PromptKind.Command, + GlobalMemory: PromptKind.GlobalMemory, + ProjectChildrenMemory: PromptKind.ProjectChildrenMemory, + ProjectRootMemory: PromptKind.ProjectRootMemory, + Readme: PromptKind.Readme, + Rule: PromptKind.Rule, + Skill: PromptKind.Skill, + SkillChildDoc: PromptKind.SkillChildDoc, + SkillMcpConfig: PromptKind.SkillMcpConfig, + SkillResource: PromptKind.SkillResource, + SubAgent: PromptKind.SubAgent +} + +const IDE_KIND_MAP: Readonly> = { + EditorConfig: IDEKind.EditorConfig, + Git: IDEKind.Git, + IntellijIDEA: IDEKind.IntellijIDEA, + Original: IDEKind.Original, + VSCode: IDEKind.VSCode, + Zed: IDEKind.Zed +} + +const NAMING_CASE_MAP: Readonly> = { + CamelCase: NamingCaseKind.CamelCase, + KebabCase: NamingCaseKind.KebabCase, + LowerCase: NamingCaseKind.LowerCase, + Original: NamingCaseKind.Original, + PascalCase: NamingCaseKind.PascalCase, + SnakeCase: NamingCaseKind.SnakeCase, + UpperCase: NamingCaseKind.UpperCase +} + +function isRecord(value: unknown): value is Record { + return value != null && typeof value === 'object' && !Array.isArray(value) +} + +function hasPathShape(value: unknown): value is PathLike { + return isRecord(value) && typeof value['path'] === 'string' +} + +function ensurePathHelpers(pathLike: PathLike): void { + pathLike.getDirectoryName ??= () => { + if (typeof pathLike.directoryName === 'string') return pathLike.directoryName + + const normalizedPath = pathLike.path.replaceAll('\\', '/').replaceAll(/\/+$/gu, '') + const slashIndex = normalizedPath.lastIndexOf('/') + return slashIndex === -1 ? '' : normalizedPath.slice(0, slashIndex) + } + + if (pathLike.basePath != null && pathLike.getAbsolutePath == null) { + pathLike.getAbsolutePath = () => { + if (typeof pathLike.absolutePath === 'string') return pathLike.absolutePath + return [pathLike.basePath, pathLike.path] + .filter(segment => segment != null && segment.length > 0) + .join('/') + .replaceAll(/\/+/gu, '/') + } + } +} + +function normalizeEnumValue( + input: unknown, + mapping: Readonly> +): T | unknown { + if (typeof input !== 'string') return input + return mapping[input] ?? input +} + +function normalizeFrontMatter(frontMatter: unknown): void { + if (!isRecord(frontMatter)) return + + if ('namingCase' in frontMatter) { + frontMatter['namingCase'] = normalizeEnumValue(frontMatter['namingCase'], NAMING_CASE_MAP) + } +} + +function normalizePromptLike(value: Record): void { + if ('type' in value) { + value['type'] = normalizeEnumValue(value['type'], PROMPT_KIND_MAP) + } + + if ('dir' in value && hasPathShape(value['dir'])) { + ensurePathHelpers(value['dir']) + } + + if ('workingChildDirectoryPath' in value && hasPathShape(value['workingChildDirectoryPath'])) { + ensurePathHelpers(value['workingChildDirectoryPath']) + } + + if ('yamlFrontMatter' in value) { + normalizeFrontMatter(value['yamlFrontMatter']) + } + + if ('rawFrontMatter' in value && value['rawFrontMatter'] == null && typeof value['rawMdxContent'] === 'string') { + value['rawFrontMatter'] = void 0 + } + + if (!('commandPrefix' in value) && typeof value['series'] === 'string') { + value['commandPrefix'] = value['series'] + } + + if (!('prefix' in value) && typeof value['series'] === 'string') { + value['prefix'] = value['series'] + } + + if ('childDocs' in value && Array.isArray(value['childDocs'])) { + for (const childDoc of value['childDocs']) { + if (!isRecord(childDoc)) continue + normalizePromptLike(childDoc) + if (typeof childDoc['fileName'] !== 'string' && typeof childDoc['relativePath'] === 'string') { + const normalizedPath = childDoc['relativePath'].replaceAll('\\', '/') + childDoc['fileName'] = normalizedPath.split('/').at(-1) ?? normalizedPath + } + } + } + + if ('resources' in value && Array.isArray(value['resources'])) { + for (const resource of value['resources']) { + if (!isRecord(resource)) continue + if ('type' in resource) { + resource['type'] = normalizeEnumValue(resource['type'], PROMPT_KIND_MAP) + } + } + } + + if ('mcpConfig' in value && isRecord(value['mcpConfig']) && 'type' in value['mcpConfig']) { + value['mcpConfig']['type'] = normalizeEnumValue(value['mcpConfig']['type'], PROMPT_KIND_MAP) + } +} + +function normalizeWorkspace(value: Record): void { + const {directory, projects} = value + if (hasPathShape(directory)) ensurePathHelpers(directory) + + if (!Array.isArray(projects)) return + + for (const project of projects) { + if (!isRecord(project)) continue + const relativePath = project['dirFromWorkspacePath'] + if (hasPathShape(relativePath)) ensurePathHelpers(relativePath) + } +} + +function normalizeIdeConfig(value: Record): void { + if ('type' in value) { + value['type'] = normalizeEnumValue(value['type'], IDE_KIND_MAP) + } + + if ('dir' in value && hasPathShape(value['dir'])) { + ensurePathHelpers(value['dir']) + } +} + +function normalizeCollectedContextShape(parsed: NativeLoggedResult): NativeLoggedResult { + if (parsed.workspace != null && isRecord(parsed.workspace)) { + normalizeWorkspace(parsed.workspace) + } + + for (const key of ['vscodeConfigFiles', 'zedConfigFiles', 'jetbrainsConfigFiles', 'editorConfigFiles'] as const) { + const collection = parsed[key] + if (!Array.isArray(collection)) continue + for (const entry of collection) { + if (isRecord(entry)) normalizeIdeConfig(entry) + } + } + + for (const key of ['commands', 'subAgents', 'skills', 'rules', 'readmePrompts'] as const) { + const collection = parsed[key] + if (!Array.isArray(collection)) continue + for (const entry of collection) { + if (isRecord(entry)) normalizePromptLike(entry) + } + } + + if (parsed.globalMemory != null && isRecord(parsed.globalMemory)) { + normalizePromptLike(parsed.globalMemory) + } + + return parsed +} + +function logNativeDiagnostics(logger: ILogger, parsed: NativeLoggedResult): void { + if (parsed.diagnostics != null) { + for (const diagnostic of parsed.diagnostics) { + const input = { + code: diagnostic.code, + title: diagnostic.title, + rootCause: [diagnostic.title] as const, + ...diagnostic.exactFix != null && diagnostic.exactFix.length > 0 + ? {exactFix: diagnostic.exactFix as [string, ...string[]]} + : {} + } + if (diagnostic.level === 'warn') { + logger.warn(input) + } else if (diagnostic.level === 'error') { + logger.error(input) + } + } + } + + if (parsed.debugLogs != null) { + for (const log of parsed.debugLogs) { + logger.debug(log.message, log.payload) + } + } +} + +export function parseNativeInputResult>(result: string): T { + const parsed = JSON.parse(result) as NativeLoggedResult + return normalizeCollectedContextShape(parsed) as T +} + +export function parseLoggedNativeInputResult>( + logger: ILogger, + result: string +): T { + const parsed = parseNativeInputResult(result) as NativeLoggedResult + logNativeDiagnostics(logger, parsed) + return parsed as T +} diff --git a/sdk/src/inputs/runtime.ts b/sdk/src/inputs/runtime.ts index 682b1aaf..0a087a70 100644 --- a/sdk/src/inputs/runtime.ts +++ b/sdk/src/inputs/runtime.ts @@ -4,7 +4,6 @@ import type {RuntimeCommand} from '@/runtime-command' import * as fs from 'node:fs' import * as path from 'node:path' -import {createLogger} from '@truenine/logger' import glob from 'fast-glob' import {GlobalScopeCollector, ScopePriority, ScopeRegistry} from '@/adaptors/adaptor-core/GlobalScopeCollector' import { @@ -27,6 +26,7 @@ import { VSCodeConfigInputCapability, ZedConfigInputCapability } from '@/inputs' +import {createLogger} from '@/libraries/logger' import {buildDependencyContext, mergeContexts} from '@/pipeline/ContextMerger' import {topologicalSort} from '@/pipeline/DependencyResolver' diff --git a/sdk/src/internal/default-output-plugins.ts b/sdk/src/internal/default-output-plugins.ts index ce3ddf2c..9fc23909 100644 --- a/sdk/src/internal/default-output-plugins.ts +++ b/sdk/src/internal/default-output-plugins.ts @@ -1,23 +1,25 @@ import type {PipelineConfig} from '../config' -import {AgentsOutputAdaptor} from '../adaptors/AgentsOutputAdaptor' import {ClaudeCodeCLIOutputAdaptor} from '../adaptors/ClaudeCodeCLIOutputAdaptor' import {CodexCLIOutputAdaptor} from '../adaptors/CodexCLIOutputAdaptor' import {CursorOutputAdaptor} from '../adaptors/CursorOutputAdaptor' -import {DroidCLIOutputAdaptor} from '../adaptors/DroidCLIOutputAdaptor' -import {GeminiCLIOutputAdaptor} from '../adaptors/GeminiCLIOutputAdaptor' -import {GitExcludeOutputAdaptor} from '../adaptors/GitExcludeOutputAdaptor' import {JetBrainsAIAssistantCodexOutputAdaptor} from '../adaptors/JetBrainsAIAssistantCodexOutputAdaptor' -import {JetBrainsIDECodeStyleConfigOutputAdaptor} from '../adaptors/JetBrainsIDECodeStyleConfigOutputAdaptor' import {KiroCLIOutputAdaptor} from '../adaptors/KiroCLIOutputAdaptor' +import { + NativeAgentsOutputAdaptor, + NativeGitExcludeOutputAdaptor, + NativeJetBrainsIDECodeStyleConfigOutputAdaptor, + NativeReadmeMdConfigFileOutputAdaptor, + NativeVisualStudioCodeIDEConfigOutputAdaptor, + NativeZedIDEConfigOutputAdaptor +} from '../adaptors/NativeBaseOutputAdaptor' +import {NativeDroidCLIOutputAdaptor} from '../adaptors/NativeDroidCLIOutputAdaptor' +import {NativeGeminiCLIOutputAdaptor} from '../adaptors/NativeGeminiCLIOutputAdaptor' import {OpencodeCLIOutputAdaptor} from '../adaptors/OpencodeCLIOutputAdaptor' import {QoderIDEPluginOutputAdaptor} from '../adaptors/QoderIDEPluginOutputAdaptor' -import {ReadmeMdConfigFileOutputAdaptor} from '../adaptors/ReadmeMdConfigFileOutputAdaptor' import {TraeCNIDEOutputAdaptor} from '../adaptors/TraeCNIDEOutputAdaptor' import {TraeIDEOutputAdaptor} from '../adaptors/TraeIDEOutputAdaptor' -import {VisualStudioCodeIDEConfigOutputAdaptor} from '../adaptors/VisualStudioCodeIDEConfigOutputAdaptor' import {WarpIDEOutputAdaptor} from '../adaptors/WarpIDEOutputAdaptor' import {WindsurfOutputAdaptor} from '../adaptors/WindsurfOutputAdaptor' -import {ZedIDEConfigOutputAdaptor} from '../adaptors/ZedIDEConfigOutputAdaptor' export interface DefaultOutputAdaptorDescriptor { readonly name: string @@ -28,12 +30,12 @@ export interface DefaultOutputAdaptorDescriptor { export function createDefaultOutputAdaptors(): PipelineConfig['outputPlugins'] { return [ - new AgentsOutputAdaptor(), + new NativeAgentsOutputAdaptor(), new ClaudeCodeCLIOutputAdaptor(), new CodexCLIOutputAdaptor(), new JetBrainsAIAssistantCodexOutputAdaptor(), - new DroidCLIOutputAdaptor(), - new GeminiCLIOutputAdaptor(), + new NativeDroidCLIOutputAdaptor(), + new NativeGeminiCLIOutputAdaptor(), new KiroCLIOutputAdaptor(), new OpencodeCLIOutputAdaptor(), new QoderIDEPluginOutputAdaptor(), @@ -42,11 +44,11 @@ export function createDefaultOutputAdaptors(): PipelineConfig['outputPlugins'] { new WarpIDEOutputAdaptor(), new WindsurfOutputAdaptor(), new CursorOutputAdaptor(), - new GitExcludeOutputAdaptor(), - new JetBrainsIDECodeStyleConfigOutputAdaptor(), - new VisualStudioCodeIDEConfigOutputAdaptor(), - new ZedIDEConfigOutputAdaptor(), - new ReadmeMdConfigFileOutputAdaptor() + new NativeGitExcludeOutputAdaptor(), + new NativeJetBrainsIDECodeStyleConfigOutputAdaptor(), + new NativeVisualStudioCodeIDEConfigOutputAdaptor(), + new NativeZedIDEConfigOutputAdaptor(), + new NativeReadmeMdConfigFileOutputAdaptor() ] } diff --git a/sdk/src/internal/git-discovery-legacy.ts b/sdk/src/internal/git-discovery-legacy.ts index cb4e2791..014fb552 100644 --- a/sdk/src/internal/git-discovery-legacy.ts +++ b/sdk/src/internal/git-discovery-legacy.ts @@ -14,8 +14,7 @@ export function resolveGitInfoDir(projectDir: string): string | null { const stat = fs.lstatSync(dotGitPath) if (stat.isDirectory()) { - const infoDir = path.join(dotGitPath, 'info') - return infoDir + return path.join(dotGitPath, 'info') } if (stat.isFile()) { diff --git a/sdk/src/internal/native-command-bridge.test.ts b/sdk/src/internal/native-command-bridge.test.ts new file mode 100644 index 00000000..a580d795 --- /dev/null +++ b/sdk/src/internal/native-command-bridge.test.ts @@ -0,0 +1,122 @@ +import {mkdtempSync, readFileSync, rmSync} from 'node:fs' +import {tmpdir} from 'node:os' +import {join} from 'node:path' +import {afterEach, describe, expect, it, vi} from 'vitest' + +const {cleanMock, dryRunMock, installMock} = vi.hoisted(() => ({ + cleanMock: vi.fn(), + dryRunMock: vi.fn(), + installMock: vi.fn() +})) + +vi.mock('./sdk-binding', () => ({ + createTsFallbackMemorySyncBinding() { + return { + install: installMock, + dryRun: dryRunMock, + clean: cleanMock + } + } +})) + +afterEach(() => { + vi.clearAllMocks() + vi.resetModules() +}) + +describe('internal native command bridge', () => { + it('preserves explicit logging for install dispatch', async () => { + installMock.mockResolvedValue({ + success: true, + filesAffected: 3, + dirsAffected: 1, + warnings: [], + errors: [] + }) + + const {executeInternalBridgeCommand} = await import('./native-command-bridge') + const result = await executeInternalBridgeCommand('install', JSON.stringify({ + cwd: '/workspace/demo', + logLevel: 'debug' + })) + + expect(result).toMatchObject({ + success: true, + filesAffected: 3, + dirsAffected: 1 + }) + expect(installMock).toHaveBeenCalledWith({ + cwd: '/workspace/demo', + logLevel: 'debug' + }) + }) + + it('passes dryRun through clean dispatch without injecting a log level', async () => { + cleanMock.mockResolvedValue({ + success: true, + filesAffected: 5, + dirsAffected: 2, + warnings: [], + errors: [] + }) + + const {executeInternalBridgeCommand} = await import('./native-command-bridge') + const result = await executeInternalBridgeCommand('clean', JSON.stringify({ + cwd: '/workspace/demo', + dryRun: true + })) + + expect(result).toMatchObject({ + success: true, + filesAffected: 5, + dirsAffected: 2 + }) + expect(cleanMock).toHaveBeenCalledWith({ + cwd: '/workspace/demo', + dryRun: true + }) + }) + + it('supports a bundle smoke self-test without loading runtime dependencies', async () => { + const {executeInternalBridgeCommand} = await import('./native-command-bridge') + + await expect(executeInternalBridgeCommand('self-test')).resolves.toEqual({ + ok: true, + command: 'self-test' + }) + expect(installMock).not.toHaveBeenCalled() + expect(dryRunMock).not.toHaveBeenCalled() + expect(cleanMock).not.toHaveBeenCalled() + }) + + it('writes the result payload to the bridge result path when requested', async () => { + const tempDir = mkdtempSync(join(tmpdir(), 'tnmsc-bridge-result-')) + const resultPath = join(tempDir, 'result.json') + const previousResultPath = process.env['TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH'] + const stdoutSpy = vi.spyOn(process.stdout, 'write').mockImplementation(() => true) + + process.env['TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH'] = resultPath + + try { + const {runInternalBridgeCli} = await import('./native-command-bridge') + await runInternalBridgeCli(['self-test']) + + expect(readFileSync(resultPath, 'utf8')).toBe(JSON.stringify({ + ok: true, + command: 'self-test' + })) + expect(stdoutSpy).not.toHaveBeenCalled() + } + finally { + stdoutSpy.mockRestore() + + if (previousResultPath == null) { + delete process.env['TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH'] + } else { + process.env['TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH'] = previousResultPath + } + + rmSync(tempDir, {recursive: true, force: true}) + } + }) +}) diff --git a/sdk/src/internal/native-command-bridge.ts b/sdk/src/internal/native-command-bridge.ts new file mode 100644 index 00000000..c392fd96 --- /dev/null +++ b/sdk/src/internal/native-command-bridge.ts @@ -0,0 +1,84 @@ +import type {MemorySyncCommandOptions, MemorySyncCommandResult} from './sdk-binding' +import {writeFile} from 'node:fs/promises' +import {resolve} from 'node:path' +import process from 'node:process' + +import {fileURLToPath} from 'node:url' +import {createTsFallbackMemorySyncBinding} from './sdk-binding' + +export type InternalBridgeCommand = 'install' | 'dry-run' | 'clean' | 'self-test' + +interface BridgeSelfTestResult { + readonly ok: true + readonly command: 'self-test' +} + +type BridgeExecutionResult = MemorySyncCommandResult | BridgeSelfTestResult +const INTERNAL_BRIDGE_RESULT_PATH_ENV = 'TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH' + +function isInternalBridgeCommand(value: string): value is Exclude { + return value === 'install' || value === 'dry-run' || value === 'clean' +} + +export function normalizeBridgeCommandOptions( + optionsJson?: string +): MemorySyncCommandOptions & {readonly dryRun?: boolean} { + return optionsJson == null || optionsJson.length === 0 + ? {} + : JSON.parse(optionsJson) as MemorySyncCommandOptions & {readonly dryRun?: boolean} +} + +export async function executeInternalBridgeCommand( + commandArg: string, + optionsJson?: string +): Promise { + if (commandArg === 'self-test') { + return { + ok: true, + command: 'self-test' + } + } + + if (!isInternalBridgeCommand(commandArg)) { + throw new Error(`Unsupported internal bridge command: ${commandArg}`) + } + + const binding = createTsFallbackMemorySyncBinding() + const options = normalizeBridgeCommandOptions(optionsJson) + + switch (commandArg) { + case 'install': + return binding.install(options) + case 'dry-run': + return binding.dryRun(options) + case 'clean': + return binding.clean(options) + } +} + +export async function runInternalBridgeCli(argv: readonly string[] = process.argv.slice(2)): Promise { + const [commandArg = 'self-test', optionsJson] = argv + const result = await executeInternalBridgeCommand(commandArg, optionsJson) + const serialized = JSON.stringify(result) + const resultPath = process.env[INTERNAL_BRIDGE_RESULT_PATH_ENV] + + if (resultPath != null && resultPath.length > 0) { + await writeFile(resultPath, serialized, 'utf8') + return + } + + process.stdout.write(`${serialized}\n`) +} + +function isDirectExecution(): boolean { + const entryPath = process.argv[1] + return entryPath != null && fileURLToPath(import.meta.url) === resolve(entryPath) +} + +if (isDirectExecution()) { + void runInternalBridgeCli().catch(error => { + const message = error instanceof Error ? error.stack ?? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exitCode = 1 + }) +} diff --git a/sdk/src/internal/sdk-binding.ts b/sdk/src/internal/sdk-binding.ts index ed5ed400..51066f3c 100644 --- a/sdk/src/internal/sdk-binding.ts +++ b/sdk/src/internal/sdk-binding.ts @@ -1,7 +1,3 @@ -import type { - LoggerDiagnosticRecord, - LogLevel -} from '@truenine/logger' import type { AdaptorOptions, OutputCleanContext, @@ -19,7 +15,11 @@ import type { WritePromptArtifactsInput } from '../prompts' import type {RuntimeCommand} from '../runtime-command' -import {clearBufferedDiagnostics, createLogger, drainBufferedDiagnostics, setGlobalLogLevel} from '@truenine/logger' +import type { + LoggerDiagnosticRecord, + LogLevel +} from '@/libraries/logger' +import {clearBufferedDiagnostics, createLogger, drainBufferedDiagnostics, setGlobalLogLevel} from '@/libraries/logger' import {collectOutputDeclarations, executeDeclarativeWriteOutputs} from '../adaptors/adaptor-core/plugin' import {defineConfig} from '../config' import {getConfigLoader} from '../ConfigLoader' @@ -284,7 +284,7 @@ async function runInstall(options: MemorySyncCommandOptions = {}): Promise total + declarations.length, 0) ctx.logger.info('Prepared output plan', { - plugins: predeclaredOutputs.size, + adaptors: predeclaredOutputs.size, declarations: declarationCount }) @@ -337,7 +337,7 @@ async function runInstall(options: MemorySyncCommandOptions = {}): Promise Result Result { - let cwd = resolve_command_cwd(&options)?; - let _config = load_config(&cwd)?; - // TODO: full Rust plugin execution pipeline (output generation + cleanup + WSL mirror) - Ok(MemorySyncCommandResult { - success: true, - files_affected: 0, - dirs_affected: 0, - ..Default::default() - }) + core::command_bridge::execute_internal_command("install", &options) } -/// Execute the dry-run pipeline directly in Rust. +/// Execute the dry-run pipeline through the crate-owned internal command bridge. pub fn dry_run(options: MemorySyncCommandOptions) -> Result { - let cwd = resolve_command_cwd(&options)?; - let _config = load_config(&cwd)?; - // TODO: full Rust plugin execution pipeline in dry-run mode - Ok(MemorySyncCommandResult { - success: true, - files_affected: 0, - dirs_affected: 0, - message: Some("Dry-run complete, no files were written".to_string()), - ..Default::default() - }) + core::command_bridge::execute_internal_command("dry-run", &options) } -/// Execute cleanup directly in Rust. +/// Execute cleanup through the crate-owned internal command bridge. pub fn clean(options: MemorySyncCommandOptions) -> Result { - let cwd = resolve_command_cwd(&options)?; - let _config = load_config(&cwd)?; - // TODO: build real cleanup snapshot from plugin registry and execute cleanup - if options.dry_run == Some(true) { - Ok(MemorySyncCommandResult { - success: true, - files_affected: 0, - dirs_affected: 0, - message: Some("Dry-run complete, no files were deleted".to_string()), - ..Default::default() - }) - } else { - Ok(MemorySyncCommandResult { - success: true, - files_affected: 0, - dirs_affected: 0, - ..Default::default() - }) - } + core::command_bridge::execute_internal_command("clean", &options) } /// Return the default output plugin registry without instantiating TS plugin classes. @@ -191,13 +159,6 @@ pub fn list_plugins() -> Vec { .collect() } -fn resolve_command_cwd(options: &MemorySyncCommandOptions) -> Result { - match options.cwd.as_deref() { - Some(cwd) => Ok(PathBuf::from(cwd)), - None => std::env::current_dir().map_err(CliError::IoError), - } -} - /// Run the install pipeline in passthrough mode for the Rust CLI shell. pub fn run_install_cli() -> ExitCode { match install(MemorySyncCommandOptions::default()) { @@ -435,6 +396,24 @@ mod napi_binding { crate::core::input_plugins::skill::collect_skill(&options_json) .map_err(|e| napi::Error::from_reason(e.to_string())) } + + #[napi(js_name = "collectBaseOutputPlans")] + pub fn collect_base_output_plans_binding(context_json: String) -> napi::Result { + crate::core::base_output_plans::collect_base_output_plans(&context_json) + .map_err(|e| napi::Error::from_reason(e.to_string())) + } + + #[napi(js_name = "collectGeminiOutputPlan")] + pub fn collect_gemini_output_plan_binding(context_json: String) -> napi::Result { + crate::core::gemini_output_plan::collect_gemini_output_plan(&context_json) + .map_err(|e| napi::Error::from_reason(e.to_string())) + } + + #[napi(js_name = "collectDroidOutputPlan")] + pub fn collect_droid_output_plan_binding(context_json: String) -> napi::Result { + crate::core::droid_output_plan::collect_droid_output_plan(&context_json) + .map_err(|e| napi::Error::from_reason(e.to_string())) + } } // --------------------------------------------------------------------------- @@ -506,6 +485,7 @@ mod property_tests { CliError::ConfigError("bad config".into()), CliError::IoError(std::io::Error::new(std::io::ErrorKind::NotFound, "test")), CliError::SerializationError(serde_json::from_str::("invalid").unwrap_err()), + CliError::ExecutionError("bridge failed".into()), CliError::NotImplemented("test".into()), ]; @@ -514,6 +494,7 @@ mod property_tests { CliError::ConfigError(msg) => assert!(!msg.is_empty()), CliError::IoError(e) => assert!(!e.to_string().is_empty()), CliError::SerializationError(e) => assert!(!e.to_string().is_empty()), + CliError::ExecutionError(msg) => assert!(!msg.is_empty()), CliError::NotImplemented(msg) => assert!(!msg.is_empty()), } } diff --git a/sdk/src/libraries/logger.ts b/sdk/src/libraries/logger.ts new file mode 100644 index 00000000..2d4e8c9d --- /dev/null +++ b/sdk/src/libraries/logger.ts @@ -0,0 +1,136 @@ +import {createNativeBindingLoader} from '../core/native-binding-loader' + +export type LogLevel = 'error' | 'warn' | 'info' | 'debug' | 'trace' | 'fatal' | 'silent' +export type DiagnosticLines = readonly [string, ...string[]] +export type LoggerDiagnosticLevel = Extract +type LoggerMethod = (message: string | object, ...meta: unknown[]) => void +type LoggerDiagnosticMethod = (diagnostic: LoggerDiagnosticInput) => void + +export interface LoggerDiagnosticInput { + readonly code: string + readonly title: string + readonly rootCause: DiagnosticLines + readonly exactFix?: DiagnosticLines | undefined + readonly possibleFixes?: readonly DiagnosticLines[] | undefined + readonly details?: Record | undefined +} + +export interface LoggerDiagnosticRecord extends LoggerDiagnosticInput { + readonly level: LoggerDiagnosticLevel + readonly namespace: string + readonly copyText: DiagnosticLines +} + +export interface ILogger { + error: LoggerDiagnosticMethod + warn: LoggerDiagnosticMethod + info: LoggerMethod + debug: LoggerMethod + trace: LoggerMethod + fatal: LoggerDiagnosticMethod +} + +type ActiveLogLevel = Exclude +type PlainLogLevel = Extract + +interface NapiLoggerInstance { + emit: (level: ActiveLogLevel, message: unknown, meta?: readonly unknown[]) => void + emitDiagnostic: (level: LoggerDiagnosticLevel, diagnostic: LoggerDiagnosticInput) => void +} + +interface NapiLoggerModule { + createLogger: (namespace: string, level?: string) => NapiLoggerInstance + setGlobalLogLevel: (level: string) => void + getGlobalLogLevel: () => string | undefined + clearBufferedDiagnostics: () => void + drainBufferedDiagnostics: () => string + flushOutput?: () => void +} + +const DIAGNOSTIC_LOG_LEVELS: readonly LoggerDiagnosticLevel[] = ['error', 'warn', 'fatal'] +const PLAIN_LOG_LEVELS: readonly PlainLogLevel[] = ['info', 'debug', 'trace'] + +function isNapiLoggerModule(value: unknown): value is NapiLoggerModule { + if (value == null || typeof value !== 'object') return false + + const candidate = value as Partial + return typeof candidate.createLogger === 'function' + && typeof candidate.setGlobalLogLevel === 'function' + && typeof candidate.getGlobalLogLevel === 'function' + && typeof candidate.clearBufferedDiagnostics === 'function' + && typeof candidate.drainBufferedDiagnostics === 'function' +} + +const getNapiBinding = createNativeBindingLoader({ + packageName: '@truenine/logger', + binaryName: 'napi-logger', + bindingValidator: isNapiLoggerModule, + cliExportName: 'logger' +}) + +function parseBufferedDiagnostics(serialized: string): LoggerDiagnosticRecord[] { + try { + const parsed = JSON.parse(serialized) as unknown + return Array.isArray(parsed) ? parsed as LoggerDiagnosticRecord[] : [] + } + catch { + return [] + } +} + +function createLogMethod(instance: NapiLoggerInstance, level: PlainLogLevel): LoggerMethod { + return (message: string | object, ...meta: unknown[]): void => { + instance.emit(level, message, meta.length === 0 ? void 0 : meta) + } +} + +function createDiagnosticMethod(instance: NapiLoggerInstance, level: LoggerDiagnosticLevel): LoggerDiagnosticMethod { + return (diagnostic: LoggerDiagnosticInput): void => { + instance.emitDiagnostic(level, diagnostic) + } +} + +function createNapiAdapter(instance: NapiLoggerInstance): ILogger { + const messageMethods = PLAIN_LOG_LEVELS.reduce((logger, level) => { + logger[level] = createLogMethod(instance, level) + return logger + }, {} as Record) + + const diagnosticMethods = DIAGNOSTIC_LOG_LEVELS.reduce((logger, level) => { + logger[level] = createDiagnosticMethod(instance, level) + return logger + }, {} as Record) + + return { + error: diagnosticMethods.error, + warn: diagnosticMethods.warn, + info: messageMethods.info, + debug: messageMethods.debug, + trace: messageMethods.trace, + fatal: diagnosticMethods.fatal + } +} + +export function setGlobalLogLevel(level: LogLevel): void { + getNapiBinding().setGlobalLogLevel(level) +} + +export function getGlobalLogLevel(): LogLevel | undefined { + return getNapiBinding().getGlobalLogLevel() as LogLevel | undefined +} + +export function clearBufferedDiagnostics(): void { + getNapiBinding().clearBufferedDiagnostics() +} + +export function drainBufferedDiagnostics(): LoggerDiagnosticRecord[] { + return parseBufferedDiagnostics(getNapiBinding().drainBufferedDiagnostics()) +} + +export function flushOutput(): void { + getNapiBinding().flushOutput?.() +} + +export function createLogger(namespace: string, logLevel?: LogLevel): ILogger { + return createNapiAdapter(getNapiBinding().createLogger(namespace, logLevel)) +} diff --git a/sdk/src/libraries/script-runtime/index.test.ts b/sdk/src/libraries/script-runtime/index.test.ts new file mode 100644 index 00000000..a173efa1 --- /dev/null +++ b/sdk/src/libraries/script-runtime/index.test.ts @@ -0,0 +1,39 @@ +import {mkdtempSync, rmSync, writeFileSync} from 'node:fs' +import {tmpdir} from 'node:os' +import {join} from 'node:path' +import {afterEach, describe, expect, it} from 'vitest' + +import {resolvePublicPath} from './index' + +const tempDirs: string[] = [] + +afterEach(() => { + for (const tempDir of tempDirs.splice(0)) { + rmSync(tempDir, {recursive: true, force: true}) + } +}) + +describe('script runtime worker resolution', () => { + it('resolves a proxy path through the native worker bridge', () => { + const tempDir = mkdtempSync(join(tmpdir(), 'tnmsc-script-runtime-')) + tempDirs.push(tempDir) + + const proxyModulePath = join(tempDir, 'proxy.ts') + + writeFileSync( + proxyModulePath, + 'export default { resolvePublicPath(logicalPath) { return logicalPath.replace(/^\\.git\\//u, "____git/") } }\n', + 'utf8' + ) + + const result = resolvePublicPath(proxyModulePath, { + cwd: tempDir, + workspaceDir: tempDir, + aindexDir: join(tempDir, '.aindex'), + command: 'install', + platform: process.platform + }, '.git/config') + + expect(result).toBe('____git/config') + }) +}) diff --git a/sdk/src/libraries/script-runtime/index.ts b/sdk/src/libraries/script-runtime/index.ts new file mode 100644 index 00000000..5fddd365 --- /dev/null +++ b/sdk/src/libraries/script-runtime/index.ts @@ -0,0 +1,164 @@ +import type { + ProxyContext, + ProxyDefinition, + ProxyModule, + ProxyModuleConfig, + ProxyRouteHandler, + ValidatePublicPathOptions +} from './types' + +import * as fs from 'node:fs' +import {createRequire} from 'node:module' +import {dirname, resolve} from 'node:path' +import {fileURLToPath} from 'node:url' +import {createNativeBindingLoader} from '../../core/native-binding-loader' + +import { + loadProxyModule as loadProxyModuleInternal, + resolvePublicPathModule +} from './runtime-core' + +export type { + ProxyCommand, + ProxyContext, + ProxyDefinition, + ProxyMatcherConfig, + ProxyModule, + ProxyModuleConfig, + ProxyRouteHandler, + ValidatePublicPathOptions +} from './types' + +interface ScriptRuntimeBinding { + validate_public_path?: (resolvedPath: string, aindexPublicDir: string) => string + validatePublicPath?: (resolvedPath: string, aindexPublicDir: string) => string + resolve_public_path?: (filePath: string, ctxJson: string, logicalPath: string) => string + resolvePublicPath?: (filePath: string, ctxJson: string, logicalPath: string) => string +} + +function isScriptRuntimeBinding(value: unknown): value is ScriptRuntimeBinding { + if (value == null || typeof value !== 'object') return false + const candidate = value as ScriptRuntimeBinding + return typeof candidate.validate_public_path === 'function' + || typeof candidate.validatePublicPath === 'function' + || typeof candidate.resolve_public_path === 'function' + || typeof candidate.resolvePublicPath === 'function' +} + +const getBinding = createNativeBindingLoader({ + packageName: '@truenine/script-runtime', + binaryName: 'napi-script-runtime', + bindingValidator: isScriptRuntimeBinding, + cliExportName: 'scriptRuntime', + optionalMethods: { + validatePublicPath: ['validate_public_path'], + resolvePublicPath: ['resolve_public_path'] + } +}) + +let workerPathCache: string | undefined +const runtimeRequire = createRequire(import.meta.url) + +function callValidatePublicPathBinding(resolvedPath: string, options: ValidatePublicPathOptions): string { + const nativeBinding = getBinding() + const validatePublicPathNative = nativeBinding.validate_public_path ?? nativeBinding.validatePublicPath + + if (validatePublicPathNative == null) throw new Error('validate_public_path native binding is unavailable') + + return validatePublicPathNative(resolvedPath, options.aindexPublicDir) +} + +function callResolvePublicPathBinding(filePath: string, ctxJson: string, logicalPath: string): string { + const nativeBinding = getBinding() + const resolvePublicPathNative = nativeBinding.resolve_public_path ?? nativeBinding.resolvePublicPath + + if (resolvePublicPathNative == null) throw new Error('resolve_public_path native binding is unavailable') + + return resolvePublicPathNative(filePath, ctxJson, logicalPath) +} + +function getPackageWorkerPaths(): string[] { + try { + const packageJsonPath = runtimeRequire.resolve('@truenine/script-runtime/package.json') + const packageDir = dirname(packageJsonPath) + + return [ + resolve(packageDir, 'dist', 'resolve-proxy-worker.mjs'), + resolve(packageDir, 'dist', 'script-runtime-worker.mjs') + ] + } + catch { + return [] + } +} + +function getWorkerPath(): string { + if (workerPathCache != null) return workerPathCache + + const candidatePaths = [ + ...getPackageWorkerPaths(), + fileURLToPath(new URL('./resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('./script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../libraries/script-runtime/dist/resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../libraries/script-runtime/dist/script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../../cli/dist/script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../../../cli/dist/script-runtime-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../../libraries/script-runtime/dist/resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../../../libraries/script-runtime/dist/resolve-proxy-worker.mjs', import.meta.url)), + fileURLToPath(new URL('../../../../../libraries/script-runtime/dist/resolve-proxy-worker.mjs', import.meta.url)) + ] + + for (const candidatePath of candidatePaths) { + if (fs.existsSync(candidatePath)) { + workerPathCache = candidatePath + return candidatePath + } + } + + workerPathCache = candidatePaths[0] + return candidatePaths[0]! // eslint-disable-line ts/no-non-null-assertion -- fallback array is never empty +} + +export function defineProxy(value: T): T { + return value +} + +export async function loadProxyModule(filePath: string): Promise { + return loadProxyModuleInternal(filePath) +} + +export function validatePublicPath( + resolvedPath: string, + options: ValidatePublicPathOptions +): string { + return callValidatePublicPathBinding(resolvedPath, options) +} + +export function resolvePublicPath( + filePath: string, + ctx: ProxyContext, + logicalPath: string, + timeoutMs: number = 5_000 +): string { + return callResolvePublicPathBinding(filePath, JSON.stringify({ + ...ctx, + workerPath: getWorkerPath(), + timeoutMs + }), logicalPath) +} + +export async function resolvePublicPathUnchecked( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): Promise { + return resolvePublicPathModule(filePath, ctx, logicalPath) +} + +export function getProxyModuleConfig(module: ProxyModule): ProxyModuleConfig | undefined { + return module.config +} diff --git a/sdk/src/libraries/script-runtime/resolve-proxy-worker.ts b/sdk/src/libraries/script-runtime/resolve-proxy-worker.ts new file mode 100644 index 00000000..9fb0f7d3 --- /dev/null +++ b/sdk/src/libraries/script-runtime/resolve-proxy-worker.ts @@ -0,0 +1,19 @@ +import {readFileSync} from 'node:fs' +import process from 'node:process' +import {resolvePublicPathModule} from './runtime-core' + +async function main(): Promise { + const [, , filePath, ctxJsonPath, logicalPath] = process.argv + if (filePath == null || ctxJsonPath == null || logicalPath == null) throw new Error('Usage: resolve-proxy-worker ') + + const ctxJson = readFileSync(ctxJsonPath, 'utf8') + const ctx = JSON.parse(ctxJson) as Parameters[1] + const result = await resolvePublicPathModule(filePath, ctx, logicalPath) + process.stdout.write(`${result}\n`) +} + +main().catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exit(1) +}) diff --git a/sdk/src/libraries/script-runtime/runtime-core.ts b/sdk/src/libraries/script-runtime/runtime-core.ts new file mode 100644 index 00000000..f3f6f59b --- /dev/null +++ b/sdk/src/libraries/script-runtime/runtime-core.ts @@ -0,0 +1,104 @@ +import type {Jiti} from 'jiti' +import type {ProxyContext, ProxyDefinition, ProxyModule, ProxyRouteHandler} from './types' + +import * as fs from 'node:fs' +import * as path from 'node:path' + +function isRecord(value: unknown): value is Record { + return typeof value === 'object' && value !== null +} + +function isPlainObject(value: unknown): value is Record { + if (!isRecord(value)) return false + const prototype = Object.getPrototypeOf(value) as object | null + return prototype === Object.prototype || prototype === null +} + +async function createRuntime(): Promise { + const {createJiti} = await import('jiti') as { + createJiti: (filename: string, options: { + readonly fsCache: boolean + readonly moduleCache: boolean + readonly interopDefault: false + }) => Jiti + } + + return createJiti(import.meta.url, { + fsCache: false, + moduleCache: false, + interopDefault: false + }) +} + +function toProxyModule(rawModule: unknown): ProxyModule { + if (!isRecord(rawModule)) throw new Error('proxy.ts must export a module namespace object') + + const defaultExport = rawModule['default'] + if (defaultExport == null) throw new Error('proxy.ts must export a default value') + if (typeof defaultExport !== 'function' && !isPlainObject(defaultExport)) throw new TypeError('proxy.ts default export must be a function or plain object') + + const configExport = rawModule['config'] + if (configExport != null && !isPlainObject(configExport)) throw new Error('proxy.ts config export must be a plain object') + + const proxyModule: ProxyModule = { + default: defaultExport as ProxyModule['default'] + } + + if (configExport != null) { + return { + ...proxyModule, + config: configExport as NonNullable + } + } + + return proxyModule +} + +export async function loadProxyModule(filePath: string): Promise { + const absoluteFilePath = path.resolve(filePath) + if (!fs.existsSync(absoluteFilePath)) throw new Error(`proxy.ts not found: ${absoluteFilePath}`) + + const runtime = await createRuntime() + const loadedModule = await runtime.import(absoluteFilePath) + return toProxyModule(loadedModule) +} + +function matchesCommand(module: ProxyModule, command: ProxyContext['command']): boolean { + const commands = module.config?.matcher?.commands + if (commands == null || commands.length === 0) return true + return commands.includes(command) +} + +function assertNonEmptyPath(value: string, label: string): string { + if (value.trim().length === 0) throw new Error(`${label} cannot be empty`) + return value +} + +function getRouteHandler(handler: ProxyModule['default']): ProxyRouteHandler | undefined { + if (typeof handler === 'function') return handler + + const proxyDefinition: ProxyDefinition = handler + if (proxyDefinition.resolvePublicPath == null) return void 0 + if (typeof proxyDefinition.resolvePublicPath !== 'function') throw new TypeError('proxy.ts default export resolvePublicPath must be a function') + + return proxyDefinition.resolvePublicPath +} + +export async function resolvePublicPathModule( + filePath: string, + ctx: ProxyContext, + logicalPath: string +): Promise { + const targetLogicalPath = assertNonEmptyPath(logicalPath, 'logical public path') + const proxyModule = await loadProxyModule(filePath) + + if (!matchesCommand(proxyModule, ctx.command)) return targetLogicalPath + + const routeHandler = getRouteHandler(proxyModule.default) + if (routeHandler == null) return targetLogicalPath + + const resolvedPath = await routeHandler(targetLogicalPath, ctx) + if (typeof resolvedPath !== 'string') throw new Error('proxy.ts must resolve public paths to a string') + + return assertNonEmptyPath(resolvedPath, 'proxy.ts resolved public path') +} diff --git a/sdk/src/libraries/script-runtime/types.ts b/sdk/src/libraries/script-runtime/types.ts new file mode 100644 index 00000000..690daa2d --- /dev/null +++ b/sdk/src/libraries/script-runtime/types.ts @@ -0,0 +1,37 @@ +export type ProxyCommand = 'install' | 'dry-run' | 'clean' | 'plugins' + +export interface ProxyContext { + readonly cwd: string + readonly workspaceDir: string + readonly aindexDir: string + readonly command: ProxyCommand + readonly platform: NodeJS.Platform +} + +export interface ProxyMatcherConfig { + readonly commands?: readonly ProxyCommand[] +} + +export interface ProxyModuleConfig { + readonly matcher?: ProxyMatcherConfig +} + +export type ProxyRouteHandler = ( + logicalPath: string, + ctx: ProxyContext +) => string | Promise + +export interface ProxyDefinition { + readonly resolvePublicPath?: ProxyRouteHandler +} + +export type ProxyHandler = ProxyDefinition | ProxyRouteHandler + +export interface ProxyModule { + readonly default: ProxyHandler + readonly config?: ProxyModuleConfig +} + +export interface ValidatePublicPathOptions { + readonly aindexPublicDir: string +} diff --git a/sdk/src/prompts.ts b/sdk/src/prompts.ts index d1ebb9e6..7fa7308b 100644 --- a/sdk/src/prompts.ts +++ b/sdk/src/prompts.ts @@ -104,7 +104,7 @@ export async function listPrompts(options: ListPromptsOptions = {}): Promise total + plugin.outputs.length, 0), - cleanupDeleteCount: snapshot.pluginSnapshots.reduce((total, plugin) => total + (plugin.cleanup.delete?.length ?? 0), 0), - cleanupProtectCount: snapshot.pluginSnapshots.reduce((total, plugin) => total + (plugin.cleanup.protect?.length ?? 0), 0), - cleanupExcludeScanGlobs: snapshot.pluginSnapshots.reduce((total, plugin) => total + (plugin.cleanup.excludeScanGlobs?.length ?? 0), 0), + adaptorCount: snapshot.pluginSnapshots.length, + outputCount: snapshot.pluginSnapshots.reduce((total, adaptor) => total + adaptor.outputs.length, 0), + cleanupDeleteCount: snapshot.pluginSnapshots.reduce((total, adaptor) => total + (adaptor.cleanup.delete?.length ?? 0), 0), + cleanupProtectCount: snapshot.pluginSnapshots.reduce((total, adaptor) => total + (adaptor.cleanup.protect?.length ?? 0), 0), + cleanupExcludeScanGlobs: snapshot.pluginSnapshots.reduce((total, adaptor) => total + (adaptor.cleanup.excludeScanGlobs?.length ?? 0), 0), protectedRuleCount: snapshot.protectedRules.length, projectRootCount: snapshot.projectRoots.length, emptyDirExcludeGlobs: snapshot.emptyDirExcludeGlobs?.length ?? 0 @@ -526,7 +526,7 @@ export async function collectDeletionTargets( }> { cleanCtx.logger.debug('Cleanup planning started', { dryRun: cleanCtx.dryRun === true, - plugins: outputPlugins.length, + adaptors: outputPlugins.length, workspace: cleanCtx.collectedOutputContext.workspace.directory.path }) const snapshot = await buildCleanupSnapshot(outputPlugins, cleanCtx, predeclaredOutputs) @@ -564,7 +564,7 @@ export async function performCleanup( ): Promise { logger.debug('Cleanup execution started', { dryRun: cleanCtx.dryRun === true, - plugins: outputPlugins.length, + adaptors: outputPlugins.length, workspace: cleanCtx.collectedOutputContext.workspace.directory.path }) if (predeclaredOutputs != null) { @@ -582,8 +582,8 @@ export async function performCleanup( ...summarizeCleanupSnapshot(snapshot) }) logger.debug('Cleanup native execution started', { - pluginCount: snapshot.pluginSnapshots.length, - outputCount: snapshot.pluginSnapshots.reduce((total, plugin) => total + plugin.outputs.length, 0) + adaptorCount: snapshot.pluginSnapshots.length, + outputCount: snapshot.pluginSnapshots.reduce((total, adaptor) => total + adaptor.outputs.length, 0) }) const result = reconcileExactSafeFileViolations(await performCleanupWithNative(snapshot), collectExactSafeFilePaths(snapshot)) logger.debug('Cleanup native execution finished', { diff --git a/sdk/test/native-binding/base-output-plans.ts b/sdk/test/native-binding/base-output-plans.ts new file mode 100644 index 00000000..4aa3b093 --- /dev/null +++ b/sdk/test/native-binding/base-output-plans.ts @@ -0,0 +1,191 @@ +import type { + ILogger, + OutputCleanContext, + OutputCleanupDeclarations, + OutputCollectedContext, + OutputFileDeclaration, + OutputWriteContext +} from '../../src/adaptors/adaptor-core' +import {Buffer} from 'node:buffer' +import * as fs from 'node:fs' +import * as path from 'node:path' +import glob from 'fast-glob' +import {createLogger} from '../../src/adaptors/adaptor-core' +import {AgentsOutputAdaptor} from '../../src/adaptors/AgentsOutputAdaptor' +import {DroidCLIOutputAdaptor} from '../../src/adaptors/DroidCLIOutputAdaptor' +import {GeminiCLIOutputAdaptor} from '../../src/adaptors/GeminiCLIOutputAdaptor' +import {GitExcludeOutputAdaptor} from '../../src/adaptors/GitExcludeOutputAdaptor' +import {JetBrainsIDECodeStyleConfigOutputAdaptor} from '../../src/adaptors/JetBrainsIDECodeStyleConfigOutputAdaptor' +import {ReadmeMdConfigFileOutputAdaptor} from '../../src/adaptors/ReadmeMdConfigFileOutputAdaptor' +import {VisualStudioCodeIDEConfigOutputAdaptor} from '../../src/adaptors/VisualStudioCodeIDEConfigOutputAdaptor' +import {ZedIDEConfigOutputAdaptor} from '../../src/adaptors/ZedIDEConfigOutputAdaptor' +import {parseNativeInputResult} from '../../src/inputs/native-result' + +interface NativeBaseOutputFilePlan { + readonly path: string + readonly scope?: string + readonly content: string + readonly encoding?: 'text' | 'base64' +} + +interface NativeBaseOutputPluginPlan { + readonly pluginName: string + readonly outputFiles: readonly NativeBaseOutputFilePlan[] + readonly cleanup: OutputCleanupDeclarations +} + +interface NativeBaseOutputPlans { + readonly plugins: readonly NativeBaseOutputPluginPlan[] +} + +function createMockLogger(): ILogger { + return createLogger('test-native-base-output-plans', 'silent') +} + +function createWriteContext( + collectedOutputContext: OutputCollectedContext +): OutputWriteContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext + } as unknown as OutputWriteContext +} + +function createCleanContext( + collectedOutputContext: OutputCollectedContext +): OutputCleanContext { + return { + logger: createMockLogger(), + fs, + path, + glob, + dryRun: true, + runtimeTargets: {jetbrainsCodexDirs: []}, + collectedOutputContext + } as unknown as OutputCleanContext +} + +async function declarationContentToPlan( + declaration: OutputFileDeclaration, + content: string | Uint8Array +): Promise { + if (typeof content === 'string') { + return { + path: declaration.path, + scope: declaration.scope, + content + } + } + + return { + path: declaration.path, + scope: declaration.scope, + content: Buffer.from(content).toString('base64'), + encoding: 'base64' + } +} + +export async function collectBaseOutputPlans( + contextJson: string +): Promise { + const collectedOutputContext = parseNativeInputResult( + contextJson + ) + const writeContext = createWriteContext(collectedOutputContext) + const cleanContext = createCleanContext(collectedOutputContext) + const plugins = [ + new AgentsOutputAdaptor(), + new GitExcludeOutputAdaptor(), + new JetBrainsIDECodeStyleConfigOutputAdaptor(), + new VisualStudioCodeIDEConfigOutputAdaptor(), + new ZedIDEConfigOutputAdaptor(), + new ReadmeMdConfigFileOutputAdaptor() + ] + const pluginPlans: NativeBaseOutputPluginPlan[] = [] + + for (const plugin of plugins) { + const declarations = await plugin.declareOutputFiles(writeContext) + const outputFiles: NativeBaseOutputFilePlan[] = [] + + for (const declaration of declarations) { + const content = await plugin.convertContent(declaration, writeContext) + outputFiles.push(await declarationContentToPlan(declaration, content)) + } + + const cleanup = plugin.declareCleanupPaths == null + ? {} + : await plugin.declareCleanupPaths(cleanContext) + + pluginPlans.push({ + pluginName: plugin.name, + outputFiles, + cleanup + }) + } + + const result: NativeBaseOutputPlans = { + plugins: pluginPlans + } + return JSON.stringify(result) +} + +export async function collectGeminiOutputPlan( + contextJson: string +): Promise { + const collectedOutputContext = parseNativeInputResult( + contextJson + ) + const writeContext = createWriteContext(collectedOutputContext) + const cleanContext = createCleanContext(collectedOutputContext) + const plugin = new GeminiCLIOutputAdaptor() + const declarations = await plugin.declareOutputFiles(writeContext) + const outputFiles: NativeBaseOutputFilePlan[] = [] + + for (const declaration of declarations) { + const content = await plugin.convertContent(declaration, writeContext) + outputFiles.push(await declarationContentToPlan(declaration, content)) + } + + const cleanup = plugin.declareCleanupPaths == null + ? {} + : await plugin.declareCleanupPaths(cleanContext) + + return JSON.stringify({ + pluginName: plugin.name, + outputFiles, + cleanup + } satisfies NativeBaseOutputPluginPlan) +} + +export async function collectDroidOutputPlan( + contextJson: string +): Promise { + const collectedOutputContext = parseNativeInputResult( + contextJson + ) + const writeContext = createWriteContext(collectedOutputContext) + const cleanContext = createCleanContext(collectedOutputContext) + const plugin = new DroidCLIOutputAdaptor() + const declarations = await plugin.declareOutputFiles(writeContext) + const outputFiles: NativeBaseOutputFilePlan[] = [] + + for (const declaration of declarations) { + const content = await plugin.convertContent(declaration, writeContext) + outputFiles.push(await declarationContentToPlan(declaration, content)) + } + + const cleanup = plugin.declareCleanupPaths == null + ? {} + : await plugin.declareCleanupPaths(cleanContext) + + return JSON.stringify({ + pluginName: plugin.name, + outputFiles, + cleanup + } satisfies NativeBaseOutputPluginPlan) +} diff --git a/sdk/test/setup-native-binding-fixed.ts b/sdk/test/setup-native-binding-fixed.ts index 76c58171..88fc05c2 100644 --- a/sdk/test/setup-native-binding-fixed.ts +++ b/sdk/test/setup-native-binding-fixed.ts @@ -3,6 +3,7 @@ import * as fs from 'node:fs' import * as path from 'node:path' import glob from 'fast-glob' import {AdaptorKind, FilePathKind} from '../src/adaptors/adaptor-core/enums' +import {collectBaseOutputPlans, collectDroidOutputPlan, collectGeminiOutputPlan} from './native-binding/base-output-plans' import * as deskPaths from './native-binding/desk-paths' // import { // // getPrompt, @@ -49,16 +50,20 @@ interface NativeCleanupSnapshot { } function createMockLogger(): ILogger { - const logger = { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} + return { + trace: () => { + }, + debug: () => { + }, + info: () => { + }, + warn: () => { + }, + error: () => { + }, + fatal: () => { + } } satisfies ILogger - - return logger } function createSyntheticOutputAdaptor(snapshot: NativePluginCleanupSnapshot): OutputAdaptor { @@ -205,6 +210,9 @@ globalThis.__TNMSC_TEST_NATIVE_BINDING__ = { removeBlockingFile: deskPaths.removeBlockingFile, planCleanup, performCleanup: runCleanup, + collectBaseOutputPlans, + collectDroidOutputPlan, + collectGeminiOutputPlan, resolveEffectiveIncludeSeries, matchesSeries, resolveSubSeries diff --git a/sdk/test/setup-native-binding.ts b/sdk/test/setup-native-binding.ts index 3196ca85..c7c79d94 100644 --- a/sdk/test/setup-native-binding.ts +++ b/sdk/test/setup-native-binding.ts @@ -1,29 +1,13 @@ import type {ILogger, OutputAdaptor, OutputCleanContext, OutputCleanupDeclarations} from '../src/adaptors/adaptor-core' -import type { - ListPromptsOptions, - PromptServiceOptions, - UpsertPromptSourceInput, - WritePromptArtifactsInput -} from '../src/prompts' +import type {ListPromptsOptions, PromptServiceOptions, UpsertPromptSourceInput, WritePromptArtifactsInput} from '../src/prompts' import * as fs from 'node:fs' import {createRequire} from 'node:module' import * as path from 'node:path' import glob from 'fast-glob' import {AdaptorKind, FilePathKind} from '../src/adaptors/adaptor-core/enums.ts' -import { - topologicalSort as topologicalSortLegacy -} from '../src/internal/dependency-resolver-legacy' -import { - findAllGitRepos, - findGitModuleInfoDirs, - resolveGitInfoDir -} from '../src/internal/git-discovery-legacy' -import { - getPrompt, - listPrompts, - upsertPromptSource, - writePromptArtifacts -} from '../src/internal/prompts-legacy' +import {topologicalSort as topologicalSortLegacy} from '../src/internal/dependency-resolver-legacy' +import {findAllGitRepos, findGitModuleInfoDirs, resolveGitInfoDir} from '../src/internal/git-discovery-legacy' +import {getPrompt, listPrompts, upsertPromptSource, writePromptArtifacts} from '../src/internal/prompts-legacy' import { getEffectiveHomeDir, getGlobalConfigPath, @@ -32,6 +16,7 @@ import { resolveRuntimeEnvironment } from '../src/internal/runtime-environment-legacy' +import {collectBaseOutputPlans, collectDroidOutputPlan, collectGeminiOutputPlan} from './native-binding/base-output-plans' import * as deskPaths from './native-binding/desk-paths' interface NativeCleanupTarget { @@ -72,16 +57,20 @@ interface NativeCleanupSnapshot { } function createMockLogger(): ILogger { - const logger = { - trace: () => {}, - debug: () => {}, - info: () => {}, - warn: () => {}, - error: () => {}, - fatal: () => {} + return { + trace: () => { + }, + debug: () => { + }, + info: () => { + }, + warn: () => { + }, + error: () => { + }, + fatal: () => { + } } satisfies ILogger - - return logger } function createSyntheticOutputAdaptor(snapshot: NativePluginCleanupSnapshot): OutputAdaptor { @@ -238,6 +227,9 @@ const testBinding = { removeBlockingFile: deskPaths.removeBlockingFile, planCleanup, performCleanup: runCleanup, + collectBaseOutputPlans, + collectDroidOutputPlan, + collectGeminiOutputPlan, resolveEffectiveIncludeSeries, matchesSeries, resolveSubSeries, diff --git a/sdk/tsconfig.eslint.json b/sdk/tsconfig.eslint.json index ae9eeee1..0d1598a0 100644 --- a/sdk/tsconfig.eslint.json +++ b/sdk/tsconfig.eslint.json @@ -7,7 +7,7 @@ "@/*": ["./src/*"], "@truenine/desk-paths": ["./src/core/desk-paths.ts"], "@truenine/desk-paths/*": ["./src/core/desk-paths/*"], - "@truenine/logger": ["../libraries/logger/src/index.ts"], + "@truenine/logger": ["./src/libraries/logger.ts"], "@truenine/md-compiler": ["../libraries/md-compiler/src/index.ts"], "@truenine/md-compiler/errors": ["../libraries/md-compiler/src/errors/index.ts"], "@truenine/md-compiler/globals": ["../libraries/md-compiler/src/globals/index.ts"], @@ -35,7 +35,7 @@ "@truenine/plugin-warp-ide": ["./src/plugins/WarpIDEOutputPlugin.ts"], "@truenine/plugin-windsurf": ["./src/plugins/plugin-windsurf.ts"], "@truenine/plugin-zed": ["./src/plugins/plugin-zed.ts"], - "@truenine/script-runtime": ["../libraries/script-runtime/src/index.ts"] + "@truenine/script-runtime": ["./src/libraries/script-runtime/index.ts"] }, "noEmit": true, "skipLibCheck": true diff --git a/sdk/tsdown.config.ts b/sdk/tsdown.config.ts index 2d2a8271..92b6b666 100644 --- a/sdk/tsdown.config.ts +++ b/sdk/tsdown.config.ts @@ -20,7 +20,7 @@ const alwaysBundleDeps = [ export default defineConfig([ { - entry: ['./src/index.ts', '!**/*.{spec,test}.*'], + entry: ['./src/index.ts', './src/internal/native-command-bridge.ts', '!**/*.{spec,test}.*'], platform: 'node', sourcemap: false, unbundle: false, diff --git a/turbo.json b/turbo.json index a944ba83..0ca6f7ab 100644 --- a/turbo.json +++ b/turbo.json @@ -3,22 +3,27 @@ "tasks": { "build": { "dependsOn": ["^build"], - "outputs": ["dist/**", "*.node", ".next/**", "!.next/cache/**"] + "outputs": ["dist/**", "*.node", ".next/**", "!.next/cache/**"], + "outputLogs": "errors-only" }, "test": { "dependsOn": ["build"], - "outputs": [] + "outputs": [], + "outputLogs": "errors-only" }, "lint": { "dependsOn": ["^build"], - "cache": false + "cache": false, + "outputLogs": "errors-only" }, "lint:fix": { - "cache": false + "cache": false, + "outputLogs": "errors-only" }, "check:type": { "dependsOn": ["^build"], - "cache": false + "cache": false, + "outputLogs": "errors-only" } } } From 951488b68e601e6c9a0339c9f961c7ae1ed6b05c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 12 Apr 2026 16:00:58 +0800 Subject: [PATCH 2/3] Add CLI integration coverage and sync version handling --- .githooks/sync-versions.test.ts | 47 + .github/workflows/pull-request.yml | 23 +- Cargo.lock | 12 +- Cargo.toml | 2 +- cli-integration-test/package.json | 20 + cli-integration-test/src/artifacts.ts | 158 +++ cli-integration-test/src/container.ts | 328 +++++ cli-integration-test/src/fixtures.ts | 429 +++++++ .../test/claude-code-cli.integration.test.ts | 172 +++ .../test/codex.integration.test.ts | 159 +++ cli-integration-test/tsconfig.json | 59 + cli-integration-test/tsconfig.test.json | 19 + cli-integration-test/vitest.config.ts | 19 + cli/npm/darwin-arm64/package.json | 2 +- cli/npm/darwin-x64/package.json | 2 +- cli/npm/linux-arm64-gnu/package.json | 2 +- cli/npm/linux-x64-gnu/package.json | 2 +- cli/npm/win32-x64-msvc/package.json | 2 +- cli/package.json | 2 +- cli/scripts/sync-sdk-dist.ts | 47 +- cli/src/internal/native-command-bridge.ts | 74 ++ cli/tsdown.config.ts | 15 + doc/package.json | 2 +- gui/package.json | 2 +- gui/src-tauri/Cargo.lock | 2 +- gui/src-tauri/Cargo.toml | 2 +- gui/src-tauri/tauri.conf.json | 2 +- libraries/logger/package.json | 2 +- libraries/md-compiler/package.json | 2 +- libraries/script-runtime/package.json | 2 +- mcp/package.json | 2 +- package.json | 4 +- pnpm-lock.yaml | 1114 +++++++++++++++++ pnpm-workspace.yaml | 2 + sdk/package.json | 2 +- sdk/src/index.ts | 39 +- 36 files changed, 2744 insertions(+), 30 deletions(-) create mode 100644 cli-integration-test/package.json create mode 100644 cli-integration-test/src/artifacts.ts create mode 100644 cli-integration-test/src/container.ts create mode 100644 cli-integration-test/src/fixtures.ts create mode 100644 cli-integration-test/test/claude-code-cli.integration.test.ts create mode 100644 cli-integration-test/test/codex.integration.test.ts create mode 100644 cli-integration-test/tsconfig.json create mode 100644 cli-integration-test/tsconfig.test.json create mode 100644 cli-integration-test/vitest.config.ts create mode 100644 cli/src/internal/native-command-bridge.ts diff --git a/.githooks/sync-versions.test.ts b/.githooks/sync-versions.test.ts index 451235a0..75193dbc 100644 --- a/.githooks/sync-versions.test.ts +++ b/.githooks/sync-versions.test.ts @@ -40,6 +40,11 @@ function createFixtureRepo(): string { version: initialVersion, private: true }) + writeJson(join(rootDir, 'cli-integration-test', 'package.json'), { + name: '@truenine/memory-sync-cli-integration-test', + version: initialVersion, + private: true + }) writeJson(join(rootDir, 'cli', 'npm', 'darwin-arm64', 'package.json'), { name: '@truenine/memory-sync-cli-darwin-arm64', version: initialVersion @@ -106,6 +111,7 @@ describe('sync-versions hook', () => { expect(result.versionSource).toBe('cli/npm/darwin-arm64/package.json') expect(JSON.parse(readFileSync(join(rootDir, 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'cli', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'cli-integration-test', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'sdk', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'libraries', 'logger', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(readFileSync(join(rootDir, 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) @@ -114,6 +120,7 @@ describe('sync-versions hook', () => { expect(stagedFiles).toEqual(new Set([ 'Cargo.toml', 'cli-crate/Cargo.toml', + 'cli-integration-test/package.json', 'cli/npm/darwin-arm64/package.json', 'cli/package.json', 'gui/src-tauri/tauri.conf.json', @@ -142,6 +149,45 @@ describe('sync-versions hook', () => { expect(result.versionSource).toBe('sdk/package.json') expect(JSON.parse(readFileSync(join(rootDir, 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'cli', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'cli-integration-test', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'sdk', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'libraries', 'logger', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(readFileSync(join(rootDir, 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) + expect(readFileSync(join(rootDir, 'cli-crate', 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) + expect(JSON.parse(readFileSync(join(rootDir, 'gui', 'src-tauri', 'tauri.conf.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(stagedFiles).toEqual(new Set([ + 'Cargo.toml', + 'cli-crate/Cargo.toml', + 'cli-integration-test/package.json', + 'cli/npm/darwin-arm64/package.json', + 'cli/package.json', + 'gui/src-tauri/tauri.conf.json', + 'libraries/logger/package.json', + 'package.json', + 'sdk/package.json' + ])) + }) + + it('accepts cli-integration-test/package.json as a staged version source and propagates it', () => { + const rootDir = createFixtureRepo() + tempDirs.push(rootDir) + + const nextVersion = '2026.10324.10318' + writeJson(join(rootDir, 'cli-integration-test', 'package.json'), { + name: '@truenine/memory-sync-cli-integration-test', + version: nextVersion, + private: true + }) + runGit(rootDir, ['add', 'cli-integration-test/package.json']) + + const result = runSyncVersions({rootDir}) + const stagedFiles = new Set(runGit(rootDir, ['diff', '--cached', '--name-only']).split(/\r?\n/).filter(Boolean)) + + expect(result.targetVersion).toBe(nextVersion) + expect(result.versionSource).toBe('cli-integration-test/package.json') + expect(JSON.parse(readFileSync(join(rootDir, 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'cli', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) + expect(JSON.parse(readFileSync(join(rootDir, 'cli-integration-test', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'sdk', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(JSON.parse(readFileSync(join(rootDir, 'libraries', 'logger', 'package.json'), 'utf-8')) as {version: string}).toMatchObject({version: nextVersion}) expect(readFileSync(join(rootDir, 'Cargo.toml'), 'utf-8')).toContain(`version = "${nextVersion}"`) @@ -150,6 +196,7 @@ describe('sync-versions hook', () => { expect(stagedFiles).toEqual(new Set([ 'Cargo.toml', 'cli-crate/Cargo.toml', + 'cli-integration-test/package.json', 'cli/npm/darwin-arm64/package.json', 'cli/package.json', 'gui/src-tauri/tauri.conf.json', diff --git a/.github/workflows/pull-request.yml b/.github/workflows/pull-request.yml index cc4fb100..b8780563 100644 --- a/.github/workflows/pull-request.yml +++ b/.github/workflows/pull-request.yml @@ -138,8 +138,27 @@ jobs: - name: Build run: pnpm run build - - name: Run all tests - run: pnpm run test + - name: Run workspace tests except container-backed CLI integration + run: pnpm exec turbo run test --ui=stream --log-order=grouped --filter='!@truenine/memory-sync-cli-integration-test' + + test-cli-integration: + if: github.event.pull_request.draft == false + runs-on: ubuntu-24.04 + timeout-minutes: 45 + steps: + - uses: actions/checkout@v6 + + - uses: ./.github/actions/setup-node-pnpm + + - uses: ./.github/actions/setup-rust + with: + cache-key: pr + + - name: Verify Docker is available + run: docker version + + - name: Run CLI integration tests + run: pnpm -C cli-integration-test run test test-gui: if: github.event.pull_request.draft == false diff --git a/Cargo.lock b/Cargo.lock index 2d8a5ab6..e99c5bc7 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2170,7 +2170,7 @@ dependencies = [ [[package]] name = "memory-sync-gui" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "dirs", "proptest", @@ -4505,7 +4505,7 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tnmsc" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "base64 0.22.1", "chrono", @@ -4532,7 +4532,7 @@ dependencies = [ [[package]] name = "tnmsc-cli-shell" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "clap", "serde_json", @@ -4542,7 +4542,7 @@ dependencies = [ [[package]] name = "tnmsc-logger" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "napi", "napi-build", @@ -4553,7 +4553,7 @@ dependencies = [ [[package]] name = "tnmsc-md-compiler" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "json5 0.4.1", "markdown", @@ -4569,7 +4569,7 @@ dependencies = [ [[package]] name = "tnmsc-script-runtime" -version = "2026.10411.10132" +version = "2026.10412.11551" dependencies = [ "napi", "napi-build", diff --git a/Cargo.toml b/Cargo.toml index 55792f10..d030f77f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -10,7 +10,7 @@ members = [ ] [workspace.package] -version = "2026.10411.10132" +version = "2026.10412.11551" edition = "2024" rust-version = "1.88" license = "AGPL-3.0-only" diff --git a/cli-integration-test/package.json b/cli-integration-test/package.json new file mode 100644 index 00000000..5f90d629 --- /dev/null +++ b/cli-integration-test/package.json @@ -0,0 +1,20 @@ +{ + "name": "@truenine/memory-sync-cli-integration-test", + "private": true, + "type": "module", + "version": "2026.10412.11551", + "description": "Container-backed CLI integration tests for tnmsc", + "scripts": { + "test": "vitest run", + "test:codex": "vitest run test/codex.integration.test.ts", + "test:claude-code": "vitest run test/claude-code-cli.integration.test.ts", + "test:integration": "pnpm run test", + "check:type": "tsc --noEmit -p tsconfig.test.json" + }, + "devDependencies": { + "@types/node": "catalog:", + "testcontainers": "catalog:", + "typescript": "catalog:", + "vitest": "catalog:" + } +} diff --git a/cli-integration-test/src/artifacts.ts b/cli-integration-test/src/artifacts.ts new file mode 100644 index 00000000..5c74fa3e --- /dev/null +++ b/cli-integration-test/src/artifacts.ts @@ -0,0 +1,158 @@ +import {spawnSync} from 'node:child_process' +import {existsSync, mkdirSync, mkdtempSync, readdirSync, rmSync} from 'node:fs' +import {tmpdir} from 'node:os' +import path from 'node:path' +import {fileURLToPath} from 'node:url' + +const REPO_ROOT = fileURLToPath(new URL('../../', import.meta.url)) +const CLI_DIR = path.join(REPO_ROOT, 'cli') +const SCRIPT_RUNTIME_DIR = path.join(REPO_ROOT, 'libraries', 'script-runtime') +const CLI_LINUX_PACKAGE_DIR = path.join(CLI_DIR, 'npm', 'linux-x64-gnu') +const EXPECTED_LINUX_NODE_FILES = 4 +const MAX_BUFFER = 16 * 1024 * 1024 + +export interface CliIntegrationArtifacts { + readonly tempDir: string + readonly cliTarballPath: string + readonly linuxTarballPath: string + readonly scriptRuntimeTarballPath: string + readonly latestPnpmVersion: string +} + +let cachedArtifacts: CliIntegrationArtifacts | undefined +let cleanupRegistered = false + +function registerArtifactCleanup(): void { + if (cleanupRegistered) return + + cleanupRegistered = true + process.once('exit', () => { + cleanupCliIntegrationArtifacts() + }) +} + +function runCommand( + command: string, + args: readonly string[], + cwd: string = REPO_ROOT +): string { + const result = spawnSync(command, args, { + cwd, + encoding: 'utf8', + maxBuffer: MAX_BUFFER + }) + + if (result.error != null) throw result.error + if (result.status === 0) return `${result.stdout ?? ''}${result.stderr ?? ''}` + + throw new Error([ + `Command failed: ${command} ${args.join(' ')}`, + `cwd: ${cwd}`, + `${result.stdout ?? ''}${result.stderr ?? ''}`.trim() || 'No output captured.' + ].join('\n')) +} + +function resolveLatestPackageVersion(packageName: string): string { + const raw = runCommand( + 'npm', + ['view', packageName, 'version'], + tmpdir() + ).trim() + const firstLine = raw + .split(/\r?\n/u) + .map(line => line.trim()) + .find(line => line.length > 0) + + if (firstLine != null) return firstLine + + throw new Error(`Failed to resolve the latest version for "${packageName}".`) +} + +function ensureDirectory(dirPath: string): void { + mkdirSync(dirPath, {recursive: true}) +} + +function findSingleTarball(dirPath: string): string { + const tarballs = readdirSync(dirPath) + .filter(fileName => fileName.endsWith('.tgz')) + .sort() + + if (tarballs.length !== 1) { + throw new Error( + `Expected exactly one tarball in "${dirPath}", found ${tarballs.length}.` + ) + } + + return path.join(dirPath, tarballs[0] ?? '') +} + +function packWorkspacePackage(packageDir: string, targetDir: string): string { + ensureDirectory(targetDir) + runCommand('pnpm', ['-C', packageDir, 'pack', '--pack-destination', targetDir]) + return findSingleTarball(targetDir) +} + +function ensureLinuxPlatformPackageReady(): void { + const nodeFiles = existsSync(CLI_LINUX_PACKAGE_DIR) + ? readdirSync(CLI_LINUX_PACKAGE_DIR).filter(fileName => fileName.endsWith('.node')) + : [] + + if (nodeFiles.length >= EXPECTED_LINUX_NODE_FILES) return + + runCommand('pnpm', ['-C', CLI_DIR, 'run', 'build:napi:copy']) + + const copiedNodeFiles = readdirSync(CLI_LINUX_PACKAGE_DIR) + .filter(fileName => fileName.endsWith('.node')) + + if (copiedNodeFiles.length < EXPECTED_LINUX_NODE_FILES) { + throw new Error( + `Expected ${EXPECTED_LINUX_NODE_FILES} Linux x64 NAPI artifacts in "${CLI_LINUX_PACKAGE_DIR}", found ${copiedNodeFiles.length}.` + ) + } +} + +function assertSupportedHost(): void { + if (process.platform === 'linux' && process.arch === 'x64') return + + throw new Error( + `cli-integration-test currently supports only linux-x64 hosts. Current host: ${process.platform}-${process.arch}.` + ) +} + +export function prepareCliIntegrationArtifacts(): CliIntegrationArtifacts { + if (cachedArtifacts != null) return cachedArtifacts + + assertSupportedHost() + registerArtifactCleanup() + runCommand('pnpm', ['-C', CLI_DIR, 'run', 'build']) + ensureLinuxPlatformPackageReady() + + const tempDir = mkdtempSync(path.join(tmpdir(), 'tnmsc-cli-integration-artifacts-')) + const cliTarballPath = packWorkspacePackage(CLI_DIR, path.join(tempDir, 'cli')) + const linuxTarballPath = packWorkspacePackage( + CLI_LINUX_PACKAGE_DIR, + path.join(tempDir, 'cli-linux-x64') + ) + const scriptRuntimeTarballPath = packWorkspacePackage( + SCRIPT_RUNTIME_DIR, + path.join(tempDir, 'script-runtime') + ) + const latestPnpmVersion = resolveLatestPackageVersion('pnpm') + + cachedArtifacts = { + tempDir, + cliTarballPath, + linuxTarballPath, + scriptRuntimeTarballPath, + latestPnpmVersion + } + + return cachedArtifacts +} + +export function cleanupCliIntegrationArtifacts(): void { + if (cachedArtifacts == null) return + + rmSync(cachedArtifacts.tempDir, {recursive: true, force: true}) + cachedArtifacts = void 0 +} diff --git a/cli-integration-test/src/container.ts b/cli-integration-test/src/container.ts new file mode 100644 index 00000000..907b6e8f --- /dev/null +++ b/cli-integration-test/src/container.ts @@ -0,0 +1,328 @@ +import {spawnSync} from 'node:child_process' +import {statSync} from 'node:fs' +import path from 'node:path' +import type {StartedTestContainer} from 'testcontainers' +import {GenericContainer} from 'testcontainers' + +import type {CliIntegrationArtifacts} from './artifacts' +import { + CONTAINER_EXTERNAL_CWD, + CONTAINER_HOME_DIR, + CONTAINER_WORKSPACE_DIR +} from './fixtures' + +const NODE_IMAGE = 'node:22-trixie' +const BASE_IMAGE_REPOSITORY = 'tnmsc-cli-integration' +const MAX_BUFFER = 16 * 1024 * 1024 + +let cachedPreparedBaseImage: string | undefined +let baseImageCleanupRegistered = false + +export interface ContainerExecResult { + readonly command: string + readonly cwd: string + readonly stdout: string + readonly stderr: string + readonly exitCode: number +} + +export interface InstalledCliResolution { + readonly mainPackageDir: string + readonly platformPackageDir: string + readonly resolvedAddonPath: string + readonly scriptRuntimePackagePath: string +} + +export interface CliIntegrationFixture { + readonly homeDir: string + readonly workspaceDir: string +} + +function quoteShell(value: string): string { + return `'${value.replaceAll(`'`, `'\"'\"'`)}'` +} + +function runDockerCommand(args: readonly string[]): { + readonly stdout: string + readonly stderr: string + readonly exitCode: number +} { + const result = spawnSync('docker', args, { + encoding: 'utf8', + maxBuffer: MAX_BUFFER + }) + + if (result.error != null) throw result.error + + return { + stdout: result.stdout ?? '', + stderr: result.stderr ?? '', + exitCode: result.status ?? 1 + } +} + +function assertDockerCommandSucceeded( + args: readonly string[], + result: { + readonly stdout: string + readonly stderr: string + readonly exitCode: number + } +): void { + if (result.exitCode === 0) return + + throw new Error([ + `Docker command failed: docker ${args.join(' ')}`, + `${result.stdout}${result.stderr}`.trim() || 'No output captured.' + ].join('\n')) +} + +function registerBaseImageCleanup(): void { + if (baseImageCleanupRegistered) return + + baseImageCleanupRegistered = true + process.once('exit', () => { + cleanupPreparedCliIntegrationBaseImage() + }) +} + +function cleanupPreparedCliIntegrationBaseImage(): void { + if (cachedPreparedBaseImage == null) return + + runDockerCommand(['rmi', '-f', cachedPreparedBaseImage]) + cachedPreparedBaseImage = void 0 +} + +function shellScript(command: string, cwd: string): string { + return [ + 'set -eu', + `export HOME=${quoteShell(CONTAINER_HOME_DIR)}`, + 'export PNPM_HOME=/pnpm', + 'export PATH="$PNPM_HOME:/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"', + `mkdir -p "$PNPM_HOME" /artifacts ${quoteShell(CONTAINER_WORKSPACE_DIR)} ${quoteShell(CONTAINER_EXTERNAL_CWD)}`, + `cd ${quoteShell(cwd)}`, + command + ].join('\n') +} + +function buildPinnedGlobalCliPlatformLinkScript(): string { + return [ + 'GLOBAL_ROOT="$(pnpm root -g)"', + 'PNPM_STORE_DIR="$(dirname "$GLOBAL_ROOT")/.pnpm"', + 'MAIN_STORE_DIR="$(find "$PNPM_STORE_DIR" -maxdepth 1 -type d -name \'@truenine+memory-sync-cli@file*\' | head -n 1)"', + 'PLATFORM_STORE_DIR="$(find "$PNPM_STORE_DIR" -maxdepth 1 -type d -name \'@truenine+memory-sync-cli-linux-x64-gnu@file*\' | head -n 1)"', + 'test -n "$MAIN_STORE_DIR"', + 'test -n "$PLATFORM_STORE_DIR"', + 'MAIN_PACKAGE_DIR="$MAIN_STORE_DIR/node_modules/@truenine/memory-sync-cli"', + 'PLATFORM_PACKAGE_DIR="$PLATFORM_STORE_DIR/node_modules/@truenine/memory-sync-cli-linux-x64-gnu"', + 'mkdir -p "$MAIN_PACKAGE_DIR/node_modules/@truenine"', + 'rm -rf "$MAIN_PACKAGE_DIR/node_modules/@truenine/memory-sync-cli-linux-x64-gnu"', + 'ln -s "$PLATFORM_PACKAGE_DIR" "$MAIN_PACKAGE_DIR/node_modules/@truenine/memory-sync-cli-linux-x64-gnu"', + 'node -e \'', + 'const {createRequire} = require("node:module");', + 'const path = require("node:path");', + 'const mainPackageDir = process.argv[1];', + 'const requireFromBridge = createRequire(path.join(mainPackageDir, "dist", "internal", "native-command-bridge.mjs"));', + 'const addon = requireFromBridge("@truenine/memory-sync-cli-linux-x64-gnu/napi-memory-sync-cli.linux-x64-gnu.node");', + 'const resolvedAddonPath = requireFromBridge.resolve("@truenine/memory-sync-cli-linux-x64-gnu/napi-memory-sync-cli.linux-x64-gnu.node");', + 'if (typeof addon.collectDroidOutputPlan !== "function") {', + ' console.error("Pinned CLI platform package is missing collectDroidOutputPlan.");', + ' process.exit(1);', + '}', + 'if (!resolvedAddonPath.includes("@file+")) {', + ' console.error(`Pinned CLI platform package resolved to a non-local path: ${resolvedAddonPath}`);', + ' process.exit(1);', + '}', + '\' "$MAIN_PACKAGE_DIR"' + ].join('\n') +} + +function containerTarballPath(hostTarballPath: string): string { + return path.posix.join('/artifacts', path.basename(hostTarballPath)) +} + +export class PreparedCliIntegrationContainer { + constructor( + private readonly startedContainer: StartedTestContainer, + private readonly containerId: string + ) {} + + exec(command: string, cwd: string = CONTAINER_EXTERNAL_CWD): ContainerExecResult { + const args = ['exec', this.containerId, 'sh', '-lc', shellScript(command, cwd)] + const result = runDockerCommand(args) + + return { + command, + cwd, + stdout: result.stdout, + stderr: result.stderr, + exitCode: result.exitCode + } + } + + assertExecSuccess( + command: string, + cwd: string = CONTAINER_EXTERNAL_CWD + ): ContainerExecResult { + const result = this.exec(command, cwd) + if (result.exitCode === 0) return result + + throw new Error([ + `Container command failed in "${cwd}": ${command}`, + `${result.stdout}${result.stderr}`.trim() || 'No output captured.' + ].join('\n')) + } + + pathExists(targetPath: string): boolean { + return this.exec(`test -e ${quoteShell(targetPath)}`, '/').exitCode === 0 + } + + readFile(targetPath: string): string { + return this.assertExecSuccess(`cat ${quoteShell(targetPath)}`, '/').stdout + } + + inspectInstalledCliResolution(): InstalledCliResolution { + const script = [ + 'const {createRequire} = require("node:module");', + 'const path = require("node:path");', + 'const globalRoot = process.argv[1];', + 'const mainPackageDir = process.argv[2];', + 'const platformPackageDir = process.argv[3];', + 'const requireFromBridge = createRequire(path.join(mainPackageDir, "dist", "internal", "native-command-bridge.mjs"));', + 'const resolvedAddonPath = requireFromBridge.resolve("@truenine/memory-sync-cli-linux-x64-gnu/napi-memory-sync-cli.linux-x64-gnu.node");', + 'const scriptRuntimePackagePath = requireFromBridge.resolve("@truenine/script-runtime/package.json");', + 'process.stdout.write(JSON.stringify({', + ' mainPackageDir,', + ' platformPackageDir,', + ' resolvedAddonPath,', + ' scriptRuntimePackagePath', + '}));' + ].join(' ') + + const result = this.assertExecSuccess([ + 'GLOBAL_ROOT="$(pnpm root -g)"', + 'PNPM_STORE_DIR="$(dirname "$GLOBAL_ROOT")/.pnpm"', + 'MAIN_STORE_DIR="$(find "$PNPM_STORE_DIR" -maxdepth 1 -type d -name \'@truenine+memory-sync-cli@file*\' | head -n 1)"', + 'PLATFORM_STORE_DIR="$(find "$PNPM_STORE_DIR" -maxdepth 1 -type d -name \'@truenine+memory-sync-cli-linux-x64-gnu@file*\' | head -n 1)"', + 'test -n "$MAIN_STORE_DIR"', + 'test -n "$PLATFORM_STORE_DIR"', + 'MAIN_PACKAGE_DIR="$MAIN_STORE_DIR/node_modules/@truenine/memory-sync-cli"', + 'PLATFORM_PACKAGE_DIR="$PLATFORM_STORE_DIR/node_modules/@truenine/memory-sync-cli-linux-x64-gnu"', + `node -e ${quoteShell(script)} "$GLOBAL_ROOT" "$MAIN_PACKAGE_DIR" "$PLATFORM_PACKAGE_DIR"` + ].join(' && ')) + + return JSON.parse(result.stdout) as InstalledCliResolution + } + + async stop(): Promise { + await this.startedContainer.stop() + } + + private copyPathToContainer(sourcePath: string, targetPath: string): void { + const sourceStat = statSync(sourcePath) + const prepareTargetCommand = sourceStat.isDirectory() + ? `mkdir -p ${quoteShell(targetPath)}` + : `mkdir -p ${quoteShell(path.posix.dirname(targetPath))}` + this.assertExecSuccess(prepareTargetCommand, '/') + + const copySource = sourceStat.isDirectory() + ? `${sourcePath}${path.sep}.` + : sourcePath + + const args = ['cp', copySource, `${this.containerId}:${targetPath}`] + const result = runDockerCommand(args) + assertDockerCommandSucceeded(args, result) + } + + copyFixture(fixture: CliIntegrationFixture): void { + this.copyPathToContainer(fixture.homeDir, CONTAINER_HOME_DIR) + this.copyPathToContainer(fixture.workspaceDir, CONTAINER_WORKSPACE_DIR) + } + + copyArtifacts(artifacts: CliIntegrationArtifacts): void { + this.copyPathToContainer(artifacts.cliTarballPath, containerTarballPath(artifacts.cliTarballPath)) + this.copyPathToContainer(artifacts.linuxTarballPath, containerTarballPath(artifacts.linuxTarballPath)) + this.copyPathToContainer( + artifacts.scriptRuntimeTarballPath, + containerTarballPath(artifacts.scriptRuntimeTarballPath) + ) + } + + bootstrapLatestPnpmAndInstallCli(artifacts: CliIntegrationArtifacts): void { + const cliTarball = containerTarballPath(artifacts.cliTarballPath) + const linuxTarball = containerTarballPath(artifacts.linuxTarballPath) + const scriptRuntimeTarball = containerTarballPath(artifacts.scriptRuntimeTarballPath) + + this.assertExecSuccess( + [ + 'corepack enable', + `corepack prepare pnpm@${quoteShell(artifacts.latestPnpmVersion)} --activate`, + 'pnpm --version', + `pnpm add -g ${quoteShell(cliTarball)} ${quoteShell(linuxTarball)} ${quoteShell(scriptRuntimeTarball)}`, + buildPinnedGlobalCliPlatformLinkScript(), + 'command -v tnmsc >/dev/null' + ].join(' && ') + ) + } +} + +async function createPreparedCliIntegrationBaseImage( + artifacts: CliIntegrationArtifacts +): Promise { + if (cachedPreparedBaseImage != null) return cachedPreparedBaseImage + + registerBaseImageCleanup() + + const startedContainer = await new GenericContainer(NODE_IMAGE) + .withCommand(['sh', '-lc', 'while true; do sleep 3600; done']) + .start() + + const container = new PreparedCliIntegrationContainer( + startedContainer, + startedContainer.getId() + ) + + const imageTag = `${BASE_IMAGE_REPOSITORY}:${process.pid}-${Date.now()}` + + try { + container.copyArtifacts(artifacts) + container.bootstrapLatestPnpmAndInstallCli(artifacts) + + const commitArgs = ['commit', startedContainer.getId(), imageTag] + const commitResult = runDockerCommand(commitArgs) + assertDockerCommandSucceeded(commitArgs, commitResult) + + cachedPreparedBaseImage = imageTag + return imageTag + } + catch (error) { + runDockerCommand(['rmi', '-f', imageTag]) + throw error + } + finally { + await startedContainer.stop() + } +} + +export async function createPreparedCliIntegrationContainer( + artifacts: CliIntegrationArtifacts, + fixture: CliIntegrationFixture +): Promise { + const baseImage = await createPreparedCliIntegrationBaseImage(artifacts) + const startedContainer = await new GenericContainer(baseImage) + .withCommand(['sh', '-lc', 'while true; do sleep 3600; done']) + .start() + + const container = new PreparedCliIntegrationContainer( + startedContainer, + startedContainer.getId() + ) + + try { + container.copyFixture(fixture) + return container + } catch (error) { + await startedContainer.stop() + throw error + } +} diff --git a/cli-integration-test/src/fixtures.ts b/cli-integration-test/src/fixtures.ts new file mode 100644 index 00000000..f9216231 --- /dev/null +++ b/cli-integration-test/src/fixtures.ts @@ -0,0 +1,429 @@ +import {mkdirSync, mkdtempSync, rmSync, writeFileSync} from 'node:fs' +import {tmpdir} from 'node:os' +import path from 'node:path' + +export const CONTAINER_HOME_DIR = '/root' +export const CONTAINER_WORKSPACE_DIR = '/workspace' +export const CONTAINER_EXTERNAL_CWD = '/tmp/tnmsc-external' + +export interface CodexFixtureOptions { + readonly seedGlobalSystemSkill?: boolean + readonly seedGlobalStaleSkill?: boolean +} + +interface FixturePluginFlags { + readonly codex: boolean + readonly claudeCode: boolean + readonly git?: boolean + readonly readme?: boolean +} + +export interface CodexFixture { + readonly rootDir: string + readonly homeDir: string + readonly workspaceDir: string + readonly outputPaths: { + readonly globalCommand: string + readonly workspaceCommand: string + readonly projectAgent: string + readonly projectSkill: string + readonly projectSkillMcp: string + readonly globalSystemSkill: string + readonly globalStaleSkill: string + } + cleanup: () => void +} + +export interface ClaudeCodeFixture { + readonly rootDir: string + readonly homeDir: string + readonly workspaceDir: string + readonly outputPaths: { + readonly globalMemory: string + readonly projectMemory: string + readonly projectCommand: string + readonly projectAgent: string + readonly projectSkill: string + readonly projectRule: string + readonly projectSettings: string + readonly projectSettingsLocal: string + } + cleanup: () => void +} + +function ensureDir(dirPath: string): void { + mkdirSync(dirPath, {recursive: true}) +} + +function writeTextFile(filePath: string, content: string): void { + ensureDir(path.dirname(filePath)) + writeFileSync(filePath, content, 'utf8') +} + +function writeGlobalConfig( + homeDir: string, + plugins: FixturePluginFlags +): void { + const configPath = path.join(homeDir, '.aindex', '.tnmsc.json') + writeTextFile( + configPath, + JSON.stringify( + { + workspaceDir: CONTAINER_WORKSPACE_DIR, + logLevel: 'warn', + plugins + }, + null, + 2 + ) + ) +} + +function writeGlobalMemoryFixtures(workspaceDir: string): void { + writeTextFile( + path.join(workspaceDir, 'aindex', 'global.src.mdx'), + [ + '---', + 'description: 中文全局记忆描述', + '---', + '中文全局记忆内容' + ].join('\n') + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'global.mdx'), + [ + '---', + 'description: English global memory description', + '---', + 'English global memory body' + ].join('\n') + ) +} + +function writeCommandFixtures(workspaceDir: string): void { + writeTextFile( + path.join(workspaceDir, 'aindex', 'commands', 'find', 'opensource.src.mdx'), + [ + '---', + 'description: 中文源描述', + 'scope: project', + '---', + '中文源命令内容' + ].join('\n') + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'commands', 'find', 'opensource.mdx'), + [ + '---', + 'description: English dist description', + 'scope: project', + '---', + 'English dist command body' + ].join('\n') + ) +} + +function writeSubAgentFixtures(workspaceDir: string): void { + const sourceContent = [ + '---', + 'description: 审查变更', + 'scope: project', + '---', + '请仔细审查改动。' + ].join('\n') + const distContent = [ + '---', + 'description: Review pull requests', + 'scope: project', + 'nickname_candidates:', + ' - guard', + 'sandbox_mode: workspace-write', + 'mcp_servers:', + ' docs:', + ' command: node', + ' args:', + ' - mcp.js', + '---', + 'Review changes carefully.', + 'Focus on concrete regressions.' + ].join('\n') + + writeTextFile( + path.join(workspaceDir, 'aindex', 'subagents', 'qa', 'reviewer.src.mdx'), + sourceContent + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'subagents', 'qa', 'reviewer.mdx'), + distContent + ) +} + +function writeSkillFixtures(workspaceDir: string): void { + writeTextFile( + path.join(workspaceDir, 'aindex', 'skills', 'ship-it', 'skill.src.mdx'), + [ + '---', + 'description: 中文技能描述', + 'scope: project', + '---', + '中文技能内容' + ].join('\n') + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'skills', 'ship-it', 'mcp.json'), + JSON.stringify( + { + mcpServers: { + inspector: { + command: 'npx', + args: ['inspector'] + } + } + }, + null, + 2 + ) + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'skills', 'ship-it', 'skill.mdx'), + [ + '---', + 'description: Ship-it skill', + 'scope: project', + '---', + 'English dist skill body' + ].join('\n') + ) +} + +function writeManagedProjectFixtures(workspaceDir: string): void { + ensureDir(path.join(workspaceDir, 'project-a')) + ensureDir(path.join(workspaceDir, 'aindex', 'app', 'project-a')) + ensureDir(path.join(workspaceDir, 'aindex', 'dist', 'app', 'project-a')) +} + +function writeProjectPromptFixtures(workspaceDir: string): void { + writeTextFile( + path.join(workspaceDir, 'aindex', 'app', 'project-a', 'agt.src.mdx'), + [ + '---', + 'description: 中文项目记忆描述', + '---', + '中文项目记忆内容' + ].join('\n') + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'app', 'project-a', 'agt.mdx'), + [ + '---', + 'description: English project memory description', + '---', + 'English project memory body' + ].join('\n') + ) +} + +function writeRuleFixtures(workspaceDir: string): void { + writeTextFile( + path.join(workspaceDir, 'aindex', 'rules', 'qa', 'safe.src.mdx'), + [ + '---', + 'scope: project', + 'description: 中文规则描述', + 'globs:', + ' - "**/*.ts"', + '---', + '中文规则内容' + ].join('\n') + ) + writeTextFile( + path.join(workspaceDir, 'aindex', 'dist', 'rules', 'qa', 'safe.mdx'), + [ + '---', + 'scope: project', + 'description: English rule description', + 'globs:', + ' - "**/*.ts"', + '---', + 'English rule body' + ].join('\n') + ) +} + +function seedGlobalCodexSkills(homeDir: string, options: CodexFixtureOptions): void { + if (options.seedGlobalSystemSkill === true) { + writeTextFile( + path.join(homeDir, '.codex', 'skills', '.system', 'SKILL.md'), + '# preserved system skill\n' + ) + } + + if (options.seedGlobalStaleSkill === true) { + writeTextFile( + path.join(homeDir, '.codex', 'skills', 'stale-skill', 'SKILL.md'), + '# stale skill\n' + ) + } +} + +export function createCodexFixture( + options: CodexFixtureOptions = {} +): CodexFixture { + const rootDir = mkdtempSync(path.join(tmpdir(), 'tnmsc-codex-fixture-')) + const homeDir = path.join(rootDir, 'home') + const workspaceDir = path.join(rootDir, 'workspace') + + writeGlobalConfig(homeDir, { + codex: true, + claudeCode: false, + git: false, + readme: false + }) + writeCommandFixtures(workspaceDir) + writeSubAgentFixtures(workspaceDir) + writeSkillFixtures(workspaceDir) + writeManagedProjectFixtures(workspaceDir) + seedGlobalCodexSkills(homeDir, options) + + return { + rootDir, + homeDir, + workspaceDir, + outputPaths: { + globalCommand: path.posix.join( + CONTAINER_HOME_DIR, + '.codex', + 'prompts', + 'find-opensource.md' + ), + workspaceCommand: path.posix.join( + CONTAINER_WORKSPACE_DIR, + '.codex', + 'prompts', + 'find-opensource.md' + ), + projectAgent: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.codex', + 'agents', + 'qa-reviewer.toml' + ), + projectSkill: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.codex', + 'skills', + 'ship-it', + 'SKILL.md' + ), + projectSkillMcp: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.codex', + 'skills', + 'ship-it', + 'mcp.json' + ), + globalSystemSkill: path.posix.join( + CONTAINER_HOME_DIR, + '.codex', + 'skills', + '.system', + 'SKILL.md' + ), + globalStaleSkill: path.posix.join( + CONTAINER_HOME_DIR, + '.codex', + 'skills', + 'stale-skill', + 'SKILL.md' + ) + }, + cleanup() { + rmSync(rootDir, {recursive: true, force: true}) + } + } +} + +export function createClaudeCodeFixture(): ClaudeCodeFixture { + const rootDir = mkdtempSync(path.join(tmpdir(), 'tnmsc-claude-code-fixture-')) + const homeDir = path.join(rootDir, 'home') + const workspaceDir = path.join(rootDir, 'workspace') + + writeGlobalConfig(homeDir, { + codex: false, + claudeCode: true, + git: false, + readme: false + }) + writeGlobalMemoryFixtures(workspaceDir) + writeCommandFixtures(workspaceDir) + writeSubAgentFixtures(workspaceDir) + writeSkillFixtures(workspaceDir) + writeManagedProjectFixtures(workspaceDir) + writeProjectPromptFixtures(workspaceDir) + writeRuleFixtures(workspaceDir) + + return { + rootDir, + homeDir, + workspaceDir, + outputPaths: { + globalMemory: path.posix.join( + CONTAINER_HOME_DIR, + '.claude', + 'CLAUDE.md' + ), + projectMemory: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + 'CLAUDE.md' + ), + projectCommand: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'commands', + 'find-opensource.md' + ), + projectAgent: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'agents', + 'qa-reviewer.md' + ), + projectSkill: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'skills', + 'ship-it', + 'SKILL.md' + ), + projectRule: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'rules', + 'rule-qa-safe.md' + ), + projectSettings: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'settings.json' + ), + projectSettingsLocal: path.posix.join( + CONTAINER_WORKSPACE_DIR, + 'project-a', + '.claude', + 'settings.local.json' + ) + }, + cleanup() { + rmSync(rootDir, {recursive: true, force: true}) + } + } +} diff --git a/cli-integration-test/test/claude-code-cli.integration.test.ts b/cli-integration-test/test/claude-code-cli.integration.test.ts new file mode 100644 index 00000000..511d770f --- /dev/null +++ b/cli-integration-test/test/claude-code-cli.integration.test.ts @@ -0,0 +1,172 @@ +import type {CliIntegrationArtifacts} from '../src/artifacts' +import type {ClaudeCodeFixture} from '../src/fixtures' +import {beforeAll, describe, expect, it} from 'vitest' +import {prepareCliIntegrationArtifacts} from '../src/artifacts' +import { + PreparedCliIntegrationContainer, + createPreparedCliIntegrationContainer +} from '../src/container' +import { + CONTAINER_EXTERNAL_CWD, + createClaudeCodeFixture +} from '../src/fixtures' + +const supportedHost = process.platform === 'linux' && process.arch === 'x64' +const describeForHost = supportedHost ? describe : describe.skip + +function expectSuccess(exitCode: number): void { + expect(exitCode).toBe(0) +} + +async function withClaudeCodeEnvironment( + artifacts: CliIntegrationArtifacts, + fixture: ClaudeCodeFixture, + run: (container: PreparedCliIntegrationContainer) => Promise +): Promise { + let container: PreparedCliIntegrationContainer | undefined + + try { + container = await createPreparedCliIntegrationContainer(artifacts, fixture) + await run(container) + } + finally { + await container?.stop() + fixture.cleanup() + } +} + +describeForHost('claude code cli integration', () => { + let artifacts: CliIntegrationArtifacts + + beforeAll(() => { + artifacts = prepareCliIntegrationArtifacts() + }) + + it('bootstraps the latest pnpm, resolves local tarballs, and exposes the claude plugin surface', async () => { + const fixture = createClaudeCodeFixture() + + await withClaudeCodeEnvironment(artifacts, fixture, async container => { + const pnpmVersion = container.assertExecSuccess('pnpm --version').stdout.trim() + expect(pnpmVersion).toBe(artifacts.latestPnpmVersion) + + const installedResolution = container.inspectInstalledCliResolution() + expect(installedResolution.mainPackageDir).toContain('@truenine+memory-sync-cli@file') + expect(installedResolution.platformPackageDir).toContain('@truenine+memory-sync-cli-linux-x64-gnu@file') + expect(installedResolution.resolvedAddonPath).toContain('@truenine+memory-sync-cli-linux-x64-gnu@file') + expect(installedResolution.scriptRuntimePackagePath).toContain('@truenine+script-runtime@file') + + const help = container.assertExecSuccess('tnmsc help') + expect(help.stdout).toContain('install') + expect(help.stdout).toContain('dry-run') + expect(help.stdout).toContain('clean') + expect(help.stdout).toContain('plugins') + + const plugins = container.assertExecSuccess('tnmsc plugins') + expect(plugins.stdout).toContain('ClaudeCodeCLIOutputAdaptor') + }) + }) + + it('keeps dry-run side effect free for claude outputs', async () => { + const fixture = createClaudeCodeFixture() + + await withClaudeCodeEnvironment(artifacts, fixture, async container => { + const result = container.exec('tnmsc dry-run', CONTAINER_EXTERNAL_CWD) + expectSuccess(result.exitCode) + + expect(container.pathExists(fixture.outputPaths.globalMemory)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectMemory)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectRule)).toBe(false) + }) + }) + + it('installs claude outputs from dist content', async () => { + const fixture = createClaudeCodeFixture() + + await withClaudeCodeEnvironment(artifacts, fixture, async container => { + const result = container.exec('tnmsc', CONTAINER_EXTERNAL_CWD) + expectSuccess(result.exitCode) + + expect(container.pathExists(fixture.outputPaths.globalMemory)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectMemory)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectCommand)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectRule)).toBe(true) + + const globalMemory = container.readFile(fixture.outputPaths.globalMemory) + expect(globalMemory).toContain('English global memory body') + expect(globalMemory).not.toContain('中文全局记忆内容') + + const projectMemory = container.readFile(fixture.outputPaths.projectMemory) + expect(projectMemory).toContain('English project memory body') + expect(projectMemory).not.toContain('中文项目记忆内容') + + const command = container.readFile(fixture.outputPaths.projectCommand) + expect(command).toContain('description: English dist description') + expect(command).toContain('English dist command body') + expect(command).not.toContain('中文源描述') + expect(command).not.toContain('中文源命令内容') + + const agent = container.readFile(fixture.outputPaths.projectAgent) + expect(agent).toContain('name: qa-reviewer') + expect(agent).toContain('description: Review pull requests') + expect(agent).toContain('memory: project') + expect(agent).toContain('Review changes carefully.') + expect(agent).toContain('Focus on concrete regressions.') + + const skill = container.readFile(fixture.outputPaths.projectSkill) + expect(skill).toContain('description: Ship-it skill') + expect(skill).toContain('English dist skill body') + expect(skill).not.toContain('中文技能内容') + + const rule = container.readFile(fixture.outputPaths.projectRule) + expect(rule).toContain('paths:') + expect(rule).toContain('**/*.ts') + expect(rule).toContain('English rule body') + expect(rule).not.toContain('中文规则内容') + }) + }) + + it('supports clean dry-run and clean for claude outputs', async () => { + const fixture = createClaudeCodeFixture() + + await withClaudeCodeEnvironment(artifacts, fixture, async container => { + const installResult = container.exec('tnmsc', CONTAINER_EXTERNAL_CWD) + expectSuccess(installResult.exitCode) + + container.assertExecSuccess( + [ + `mkdir -p "$(dirname '${fixture.outputPaths.projectSettings}')"`, + `printf '{"theme":"dark"}\n' > '${fixture.outputPaths.projectSettings}'`, + `printf '{"sandbox":"workspace"}\n' > '${fixture.outputPaths.projectSettingsLocal}'` + ].join(' && '), + '/' + ) + + const cleanDryRunResult = container.exec('tnmsc clean --dry-run', CONTAINER_EXTERNAL_CWD) + expectSuccess(cleanDryRunResult.exitCode) + expect(container.pathExists(fixture.outputPaths.globalMemory)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectMemory)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectCommand)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectRule)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSettings)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSettingsLocal)).toBe(true) + + const cleanResult = container.exec('tnmsc clean', CONTAINER_EXTERNAL_CWD) + expectSuccess(cleanResult.exitCode) + expect(container.pathExists(fixture.outputPaths.globalMemory)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectMemory)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectRule)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSettings)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSettingsLocal)).toBe(false) + }) + }) +}) diff --git a/cli-integration-test/test/codex.integration.test.ts b/cli-integration-test/test/codex.integration.test.ts new file mode 100644 index 00000000..fe400b0b --- /dev/null +++ b/cli-integration-test/test/codex.integration.test.ts @@ -0,0 +1,159 @@ +import type {CliIntegrationArtifacts} from '../src/artifacts' +import type {CodexFixture} from '../src/fixtures' +import {beforeAll, describe, expect, it} from 'vitest' +import {prepareCliIntegrationArtifacts} from '../src/artifacts' +import { + PreparedCliIntegrationContainer, + createPreparedCliIntegrationContainer +} from '../src/container' +import { + CONTAINER_EXTERNAL_CWD, + createCodexFixture +} from '../src/fixtures' + +const supportedHost = process.platform === 'linux' && process.arch === 'x64' +const describeForHost = supportedHost ? describe : describe.skip + +function expectSuccess(exitCode: number): void { + expect(exitCode).toBe(0) +} + +async function withCodexEnvironment( + artifacts: CliIntegrationArtifacts, + fixture: CodexFixture, + run: (container: PreparedCliIntegrationContainer) => Promise +): Promise { + let container: PreparedCliIntegrationContainer | undefined + + try { + container = await createPreparedCliIntegrationContainer(artifacts, fixture) + await run(container) + } + finally { + await container?.stop() + fixture.cleanup() + } +} + +describeForHost('codex cli integration', () => { + let artifacts: CliIntegrationArtifacts + + beforeAll(() => { + artifacts = prepareCliIntegrationArtifacts() + }) + + it('bootstraps the latest pnpm and exposes the installed cli help surface', async () => { + const fixture = createCodexFixture() + + await withCodexEnvironment(artifacts, fixture, async container => { + const pnpmVersion = container.assertExecSuccess('pnpm --version').stdout.trim() + expect(pnpmVersion).toBe(artifacts.latestPnpmVersion) + + const installedResolution = container.inspectInstalledCliResolution() + expect(installedResolution.mainPackageDir).toContain('@truenine+memory-sync-cli@file') + expect(installedResolution.platformPackageDir).toContain('@truenine+memory-sync-cli-linux-x64-gnu@file') + expect(installedResolution.resolvedAddonPath).toContain('@truenine+memory-sync-cli-linux-x64-gnu@file') + expect(installedResolution.scriptRuntimePackagePath).toContain('@truenine+script-runtime@file') + + const help = container.assertExecSuccess('tnmsc help') + expect(help.stdout).toContain('install') + expect(help.stdout).toContain('dry-run') + expect(help.stdout).toContain('clean') + expect(help.stdout).toContain('plugins') + + const plugins = container.assertExecSuccess('tnmsc plugins') + expect(plugins.stdout).toContain('CodexCLIOutputAdaptor') + }) + }) + + it('keeps dry-run side effect free for codex outputs', async () => { + const fixture = createCodexFixture() + + await withCodexEnvironment(artifacts, fixture, async container => { + const result = container.exec('tnmsc dry-run', CONTAINER_EXTERNAL_CWD) + expectSuccess(result.exitCode) + + expect(container.pathExists(fixture.outputPaths.globalCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.workspaceCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkillMcp)).toBe(false) + }) + }) + + it('installs codex outputs from dist content and preserves the built-in system skill directory', async () => { + const fixture = createCodexFixture({ + seedGlobalSystemSkill: true, + seedGlobalStaleSkill: true + }) + + await withCodexEnvironment(artifacts, fixture, async container => { + const result = container.exec('tnmsc', CONTAINER_EXTERNAL_CWD) + expectSuccess(result.exitCode) + + expect(container.pathExists(fixture.outputPaths.globalCommand)).toBe(true) + expect(container.pathExists(fixture.outputPaths.workspaceCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkillMcp)).toBe(true) + + const command = container.readFile(fixture.outputPaths.globalCommand) + expect(command).toContain('description: English dist description') + expect(command).toContain('English dist command body') + expect(command).not.toContain('中文源描述') + expect(command).not.toContain('中文源命令内容') + + const agent = container.readFile(fixture.outputPaths.projectAgent) + expect(agent).toContain('name = "qa-reviewer"') + expect(agent).toContain('description = "Review pull requests"') + expect(agent).toContain('developer_instructions = """') + expect(agent).toContain('Review changes carefully.') + expect(agent).toContain('Focus on concrete regressions.') + expect(agent).toContain('nickname_candidates = ["guard"]') + expect(agent).toContain('sandbox_mode = "workspace-write"') + expect(agent).toContain('[mcp_servers.docs]') + + const skill = container.readFile(fixture.outputPaths.projectSkill) + expect(skill).toContain('description: Ship-it skill') + expect(skill).toContain('English dist skill body') + expect(skill).not.toContain('中文技能内容') + + const skillMcp = container.readFile(fixture.outputPaths.projectSkillMcp) + expect(skillMcp).toContain('"inspector"') + expect(skillMcp).toContain('"command": "npx"') + expect(skillMcp).toContain('"args"') + + expect(container.pathExists(fixture.outputPaths.globalSystemSkill)).toBe(true) + expect(container.pathExists(fixture.outputPaths.globalStaleSkill)).toBe(false) + }) + }) + + it('supports clean dry-run and clean while preserving the built-in system skill directory', async () => { + const fixture = createCodexFixture({ + seedGlobalSystemSkill: true + }) + + await withCodexEnvironment(artifacts, fixture, async container => { + const installResult = container.exec('tnmsc', CONTAINER_EXTERNAL_CWD) + expectSuccess(installResult.exitCode) + + const cleanDryRunResult = container.exec('tnmsc clean --dry-run', CONTAINER_EXTERNAL_CWD) + expectSuccess(cleanDryRunResult.exitCode) + expect(container.pathExists(fixture.outputPaths.globalCommand)).toBe(true) + expect(container.pathExists(fixture.outputPaths.workspaceCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(true) + expect(container.pathExists(fixture.outputPaths.projectSkillMcp)).toBe(true) + expect(container.pathExists(fixture.outputPaths.globalSystemSkill)).toBe(true) + + const cleanResult = container.exec('tnmsc clean', CONTAINER_EXTERNAL_CWD) + expectSuccess(cleanResult.exitCode) + expect(container.pathExists(fixture.outputPaths.globalCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.workspaceCommand)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectAgent)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkill)).toBe(false) + expect(container.pathExists(fixture.outputPaths.projectSkillMcp)).toBe(false) + expect(container.pathExists(fixture.outputPaths.globalSystemSkill)).toBe(true) + }) + }) +}) diff --git a/cli-integration-test/tsconfig.json b/cli-integration-test/tsconfig.json new file mode 100644 index 00000000..062a2289 --- /dev/null +++ b/cli-integration-test/tsconfig.json @@ -0,0 +1,59 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "compilerOptions": { + "noUncheckedSideEffectImports": true, + "incremental": true, + "composite": false, + "target": "ESNext", + "lib": ["ESNext"], + "moduleDetection": "force", + "useDefineForClassFields": true, + "module": "ESNext", + "moduleResolution": "Bundler", + "paths": { + "@/*": ["./src/*"] + }, + "resolveJsonModule": true, + "types": ["node"], + "allowImportingTsExtensions": true, + "strict": true, + "strictBindCallApply": true, + "strictFunctionTypes": true, + "strictNullChecks": true, + "strictPropertyInitialization": true, + "allowUnreachableCode": false, + "allowUnusedLabels": false, + "alwaysStrict": true, + "exactOptionalPropertyTypes": true, + "noFallthroughCasesInSwitch": true, + "noImplicitAny": true, + "noImplicitOverride": true, + "noImplicitReturns": true, + "noImplicitThis": true, + "noPropertyAccessFromIndexSignature": true, + "noUncheckedIndexedAccess": true, + "noUnusedLocals": true, + "noUnusedParameters": true, + "useUnknownInCatchVariables": true, + "declaration": true, + "declarationMap": true, + "newLine": "lf", + "noEmit": true, + "sourceMap": true, + "allowSyntheticDefaultImports": true, + "esModuleInterop": true, + "forceConsistentCasingInFileNames": true, + "isolatedModules": true, + "verbatimModuleSyntax": true, + "skipLibCheck": true + }, + "include": [ + "src/**/*", + "test/**/*.ts", + "vitest.config.ts" + ], + "exclude": [ + "../node_modules", + "dist" + ] +} diff --git a/cli-integration-test/tsconfig.test.json b/cli-integration-test/tsconfig.test.json new file mode 100644 index 00000000..762e613a --- /dev/null +++ b/cli-integration-test/tsconfig.test.json @@ -0,0 +1,19 @@ +{ + "$schema": "https://json.schemastore.org/tsconfig", + "extends": "./tsconfig.json", + "compilerOptions": { + "types": [ + "vitest/globals", + "node" + ] + }, + "include": [ + "src/**/*.ts", + "test/**/*.ts", + "vitest.config.ts" + ], + "exclude": [ + "../node_modules", + "dist" + ] +} diff --git a/cli-integration-test/vitest.config.ts b/cli-integration-test/vitest.config.ts new file mode 100644 index 00000000..97d07dcd --- /dev/null +++ b/cli-integration-test/vitest.config.ts @@ -0,0 +1,19 @@ +import {fileURLToPath} from 'node:url' + +import {defineConfig} from 'vitest/config' + +export default defineConfig({ + test: { + environment: 'node', + root: fileURLToPath(new URL('./', import.meta.url)), + include: ['test/**/*.test.ts'], + fileParallelism: false, + testTimeout: 300000, + hookTimeout: 300000, + typecheck: { + enabled: true, + tsconfig: './tsconfig.test.json' + }, + onConsoleLog: () => false + } +}) diff --git a/cli/npm/darwin-arm64/package.json b/cli/npm/darwin-arm64/package.json index 530e7e37..90e30860 100644 --- a/cli/npm/darwin-arm64/package.json +++ b/cli/npm/darwin-arm64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-arm64", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "os": [ "darwin" ], diff --git a/cli/npm/darwin-x64/package.json b/cli/npm/darwin-x64/package.json index 82291e76..14419158 100644 --- a/cli/npm/darwin-x64/package.json +++ b/cli/npm/darwin-x64/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-darwin-x64", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "os": [ "darwin" ], diff --git a/cli/npm/linux-arm64-gnu/package.json b/cli/npm/linux-arm64-gnu/package.json index 175f7a1a..82b41cae 100644 --- a/cli/npm/linux-arm64-gnu/package.json +++ b/cli/npm/linux-arm64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-arm64-gnu", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "os": [ "linux" ], diff --git a/cli/npm/linux-x64-gnu/package.json b/cli/npm/linux-x64-gnu/package.json index 1f8b96b0..0de722bc 100644 --- a/cli/npm/linux-x64-gnu/package.json +++ b/cli/npm/linux-x64-gnu/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-linux-x64-gnu", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "os": [ "linux" ], diff --git a/cli/npm/win32-x64-msvc/package.json b/cli/npm/win32-x64-msvc/package.json index 9aceef1f..6416ed17 100644 --- a/cli/npm/win32-x64-msvc/package.json +++ b/cli/npm/win32-x64-msvc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-cli-win32-x64-msvc", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "os": [ "win32" ], diff --git a/cli/package.json b/cli/package.json index e1ad9d19..664fd369 100644 --- a/cli/package.json +++ b/cli/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-cli", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "description": "TrueNine Memory Synchronization CLI shell", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/cli/scripts/sync-sdk-dist.ts b/cli/scripts/sync-sdk-dist.ts index bbc3e612..912be08e 100644 --- a/cli/scripts/sync-sdk-dist.ts +++ b/cli/scripts/sync-sdk-dist.ts @@ -1,10 +1,11 @@ #!/usr/bin/env tsx import {spawnSync} from 'node:child_process' -import {copyFileSync, cpSync, existsSync, mkdirSync, mkdtempSync, readdirSync, rmSync, writeFileSync} from 'node:fs' +import {copyFileSync, cpSync, existsSync, mkdirSync, mkdtempSync, readdirSync, renameSync, rmSync, writeFileSync} from 'node:fs' import {createRequire} from 'node:module' import {tmpdir} from 'node:os' import {dirname, join, resolve} from 'node:path' +import process from 'node:process' import {fileURLToPath} from 'node:url' const __dirname = dirname(fileURLToPath(import.meta.url)) @@ -12,6 +13,8 @@ const cliDir = resolve(__dirname, '..') const sdkDistDir = resolve(cliDir, '../sdk/dist') const cliDistDir = resolve(cliDir, 'dist') const scriptRuntimeWorkerPath = resolve(cliDistDir, 'script-runtime-worker.mjs') +const builtInternalCommandBridgePath = resolve(cliDistDir, 'native-command-bridge.mjs') +const internalCommandBridgePath = resolve(cliDistDir, 'internal', 'native-command-bridge.mjs') const EXACT_FILES = new Set(['tnmsc.schema.json']) const runtimeRequire = createRequire(import.meta.url) @@ -95,6 +98,46 @@ function ensureBundledJitiRuntimeAssets(): void { copyFileSync(bundledJitiBabelRuntimeSourcePath, bundledJitiBabelRuntimeTargetPath) } +function ensureInternalCommandBridgeBundle(): void { + if (!existsSync(internalCommandBridgePath) && existsSync(builtInternalCommandBridgePath)) { + mkdirSync(dirname(internalCommandBridgePath), {recursive: true}) + rmSync(internalCommandBridgePath, {force: true}) + renameSync(builtInternalCommandBridgePath, internalCommandBridgePath) + } + + if (existsSync(internalCommandBridgePath)) return + + throw new Error( + `Expected bundled internal command bridge at "${internalCommandBridgePath}".` + ) +} + +function smokeTestInternalCommandBridge(): void { + const smokeTest = spawnSync( + process.execPath, + [internalCommandBridgePath, 'self-test'], + { + cwd: cliDir, + encoding: 'utf8' + } + ) + + assertProcessSucceeded(smokeTest, [ + `Bundled internal command bridge "${internalCommandBridgePath}" failed the runtime smoke test.` + ]) + + const stdout = smokeTest.stdout.trim() + if (stdout !== '{"ok":true,"command":"self-test"}') { + throw new Error( + [ + `Bundled internal command bridge "${internalCommandBridgePath}" returned an unexpected result.`, + 'Expected: {"ok":true,"command":"self-test"}', + `Actual: ${stdout || '(empty)'}` + ].join('\n') + ) + } +} + function smokeTestScriptRuntimeWorker(): void { if (!existsSync(scriptRuntimeWorkerPath)) { throw new Error(`Expected bundled script runtime worker at "${scriptRuntimeWorkerPath}".`) @@ -149,4 +192,6 @@ function smokeTestScriptRuntimeWorker(): void { syncSdkAssets() ensureBundledJitiRuntimeAssets() +ensureInternalCommandBridgeBundle() +smokeTestInternalCommandBridge() smokeTestScriptRuntimeWorker() diff --git a/cli/src/internal/native-command-bridge.ts b/cli/src/internal/native-command-bridge.ts new file mode 100644 index 00000000..19fb252a --- /dev/null +++ b/cli/src/internal/native-command-bridge.ts @@ -0,0 +1,74 @@ +import type {MemorySyncCommandOptions, MemorySyncCommandResult} from '../../../sdk/src/internal/sdk-binding' +import {writeFile} from 'node:fs/promises' +import process from 'node:process' +import {createTsFallbackMemorySyncBinding} from '../../../sdk/src/internal/sdk-binding' + +type InternalBridgeCommand = 'install' | 'dry-run' | 'clean' | 'self-test' + +interface BridgeSelfTestResult { + readonly ok: true + readonly command: 'self-test' +} + +type BridgeExecutionResult = MemorySyncCommandResult | BridgeSelfTestResult +const INTERNAL_BRIDGE_RESULT_PATH_ENV = 'TNMSC_INTERNAL_COMMAND_BRIDGE_RESULT_PATH' + +function isInternalBridgeCommand(value: string): value is Exclude { + return value === 'install' || value === 'dry-run' || value === 'clean' +} + +function normalizeBridgeCommandOptions( + optionsJson?: string +): MemorySyncCommandOptions & {readonly dryRun?: boolean} { + return optionsJson == null || optionsJson.length === 0 + ? {} + : JSON.parse(optionsJson) as MemorySyncCommandOptions & {readonly dryRun?: boolean} +} + +async function executeInternalBridgeCommand( + commandArg: string, + optionsJson?: string +): Promise { + if (commandArg === 'self-test') { + return { + ok: true, + command: 'self-test' + } + } + + if (!isInternalBridgeCommand(commandArg)) { + throw new Error(`Unsupported internal bridge command: ${commandArg}`) + } + + const binding = createTsFallbackMemorySyncBinding() + const options = normalizeBridgeCommandOptions(optionsJson) + + switch (commandArg) { + case 'install': + return binding.install(options) + case 'dry-run': + return binding.dryRun(options) + case 'clean': + return binding.clean(options) + } +} + +async function runInternalBridgeCli(argv: readonly string[] = process.argv.slice(2)): Promise { + const [commandArg = 'self-test', optionsJson] = argv + const result = await executeInternalBridgeCommand(commandArg, optionsJson) + const serialized = JSON.stringify(result) + const resultPath = process.env[INTERNAL_BRIDGE_RESULT_PATH_ENV] + + if (resultPath != null && resultPath.length > 0) { + await writeFile(resultPath, serialized, 'utf8') + return + } + + process.stdout.write(`${serialized}\n`) +} + +void runInternalBridgeCli().catch(error => { + const message = error instanceof Error ? error.stack ?? error.message : String(error) + process.stderr.write(`${message}\n`) + process.exitCode = 1 +}) diff --git a/cli/tsdown.config.ts b/cli/tsdown.config.ts index 7bc21152..fcff3722 100644 --- a/cli/tsdown.config.ts +++ b/cli/tsdown.config.ts @@ -21,6 +21,21 @@ export default defineConfig([ }, outputOptions: {exports: 'named'} }, + { + entry: ['./src/internal/native-command-bridge.ts'], + platform: 'node', + sourcemap: false, + unbundle: false, + format: ['esm'], + minify: true, + dts: false, + deps: { + alwaysBundle: alwaysBundleDeps, + onlyBundle: false, + neverBundle: neverBundleDeps + }, + outputOptions: {exports: 'named'} + }, { entry: ['./src/script-runtime-worker.ts'], platform: 'node', diff --git a/doc/package.json b/doc/package.json index fce7cf8c..0141a9ee 100644 --- a/doc/package.json +++ b/doc/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-docs", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "description": "Chinese-first manifesto-led documentation site for @truenine/memory-sync.", "engines": { diff --git a/gui/package.json b/gui/package.json index e39ea294..cd107712 100644 --- a/gui/package.json +++ b/gui/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync-gui", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "engines": { "node": ">= 22" diff --git a/gui/src-tauri/Cargo.lock b/gui/src-tauri/Cargo.lock index 21b336c5..608b34e9 100644 --- a/gui/src-tauri/Cargo.lock +++ b/gui/src-tauri/Cargo.lock @@ -1774,7 +1774,7 @@ dependencies = [ [[package]] name = "memory-sync-gui" -version = "2026.10329.110" +version = "2026.10412.11551" dependencies = [ "dirs", "json5", diff --git a/gui/src-tauri/Cargo.toml b/gui/src-tauri/Cargo.toml index 003315de..b09bc883 100644 --- a/gui/src-tauri/Cargo.toml +++ b/gui/src-tauri/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "memory-sync-gui" -version = "2026.10411.10132" +version = "2026.10412.11551" description = "Memory Sync desktop GUI application" authors.workspace = true edition.workspace = true diff --git a/gui/src-tauri/tauri.conf.json b/gui/src-tauri/tauri.conf.json index 1900e723..0bdae673 100644 --- a/gui/src-tauri/tauri.conf.json +++ b/gui/src-tauri/tauri.conf.json @@ -1,6 +1,6 @@ { "$schema": "https://schema.tauri.app/config/2", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "productName": "Memory Sync", "identifier": "org.truenine.memory-sync", "build": { diff --git a/libraries/logger/package.json b/libraries/logger/package.json index 0c2faab5..ee55fbc8 100644 --- a/libraries/logger/package.json +++ b/libraries/logger/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/logger", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "description": "Rust-powered AI-friendly Markdown logger for Node.js via N-API", "license": "AGPL-3.0-only", diff --git a/libraries/md-compiler/package.json b/libraries/md-compiler/package.json index 520ae4a2..6b6ddb08 100644 --- a/libraries/md-compiler/package.json +++ b/libraries/md-compiler/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/md-compiler", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "description": "Rust-powered MDX→Markdown compiler for Node.js with shared N-API loading", "license": "AGPL-3.0-only", diff --git a/libraries/script-runtime/package.json b/libraries/script-runtime/package.json index 3d585bfa..1fdedcf0 100644 --- a/libraries/script-runtime/package.json +++ b/libraries/script-runtime/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/script-runtime", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "description": "Rust-backed TypeScript proxy runtime for tnmsc", "license": "AGPL-3.0-only", diff --git a/mcp/package.json b/mcp/package.json index 82a30b66..5297f2a6 100644 --- a/mcp/package.json +++ b/mcp/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-mcp", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "description": "MCP stdio server for managing memory-sync prompt sources and translation artifacts", "author": "TrueNine", "license": "AGPL-3.0-only", diff --git a/package.json b/package.json index d47a9d5c..d976a144 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@truenine/memory-sync", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "description": "Cross-AI-tool prompt synchronisation toolkit (CLI + Tauri desktop GUI) — one ruleset, multi-target adaptation. Monorepo powered by pnpm + Turbo.", "license": "AGPL-3.0-only", "keywords": [ @@ -31,6 +31,8 @@ "scripts": { "build": "turbo run build --ui=stream --log-order=grouped", "test": "turbo run test --ui=stream --log-order=grouped", + "test:cli-integration:claude-code": "pnpm -C cli-integration-test run test:claude-code", + "test:cli-integration:codex": "pnpm -C cli-integration-test run test:codex", "lint": "turbo run lint --ui=stream --log-order=grouped", "lint:fix": "turbo run lint:fix --ui=stream --log-order=grouped", "check:type": "turbo run check:type --ui=stream --log-order=grouped", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index ba9052c0..fc58892f 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -201,6 +201,9 @@ catalogs: tailwindcss: specifier: ^4.2.2 version: 4.2.2 + testcontainers: + specifier: ^11.14.0 + version: 11.14.0 tsdown: specifier: ^0.21.7 version: 0.21.7 @@ -383,6 +386,21 @@ importers: specifier: 'catalog:' version: 4.3.6 + cli-integration-test: + devDependencies: + '@types/node': + specifier: 'catalog:' + version: 25.6.0 + testcontainers: + specifier: 'catalog:' + version: 11.14.0 + typescript: + specifier: 'catalog:' + version: 6.0.2 + vitest: + specifier: 'catalog:' + version: 4.1.4(@types/node@25.6.0)(@vitest/coverage-v8@4.1.4)(vite@8.0.8(@types/node@25.6.0)(esbuild@0.27.7)(jiti@2.6.1)(sass@1.99.0)(tsx@4.21.0)(yaml@2.8.3)) + cli/npm/darwin-arm64: {} cli/npm/darwin-x64: {} @@ -914,6 +932,9 @@ packages: resolution: {integrity: sha512-mOm5ZrYmphGfqVWoH5YYMTITb3cDXsFgmvFlvkvWDMsR9X8RFnt7a0Wb6yNIdoFsiMO9WjYLq+U/FMtqIYAF8Q==} engines: {node: ^20.19.0 || >=22.12.0} + '@balena/dockerignore@1.0.2': + resolution: {integrity: sha512-wMue2Sy4GAVTk6Ic4tJVcnfdau+gx2EnG7S+uAEe+TWJFqE4YoWN4/H8MSLj4eYJKxGg26lZwboEniNiNwZQ6Q==} + '@bcoe/v8-coverage@1.0.2': resolution: {integrity: sha512-6zABk/ECA/QYSCQ1NGiVwwbQerUCZ+TQbp64Q3AgmfNvurHH0j8TtXa1qbShXA6qqkpAj4V5W8pP6mLe1mcMqA==} engines: {node: '>=18'} @@ -1225,6 +1246,20 @@ packages: '@formatjs/intl-localematcher@0.6.2': resolution: {integrity: sha512-XOMO2Hupl0wdd172Y06h6kLpBz6Dv+J4okPLl4LPtzbr8f66WbIoy4ev98EBuZ6ZK4h5ydTN6XneT4QVpD7cdA==} + '@grpc/grpc-js@1.14.3': + resolution: {integrity: sha512-Iq8QQQ/7X3Sac15oB6p0FmUg/klxQvXLeileoqrTRGJYLV+/9tubbr9ipz0GKHjmXVsgFPo/+W+2cA8eNcR+XA==} + engines: {node: '>=12.10.0'} + + '@grpc/proto-loader@0.7.15': + resolution: {integrity: sha512-tMXdRCfYVixjuFK+Hk0Q1s38gV9zDiDJfWL3h1rv4Qc39oILCu1TRTDt7+fGUI8K4G1Fj125Hx/ru3azECWTyQ==} + engines: {node: '>=6'} + hasBin: true + + '@grpc/proto-loader@0.8.0': + resolution: {integrity: sha512-rc1hOQtjIWGxcxpb9aHAfLpIctjEnsDehj0DAiVfBlmT84uvR0uUtN2hEi/ecvWVjXUGf5qPF4qEgiLOx1YIMQ==} + engines: {node: '>=6'} + hasBin: true + '@headlessui/react@2.2.10': resolution: {integrity: sha512-5pVLNK9wlpxTUTy9GpgbX/SdcRh+HBnPktjM2wbiLTH4p+2EPHBO1aoSryUCuKUIItdDWO9ITlhUL8UnUN/oIA==} engines: {node: '>=10'} @@ -1547,6 +1582,10 @@ packages: '@types/node': optional: true + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} + '@jridgewell/gen-mapping@0.3.13': resolution: {integrity: sha512-2kkt/7niJ6MgEPxF0bYdQ6etZaA+fQvDcLKckhy1yIQOzaoKjBBjSj63/aLVjYE3qhRt5dvM+uUyfCg6UKCBbA==} @@ -1563,6 +1602,12 @@ packages: '@jridgewell/trace-mapping@0.3.31': resolution: {integrity: sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw==} + '@js-sdsl/ordered-map@4.4.2': + resolution: {integrity: sha512-iUKgm52T8HOE/makSxjqoWhe95ZJA1/G1sYsGev2JDKUSS14KAgg1LHb+Ba+IPow0xflbnSkOsZcO08C7w1gYw==} + + '@kwsites/file-exists@1.1.1': + resolution: {integrity: sha512-m9/5YGR18lIwxSFDwfE3oA7bWuq9kdau6ugN4H2rJeyhFQZcG9AgSHkQtSD15a8WvTgfz9aikZMrKPHvbpqFiw==} + '@mdx-js/mdx@3.1.1': resolution: {integrity: sha512-f6ZO2ifpwAQIpzGWaBQT2TXxPv6z3RBzQKpVftEWN78Vl/YweF1uwussDx8ECAXVtr3Rs89fKyG9YlzUs9DyGQ==} @@ -2419,10 +2464,44 @@ packages: resolution: {integrity: sha512-tmmZ3lQxAe/k/+rNnXQRawJ4NjxO2hqiOLTHvWchtGZULp4RyFeh6aU4XdOYBFe2KE1oShQTv4AblOs2iOrNnQ==} engines: {node: '>= 10.0.0'} + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + '@pkgr/core@0.2.9': resolution: {integrity: sha512-QNqXyfVS2wm9hweSYD2O7F0G06uurj9kZ96TRQE5Y9hU7+tgdZwIkbAKc5Ocy1HxEY2kuDQa6cQ1WRs/O5LFKA==} engines: {node: ^12.20.0 || ^14.18.0 || >=16.0.0} + '@protobufjs/aspromise@1.1.2': + resolution: {integrity: sha512-j+gKExEuLmKwvz3OgROXtrJ2UG2x8Ch2YZUxahh+s1F2HZ+wAceUNLkvy6zKCPVRkU++ZWQrdxsUeQXmcg4uoQ==} + + '@protobufjs/base64@1.1.2': + resolution: {integrity: sha512-AZkcAA5vnN/v4PDqKyMR5lx7hZttPDgClv83E//FMNhR2TMcLUhfRUBHCmSl0oi9zMgDDqRUJkSxO3wm85+XLg==} + + '@protobufjs/codegen@2.0.4': + resolution: {integrity: sha512-YyFaikqM5sH0ziFZCN3xDC7zeGaB/d0IUb9CATugHWbd1FRFwWwt4ld4OYMPWu5a3Xe01mGAULCdqhMlPl29Jg==} + + '@protobufjs/eventemitter@1.1.0': + resolution: {integrity: sha512-j9ednRT81vYJ9OfVuXG6ERSTdEL1xVsNgqpkxMsbIabzSo3goCjDIveeGv5d03om39ML71RdmrGNjG5SReBP/Q==} + + '@protobufjs/fetch@1.1.0': + resolution: {integrity: sha512-lljVXpqXebpsijW71PZaCYeIcE5on1w5DlQy5WH6GLbFryLUrBD4932W/E2BSpfRJWseIL4v/KPgBFxDOIdKpQ==} + + '@protobufjs/float@1.0.2': + resolution: {integrity: sha512-Ddb+kVXlXst9d+R9PfTIxh1EdNkgoRe5tOX6t01f1lYWOvJnSPDBlG241QLzcyPdoNTsblLUdujGSE4RzrTZGQ==} + + '@protobufjs/inquire@1.1.0': + resolution: {integrity: sha512-kdSefcPdruJiFMVSbn801t4vFK7KB/5gd2fYvrxhuJYg8ILrmn9SKSX2tZdV6V+ksulWqS7aXjBcRXl3wHoD9Q==} + + '@protobufjs/path@1.1.2': + resolution: {integrity: sha512-6JOcJ5Tm08dOHAbdR3GrvP+yUUfkjG5ePsHYczMFLq3ZmMkAD98cDgcT2iA1lJ9NVwFd4tH/iSSoe44YWkltEA==} + + '@protobufjs/pool@1.1.0': + resolution: {integrity: sha512-0kELaGSIDBKvcgS4zkjz1PeddatrjYcmMWOlAuAPwAeccUrPHdUqo/J6LiymHHEiJT5NrF1UVwxY14f+fy4WQw==} + + '@protobufjs/utf8@1.1.0': + resolution: {integrity: sha512-Vvn3zZrhQZkkBE8LSuW3em98c0FwgO4nxzv6OdSxPKJIEKY2bGbHn+mhGIPerzI4twdxaP8/0+06HBpwf345Lw==} + '@quansync/fs@1.0.0': resolution: {integrity: sha512-4TJ3DFtlf1L5LDMaM6CanJ/0lckGNtJcMjQ1NAV6zDmA0tEHKZtxNKin8EgPaVX1YzljbxckyT2tJrpQKAtngQ==} @@ -3131,6 +3210,12 @@ packages: '@types/deep-eql@4.0.2': resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} + '@types/docker-modem@3.0.6': + resolution: {integrity: sha512-yKpAGEuKRSS8wwx0joknWxsmLha78wNMe9R2S3UNsVOkZded8UqOrV8KoeDXoXsjndxwyF3eIhyClGbO1SEhEg==} + + '@types/dockerode@4.0.1': + resolution: {integrity: sha512-cmUpB+dPN955PxBEuXE3f6lKO1hHiIGYJA46IVF3BJpNsZGvtBDcRnlrHYHtOH/B6vtDOyl2kZ2ShAu3mgc27Q==} + '@types/esrecurse@4.3.1': resolution: {integrity: sha512-xJBAbDifo5hpffDBuHl0Y8ywswbiAp/Wi7Y/GtAgSlZyIABppyurxVueOPE8LUQOxdlgi6Zqce7uoEpqNTeiUw==} @@ -3173,6 +3258,9 @@ packages: '@types/nlcst@2.0.3': resolution: {integrity: sha512-vSYNSDe6Ix3q+6Z7ri9lyWqgGhJTmzRjZRqyq15N0Z/1/UnVsno9G/N40NBijoYx2seFDIl0+B2mgAb9mezUCA==} + '@types/node@18.19.130': + resolution: {integrity: sha512-GRaXQx6jGfL8sKfaIDD6OupbIHBr9jv7Jnaml9tB7l4v068PAOXqfcujMMo5PhbIs6ggR1XODELqahT2R8v0fg==} + '@types/node@25.6.0': resolution: {integrity: sha512-+qIYRKdNYJwY3vRCZMdJbPLJAtGjQBudzZzdzwQYkEPQd+PJGixUL5QfvCLDaULoLv+RhT3LDkwEfKaAkgSmNQ==} @@ -3187,6 +3275,15 @@ packages: '@types/react@19.2.14': resolution: {integrity: sha512-ilcTH/UniCkMdtexkoCN0bI7pMcJDvmQFPvuPvmEaYA/NSfFTAgdUSLAoVjaRJm7+6PvcM+q1zYOwS4wTYMF9w==} + '@types/ssh2-streams@0.1.13': + resolution: {integrity: sha512-faHyY3brO9oLEA0QlcO8N2wT7R0+1sHWZvQ+y3rMLwdY1ZyS1z0W3t65j9PqT4HmQ6ALzNe7RZlNuCNE0wBSWA==} + + '@types/ssh2@0.5.52': + resolution: {integrity: sha512-lbLLlXxdCZOSJMCInKH2+9V/77ET2J6NPQHpFI0kda61Dd1KglJs+fPQBchizmzYSOJBgdTajhPqBO1xxLywvg==} + + '@types/ssh2@1.15.5': + resolution: {integrity: sha512-N1ASjp/nXH3ovBHddRJpli4ozpk6UdDYIX4RJWFa9L1YKnzdhTlVmiGHm4DZnj/jLbqZpes4aeR30EFGQtvhQQ==} + '@types/trusted-types@2.0.7': resolution: {integrity: sha512-ScaPdn1dQczgbl0QFTeTOmVHFULt394XJgOQNoyVhZ6r2vLnMLJfBPd53SB52T/3G36VI1/g2MZaX0cwDuXsfw==} @@ -3441,6 +3538,10 @@ packages: resolution: {integrity: sha512-qycIHAucxy/LXAYIjmLmtQ8q9GPnMbnjG1KXhWm9o5sCr6pOYDATkMPiTNa6/v8eELyqOQ2FsEqeoFYmgv/gJg==} engines: {node: '>=14.6'} + abort-controller@3.0.0: + resolution: {integrity: sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==} + engines: {node: '>=6.5'} + accepts@2.0.0: resolution: {integrity: sha512-5cvg6CtKwfgdmVqY1WIiXKc3Q1bkRqGLi+2W/6ao+6Y7gu/RCwRuAhGEzh5B4KlszSuTLgZYuqFqo5bImjNKng==} engines: {node: '>= 0.6'} @@ -3469,6 +3570,18 @@ packages: ajv@8.18.0: resolution: {integrity: sha512-PlXPeEWMXMZ7sPYOHqmDyCJzcfNrUr3fGNKtezX14ykXOEIvyK81d+qydx89KY5O71FKMPaQ2vBfBFI5NHR63A==} + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} + + ansi-regex@6.2.2: + resolution: {integrity: sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg==} + engines: {node: '>=12'} + + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} + ansi-styles@6.2.3: resolution: {integrity: sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg==} engines: {node: '>=12'} @@ -3481,6 +3594,14 @@ packages: resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} engines: {node: '>= 8'} + archiver-utils@5.0.2: + resolution: {integrity: sha512-wuLJMmIBQYCsGZgYLTy5FIB2pF6Lfb6cXMSF8Qywwk3t20zWnAi7zLcQFdKQmIB8wyZpY5ER38x08GbwtR2cLA==} + engines: {node: '>= 14'} + + archiver@7.0.1: + resolution: {integrity: sha512-ZcbTaIqJOfCc03QwD468Unz/5Ir8ATtvAHsK+FdXbDIbGfihqh9mrvdcYunQzqn4HrvWWaFyaxJhGZagaJJpPQ==} + engines: {node: '>= 14'} + are-docs-informative@0.0.2: resolution: {integrity: sha512-ixiS0nLNNG5jNQzgZJNoUpBKdo9yTYZMGJ+QgT2jmjR7G7+QHRCc4v6LQ3NgE7EBJq+o0ams3waJwkrlBom8Ig==} engines: {node: '>=14'} @@ -3494,6 +3615,9 @@ packages: array-iterate@2.0.1: resolution: {integrity: sha512-I1jXZMjAgCMmxT4qxXfPXa6SthSoE8h6gkSI9BGGNv8mP8G/v0blc+qFnZu6K42vTOiuME596QaLO0TP3Lk0xg==} + asn1@0.2.6: + resolution: {integrity: sha512-ix/FxPn0MDjeyJ7i/yoHGFt/EX6LyNbxSEhPPXODPL+KB0VPk86UYfL0lMdy+KCnv+fmvIzySwaK5COwqVbWTQ==} + assertion-error@2.0.1: resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} engines: {node: '>=12'} @@ -3513,21 +3637,85 @@ packages: resolution: {integrity: sha512-LElXdjswlqjWrPpJFg1Fx4wpkOCxj1TDHlSV4PlaRxHGWko024xICaa97ZkMfs6DRKlCguiAI+rbXv5GWwXIkg==} hasBin: true + async-lock@1.4.1: + resolution: {integrity: sha512-Az2ZTpuytrtqENulXwO3GGv1Bztugx6TT37NIo7imr/Qo0gsYiGtSdBa2B6fsXhTpVZDNfu1Qn3pk531e3q+nQ==} + + async@3.2.6: + resolution: {integrity: sha512-htCUDlxyyCLMgaM3xXg0C0LW2xqfuQ6p05pCEIsXuyQ+a1koYKTuBMzRNwmybfLgvJDMd0r1LTn4+E0Ti6C2AA==} + + b4a@1.8.0: + resolution: {integrity: sha512-qRuSmNSkGQaHwNbM7J78Wwy+ghLEYF1zNrSeMxj4Kgw6y33O3mXcQ6Ie9fRvfU/YnxWkOchPXbaLb73TkIsfdg==} + peerDependencies: + react-native-b4a: '*' + peerDependenciesMeta: + react-native-b4a: + optional: true + babel-dead-code-elimination@1.0.12: resolution: {integrity: sha512-GERT7L2TiYcYDtYk1IpD+ASAYXjKbLTDPhBtYj7X1NuRMDTMtAx9kyBenub1Ev41lo91OHCKdmP+egTDmfQ7Ig==} bail@2.0.2: resolution: {integrity: sha512-0xO6mYd7JB2YesxDKplafRpsiOzPt9V02ddPCLbY1xYGPOX24NTyN50qnUxgCPcSoYMhKpAuBTjQoRZCAkUDRw==} + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + balanced-match@4.0.4: resolution: {integrity: sha512-BLrgEcRTwX2o6gGxGOCNyMvGSp35YofuYzw9h1IMTRmKqttAZZVU67bdb9Pr2vUHA8+j3i2tJfjO6C6+4myGTA==} engines: {node: 18 || 20 || >=22} + bare-events@2.8.2: + resolution: {integrity: sha512-riJjyv1/mHLIPX4RwiK+oW9/4c3TEUeORHKefKAKnZ5kyslbN+HXowtbaVEqt4IMUB7OXlfixcs6gsFeo/jhiQ==} + peerDependencies: + bare-abort-controller: '*' + peerDependenciesMeta: + bare-abort-controller: + optional: true + + bare-fs@4.7.0: + resolution: {integrity: sha512-xzqKsCFxAek9aezYhjJuJRXBIaYlg/0OGDTZp+T8eYmYMlm66cs6cYko02drIyjN2CBbi+I6L7YfXyqpqtKRXA==} + engines: {bare: '>=1.16.0'} + peerDependencies: + bare-buffer: '*' + peerDependenciesMeta: + bare-buffer: + optional: true + + bare-os@3.8.7: + resolution: {integrity: sha512-G4Gr1UsGeEy2qtDTZwL7JFLo2wapUarz7iTMcYcMFdS89AIQuBoyjgXZz0Utv7uHs3xA9LckhVbeBi8lEQrC+w==} + engines: {bare: '>=1.14.0'} + + bare-path@3.0.0: + resolution: {integrity: sha512-tyfW2cQcB5NN8Saijrhqn0Zh7AnFNsnczRcuWODH0eYAXBsJ5gVxAUuNr7tsHSC6IZ77cA0SitzT+s47kot8Mw==} + + bare-stream@2.13.0: + resolution: {integrity: sha512-3zAJRZMDFGjdn+RVnNpF9kuELw+0Fl3lpndM4NcEOhb9zwtSo/deETfuIwMSE5BXanA0FrN1qVjffGwAg2Y7EA==} + peerDependencies: + bare-abort-controller: '*' + bare-buffer: '*' + bare-events: '*' + peerDependenciesMeta: + bare-abort-controller: + optional: true + bare-buffer: + optional: true + bare-events: + optional: true + + bare-url@2.4.0: + resolution: {integrity: sha512-NSTU5WN+fy/L0DDenfE8SXQna4voXuW0FHM7wH8i3/q9khUSchfPbPezO4zSFMnDGIf9YE+mt/RWhZgNRKRIXA==} + + base64-js@1.5.1: + resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} + baseline-browser-mapping@2.10.17: resolution: {integrity: sha512-HdrkN8eVG2CXxeifv/VdJ4A4RSra1DTW8dc/hdxzhGHN8QePs6gKaWM9pHPcpCoxYZJuOZ8drHmbdpLHjCYjLA==} engines: {node: '>=6.0.0'} hasBin: true + bcrypt-pbkdf@1.0.2: + resolution: {integrity: sha512-qeFIXtP4MSoi6NLqO12WfqARWWuCKi2Rn/9hJLEmtB5yTNr9DqFWkJRCf2qShWzPeAMRnOgCrq0sg/KLv5ES9w==} + before-after-hook@4.0.0: resolution: {integrity: sha512-q6tR3RPqIB1pMiTRMFcZwuG5T8vwp+vUvEG0vuI6B+Rikh5BfPp2fQ82c925FOs+b0lcFQ8CFrL+KbilfZFhOQ==} @@ -3543,6 +3731,9 @@ packages: birpc@4.0.0: resolution: {integrity: sha512-LShSxJP0KTmd101b6DRyGBj57LZxSDYWKitQNW/mi8GRMvZb078Uf9+pveax1DrVL89vm7mWe+TovdI/UDOuPw==} + bl@4.1.0: + resolution: {integrity: sha512-1W07cM9gS6DcLperZfFSj+bWLtaPGSOHWhPiGzXmvVJbRLdG82sH/Kn8EtW1VqWVA54AKf2h5k5BbnIbwF3h6w==} + body-parser@2.2.2: resolution: {integrity: sha512-oP5VkATKlNwcgvxi0vM0p/D3n2C3EReYVX+DNYs5TjZFn/oQt2j+4sVJtSMr18pdRr8wjTcBl6LoV+FUwzPmNA==} engines: {node: '>=18'} @@ -3550,6 +3741,9 @@ packages: boolbase@1.0.0: resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} + brace-expansion@2.1.0: + resolution: {integrity: sha512-TN1kCZAgdgweJhWWpgKYrQaMNHcDULHkWwQIspdtjV4Y5aurRdZpjAqn6yX3FPqTA9ngHCc4hJxMAMgGfve85w==} + brace-expansion@5.0.5: resolution: {integrity: sha512-VZznLgtwhn+Mact9tfiwx64fA9erHH/MCXEUfB/0bX/6Fz6ny5EGTXYltMocqg4xFAQZtnO3DHWWXi8RiuN7cQ==} engines: {node: 18 || 20 || >=22} @@ -3563,10 +3757,28 @@ packages: engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} hasBin: true + buffer-crc32@1.0.0: + resolution: {integrity: sha512-Db1SbgBS/fg/392AblrMJk97KggmvYhr4pB5ZIMTWtaivCPMWLkmb7m21cJvpvgK+J3nsU2CmmixNBZx4vFj/w==} + engines: {node: '>=8.0.0'} + + buffer@5.7.1: + resolution: {integrity: sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==} + + buffer@6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + + buildcheck@0.0.7: + resolution: {integrity: sha512-lHblz4ahamxpTmnsk+MNTRWsjYKv965MwOrSJyeD588rR3Jcu7swE+0wN5F+PbL5cjgu/9ObkhfzEPuofEMwLA==} + engines: {node: '>=10.0.0'} + builtin-modules@5.0.0: resolution: {integrity: sha512-bkXY9WsVpY7CvMhKSR6pZilZu9Ln5WDrKVBUXf2S443etkmEO4V58heTecXcUIsNsi4Rx8JUO4NfX1IcQl4deg==} engines: {node: '>=18.20'} + byline@5.0.0: + resolution: {integrity: sha512-s6webAy+R4SR8XVuJWt2V2rGvhnrhxN+9S15GNuTK3wKPOXFF6RNc+8ug2XhH+2s4f+uudG4kUVYmYOQWL2g0Q==} + engines: {node: '>=0.10.0'} + bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} @@ -3632,6 +3844,9 @@ packages: resolution: {integrity: sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA==} engines: {node: '>= 14.16.0'} + chownr@1.1.4: + resolution: {integrity: sha512-jJ0bqzaylmJtVnNgzTeSOs8DPavpbYgEr/b0YL8/2GO3xJEhInFmhKMUnEJQjZumK7KXGFhUy89PrsJWlakBVg==} + chroma-js@3.2.0: resolution: {integrity: sha512-os/OippSlX1RlWWr+QDPcGUZs0uoqr32urfxESG9U93lhUfbnlyckte84Q8P1UQY/qth983AS1JONKmLS4T0nw==} @@ -3662,6 +3877,10 @@ packages: resolution: {integrity: sha512-5mOlNS0mhX0707P2I0aZ2V/cmHUEO/fL7VFLqszkhUsxt7RwnmrInf/eEQKlf5GzvYeHIjT+Ov1HRfNmymlG0w==} engines: {node: '>=18'} + cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + clsx@2.1.1: resolution: {integrity: sha512-eYm0QWBtUrBWZWG0d386OGAw16Z995PiOVo2B7bjWSbHedGl5e0ZWaq65kOGgUSNesEIDkB9ISbTg/JK9dhCZA==} engines: {node: '>=6'} @@ -3672,6 +3891,13 @@ packages: collapse-white-space@2.1.0: resolution: {integrity: sha512-loKTxY1zCOuG4j9f6EPnuyyYkf58RnhhWTvRoZEokgB+WbdXehfjFviyOVYkqzEWz1Q5kRiZdBYS5SwxbQYwzw==} + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + colorette@2.0.20: resolution: {integrity: sha512-IfEDxwoWIjkeXL1eXcDiow4UbKjhLdq6/EuSVR9GMN7KVH3r9gQ83e73hsz1Nd1T3ijd5xv1wcWRYO+D6kCI2w==} @@ -3698,6 +3924,10 @@ packages: resolution: {integrity: sha512-ObxuY6vnbWTN6Od72xfwN9DbzC7Y2vv8u1Soi9ahRKL37gb6y1qk6/dgjs+3JWuXJHWvsg3BXIwzd/rkmAwavg==} engines: {node: '>= 12.0.0'} + compress-commons@6.0.2: + resolution: {integrity: sha512-6FqVXeETqWPoGcfzrXb37E50NP0LXT8kAMu5ooZayhWWdgEY4lBEEcbQNXtkuKQsGduxiIcI4gOTsxTmuq/bSg==} + engines: {node: '>= 14'} + compute-scroll-into-view@3.1.1: resolution: {integrity: sha512-VRhuHOLoKYOy4UbilLbUzbYg93XLjv2PncJC50EuTWPA3gaja1UjBsUP/D/9/juV3vQFr6XBEzn9KCAHdUvOHw==} @@ -3736,6 +3966,9 @@ packages: core-js-compat@3.49.0: resolution: {integrity: sha512-VQXt1jr9cBz03b331DFDCCP90b3fanciLkgiOoy8SBHy06gNf+vQ1A3WFLqG7I8TipYIKeYK9wxd0tUrvHcOZA==} + core-util-is@1.0.3: + resolution: {integrity: sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==} + cors@2.8.6: resolution: {integrity: sha512-tJtZBBHA6vjIAaF6EnIaq6laBBP9aq/Y3ouVJjEfoHbRBcHBAHYcMh/w8LDrk2PvIMMq8gmopa5D4V8RmbrxGw==} engines: {node: '>= 0.10'} @@ -3746,6 +3979,19 @@ packages: cose-base@2.2.0: resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + cpu-features@0.0.10: + resolution: {integrity: sha512-9IkYqtX3YHPCzoVg1Py+o9057a3i0fp7S530UWokCSaFVTc7CwXPRiOjRjBQQ18ZCNafx78YfnG+HALxtVmOGA==} + engines: {node: '>=10.0.0'} + + crc-32@1.2.2: + resolution: {integrity: sha512-ROmzCKrTnOwybPcJApAA6WBWij23HVfGVNKqqrZpuyZOHqK2CwHSvpGuyt/UNNvaIjEd8X5IFGp4Mh+Ie1IHJQ==} + engines: {node: '>=0.8'} + hasBin: true + + crc32-stream@6.0.0: + resolution: {integrity: sha512-piICUB6ei4IlTv1+653yq5+KoqfBYmj9bw6LqXoOneTMDXk5nM1qt12mFW1caG3LlJXEKW1Bp0WggEmIfQB34g==} + engines: {node: '>= 14'} + cross-spawn@7.0.6: resolution: {integrity: sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA==} engines: {node: '>= 8'} @@ -3968,6 +4214,18 @@ packages: resolution: {integrity: sha512-DPi0FmjiSU5EvQV0++GFDOJ9ASQUVFh5kD+OzOnYdi7n3Wpm9hWWGfB/O2blfHcMVTL5WkQXSnRiK9makhrcnw==} engines: {node: '>=0.3.1'} + docker-compose@1.4.2: + resolution: {integrity: sha512-rPHigTKGaEHpkUmfd69QgaOp+Os5vGJwG/Ry8lcr8W/382AmI+z/D7qoa9BybKIkqNppaIbs8RYeHSevdQjWww==} + engines: {node: '>= 6.0.0'} + + docker-modem@5.0.7: + resolution: {integrity: sha512-XJgGhoR/CLpqshm4d3L7rzH6t8NgDFUIIpztYlLHIApeJjMZKYJMz2zxPsYxnejq5h3ELYSw/RBsi3t5h7gNTA==} + engines: {node: '>= 8.0'} + + dockerode@4.0.10: + resolution: {integrity: sha512-8L/P9JynLBiG7/coiA4FlQXegHltRqS0a+KqI44P1zgQh8QLHTg7FKOwhkBgSJwZTeHsq30WRoVFLuwkfK0YFg==} + engines: {node: '>= 8.0'} + dompurify@3.2.7: resolution: {integrity: sha512-WhL/YuveyGXJaerVlMYGWhvQswa7myDG17P7Vu65EWC05o8vfeNbvNf4d/BOvH99+ZW+LlQsc1GDKMa1vNK6dw==} @@ -3987,6 +4245,9 @@ packages: resolution: {integrity: sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==} engines: {node: '>= 0.4'} + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + ee-first@1.1.1: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} @@ -4001,6 +4262,12 @@ packages: node-addon-api: optional: true + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + empathic@2.0.0: resolution: {integrity: sha512-i6UzDscO/XfAcNYD75CfICkmfLedpyPDdozrLMmQc5ORaQcdMoc21OnlEylMIqI7U8eniKrPMxxtj8k0vhmJhA==} engines: {node: '>=14'} @@ -4009,6 +4276,9 @@ packages: resolution: {integrity: sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==} engines: {node: '>= 0.8'} + end-of-stream@1.4.5: + resolution: {integrity: sha512-ooEGc6HP26xXq/N+GCGOT0JKCLDGrq2bQUZrQ7gyrJiZANJ/8YDTxTpQBXGMn+WbIQXNVpyWymm7KYVICQnyOg==} + enhanced-resolve@5.20.1: resolution: {integrity: sha512-Qohcme7V1inbAfvjItgw0EaxVX5q2rdVEZHRBrEQdRZTssLDGsL8Lwrznl8oQ/6kuTJONLaDcGjkNP247XEhcA==} engines: {node: '>=10.13.0'} @@ -4338,12 +4608,19 @@ packages: resolution: {integrity: sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==} engines: {node: '>= 0.6'} + event-target-shim@5.0.1: + resolution: {integrity: sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==} + engines: {node: '>=6'} + eventemitter3@2.0.3: resolution: {integrity: sha512-jLN68Dx5kyFHaePoXWPsCGW5qdyZQtLYHkxkg02/Mz6g0kYpDx4FyP6XfArhQdlOC4b8Mv+EMxPo/8La7Tzghg==} eventemitter3@5.0.4: resolution: {integrity: sha512-mlsTRyGaPBjPedk6Bvw+aqbsXDtoAyAzm5MO7JgU+yVRyMQ5O8bD4Kcci7BS85f93veegeCPkL8R4GLClnjLFw==} + events-universal@1.0.1: + resolution: {integrity: sha512-LUd5euvbMLpwOF8m6ivPCbhQeSiYVNb8Vs0fQ8QjXo0JTkEHpz8pxdQf0gStltaPpw0Cca8b39KxvK9cfKRiAw==} + events@3.3.0: resolution: {integrity: sha512-mQw+2fkQbALzQ7V0MY0IqdnXNOeTtP4r0lN9z7AAawCXgqea7bDii20AYrIBrFd/Hx0M2Ocz6S111CaFkUcb0Q==} engines: {node: '>=0.8.x'} @@ -4393,6 +4670,9 @@ packages: fast-diff@1.3.0: resolution: {integrity: sha512-VxPP4NqbUjj6MaAOafWeUn2cXWLcCtljklUtZf0Ind4XQ+QPtmA0b18zZy0jIQx+ExRVCR/ZQpBmik5lXshNsw==} + fast-fifo@1.3.2: + resolution: {integrity: sha512-/d9sfos4yxzpwkDkuN7k2SqFKtYNmCTzgfEpz82x34IM9/zc8KGxQoXg1liNC/izpRM/MBdt44Nmx41ZWqk+FQ==} + fast-glob@3.3.1: resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} engines: {node: '>=8.6.0'} @@ -4470,6 +4750,10 @@ packages: flatted@3.4.2: resolution: {integrity: sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==} + foreground-child@3.3.1: + resolution: {integrity: sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw==} + engines: {node: '>=14'} + format@0.2.2: resolution: {integrity: sha512-wzsgA6WOq+09wrU1tsJ09udeR/YZRaeArL9e1wPbFg3GG2yDnC2ldKpxs4xunpFF9DgqCqOIra3bc1HWrJ37Ww==} engines: {node: '>=0.4.x'} @@ -4482,6 +4766,9 @@ packages: resolution: {integrity: sha512-Rx/WycZ60HOaqLKAi6cHRKKI7zxWbJ31MhntmtwMoaTeF7XFH9hhBp8vITaMidfljRQ6eYWCKkaTK+ykVJHP2A==} engines: {node: '>= 0.8'} + fs-constants@1.0.0: + resolution: {integrity: sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==} + fs-extra@11.3.4: resolution: {integrity: sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==} engines: {node: '>=14.14'} @@ -4498,10 +4785,18 @@ packages: resolution: {integrity: sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==} engines: {node: '>=6.9.0'} + get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + get-intrinsic@1.3.0: resolution: {integrity: sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==} engines: {node: '>= 0.4'} + get-port@7.2.0: + resolution: {integrity: sha512-afP4W205ONCuMoPBqcR6PSXnzX35KTcJygfJfcp+QY+uwm3p20p1YczWXhlICIzGMCxYBQcySEcOgsJcrkyobg==} + engines: {node: '>=16'} + get-proto@1.0.1: resolution: {integrity: sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==} engines: {node: '>= 0.4'} @@ -4524,6 +4819,11 @@ packages: resolution: {integrity: sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A==} engines: {node: '>=10.13.0'} + glob@10.5.0: + resolution: {integrity: sha512-DfXN8DfhJ7NH3Oe7cFmu3NCu1wKbkReJ8TorzSAFbSKrlNaQSKfIzqYqVY8zlbs2NLBbWpRiU52GX2PbaBVNkg==} + deprecated: Old versions of glob are not supported, and contain widely publicized security vulnerabilities, which have been fixed in the current version. Please update. Support for old versions may be purchased (at exorbitant rates) by contacting i@izs.me + hasBin: true + globals@15.15.0: resolution: {integrity: sha512-7ACyT3wmyp3I61S4fG682L0VA2RGD9otkqGJIwNUMF1SWUombIIk+af1unuDYgMm082aHYwD+mzJvv9Iu8dsgg==} engines: {node: '>=18'} @@ -4634,6 +4934,9 @@ packages: resolution: {integrity: sha512-im9DjEDQ55s9fL4EYzOAv0yMqmMBSZp6G0VvFyTMPKWxiSBHUj9NW/qqLmXUwXrrM7AvqSlTCfvqRb0cM8yYqw==} engines: {node: '>=0.10.0'} + ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + ignore@5.3.2: resolution: {integrity: sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g==} engines: {node: '>= 4'} @@ -4713,6 +5016,10 @@ packages: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + is-glob@4.0.3: resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} engines: {node: '>=0.10.0'} @@ -4736,6 +5043,10 @@ packages: is-promise@4.0.0: resolution: {integrity: sha512-hvpoI6korhJMnej285dSg6nu1+e6uxs7zG3BYAm5byqDsgJNWwxzM6z6iZiAgQR4TJ30JmBTOwqZUw3WlyH3AQ==} + is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + is-stream@3.0.0: resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} @@ -4748,6 +5059,9 @@ packages: resolution: {integrity: sha512-jv+8jaWCl0g2lSBkNSVXdzfBA0npK1HGC2KtWM9FumFRoGS94g3NbCCLVnCYHLjp4GrW2KZeeSTMo5ddtznmGw==} engines: {node: '>=18'} + isarray@1.0.0: + resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} + isbot@5.1.37: resolution: {integrity: sha512-5bcicX81xf6NlTEV8rWdg7Pk01LFizDetuYGHx6d/f6y3lR2/oo8IfxjzJqn1UdDEyCcwT9e7NRloj8DwCYujQ==} engines: {node: '>=18'} @@ -4771,6 +5085,9 @@ packages: resolution: {integrity: sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==} engines: {node: '>=8'} + jackspeak@3.4.3: + resolution: {integrity: sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw==} + jiti@2.6.1: resolution: {integrity: sha512-ekilCSN1jwRvIbgeg/57YFh8qQDNbwDb9xT/qu2DAHbFFZUicIl4ygVaAvzveMhMVr3LnpSKTNnwt8PoOfmKhQ==} hasBin: true @@ -4859,6 +5176,10 @@ packages: layout-base@2.0.1: resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + lazystream@1.0.1: + resolution: {integrity: sha512-b94GiNHQNy6JNTrt5w6zNyffMrNkXZb3KTkCZJb2V1xaEGCk093vkZ2jk3tpaeP33/OiXC+WvK9AxUebnf5nbw==} + engines: {node: '>= 0.6.3'} + levn@0.4.1: resolution: {integrity: sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ==} engines: {node: '>= 0.8.0'} @@ -4948,12 +5269,24 @@ packages: lodash-es@4.18.1: resolution: {integrity: sha512-J8xewKD/Gk22OZbhpOVSwcs60zhd95ESDwezOFuA3/099925PdHJ7OFHNTGtajL3AlZkykD32HykiMo+BIBI8A==} + lodash.camelcase@4.3.0: + resolution: {integrity: sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA==} + lodash.merge@4.6.2: resolution: {integrity: sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ==} + lodash@4.18.1: + resolution: {integrity: sha512-dMInicTPVE8d1e5otfwmmjlxkZoUpiVLwyeTdUsi/Caj/gfzzblBcCE5sRHV/AsjuCmxWrte2TNGSYuCeCq+0Q==} + + long@5.3.2: + resolution: {integrity: sha512-mNAgZ1GmyNhD7AuqnTG3/VQ26o760+ZYBPKjPvugO8+nLbYfX6TVpJPseBvopbdY+qpZ/lKUnmEc1LeZYS3QAA==} + longest-streak@3.1.0: resolution: {integrity: sha512-9Ri+o0JYgehTaVBBDoMqIl8GXtbWg711O3srftcHhZ0dqnETqLaoIK0x17fUw9rFSlK/0NlsKe0Ahhyl5pXE2g==} + lru-cache@10.4.3: + resolution: {integrity: sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==} + lru-cache@5.1.1: resolution: {integrity: sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==} @@ -5211,9 +5544,29 @@ packages: resolution: {integrity: sha512-MULkVLfKGYDFYejP07QOurDLLQpcjk7Fw+7jXS2R2czRQzR56yHRveU5NDJEOviH+hETZKSkIk5c+T23GjFUMg==} engines: {node: 18 || 20 || >=22} + minimatch@5.1.9: + resolution: {integrity: sha512-7o1wEA2RyMP7Iu7GNba9vc0RWWGACJOCZBJX2GJWip0ikV+wcOsgVuY9uE8CPiyQhkGFSlhuSkZPavN7u1c2Fw==} + engines: {node: '>=10'} + + minimatch@9.0.9: + resolution: {integrity: sha512-OBwBN9AL4dqmETlpS2zasx+vTeWclWzkblfZk7KTA5j3jeOONz/tRCnZomUyvNg83wL5Zv9Ss6HMJXAgL8R2Yg==} + engines: {node: '>=16 || 14 >=14.17'} + + minipass@7.1.3: + resolution: {integrity: sha512-tEBHqDnIoM/1rXME1zgka9g6Q2lcoCkxHLuc7ODJ5BxbP5d4c2Z5cGgtXAku59200Cx7diuHTOYfSBD8n6mm8A==} + engines: {node: '>=16 || 14 >=14.17'} + mj-context-menu@0.6.1: resolution: {integrity: sha512-7NO5s6n10TIV96d4g2uDpG7ZDpIhMh0QNfGdJw/W47JswFcosz457wqz/b5sAKvl12sxINGFCn80NZHKwxQEXA==} + mkdirp-classic@0.5.3: + resolution: {integrity: sha512-gKLcREMhtuZRwRAfqP3RFW+TK4JqApVBtOIftVgjuABpAtpxhPGaDcfvbhNvD0B8iD1oUr/txX35NjcaY6Ns/A==} + + mkdirp@3.0.1: + resolution: {integrity: sha512-+NsyUUAZDmo6YVHzL/stxSu3t9YS1iljliy3BSDrXJ/dkn1KYdmtZODGGjLcc9XLgVVpH4KshHB8XmZgMhaBXg==} + engines: {node: '>=10'} + hasBin: true + mlly@1.8.2: resolution: {integrity: sha512-d+ObxMQFmbt10sretNDytwt85VrbkhhUA/JBGm1MPaWJ65Cl4wOgLaB1NYvJSZ0Ef03MMEU/0xpPMXUIQ29UfA==} @@ -5230,6 +5583,9 @@ packages: resolution: {integrity: sha512-dkEJPVvun4FryqBmZ5KhDo0K9iDXAwn08tMLDinNdRBNPcYEDiWYysLcc6k3mjTMlbP9KyylvRpd4wFtwrT9rw==} engines: {node: ^20.17.0 || >=22.9.0} + nan@2.26.2: + resolution: {integrity: sha512-0tTvBTYkt3tdGw22nrAy50x7gpbGCCFH3AFcyS5WiUu7Eu4vWlri1woE6qHBSfy11vksDqkiwjOnlR7WV8G1Hw==} + nanoid@3.3.11: resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} @@ -5373,6 +5729,9 @@ packages: resolution: {integrity: sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw==} engines: {node: '>=10'} + package-json-from-dist@1.0.1: + resolution: {integrity: sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==} + package-manager-detector@1.6.0: resolution: {integrity: sha512-61A5ThoTiDG/C8s8UMZwSorAGwMJ0ERVGj2OjoW5pAalsNOg15+iQiPzrLJ4jhZ1HJzmC2PIHT2oEiH3R5fzNA==} @@ -5424,6 +5783,10 @@ packages: resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} engines: {node: '>=12'} + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + path-to-regexp@8.4.2: resolution: {integrity: sha512-qRcuIdP69NPm4qbACK+aDogI5CBDMi1jKe0ry5rSQJz8JVLsC7jV8XpiJjGRLLol3N+R5ihGYcrPLTno6pAdBA==} @@ -5494,13 +5857,34 @@ packages: engines: {node: '>=14'} hasBin: true + process-nextick-args@2.0.1: + resolution: {integrity: sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==} + + process@0.11.10: + resolution: {integrity: sha512-cdGef/drWFoydD1JsMzuFf8100nZl+GT+yacc2bEced5f9Rjk4z+WtFUTBu9PhOi9j/jfmBPu0mMEY4wIdAF8A==} + engines: {node: '>= 0.6.0'} + + proper-lockfile@4.1.2: + resolution: {integrity: sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==} + + properties-reader@3.0.1: + resolution: {integrity: sha512-WPn+h9RGEExOKdu4bsF4HksG/uzd3cFq3MFtq8PsFeExPse5Ha/VOjQNyHhjboBFwGXGev6muJYTSPAOkROq2g==} + engines: {node: '>=18'} + property-information@7.1.0: resolution: {integrity: sha512-TwEZ+X+yCJmYfL7TPUOcvBZ4QfoT5YenQiJuX//0th53DE6w0xxLEtfK3iyryQFddXuvkIk51EEgrJQ0WJkOmQ==} + protobufjs@7.5.4: + resolution: {integrity: sha512-CvexbZtbov6jW2eXAvLukXjXUW1TzFaivC46BpWc/3BpcCysb5Vffu+B3XHMm8lVEuy2Mm4XGex8hBSg1yapPg==} + engines: {node: '>=12.0.0'} + proxy-addr@2.0.7: resolution: {integrity: sha512-llQsMLSUDUPT44jdrU/O37qlnifitDP+ZwrmmZcoSKyLKvtZxpyV0n2/bD/N4tBAAZ/gJEdZU7KMraoK1+XYAg==} engines: {node: '>= 0.10'} + pump@3.0.4: + resolution: {integrity: sha512-VS7sjc6KR7e1ukRFhQSY5LM2uBWAUPiOPa/A3mkKmiMwSmRFUITt0xuj+/lesgnCv+dPIEYlkzrcyXgquIHMcA==} + punycode@2.3.1: resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} engines: {node: '>=6'} @@ -5568,6 +5952,20 @@ packages: resolution: {integrity: sha512-qpt8EwugBWDw2cgE2W+/3oxC+KTez2uSVR8JU9Q36TXPAGCaozfQUs59v4j4GFpWTaw0i6hAZSvOmu1J0uOEUg==} engines: {node: ^18.17.0 || >=20.5.0} + readable-stream@2.3.8: + resolution: {integrity: sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==} + + readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + + readable-stream@4.7.0: + resolution: {integrity: sha512-oIGGmcpTLwPga8Bn6/Z75SVaH1z5dUut2ibSyAMVhmUggWpmDn2dapB0n7f8nwaSiRtepAsfJyfXIO5DCVAODg==} + engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} + + readdir-glob@1.1.3: + resolution: {integrity: sha512-v05I2k7xN8zXvPD9N+z/uhXPaj0sUFCe2rcWZIpBsqxfP7xXFQ0tipAd/wjj1YxWyWtUS5IDJpOG82JKt2EAVA==} + readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -5688,6 +6086,10 @@ packages: resolution: {integrity: sha512-U7XpAktpbSgHTRSNRrjKSrjYkZKuhUukfoBlXWXUExCAqhzh1TU3BDRAfJmarcl5voKS+pbKU9MvyLWKZ4UEEg==} engines: {node: '>= 0.8.0'} + require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + require-from-string@2.0.2: resolution: {integrity: sha512-Xf0nWe6RseziFMu+Ap9biiUbmplq6S9/p+7w7YXP/JBHhrUDDUhwa+vANyubuqfZWTveU//DYVGsDG7RKL/vEw==} engines: {node: '>=0.10.0'} @@ -5714,6 +6116,10 @@ packages: retext@9.0.0: resolution: {integrity: sha512-sbMDcpHCNjvlheSgMfEcVrZko3cDzdbe1x/e7G66dFp0Ff7Mldvi2uv6JkJQzdRcvLYE8CA8Oe8siQx8ZOgTcA==} + retry@0.12.0: + resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==} + engines: {node: '>= 4'} + reusify@1.1.0: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -5763,6 +6169,12 @@ packages: rw@1.3.3: resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + safe-buffer@5.1.2: + resolution: {integrity: sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==} + + safe-buffer@5.2.1: + resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} + safer-buffer@2.1.2: resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} @@ -5852,6 +6264,9 @@ packages: siginfo@2.0.0: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -5895,6 +6310,16 @@ packages: resolution: {integrity: sha512-SBMgkuJYvP4F62daRfBNwYC2nXTEhNXAfsBZ/BB7Ly85/KnbnjmKM7/45ZrFbH6jIMiAliDUDPSZFUuXDvcg6A==} hasBin: true + split-ca@1.0.1: + resolution: {integrity: sha512-Q5thBSxp5t8WPTTJQS59LrGqOZqOsrhDGDVm8azCqIBjSBd7nd9o2PM+mDulQQkh8h//4U6hFZnc/mul8t5pWQ==} + + ssh-remote-port-forward@1.0.4: + resolution: {integrity: sha512-x0LV1eVDwjf1gmG7TTnfqIzf+3VPRz7vrNIjX6oYLbeCrf/PeVY6hkT68Mg+q02qXxQhrLjB0jfgvhevoCRmLQ==} + + ssh2@1.17.0: + resolution: {integrity: sha512-wPldCk3asibAjQ/kziWQQt1Wh3PgDFpC0XpwclzKcdT1vql6KeYxf5LIt4nlFkUeR8WuphYMKqUA56X4rjbfgQ==} + engines: {node: '>=10.16.0'} + stackback@0.0.2: resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} @@ -5908,9 +6333,34 @@ packages: std-env@4.0.0: resolution: {integrity: sha512-zUMPtQ/HBY3/50VbpkupYHbRroTRZJPRLvreamgErJVys0ceuzMkD44J/QjqhHjOzK42GQ3QZIeFG1OYfOtKqQ==} + streamx@2.25.0: + resolution: {integrity: sha512-0nQuG6jf1w+wddNEEXCF4nTg3LtufWINB5eFEN+5TNZW7KWJp6x87+JFL43vaAUPyCfH1wID+mNVyW6OHtFamg==} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + string_decoder@1.1.1: + resolution: {integrity: sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==} + + string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + stringify-entities@4.0.4: resolution: {integrity: sha512-IwfBptatlO+QCJUo19AqvrPNqlVMpW9YEL2LIVY+Rpv2qsjCGxaDLNRgeGsQWJhfItebuJhsGSLjaBbNSQ+ieg==} + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.2.0: + resolution: {integrity: sha512-yDPMNjp4WyfYBkHnjIRLfca1i6KMyGCtsVgoKe/z1+6vukgaENdgGBZt+ZmKPc4gavvEZ5OgHfHdrazhgNyG7w==} + engines: {node: '>=12'} + strip-final-newline@3.0.0: resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} engines: {node: '>=12'} @@ -5969,6 +6419,28 @@ packages: resolution: {integrity: sha512-1MOpMXuhGzGL5TTCZFItxCc0AARf1EZFQkGqMm7ERKj8+Hgr5oLvJOVFcC+lRmR8hCe2S3jC4T5D7Vg/d7/fhA==} engines: {node: '>=6'} + tar-fs@2.1.4: + resolution: {integrity: sha512-mDAjwmZdh7LTT6pNleZ05Yt65HC3E+NiQzl672vQG38jIrehtJk/J3mNwIg+vShQPcLF/LV7CMnDW6vjj6sfYQ==} + + tar-fs@3.1.2: + resolution: {integrity: sha512-QGxxTxxyleAdyM3kpFs14ymbYmNFrfY+pHj7Z8FgtbZ7w2//VAgLMac7sT6nRpIHjppXO2AwwEOg0bPFVRcmXw==} + + tar-stream@2.2.0: + resolution: {integrity: sha512-ujeqbceABgwMZxEJnk2HDY2DlnUZ+9oEcb1KzTVfYHio0UE6dG71n60d8D2I4qNvleWrrXpmjpt7vZeF1LnMZQ==} + engines: {node: '>=6'} + + tar-stream@3.1.8: + resolution: {integrity: sha512-U6QpVRyCGHva435KoNWy9PRoi2IFYCgtEhq9nmrPPpbRacPs9IH4aJ3gbrFC8dPcXvdSZ4XXfXT5Fshbp2MtlQ==} + + teex@1.0.1: + resolution: {integrity: sha512-eYE6iEI62Ni1H8oIa7KlDU6uQBtqr4Eajni3wX7rpfXD8ysFx8z0+dri+KWEPWpBsxXfxu58x/0jvTVT1ekOSg==} + + testcontainers@11.14.0: + resolution: {integrity: sha512-r9pniwv/iwzyHaI7gwAvAm4Y+IvjJg3vBWdjrUCaDMc2AXIr4jKbq7jJO18Mw2ybs73pZy1Aj7p/4RVBGMRWjg==} + + text-decoder@1.2.7: + resolution: {integrity: sha512-vlLytXkeP4xvEq2otHeJfSQIRyWxo/oZGEbXrtEEF9Hnmrdly59sUbzZ/QgyWuLYHctCHxFF4tRQZNQ9k60ExQ==} + tiny-invariant@1.3.3: resolution: {integrity: sha512-+FbBPE1o9QAYvviau/qC5SE3caw21q3xkvWKBtja5vgqOWIHHJ3ioaq1VPfn/Szqctz2bU/oYeKd9/z5BL+PVg==} @@ -5995,6 +6467,10 @@ packages: resolution: {integrity: sha512-xRnPkJx9nvE5MF6LkB5e8QJjE2FW8269wTu/LQdf7zZqBgPly0QJPf/CWAo7srj5so4yXfoLEdCFgurlpi47zg==} hasBin: true + tmp@0.2.5: + resolution: {integrity: sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==} + engines: {node: '>=14.14'} + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -6082,6 +6558,9 @@ packages: tw-animate-css@1.4.0: resolution: {integrity: sha512-7bziOlRqH0hJx80h/3mbicLW7o8qLsH5+RaLR2t+OHM3D0JlWGODQKQ4cxbK7WlvmUxpcj6Kgu6EKqjrGFe3QQ==} + tweetnacl@0.14.5: + resolution: {integrity: sha512-KXXFFdAbFXY4geFIwoyNK+f5Z1b7swfXABfL7HXCmoIWMKU3dmS26672A4EeQtDzLKy7SXmfBu51JolvEKwtGA==} + twoslash-protocol@0.3.7: resolution: {integrity: sha512-mwDFdclG7DbFW3aZA/CGATzV2efV2uPai90mmRSblqPbrc1Z1cu+DpI5oKMNciGY4rw8EOXc7QGY8O0iw1hnzg==} @@ -6122,9 +6601,16 @@ packages: unconfig@7.5.0: resolution: {integrity: sha512-oi8Qy2JV4D3UQ0PsopR28CzdQ3S/5A1zwsUwp/rosSbfhJ5z7b90bIyTwi/F7hCLD4SGcZVjDzd4XoUQcEanvA==} + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + undici-types@7.19.2: resolution: {integrity: sha512-qYVnV5OEm2AW8cJMCpdV20CDyaN3g0AjDlOGf1OW4iaDEx8MwdtChUp4zu4H0VP3nDRF/8RKWH+IPp9uW0YGZg==} + undici@7.24.7: + resolution: {integrity: sha512-H/nlJ/h0ggGC+uRL3ovD+G0i4bqhvsDOpbDv7At5eFLlj2b41L8QliGbnl2H7SnDiYhENphh1tQFJZf+MyfLsQ==} + engines: {node: '>=20.18.1'} + unified@11.0.5: resolution: {integrity: sha512-xKvGhPWw3k84Qjh8bI3ZeJjqnyadK+GEFtazSfZv/rKeTkTjOJho6mFqh2SM96iIcZokxiOpg78GazTSg8+KHA==} @@ -6203,6 +6689,10 @@ packages: util-deprecate@1.0.2: resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} + uuid@10.0.0: + resolution: {integrity: sha512-8XkAphELsDnEGrDxUOHB3RGvXz6TeuYSGEZBOjtTtPm2lwhGBjLgOzLHB63IUWfBpNucQjND6d3AOudO+H3RWQ==} + hasBin: true + uuid@11.1.0: resolution: {integrity: sha512-0/A9rDy9P7cJ+8w1c9WD9V//9Wj15Ce2MPz8Ri6032usz+NfePxx5AcN3bN+r6ZL6jEo066/yNYB3tn4pQEx+A==} hasBin: true @@ -6361,6 +6851,14 @@ packages: resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} engines: {node: '>=0.10.0'} + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + wrappy@1.0.2: resolution: {integrity: sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==} @@ -6374,6 +6872,10 @@ packages: xml-reader@2.4.3: resolution: {integrity: sha512-xWldrIxjeAMAu6+HSf9t50ot1uL5M+BtOidRCWHXIeewvSeIpscWCsp4Zxjk8kHHhdqFBrfK8U0EJeCcnyQ/gA==} + y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + yallist@3.1.1: resolution: {integrity: sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==} @@ -6386,10 +6888,22 @@ packages: engines: {node: '>= 14.6'} hasBin: true + yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + + yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} + zip-stream@6.0.1: + resolution: {integrity: sha512-zK7YHHz4ZXpW89AHXUPbQVGKI7uvkd3hzusTdotCg1UxyaVtg0zFJSTfW/Dq5f7OBBVnq6cZIaC8Ti4hb6dtCA==} + engines: {node: '>= 14'} + zod-to-json-schema@3.25.2: resolution: {integrity: sha512-O/PgfnpT1xKSDeQYSCfRI5Gy3hPf91mKVDuYLUHZJMiDFptvP41MSnWofm8dnCm0256ZNfZIM7DSzuSMAFnjHA==} peerDependencies: @@ -6617,6 +7131,8 @@ snapshots: '@babel/helper-string-parser': 8.0.0-rc.3 '@babel/helper-validator-identifier': 8.0.0-rc.3 + '@balena/dockerignore@1.0.2': {} + '@bcoe/v8-coverage@1.0.2': {} '@braintree/sanitize-url@7.1.2': {} @@ -6868,6 +7384,25 @@ snapshots: dependencies: tslib: 2.8.1 + '@grpc/grpc-js@1.14.3': + dependencies: + '@grpc/proto-loader': 0.8.0 + '@js-sdsl/ordered-map': 4.4.2 + + '@grpc/proto-loader@0.7.15': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + + '@grpc/proto-loader@0.8.0': + dependencies: + lodash.camelcase: 4.3.0 + long: 5.3.2 + protobufjs: 7.5.4 + yargs: 17.7.2 + '@headlessui/react@2.2.10(react-dom@19.2.5(react@19.2.5))(react@19.2.5)': dependencies: '@floating-ui/react': 0.26.28(react-dom@19.2.5(react@19.2.5))(react@19.2.5) @@ -7117,6 +7652,15 @@ snapshots: optionalDependencies: '@types/node': 25.6.0 + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.2.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + '@jridgewell/gen-mapping@0.3.13': dependencies: '@jridgewell/sourcemap-codec': 1.5.5 @@ -7136,6 +7680,14 @@ snapshots: '@jridgewell/resolve-uri': 3.1.2 '@jridgewell/sourcemap-codec': 1.5.5 + '@js-sdsl/ordered-map@4.4.2': {} + + '@kwsites/file-exists@1.1.1': + dependencies: + debug: 4.4.3 + transitivePeerDependencies: + - supports-color + '@mdx-js/mdx@3.1.1': dependencies: '@types/estree': 1.0.8 @@ -7782,8 +8334,34 @@ snapshots: '@parcel/watcher-win32-x64': 2.5.6 optional: true + '@pkgjs/parseargs@0.11.0': + optional: true + '@pkgr/core@0.2.9': {} + '@protobufjs/aspromise@1.1.2': {} + + '@protobufjs/base64@1.1.2': {} + + '@protobufjs/codegen@2.0.4': {} + + '@protobufjs/eventemitter@1.1.0': {} + + '@protobufjs/fetch@1.1.0': + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/inquire': 1.1.0 + + '@protobufjs/float@1.0.2': {} + + '@protobufjs/inquire@1.1.0': {} + + '@protobufjs/path@1.1.2': {} + + '@protobufjs/pool@1.1.0': {} + + '@protobufjs/utf8@1.1.0': {} + '@quansync/fs@1.0.0': dependencies: quansync: 1.0.0 @@ -8414,6 +8992,17 @@ snapshots: '@types/deep-eql@4.0.2': {} + '@types/docker-modem@3.0.6': + dependencies: + '@types/node': 25.6.0 + '@types/ssh2': 1.15.5 + + '@types/dockerode@4.0.1': + dependencies: + '@types/docker-modem': 3.0.6 + '@types/node': 25.6.0 + '@types/ssh2': 1.15.5 + '@types/esrecurse@4.3.1': {} '@types/estree-jsx@1.0.5': @@ -8455,6 +9044,10 @@ snapshots: dependencies: '@types/unist': 3.0.3 + '@types/node@18.19.130': + dependencies: + undici-types: 5.26.5 + '@types/node@25.6.0': dependencies: undici-types: 7.19.2 @@ -8469,6 +9062,19 @@ snapshots: dependencies: csstype: 3.2.3 + '@types/ssh2-streams@0.1.13': + dependencies: + '@types/node': 25.6.0 + + '@types/ssh2@0.5.52': + dependencies: + '@types/node': 25.6.0 + '@types/ssh2-streams': 0.1.13 + + '@types/ssh2@1.15.5': + dependencies: + '@types/node': 18.19.130 + '@types/trusted-types@2.0.7': optional: true @@ -8823,6 +9429,10 @@ snapshots: '@xmldom/xmldom@0.9.9': {} + abort-controller@3.0.0: + dependencies: + event-target-shim: 5.0.1 + accepts@2.0.0: dependencies: mime-types: 3.0.2 @@ -8852,6 +9462,14 @@ snapshots: json-schema-traverse: 1.0.0 require-from-string: 2.0.2 + ansi-regex@5.0.1: {} + + ansi-regex@6.2.2: {} + + ansi-styles@4.3.0: + dependencies: + color-convert: 2.0.1 + ansi-styles@6.2.3: {} ansis@4.2.0: {} @@ -8861,6 +9479,30 @@ snapshots: normalize-path: 3.0.0 picomatch: 2.3.2 + archiver-utils@5.0.2: + dependencies: + glob: 10.5.0 + graceful-fs: 4.2.11 + is-stream: 2.0.1 + lazystream: 1.0.1 + lodash: 4.18.1 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + + archiver@7.0.1: + dependencies: + archiver-utils: 5.0.2 + async: 3.2.6 + buffer-crc32: 1.0.0 + readable-stream: 4.7.0 + readdir-glob: 1.1.3 + tar-stream: 3.1.8 + zip-stream: 6.0.1 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + are-docs-informative@0.0.2: {} arg@5.0.2: {} @@ -8869,6 +9511,10 @@ snapshots: array-iterate@2.0.1: {} + asn1@0.2.6: + dependencies: + safer-buffer: 2.1.2 + assertion-error@2.0.1: {} ast-kit@3.0.0-beta.1: @@ -8889,6 +9535,12 @@ snapshots: astring@1.9.0: {} + async-lock@1.4.1: {} + + async@3.2.6: {} + + b4a@1.8.0: {} + babel-dead-code-elimination@1.0.12: dependencies: '@babel/core': 7.29.0 @@ -8900,10 +9552,50 @@ snapshots: bail@2.0.2: {} + balanced-match@1.0.2: {} + balanced-match@4.0.4: {} + bare-events@2.8.2: {} + + bare-fs@4.7.0: + dependencies: + bare-events: 2.8.2 + bare-path: 3.0.0 + bare-stream: 2.13.0(bare-events@2.8.2) + bare-url: 2.4.0 + fast-fifo: 1.3.2 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + + bare-os@3.8.7: {} + + bare-path@3.0.0: + dependencies: + bare-os: 3.8.7 + + bare-stream@2.13.0(bare-events@2.8.2): + dependencies: + streamx: 2.25.0 + teex: 1.0.1 + optionalDependencies: + bare-events: 2.8.2 + transitivePeerDependencies: + - react-native-b4a + + bare-url@2.4.0: + dependencies: + bare-path: 3.0.0 + + base64-js@1.5.1: {} + baseline-browser-mapping@2.10.17: {} + bcrypt-pbkdf@1.0.2: + dependencies: + tweetnacl: 0.14.5 + before-after-hook@4.0.0: {} better-react-mathjax@2.3.0(react@19.2.5): @@ -8915,6 +9607,12 @@ snapshots: birpc@4.0.0: {} + bl@4.1.0: + dependencies: + buffer: 5.7.1 + inherits: 2.0.4 + readable-stream: 3.6.2 + body-parser@2.2.2: dependencies: bytes: 3.1.2 @@ -8931,6 +9629,10 @@ snapshots: boolbase@1.0.0: {} + brace-expansion@2.1.0: + dependencies: + balanced-match: 1.0.2 + brace-expansion@5.0.5: dependencies: balanced-match: 4.0.4 @@ -8947,8 +9649,25 @@ snapshots: node-releases: 2.0.37 update-browserslist-db: 1.2.3(browserslist@4.28.2) + buffer-crc32@1.0.0: {} + + buffer@5.7.1: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + buffer@6.0.3: + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + + buildcheck@0.0.7: + optional: true + builtin-modules@5.0.0: {} + byline@5.0.0: {} + bytes@3.1.2: {} cac@7.0.0: {} @@ -9012,6 +9731,8 @@ snapshots: dependencies: readdirp: 4.1.2 + chownr@1.1.4: {} + chroma-js@3.2.0: {} ci-info@4.4.0: {} @@ -9038,12 +9759,24 @@ snapshots: is-wsl: 3.1.1 is64bit: 2.0.0 + cliui@8.0.1: + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + clsx@2.1.1: {} code-block-writer@13.0.3: {} collapse-white-space@2.1.0: {} + color-convert@2.0.1: + dependencies: + color-name: 1.1.4 + + color-name@1.1.4: {} + colorette@2.0.20: {} comma-separated-tokens@2.0.3: {} @@ -9058,6 +9791,14 @@ snapshots: comment-parser@1.4.6: {} + compress-commons@6.0.2: + dependencies: + crc-32: 1.2.2 + crc32-stream: 6.0.0 + is-stream: 2.0.1 + normalize-path: 3.0.0 + readable-stream: 4.7.0 + compute-scroll-into-view@3.1.1: {} confbox@0.1.8: {} @@ -9082,6 +9823,8 @@ snapshots: dependencies: browserslist: 4.28.2 + core-util-is@1.0.3: {} + cors@2.8.6: dependencies: object-assign: 4.1.1 @@ -9095,6 +9838,19 @@ snapshots: dependencies: layout-base: 2.0.1 + cpu-features@0.0.10: + dependencies: + buildcheck: 0.0.7 + nan: 2.26.2 + optional: true + + crc-32@1.2.2: {} + + crc32-stream@6.0.0: + dependencies: + crc-32: 1.2.2 + readable-stream: 4.7.0 + cross-spawn@7.0.6: dependencies: path-key: 3.1.1 @@ -9328,6 +10084,31 @@ snapshots: diff@8.0.4: {} + docker-compose@1.4.2: + dependencies: + yaml: 2.8.3 + + docker-modem@5.0.7: + dependencies: + debug: 4.4.3 + readable-stream: 3.6.2 + split-ca: 1.0.1 + ssh2: 1.17.0 + transitivePeerDependencies: + - supports-color + + dockerode@4.0.10: + dependencies: + '@balena/dockerignore': 1.0.2 + '@grpc/grpc-js': 1.14.3 + '@grpc/proto-loader': 0.7.15 + docker-modem: 5.0.7 + protobufjs: 7.5.4 + tar-fs: 2.1.4 + uuid: 10.0.0 + transitivePeerDependencies: + - supports-color + dompurify@3.2.7: optionalDependencies: '@types/trusted-types': 2.0.7 @@ -9344,6 +10125,8 @@ snapshots: es-errors: 1.3.0 gopd: 1.2.0 + eastasianwidth@0.2.0: {} + ee-first@1.1.1: {} electron-to-chromium@1.5.334: {} @@ -9352,10 +10135,18 @@ snapshots: optionalDependencies: node-addon-api: 7.1.1 + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + empathic@2.0.0: {} encodeurl@2.0.0: {} + end-of-stream@1.4.5: + dependencies: + once: 1.4.0 + enhanced-resolve@5.20.1: dependencies: graceful-fs: 4.2.11 @@ -9790,10 +10581,18 @@ snapshots: etag@1.8.1: {} + event-target-shim@5.0.1: {} + eventemitter3@2.0.3: {} eventemitter3@5.0.4: {} + events-universal@1.0.1: + dependencies: + bare-events: 2.8.2 + transitivePeerDependencies: + - bare-abort-controller + events@3.3.0: {} eventsource-parser@3.0.6: {} @@ -9868,6 +10667,8 @@ snapshots: fast-diff@1.3.0: {} + fast-fifo@1.3.2: {} + fast-glob@3.3.1: dependencies: '@nodelib/fs.stat': 2.0.5 @@ -9955,12 +10756,19 @@ snapshots: flatted@3.4.2: {} + foreground-child@3.3.1: + dependencies: + cross-spawn: 7.0.6 + signal-exit: 4.1.0 + format@0.2.2: {} forwarded@0.2.0: {} fresh@2.0.0: {} + fs-constants@1.0.0: {} + fs-extra@11.3.4: dependencies: graceful-fs: 4.2.11 @@ -9974,6 +10782,8 @@ snapshots: gensync@1.0.0-beta.2: {} + get-caller-file@2.0.5: {} + get-intrinsic@1.3.0: dependencies: call-bind-apply-helpers: 1.0.2 @@ -9987,6 +10797,8 @@ snapshots: hasown: 2.0.2 math-intrinsics: 1.1.0 + get-port@7.2.0: {} + get-proto@1.0.1: dependencies: dunder-proto: 1.0.1 @@ -10008,6 +10820,15 @@ snapshots: dependencies: is-glob: 4.0.3 + glob@10.5.0: + dependencies: + foreground-child: 3.3.1 + jackspeak: 3.4.3 + minimatch: 9.0.9 + minipass: 7.1.3 + package-json-from-dist: 1.0.1 + path-scurry: 1.11.1 + globals@15.15.0: {} globals@17.4.0: {} @@ -10201,6 +11022,8 @@ snapshots: dependencies: safer-buffer: 2.1.2 + ieee754@1.2.1: {} + ignore@5.3.2: {} ignore@7.0.5: {} @@ -10252,6 +11075,8 @@ snapshots: is-extglob@2.1.1: {} + is-fullwidth-code-point@3.0.0: {} + is-glob@4.0.3: dependencies: is-extglob: 2.1.1 @@ -10268,6 +11093,8 @@ snapshots: is-promise@4.0.0: {} + is-stream@2.0.1: {} + is-stream@3.0.0: {} is-wsl@3.1.1: @@ -10278,6 +11105,8 @@ snapshots: dependencies: system-architecture: 0.1.0 + isarray@1.0.0: {} + isbot@5.1.37: {} isexe@2.0.0: {} @@ -10297,6 +11126,12 @@ snapshots: html-escaper: 2.0.2 istanbul-lib-report: 3.0.1 + jackspeak@3.4.3: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + jiti@2.6.1: {} jose@6.2.2: {} @@ -10370,6 +11205,10 @@ snapshots: layout-base@2.0.1: {} + lazystream@1.0.1: + dependencies: + readable-stream: 2.3.8 + levn@0.4.1: dependencies: prelude-ls: 1.2.1 @@ -10436,10 +11275,18 @@ snapshots: lodash-es@4.18.1: {} + lodash.camelcase@4.3.0: {} + lodash.merge@4.6.2: {} + lodash@4.18.1: {} + + long@5.3.2: {} + longest-streak@3.1.0: {} + lru-cache@10.4.3: {} + lru-cache@5.1.1: dependencies: yallist: 3.1.1 @@ -11006,8 +11853,22 @@ snapshots: dependencies: brace-expansion: 5.0.5 + minimatch@5.1.9: + dependencies: + brace-expansion: 2.1.0 + + minimatch@9.0.9: + dependencies: + brace-expansion: 2.1.0 + + minipass@7.1.3: {} + mj-context-menu@0.6.1: {} + mkdirp-classic@0.5.3: {} + + mkdirp@3.0.1: {} + mlly@1.8.2: dependencies: acorn: 8.16.0 @@ -11026,6 +11887,9 @@ snapshots: mute-stream@3.0.0: {} + nan@2.26.2: + optional: true + nanoid@3.3.11: {} natural-compare@1.4.0: {} @@ -11235,6 +12099,8 @@ snapshots: dependencies: p-limit: 3.1.0 + package-json-from-dist@1.0.1: {} + package-manager-detector@1.6.0: {} pagefind@1.5.0: @@ -11292,6 +12158,11 @@ snapshots: path-key@4.0.0: {} + path-scurry@1.11.1: + dependencies: + lru-cache: 10.4.3 + minipass: 7.1.3 + path-to-regexp@8.4.2: {} pathe@2.0.3: {} @@ -11356,13 +12227,50 @@ snapshots: prettier@3.8.2: {} + process-nextick-args@2.0.1: {} + + process@0.11.10: {} + + proper-lockfile@4.1.2: + dependencies: + graceful-fs: 4.2.11 + retry: 0.12.0 + signal-exit: 3.0.7 + + properties-reader@3.0.1: + dependencies: + '@kwsites/file-exists': 1.1.1 + mkdirp: 3.0.1 + transitivePeerDependencies: + - supports-color + property-information@7.1.0: {} + protobufjs@7.5.4: + dependencies: + '@protobufjs/aspromise': 1.1.2 + '@protobufjs/base64': 1.1.2 + '@protobufjs/codegen': 2.0.4 + '@protobufjs/eventemitter': 1.1.0 + '@protobufjs/fetch': 1.1.0 + '@protobufjs/float': 1.0.2 + '@protobufjs/inquire': 1.1.0 + '@protobufjs/path': 1.1.2 + '@protobufjs/pool': 1.1.0 + '@protobufjs/utf8': 1.1.0 + '@types/node': 25.6.0 + long: 5.3.2 + proxy-addr@2.0.7: dependencies: forwarded: 0.2.0 ipaddr.js: 1.9.1 + pump@3.0.4: + dependencies: + end-of-stream: 1.4.5 + once: 1.4.0 + punycode@2.3.1: {} pure-rand@8.4.0: {} @@ -11418,6 +12326,34 @@ snapshots: json-parse-even-better-errors: 4.0.0 npm-normalize-package-bin: 4.0.0 + readable-stream@2.3.8: + dependencies: + core-util-is: 1.0.3 + inherits: 2.0.4 + isarray: 1.0.0 + process-nextick-args: 2.0.1 + safe-buffer: 5.1.2 + string_decoder: 1.1.1 + util-deprecate: 1.0.2 + + readable-stream@3.6.2: + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + + readable-stream@4.7.0: + dependencies: + abort-controller: 3.0.0 + buffer: 6.0.3 + events: 3.3.0 + process: 0.11.10 + string_decoder: 1.3.0 + + readdir-glob@1.1.3: + dependencies: + minimatch: 5.1.9 + readdirp@3.6.0: dependencies: picomatch: 2.3.2 @@ -11629,6 +12565,8 @@ snapshots: rename-keys@1.2.0: {} + require-directory@2.1.1: {} + require-from-string@2.0.2: {} reselect@5.1.1: {} @@ -11662,6 +12600,8 @@ snapshots: retext-stringify: 4.0.0 unified: 11.0.5 + retry@0.12.0: {} + reusify@1.1.0: {} robust-predicates@3.0.3: {} @@ -11752,6 +12692,10 @@ snapshots: rw@1.3.3: {} + safe-buffer@5.1.2: {} + + safe-buffer@5.2.1: {} + safer-buffer@2.1.2: {} sass@1.99.0: @@ -11894,6 +12838,8 @@ snapshots: siginfo@2.0.0: {} + signal-exit@3.0.7: {} + signal-exit@4.1.0: {} simple-git-hooks@2.13.1: {} @@ -11925,6 +12871,21 @@ snapshots: commander: 13.1.0 wicked-good-xpath: 1.3.0 + split-ca@1.0.1: {} + + ssh-remote-port-forward@1.0.4: + dependencies: + '@types/ssh2': 0.5.52 + ssh2: 1.17.0 + + ssh2@1.17.0: + dependencies: + asn1: 0.2.6 + bcrypt-pbkdf: 1.0.2 + optionalDependencies: + cpu-features: 0.0.10 + nan: 2.26.2 + stackback@0.0.2: {} state-local@1.0.7: {} @@ -11933,11 +12894,48 @@ snapshots: std-env@4.0.0: {} + streamx@2.25.0: + dependencies: + events-universal: 1.0.1 + fast-fifo: 1.3.2 + text-decoder: 1.2.7 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + + string-width@4.2.3: + dependencies: + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 + + string-width@5.1.2: + dependencies: + eastasianwidth: 0.2.0 + emoji-regex: 9.2.2 + strip-ansi: 7.2.0 + + string_decoder@1.1.1: + dependencies: + safe-buffer: 5.1.2 + + string_decoder@1.3.0: + dependencies: + safe-buffer: 5.2.1 + stringify-entities@4.0.4: dependencies: character-entities-html4: 2.1.0 character-entities-legacy: 3.0.0 + strip-ansi@6.0.1: + dependencies: + ansi-regex: 5.0.1 + + strip-ansi@7.2.0: + dependencies: + ansi-regex: 6.2.2 + strip-final-newline@3.0.0: {} strip-indent@4.1.1: {} @@ -11980,6 +12978,80 @@ snapshots: tapable@2.3.2: {} + tar-fs@2.1.4: + dependencies: + chownr: 1.1.4 + mkdirp-classic: 0.5.3 + pump: 3.0.4 + tar-stream: 2.2.0 + + tar-fs@3.1.2: + dependencies: + pump: 3.0.4 + tar-stream: 3.1.8 + optionalDependencies: + bare-fs: 4.7.0 + bare-path: 3.0.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + + tar-stream@2.2.0: + dependencies: + bl: 4.1.0 + end-of-stream: 1.4.5 + fs-constants: 1.0.0 + inherits: 2.0.4 + readable-stream: 3.6.2 + + tar-stream@3.1.8: + dependencies: + b4a: 1.8.0 + bare-fs: 4.7.0 + fast-fifo: 1.3.2 + streamx: 2.25.0 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + + teex@1.0.1: + dependencies: + streamx: 2.25.0 + transitivePeerDependencies: + - bare-abort-controller + - react-native-b4a + + testcontainers@11.14.0: + dependencies: + '@balena/dockerignore': 1.0.2 + '@types/dockerode': 4.0.1 + archiver: 7.0.1 + async-lock: 1.4.1 + byline: 5.0.0 + debug: 4.4.3 + docker-compose: 1.4.2 + dockerode: 4.0.10 + get-port: 7.2.0 + proper-lockfile: 4.1.2 + properties-reader: 3.0.1 + ssh-remote-port-forward: 1.0.4 + tar-fs: 3.1.2 + tmp: 0.2.5 + undici: 7.24.7 + transitivePeerDependencies: + - bare-abort-controller + - bare-buffer + - react-native-b4a + - supports-color + + text-decoder@1.2.7: + dependencies: + b4a: 1.8.0 + transitivePeerDependencies: + - react-native-b4a + tiny-invariant@1.3.3: {} tinybench@2.9.0: {} @@ -12001,6 +13073,8 @@ snapshots: chalk: 5.6.2 clipboardy: 4.0.0 + tmp@0.2.5: {} + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 @@ -12087,6 +13161,8 @@ snapshots: tw-animate-css@1.4.0: {} + tweetnacl@0.14.5: {} + twoslash-protocol@0.3.7: {} twoslash@0.3.7(typescript@6.0.2): @@ -12137,8 +13213,12 @@ snapshots: quansync: 1.0.0 unconfig-core: 7.5.0 + undici-types@5.26.5: {} + undici-types@7.19.2: {} + undici@7.24.7: {} + unified@11.0.5: dependencies: '@types/unist': 3.0.3 @@ -12239,6 +13319,8 @@ snapshots: util-deprecate@1.0.2: {} + uuid@10.0.0: {} + uuid@11.1.0: {} vary@1.1.2: {} @@ -12369,6 +13451,18 @@ snapshots: word-wrap@1.2.5: {} + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.3 + string-width: 5.1.2 + strip-ansi: 7.2.0 + wrappy@1.0.2: {} xml-lexer@0.2.2: @@ -12382,6 +13476,8 @@ snapshots: eventemitter3: 2.0.3 xml-lexer: 0.2.2 + y18n@5.0.8: {} + yallist@3.1.1: {} yaml-eslint-parser@2.0.0: @@ -12391,8 +13487,26 @@ snapshots: yaml@2.8.3: {} + yargs-parser@21.1.1: {} + + yargs@17.7.2: + dependencies: + cliui: 8.0.1 + escalade: 3.2.0 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + yocto-queue@0.1.0: {} + zip-stream@6.0.1: + dependencies: + archiver-utils: 5.0.2 + compress-commons: 6.0.2 + readable-stream: 4.7.0 + zod-to-json-schema@3.25.2(zod@4.3.6): dependencies: zod: 4.3.6 diff --git a/pnpm-workspace.yaml b/pnpm-workspace.yaml index 9d830f9f..9d6b5b59 100644 --- a/pnpm-workspace.yaml +++ b/pnpm-workspace.yaml @@ -1,6 +1,7 @@ packages: - sdk - cli + - cli-integration-test - cli/npm/* - mcp - libraries/* @@ -77,6 +78,7 @@ catalog: remark-stringify: ^11.0.0 tailwind-merge: ^3.5.0 tailwindcss: ^4.2.2 + testcontainers: ^11.14.0 tsdown: ^0.21.7 tsx: ^4.21.0 turbo: ^2.9.6 diff --git a/sdk/package.json b/sdk/package.json index 1a144941..1c60d01c 100644 --- a/sdk/package.json +++ b/sdk/package.json @@ -1,7 +1,7 @@ { "name": "@truenine/memory-sync-sdk", "type": "module", - "version": "2026.10411.10132", + "version": "2026.10412.11551", "private": true, "description": "TrueNine Memory Synchronization SDK", "author": "TrueNine", diff --git a/sdk/src/index.ts b/sdk/src/index.ts index 175da1f0..395a2ef7 100644 --- a/sdk/src/index.ts +++ b/sdk/src/index.ts @@ -1,8 +1,12 @@ import type {MergedConfigResult} from './ConfigLoader' import type {MemorySyncAdaptorInfo, MemorySyncCommandResult, MemorySyncSdkBinding} from './internal/sdk-binding' +import {existsSync} from 'node:fs' +import process from 'node:process' +import {fileURLToPath} from 'node:url' import {getNativeBinding} from './core/native-binding' type JsonResult = T | Promise +const INTERNAL_COMMAND_BRIDGE_ENV = 'TNMSC_INTERNAL_COMMAND_BRIDGE' interface NativeJsonCommandBinding { readonly loadConfig?: (cwd?: string) => JsonResult @@ -78,13 +82,42 @@ async function parseJsonResult(value: JsonResult): Promise { return JSON.parse(raw) as T } +function ensureInternalCommandBridgeEnv(): void { + if ((process.env[INTERNAL_COMMAND_BRIDGE_ENV] ?? '').length > 0) return + + const bridgeCandidates = [ + fileURLToPath(new URL('./internal/native-command-bridge.mjs', import.meta.url)), + fileURLToPath(new URL('./native-command-bridge.mjs', import.meta.url)) + ] + + const bridgePath = bridgeCandidates.find(candidate => existsSync(candidate)) + if (bridgePath == null) return + + process.env[INTERNAL_COMMAND_BRIDGE_ENV] = bridgePath +} + function createHybridBinding(nativeBinding: Required): MemorySyncSdkBinding { const listAdaptors = getNativeListAdaptors(nativeBinding) return { loadConfig: async cwd => parseJsonResult(nativeBinding.loadConfig(cwd)), - install: async options => parseJsonResult(nativeBinding.install(options == null ? void 0 : JSON.stringify(options))), - dryRun: async options => parseJsonResult(nativeBinding.dryRun(options == null ? void 0 : JSON.stringify(options))), - clean: async options => parseJsonResult(nativeBinding.clean(options == null ? void 0 : JSON.stringify(options))), + install: async options => { + ensureInternalCommandBridgeEnv() + return parseJsonResult( + nativeBinding.install(options == null ? void 0 : JSON.stringify(options)) + ) + }, + dryRun: async options => { + ensureInternalCommandBridgeEnv() + return parseJsonResult( + nativeBinding.dryRun(options == null ? void 0 : JSON.stringify(options)) + ) + }, + clean: async options => { + ensureInternalCommandBridgeEnv() + return parseJsonResult( + nativeBinding.clean(options == null ? void 0 : JSON.stringify(options)) + ) + }, listAdaptors: async () => parseJsonResult(listAdaptors()), listPrompts: async options => parseJsonResult(nativeBinding.listPrompts(options == null ? void 0 : JSON.stringify(options))), getPrompt: async (promptId, options) => parseJsonResult(nativeBinding.getPrompt(promptId, options == null ? void 0 : JSON.stringify(options))), From 18b004779642cd8e69396a863e8f5151317c8c7c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E8=B5=B5=E6=97=A5=E5=A4=A9?= Date: Sun, 12 Apr 2026 16:46:50 +0800 Subject: [PATCH 3/3] Add rustfmt and docs lint fix support --- .github/actions/setup-rust/action.yml | 1 + .github/actions/setup-tauri/action.yml | 1 + doc/content/sdk/_meta.ts | 6 +++--- doc/package.json | 3 ++- 4 files changed, 7 insertions(+), 4 deletions(-) diff --git a/.github/actions/setup-rust/action.yml b/.github/actions/setup-rust/action.yml index 67630fb6..b178482a 100644 --- a/.github/actions/setup-rust/action.yml +++ b/.github/actions/setup-rust/action.yml @@ -26,6 +26,7 @@ runs: uses: dtolnay/rust-toolchain@stable with: toolchain: ${{ inputs.rust-version }} + components: rustfmt targets: ${{ inputs.targets }} - name: Cache cargo diff --git a/.github/actions/setup-tauri/action.yml b/.github/actions/setup-tauri/action.yml index 7b8e678e..cb70c705 100644 --- a/.github/actions/setup-tauri/action.yml +++ b/.github/actions/setup-tauri/action.yml @@ -53,6 +53,7 @@ runs: uses: dtolnay/rust-toolchain@stable with: toolchain: ${{ inputs.rust-version }} + components: rustfmt targets: ${{ inputs.rust-targets }} - name: Normalize Tauri signing key diff --git a/doc/content/sdk/_meta.ts b/doc/content/sdk/_meta.ts index 806ed94e..a94076b6 100644 --- a/doc/content/sdk/_meta.ts +++ b/doc/content/sdk/_meta.ts @@ -1,7 +1,7 @@ export default { - index: '概览', - architecture: '架构总览', - logger: 'Logger 日志库', + 'index': '概览', + 'architecture': '架构总览', + 'logger': 'Logger 日志库', 'md-compiler': 'MDX-Compiler 编译器', 'script-runtime': 'Script-Runtime 运行时' } diff --git a/doc/package.json b/doc/package.json index 0141a9ee..910fcdc7 100644 --- a/doc/package.json +++ b/doc/package.json @@ -14,7 +14,8 @@ "validate:content": "tsx scripts/validate-content.ts", "check:type": "tsx scripts/run-next.ts typegen && tsc --project tsconfig.typecheck.json --noEmit --incremental false", "start": "tsx scripts/run-next.ts start", - "lint": "tsx scripts/validate-content.ts && eslint --cache --cache-location .eslintcache ." + "lint": "tsx scripts/validate-content.ts && eslint --cache --cache-location .eslintcache .", + "lint:fix": "tsx scripts/validate-content.ts && eslint --fix --cache --cache-location .eslintcache ." }, "dependencies": { "@theguild/remark-mermaid": "catalog:",