From b3ee21476a919ff5bd1def4be5f7aae85134020f Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 15:37:32 -0400 Subject: [PATCH 01/36] docs(specs): add design for `path share` command Brainstormed design for a single-shot `path share` that aggregates sessions across installed agent harnesses, ranks current-project sessions first, and uploads the picked session to Pathbase. Captures the surface, aggregation/picker model, derive+upload pipeline, error handling, and test plan ahead of the implementation plan. --- .../2026-05-07-path-share-command-design.md | 265 ++++++++++++++++++ 1 file changed, 265 insertions(+) create mode 100644 docs/superpowers/specs/2026-05-07-path-share-command-design.md diff --git a/docs/superpowers/specs/2026-05-07-path-share-command-design.md b/docs/superpowers/specs/2026-05-07-path-share-command-design.md new file mode 100644 index 0000000..ead0f58 --- /dev/null +++ b/docs/superpowers/specs/2026-05-07-path-share-command-design.md @@ -0,0 +1,265 @@ +# `path share` — interactive Pathbase upload + +**Status:** Design accepted, awaiting implementation plan. +**Date:** 2026-05-07 + +## Goal + +Collapse the existing two-step "derive a session, upload it" workflow +(`path import ` then `path export pathbase --input `) into a +single command that's optimised for the most common case: an +interactive user wants to share *one* agent session from the project +they're currently sitting in. + +Today this requires two commands and the user has to know which +harness ran the conversation. `path share` removes both of those. + +## Non-goals + +- Sharing git branches or GitHub PRs. Those flows already exist on + `path import` / `path export pathbase` and the user explicitly + scoped this command to "agent harnesses". +- Multi-session bundling. Multi-select is not exposed; one share, one + URL. +- Streaming uploads. The document is materialized in memory (and, by + default, in the cache) before posting. +- A `--include-thinking` flag for Gemini. Out of scope for v1. + +## Surface + +``` +path share [--url ] + [--harness ] + [--session ] + [--project ] + [--anon] [--repo ] [--slug ] [--public] + [--force] [--no-cache] +``` + +| Flag | Behavior | +| --------------------- | -------- | +| (no flags) | Unified picker over all detected harnesses, current-project rows ranked first. | +| `--harness X` | Pre-filter the picker to one harness. | +| `--harness X --session Y` | Skip the picker. `--project` required when X ∈ {claude, gemini, pi}. | +| `--session` alone | Error: ambiguous without `--harness`. | +| `--project P` | Filter the picker to sessions tied to that project (across all harnesses). | +| `--no-cache` | Skip writing `~/.toolpath/documents/.json`; derive in-memory only. | +| `--force` | Allow overwriting an existing cache entry. Same semantics as `import --force`. | +| `--url` | Override Pathbase server URL. Falls back to stored session, then `$PATHBASE_URL`, then `https://pathbase.dev`. | +| `--anon` | Force anonymous endpoint; conflicts with `--repo`, `--public`. | +| `--repo`, `--slug`, `--public` | Same semantics as `path export pathbase`. | + +When the user is logged out and passes none of `--anon` / `--repo` / +`--public` / `--slug`, the upload falls through to the anonymous +endpoint with a stderr notice — same default as `export pathbase` +today. + +## Internal architecture + +### New module: `cmd_share.rs` + +Lives next to the other `cmd_*.rs` files in `crates/path-cli/src/`. +Wired into `lib.rs` as a new `Commands::Share { args: cmd_share::ShareArgs }` +arm. + +### Session aggregation + +```rust +struct SessionRow { + harness: Harness, // Claude | Gemini | Codex | Opencode | Pi + project: Option, // project path for keyed providers; None for codex/opencode + cwd: Option, // recorded cwd from the session (codex/opencode only) + session_id: String, + title: String, // first_user_message or "(no prompt)" + last_activity: Option>, + message_count: usize, + matches_cwd: bool, // computed during aggregation +} + +fn gather_sessions( + cwd: &Path, + harness_filter: Option, + project_filter: Option<&Path>, +) -> Vec; +``` + +**Detection-by-probing.** No explicit "is X installed" config. For +each of the five harnesses, instantiate `*Convo::new()` and attempt +the listing API. Skip silently when: +- `home_dir()` resolves to None, +- the harness's base directory does not exist, +- listing returns Err with `io::ErrorKind::NotFound`, +- listing returns an empty `Vec`. + +Any other error emits a single `warning: aggregation failed: ` +to stderr and aggregation continues with the remaining harnesses. + +**Per-harness rules:** + +- **claude / gemini / pi** (project-keyed): `list_projects()` → + `list_conversation_metadata(p)` for each. `matches_cwd = + canonical(p) == canonical(cwd)`. Title from `first_user_message`. +- **codex / opencode** (session-keyed): `list_sessions()`. Codex + stores `cwd` in rollout meta; opencode stores `directory`. + `matches_cwd = canonical(stored_cwd) == canonical(cwd)`. No + `project` field. + +`canonicalize` failure on either side falls back to byte-equal string +compare; mismatch only affects ranking, never correctness. + +### Picker + +When `--session` is absent and stdin+stderr are TTYs and `fzf` is on +PATH, the rows are formatted into a TSV stream and fed to `fzf`: + +``` +col 1: harness (hidden, parser key) +col 2: project_or_cwd (hidden, derive arg) +col 3: session_id (hidden, derive arg) +col 4: harness symbol ("claude " / "gemini " / "codex " / "opencode" / "pi ") +col 5: when ("YYYY-MM-DD HH:MM" or "—") +col 6: msgs ("12 msgs") +col 7: scope ("·" for cwd-match, " " otherwise) +col 8: project_short (last two path segments) +col 9: title (truncated to 120 chars) +``` + +`fzf` shows columns 4..; preview command runs +`path show [--project {2}] --session {3}`. Single-select +only (no `--multi`). Header line: `share an agent session (Enter = upload to )`. + +**Sort order before piping to fzf:** +1. Rows with `matches_cwd = true`, descending by `last_activity`. +2. Rows with `matches_cwd = false`, descending by `last_activity`. + +### Non-interactive paths + +- `fzf` missing or non-TTY: print a generic recipe (use `path import + ` then `path export pathbase`) and exit 1. **No + most-recent fallback** — sharing is consequential enough to require + an explicit choice. +- `--harness X --session Y` (and `--project P` for keyed providers): + skip aggregation entirely; derive directly. +- `--harness X` alone: still uses the unified aggregator pre-filtered + to one harness; same fzf code path. +- Esc / Ctrl-C in fzf: exit 130, print nothing. + +### Derive + +Three small `pub(crate)` cuts in `cmd_import.rs`: + +```rust +pub(crate) struct DerivedDoc { pub cache_id: String, pub doc: Graph } + +pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result; +pub(crate) fn derive_gemini_pair(project: &str, session: &str, include_thinking: bool) -> Result; +pub(crate) fn derive_pi_pair(project: &str, session: &str, base: Option) -> Result; +pub(crate) fn derive_codex_one(session: &str) -> Result; +pub(crate) fn derive_opencode_one(session: &str, no_snapshot_diffs: bool) -> Result; +``` + +These extract the single-pair branches from the existing +`derive_claude` / `derive_gemini` / etc. dispatch functions in +`cmd_import.rs`. The existing dispatch keeps calling them — pure +mechanical refactor, no behavior change. + +### Cache + +Default behavior: write the derived `Graph` to +`~/.toolpath/documents/.json` via the existing +`write_cached(&id, &doc, force)`. Same `-` cache id +format as `path import` — a `share`-produced entry is +indistinguishable from an `import`-produced one and can be re-uploaded +later with `export pathbase --input `. `--no-cache` skips the +write. `--force` allows overwrite. + +### Upload + +`cmd_export::run_pathbase` is split: + +```rust +pub(crate) fn run_pathbase_inner(args: PathbaseExportArgs, body: &str) -> Result; +``` + +`run_pathbase` becomes a thin wrapper that reads the cache file then +calls the inner. `cmd_share` calls the inner directly with the +in-memory body (`doc.to_json()`). Same `--anon` / `--repo` / `--slug` +/ `--public` / `--url` semantics inherited from `export pathbase`. + +`UploadResult` carries the share URL and a short summary string for +stderr. + +## Output contract + +- **stdout**: the share URL, exactly one line. Scriptable. +- **stderr**: progress messages — + ``` + Picked claude session "Add share command" + Imported claude session → claude-abc (omitted with --no-cache) + Uploaded → alex/pathstash/ (secret path, 12 KB) + ``` +- The cache id is **not** echoed to stdout (unlike `path import`) + because the share URL is the primary product. The cache id appears + in the stderr "Imported …" line, which is enough to find it via + `cache ls`. + +**Exit codes.** 0 success; 130 user cancelled fzf; 1 anything else. + +## Error handling + +| Situation | Behavior | +| --- | --- | +| `home_dir()` None / harness base dir missing | Skip silently. | +| Per-file metadata read fails inside a harness | Underlying provider already handles this per-file; we don't second-guess. | +| Whole-harness listing returns Err other than NotFound | Single `warning: ...` to stderr; continue with other harnesses. | +| No sessions found anywhere | Print probe summary (one line per harness, with path and count or "not found"); exit 1. | +| No sessions match `--project P` | Print message naming the project; suggest running without `--project`; exit 1. | +| `--session` without `--harness` | Clap-level error (clap `requires = "harness"`). | +| `--anon` with `--repo`/`--public` | Clap-level conflict (copy from `export pathbase`). | +| `--harness ` + `--session` without `--project` | Runtime error: `"--project required when --harness is claude/gemini/pi and --session is set"`. | +| Logged out, no `--anon`, no auth-requiring flags | Anonymous upload with stderr notice (matches `export pathbase`). | +| Logged out, `--repo`/`--public`/`--slug` set | Error: "log in first" (inherited from `export pathbase`). | +| Logged in, `--url` host differs from stored session host | stderr warning, attempt anyway (inherited from `export pathbase`). | +| Server applies different `is_public` than requested | stderr note; share URL form follows what was actually applied (inherited). | + +## Testing + +### Unit tests in `cmd_share.rs` + +1. `gather_sessions` produces rows in the right order (cwd-match first, then by recency) — fixture builds tempdir layouts for two or three harnesses. +2. `gather_sessions` skips harnesses whose home dir is missing (no panic, no warning). +3. `gather_sessions` honors `--harness` and `--project` filters. +4. `parse_picker_row` round-trips `(harness, project, session_id)` through the TSV format. +5. `matches_cwd` uses canonicalized paths (test via temp-dir symlink that both forms match). + +Reuses the existing `setup_claude_manager` / `setup_claude_manager_with_two_sessions` helpers from `cmd_import` tests; adds a `setup_multi_harness` helper that wires two or three fake home dirs at once. + +### Integration tests in `crates/path-cli/tests/` + +1. `share_explicit_args.rs` — `path share --harness claude --project /tmp/x --session abc --no-cache --anon --url http://127.0.0.1:` against the existing `MockServer`. Asserts a single URL on stdout and that the request body is the derived Graph. +2. `share_no_harness_no_tty.rs` — non-TTY, no flags → exits 1 with the recipe message; nothing on stdout. +3. `share_filters_by_project.rs` — explicit `--project P` with no matches → exits 1 with the per-project not-found message. +4. `share_logged_out_anon_default.rs` — no credentials, no `--anon` → uploads via anon endpoint; stderr carries the "not logged in — uploading anonymously" notice. +5. `share_writes_cache_by_default.rs` — default behavior, explicit args → a file appears in the test config dir's `documents/` matching the derived cache id. +6. `share_no_cache_skips_write.rs` — same with `--no-cache` → no file appears. + +### Out of scope for tests + +- The fzf-driven path. Not exercised in CI (matches the existing import tests). The aggregator — the genuinely new logic — is fully unit-tested. + +### Documentation + +- A one-line entry in `CLAUDE.md`'s "Things to know" pointing at + `path share` and the unified-picker behavior, alongside the existing + fzf picker docs. +- A short paragraph in the CLI usage block at the top of `CLAUDE.md`. +- A `path share` section in any place README/CLI docs enumerate + commands. + +## Open questions + +None blocking. Future: +- `--include-thinking` could be added if Gemini sharing is common. +- Multi-select bundling could be added later if a user pattern emerges. +- A `--web` flag (or `path share --open`) that opens the resulting + URL via `open` / `xdg-open` is a small future addition. From f8bb2c4aa78c5bc37d1e966a655e6f57c8e6165c Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 15:58:44 -0400 Subject: [PATCH 02/36] docs(plans): add implementation plan for `path share` Nine tasks covering: refactors to lift derive helpers and split run_pathbase, scaffold for cmd_share, types, gather_sessions for all five harnesses, explicit-args path, picker + probe summary, and docs. Each task is TDD where reasonable (tests first when meaningful) with concrete code blocks per step. --- .../plans/2026-05-07-path-share-command.md | 2120 +++++++++++++++++ 1 file changed, 2120 insertions(+) create mode 100644 docs/superpowers/plans/2026-05-07-path-share-command.md diff --git a/docs/superpowers/plans/2026-05-07-path-share-command.md b/docs/superpowers/plans/2026-05-07-path-share-command.md new file mode 100644 index 0000000..db202cd --- /dev/null +++ b/docs/superpowers/plans/2026-05-07-path-share-command.md @@ -0,0 +1,2120 @@ +# `path share` Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add a `path share` command that aggregates agent sessions across installed harnesses, ranks current-project sessions first in a single fzf picker, and uploads the picked session to Pathbase in one shot. + +**Architecture:** New `cmd_share.rs` module in `crates/path-cli/src/`. Reuses derive helpers from `cmd_import.rs` (lifted to `pub(crate)` as single-pair functions) and the upload helper from `cmd_export.rs` (refactored into a body-taking `run_pathbase_inner`). Aggregation, picker, and CLI dispatch live in the new module. + +**Tech Stack:** Rust 2024, clap (CLI), reqwest+tokio (HTTP via shared `cmd_pathbase` helpers), `fzf` (interactive picker), the existing `toolpath-{claude,gemini,codex,opencode,pi}` provider crates. + +**Spec:** `docs/superpowers/specs/2026-05-07-path-share-command-design.md` (commit `b3ee214`). + +--- + +## File map + +- **Modify** `crates/path-cli/src/cmd_import.rs` — lift `DerivedDoc` to `pub(crate)`; extract single-pair derive helpers as `pub(crate) fn`s. +- **Modify** `crates/path-cli/src/cmd_export.rs` — split `run_pathbase` into `run_pathbase_inner(args, body)` + thin wrapper; add `pub(crate) struct PathbaseUploadArgs`. +- **Create** `crates/path-cli/src/cmd_share.rs` — module: types (`Harness`, `SessionRow`, `HarnessBundle`), aggregation (`gather_sessions`), picker, dispatch (`run`). +- **Modify** `crates/path-cli/src/lib.rs` — add `mod cmd_share;` and `Commands::Share { args }` enum arm. +- **Modify** `crates/path-cli/tests/integration.rs` — add `share_*` integration tests. +- **Modify** `CLAUDE.md` — add a `path share` line to the CLI usage block, and one item to "Things to know" describing the unified picker. + +--- + +## Task 1: Refactor `cmd_import.rs` — lift visibility, extract single-pair derive helpers + +Mechanical refactor; no behavior change. The new `pub(crate)` helpers each derive a `DerivedDoc` for one explicit `(project, session)` or `session` pair, so `cmd_share` can call them after its own picker resolves a row. + +**Files:** +- Modify: `crates/path-cli/src/cmd_import.rs` + +- [ ] **Step 1.1: Lift `DerivedDoc` to `pub(crate)`** + +In `crates/path-cli/src/cmd_import.rs` around line 174, change: + +```rust +struct DerivedDoc { + cache_id: String, + doc: Graph, +} +``` + +to: + +```rust +pub(crate) struct DerivedDoc { + pub(crate) cache_id: String, + pub(crate) doc: Graph, +} +``` + +- [ ] **Step 1.2: Add `derive_claude_pair`** + +Add this function next to `derive_claude_with_manager` (around line 369): + +```rust +/// Derive a single Claude conversation given an explicit project + session. +/// Used by `cmd_share` after its picker has resolved the pair; mirrors the +/// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. +pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result { + let manager = toolpath_claude::ClaudeConvo::new(); + let cfg = toolpath_claude::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking: false, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_claude::derive::derive_path(&convo, &cfg); + let cache_id = make_id("claude", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.3: Add `derive_gemini_pair`** + +Add this function next to `derive_gemini_with_manager` (around line 562): + +```rust +/// Derive a single Gemini conversation given an explicit project + session. +pub(crate) fn derive_gemini_pair( + project: &str, + session: &str, + include_thinking: bool, +) -> Result { + let manager = toolpath_gemini::GeminiConvo::new(); + let cfg = toolpath_gemini::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_gemini::derive::derive_path(&convo, &cfg); + let cache_id = make_id("gemini", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.4: Add `derive_pi_pair`** + +Add this function next to `derive_pi_with_manager` (around line 995): + +```rust +/// Derive a single Pi session given an explicit project + session. +pub(crate) fn derive_pi_pair( + project: &str, + session: &str, + base: Option, +) -> Result { + let manager = if let Some(path) = base { + let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); + toolpath_pi::PiConvo::with_resolver(resolver) + } else { + toolpath_pi::PiConvo::new() + }; + let config = toolpath_pi::DeriveConfig::default(); + let session = manager + .read_session(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let doc = Graph::from_path(toolpath_pi::derive::derive_path(&session, &config)); + let cache_id = make_id("pi", &doc_inner_id(&doc)); + Ok(DerivedDoc { cache_id, doc }) +} +``` + +- [ ] **Step 1.5: Add `derive_codex_one`** + +Add this function next to `derive_codex` (around line 738): + +```rust +/// Derive a single Codex session given an explicit session id. +pub(crate) fn derive_codex_one(session: &str) -> Result { + let manager = toolpath_codex::CodexConvo::new(); + let config = toolpath_codex::derive::DeriveConfig { project_path: None }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_codex::derive::derive_path(&s, &config); + let cache_id = make_id("codex", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.6: Add `derive_opencode_one`** + +Add this function next to `derive_opencode` (around line 848). Wrap in the same `cfg(not(target_os = "emscripten"))` gate the rest of opencode uses: + +```rust +/// Derive a single opencode session given an explicit session id. +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn derive_opencode_one( + session: &str, + no_snapshot_diffs: bool, +) -> Result { + let manager = toolpath_opencode::OpencodeConvo::new(); + let config = toolpath_opencode::derive::DeriveConfig { + no_snapshot_diffs, + ..Default::default() + }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = + toolpath_opencode::derive::derive_path_with_resolver(&s, &config, manager.resolver()); + let cache_id = make_id("opencode", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.7: Verify the workspace still builds and tests pass** + +```bash +cargo build -p path-cli +cargo test -p path-cli --lib +``` + +Expected: build succeeds, all existing tests pass (this was a pure addition — no call sites rewritten). + +- [ ] **Step 1.8: Commit** + +```bash +git add crates/path-cli/src/cmd_import.rs +git commit -m "refactor(path-cli): extract single-pair derive helpers + +Lifts DerivedDoc to pub(crate) and adds derive_{claude,gemini,pi}_pair +and derive_{codex,opencode}_one. These are the explicit-args paths +already exercised by the (Some(p), Some(s), _) arm of each existing +dispatch — extracted so cmd_share can reuse them without re-implementing +the per-harness wiring." +``` + +--- + +## Task 2: Refactor `cmd_export.rs` — split `run_pathbase` so the body can come from memory + +Today `run_pathbase` reads from a cache file. `cmd_share` has the derived `Graph` in memory; we want to upload without writing-then-reading. Extract a `run_pathbase_inner(args, body)` and have the existing wrapper read the file then call the inner. + +**Files:** +- Modify: `crates/path-cli/src/cmd_export.rs` + +- [ ] **Step 2.1: Add `pub(crate) struct PathbaseUploadArgs`** + +Add this near the existing `struct PathbaseExportArgs` (around line 219): + +```rust +/// Pathbase upload knobs that don't depend on where the body came from. +/// Identical to [`PathbaseExportArgs`] minus the `input` field — the body +/// is supplied by the caller (read from cache, derived in memory, …). +#[derive(Debug)] +pub(crate) struct PathbaseUploadArgs { + pub(crate) url: Option, + pub(crate) anon: bool, + pub(crate) repo: Option, + pub(crate) slug: Option, + pub(crate) public: bool, +} +``` + +- [ ] **Step 2.2: Lift `RepoSpec` and `parse_repo_spec` to `pub(crate)`** + +In the same file, change: + +```rust +#[derive(Debug, Clone)] +pub struct RepoSpec { + pub owner: String, + pub name: String, +} + +fn parse_repo_spec(s: &str) -> std::result::Result { +``` + +so both `pub` items become `pub(crate)` (already `pub` for `RepoSpec`; convert for `parse_repo_spec`): + +```rust +#[derive(Debug, Clone)] +pub(crate) struct RepoSpec { + pub(crate) owner: String, + pub(crate) name: String, +} + +pub(crate) fn parse_repo_spec(s: &str) -> std::result::Result { +``` + +- [ ] **Step 2.3: Extract `run_pathbase_inner`** + +Replace the body of `run_pathbase` (lines 1202–1329 — the `#[cfg(not(target_os = "emscripten"))]` arm) so that it reads the file then calls a new inner. The new shape: + +```rust +fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { + #[cfg(target_os = "emscripten")] + { + let _ = args; + anyhow::bail!("'path export pathbase' requires a native environment with network access"); + } + + #[cfg(not(target_os = "emscripten"))] + { + let file = cache_ref(&args.input)?; + let body = std::fs::read_to_string(&file) + .with_context(|| format!("Failed to read {}", file.display()))?; + let upload = PathbaseUploadArgs { + url: args.url, + anon: args.anon, + repo: args.repo, + slug: args.slug, + public: args.public, + }; + let summary_source = file.display().to_string(); + run_pathbase_inner(upload, &body, &summary_source) + } +} + +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn run_pathbase_inner( + args: PathbaseUploadArgs, + body: &str, + summary_source: &str, +) -> Result<()> { + use crate::cmd_pathbase::{ + anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, + resolve_url, + }; + + // Validate locally so we give a clean error rather than relying on + // the server to reject malformed payloads. + let doc = toolpath::v1::Graph::from_json(body) + .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; + + let stored = load_session(&credentials_path()?)?; + let base_url = match (&args.url, &stored) { + (Some(u), _) => resolve_url(Some(u.clone())), + (None, Some(s)) => s.url.clone(), + (None, None) => resolve_url(None), + }; + + let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); + + if go_anon { + if !args.anon && stored.is_none() { + eprintln!( + "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." + ); + } + let resp = anon_paths_post(&base_url, body)?; + let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { + resp.url.clone() + } else if resp.url.starts_with('/') { + format!("{base_url}{}", resp.url) + } else { + format!("{base_url}/{}", resp.url) + }; + println!("{printable}"); + eprintln!( + "Uploaded {} → anon path {} ({} bytes)", + summary_source, + resp.id, + body.len() + ); + return Ok(()); + } + + let session = stored.ok_or_else(|| { + anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") + })?; + if host_of(&base_url) != host_of(&session.url) { + eprintln!( + "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", + base_url, session.url + ); + } + + let (owner, repo) = match args.repo { + Some(spec) => (spec.owner, spec.name), + None => { + let user = api_me(&base_url, &session.token)?; + repos_post(&base_url, &session.token, "pathstash")?; + (user.username, "pathstash".to_string()) + } + }; + + let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); + let created = paths_post( + &base_url, + &session.token, + &owner, + &repo, + &slug, + body, + args.public, + )?; + + if created.is_public != args.public { + eprintln!( + "note: requested is_public={} but server applied is_public={}", + args.public, created.is_public + ); + } + let visibility = if created.is_public { "public" } else { "secret" }; + let url = pathbase_share_url( + &base_url, + &owner, + &repo, + &created.slug, + &created.id, + created.is_public, + ); + println!("{url}"); + eprintln!( + "Uploaded {} → {}/{}/{} ({} path, {} bytes)", + summary_source, + owner, + repo, + created.slug, + visibility, + body.len() + ); + Ok(()) +} +``` + +`summary_source` is the human-readable label used in the stderr "Uploaded …" line — `cache_ref` path for `export pathbase`, and a synthesized " session " string for `cmd_share`. Keeps the inner free of cache-vs-memory branching. + +- [ ] **Step 2.4: Verify the workspace still builds and tests pass** + +```bash +cargo build -p path-cli +cargo test -p path-cli +``` + +Expected: existing `pathbase_*` tests in `cmd_pathbase.rs` and `export_pathbase_repo_flag_requires_login` integration test still pass — refactor preserved behavior. + +- [ ] **Step 2.5: Commit** + +```bash +git add crates/path-cli/src/cmd_export.rs +git commit -m "refactor(path-cli): split run_pathbase into wrapper + inner + +run_pathbase_inner takes a body string and a summary_source label, so +callers with an in-memory toolpath document (cmd_share) can upload +without round-tripping through the cache." +``` + +--- + +## Task 3: Scaffold `cmd_share.rs` and wire into `lib.rs` + +Empty module with the CLI surface and a `run()` stub that errors. This is the smallest change that lets `path share --help` print and `path share` produce a recognisable "not implemented yet" failure, so subsequent tasks can be tested incrementally. + +**Files:** +- Create: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/src/lib.rs` + +- [ ] **Step 3.1: Write the failing test for the help output** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_help_lists_unified_picker_flags() { + cmd() + .args(["share", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("--harness")) + .stdout(predicate::str::contains("--session")) + .stdout(predicate::str::contains("--project")) + .stdout(predicate::str::contains("--anon")); +} +``` + +- [ ] **Step 3.2: Run the test to confirm it fails** + +```bash +cargo test -p path-cli --test integration share_help_lists_unified_picker_flags +``` + +Expected: FAIL — `path share` is not yet a recognised subcommand. + +- [ ] **Step 3.3: Create `cmd_share.rs`** + +```rust +//! `path share` — interactive Pathbase upload across installed agent +//! harnesses. See `docs/superpowers/specs/2026-05-07-path-share-command-design.md`. + +#![cfg(not(target_os = "emscripten"))] + +use anyhow::Result; +use clap::{Args, ValueEnum}; +use std::path::PathBuf; + +use crate::cmd_export::RepoSpec; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)] +#[value(rename_all = "lower")] +pub enum HarnessArg { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +#[derive(Args, Debug)] +pub struct ShareArgs { + /// Pathbase server URL (defaults to the stored session's server) + #[arg(long)] + pub url: Option, + + /// Force the anonymous endpoint, ignoring any stored credentials + #[arg(long, conflicts_with_all = ["repo", "public"])] + pub anon: bool, + + /// Target a specific repo as `owner/name` instead of `/pathstash` + #[arg(long, value_parser = crate::cmd_export::parse_repo_spec)] + pub repo: Option, + + /// Override the auto-derived slug (defaults to the toolpath document id) + #[arg(long)] + pub slug: Option, + + /// Make the uploaded path publicly listable (default: secret/unlisted) + #[arg(long)] + pub public: bool, + + /// Narrow the picker to one harness, or skip the picker entirely + /// when used with --session. + #[arg(long, value_enum)] + pub harness: Option, + + /// Skip the picker. Requires --harness; requires --project for + /// claude/gemini/pi. + #[arg(long, requires = "harness")] + pub session: Option, + + /// Override cwd-as-project. Filters the picker to sessions tied to + /// this project across all harnesses. + #[arg(long)] + pub project: Option, + + /// Overwrite the cache entry if it already exists + #[arg(long)] + pub force: bool, + + /// Skip writing the cache; derive in-memory only + #[arg(long)] + pub no_cache: bool, +} + +pub fn run(args: ShareArgs) -> Result<()> { + let _ = args; + anyhow::bail!("`path share` is not yet implemented") +} +``` + +- [ ] **Step 3.4: Wire it into `lib.rs`** + +In `crates/path-cli/src/lib.rs`, add the module declaration alongside the others: + +```rust +#[cfg(not(target_os = "emscripten"))] +mod cmd_share; +``` + +Add to the `Commands` enum (anywhere among the existing arms; placing it next to `Auth` is natural): + +```rust + /// Share an agent session to Pathbase via an interactive picker + #[cfg(not(target_os = "emscripten"))] + Share { + #[command(flatten)] + args: cmd_share::ShareArgs, + }, +``` + +Add the dispatch arm in `pub fn run`: + +```rust + #[cfg(not(target_os = "emscripten"))] + Commands::Share { args } => cmd_share::run(args), +``` + +- [ ] **Step 3.5: Run the help test to verify it passes** + +```bash +cargo test -p path-cli --test integration share_help_lists_unified_picker_flags +``` + +Expected: PASS. + +- [ ] **Step 3.6: Confirm `path share` runs and bails with the stub error** + +```bash +cargo run -p path-cli -- share 2>&1 | head -3 +``` + +Expected: stderr says `Error: \`path share\` is not yet implemented`. + +- [ ] **Step 3.7: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/src/lib.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): scaffold \`path share\` command + +Adds the cmd_share module with the full CLI surface (--url, --harness, +--session, --project, --anon, --repo, --slug, --public, --force, +--no-cache) and a stub run() that bails. Wires it into lib.rs as +Commands::Share. Subsequent tasks fill in the body." +``` + +--- + +## Task 4: Add `Harness`, `SessionRow`, and `HarnessBundle` types + +Pure types with small helper methods. No aggregation logic yet — that comes in tasks 5 and 6. Splitting it out keeps the test fixtures focused. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 4.1: Write the failing tests for the type helpers** + +Append to `crates/path-cli/src/cmd_share.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn harness_name_and_symbol_are_distinct() { + let all = [ + Harness::Claude, + Harness::Gemini, + Harness::Codex, + Harness::Opencode, + Harness::Pi, + ]; + let names: Vec<&str> = all.iter().map(|h| h.name()).collect(); + let symbols: Vec<&str> = all.iter().map(|h| h.symbol()).collect(); + assert_eq!(names.len(), 5); + assert_eq!( + names.iter().collect::>().len(), + 5, + "names must be unique" + ); + assert_eq!( + symbols + .iter() + .collect::>() + .len(), + 5, + "symbols must be unique" + ); + } + + #[test] + fn harness_project_keyed_matches_design() { + assert!(Harness::Claude.project_keyed()); + assert!(Harness::Gemini.project_keyed()); + assert!(Harness::Pi.project_keyed()); + assert!(!Harness::Codex.project_keyed()); + assert!(!Harness::Opencode.project_keyed()); + } + + #[test] + fn harness_from_arg_roundtrips() { + for (arg, harness) in [ + (HarnessArg::Claude, Harness::Claude), + (HarnessArg::Gemini, Harness::Gemini), + (HarnessArg::Codex, Harness::Codex), + (HarnessArg::Opencode, Harness::Opencode), + (HarnessArg::Pi, Harness::Pi), + ] { + assert_eq!(Harness::from_arg(arg), harness); + } + } +} +``` + +- [ ] **Step 4.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: FAIL — `Harness`, `SessionRow`, etc. don't exist yet. + +- [ ] **Step 4.3: Add the types** + +Insert above the `pub fn run` definition: + +```rust +use chrono::{DateTime, Utc}; + +/// Which agent harness a session was produced by. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum Harness { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +impl Harness { + pub(crate) fn name(&self) -> &'static str { + match self { + Harness::Claude => "claude", + Harness::Gemini => "gemini", + Harness::Codex => "codex", + Harness::Opencode => "opencode", + Harness::Pi => "pi", + } + } + + /// Padded so all five symbols line up in the fzf column. + pub(crate) fn symbol(&self) -> &'static str { + match self { + Harness::Claude => "claude ", + Harness::Gemini => "gemini ", + Harness::Codex => "codex ", + Harness::Opencode => "opencode", + Harness::Pi => "pi ", + } + } + + /// True when the underlying provider keys sessions by project path. + /// claude/gemini/pi: true. codex/opencode: false (sessions store cwd + /// per-row, not as a directory key). + pub(crate) fn project_keyed(&self) -> bool { + matches!(self, Harness::Claude | Harness::Gemini | Harness::Pi) + } + + pub(crate) fn from_arg(arg: HarnessArg) -> Self { + match arg { + HarnessArg::Claude => Harness::Claude, + HarnessArg::Gemini => Harness::Gemini, + HarnessArg::Codex => Harness::Codex, + HarnessArg::Opencode => Harness::Opencode, + HarnessArg::Pi => Harness::Pi, + } + } + + pub(crate) fn parse(s: &str) -> Option { + match s { + "claude" => Some(Harness::Claude), + "gemini" => Some(Harness::Gemini), + "codex" => Some(Harness::Codex), + "opencode" => Some(Harness::Opencode), + "pi" => Some(Harness::Pi), + _ => None, + } + } +} + +/// One row in the unified session picker. +#[derive(Debug, Clone)] +pub(crate) struct SessionRow { + pub(crate) harness: Harness, + /// Project path for keyed providers; `None` for codex/opencode. + pub(crate) project: Option, + /// Recorded cwd from the session (codex/opencode only). + pub(crate) cwd: Option, + pub(crate) session_id: String, + pub(crate) title: String, + pub(crate) last_activity: Option>, + pub(crate) message_count: usize, + pub(crate) matches_cwd: bool, +} + +/// Bundle of provider managers used during aggregation. Production code +/// builds this from real `$HOME` via `from_environment`; tests construct +/// it directly with provider-specific resolvers. +#[derive(Default)] +pub(crate) struct HarnessBundle { + pub(crate) claude: Option, + pub(crate) gemini: Option, + pub(crate) codex: Option, + pub(crate) opencode: Option, + pub(crate) pi: Option, +} + +impl HarnessBundle { + /// Build the production bundle. Each provider is included + /// unconditionally (its `new()` doesn't fail on a missing home dir); + /// `gather_sessions` skips the ones whose listing returns empty/NotFound. + pub(crate) fn from_environment() -> Self { + Self { + claude: Some(toolpath_claude::ClaudeConvo::new()), + gemini: Some(toolpath_gemini::GeminiConvo::new()), + codex: Some(toolpath_codex::CodexConvo::new()), + opencode: Some(toolpath_opencode::OpencodeConvo::new()), + pi: Some(toolpath_pi::PiConvo::new()), + } + } +} +``` + +- [ ] **Step 4.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: PASS. + +- [ ] **Step 4.5: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): add Harness, SessionRow, HarnessBundle types + +Pure data types plus from_arg/parse helpers and a project_keyed +predicate. HarnessBundle::from_environment instantiates each provider +unconditionally; gather_sessions (next task) skips providers whose +listing returns empty or NotFound." +``` + +--- + +## Task 5: Implement `gather_sessions` for project-keyed harnesses (claude, gemini, pi) + +Aggregator collects rows from claude/gemini/pi only in this task. Codex/opencode arrive in task 6. Each provider gets one unit test that uses an injectable resolver to point at a tempdir. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 5.1: Write the failing tests** + +Append to the `mod tests` block in `cmd_share.rs`: + +```rust + use std::path::Path; + use tempfile::TempDir; + + fn write_claude_session(claude_dir: &Path, project_slug: &str, session: &str, prompt: &str) { + let project_dir = claude_dir.join("projects").join(project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let user = format!( + r#"{{"type":"user","uuid":"u-{session}","timestamp":"2024-01-02T00:00:00Z","cwd":"/test/project","message":{{"role":"user","content":"{prompt}"}}}}"# + ); + let asst = format!( + r#"{{"type":"assistant","uuid":"a-{session}","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"hi"}}}}"# + ); + std::fs::write( + project_dir.join(format!("{session}.jsonl")), + format!("{user}\n{asst}\n"), + ) + .unwrap(); + } + + fn claude_only_bundle(home: &Path) -> HarnessBundle { + let claude_dir = home.join(".claude"); + std::fs::create_dir_all(&claude_dir).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + } + } + + #[test] + fn gather_sessions_includes_claude_rows_for_a_project() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Claude); + assert_eq!(rows[0].session_id, "abc-session-one"); + assert_eq!(rows[0].project.as_deref(), Some("/test/project")); + assert!(rows[0].matches_cwd, "cwd should match the project path"); + } + + #[test] + fn gather_sessions_marks_non_matching_project_rows() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/some/other/place"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert!(!rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_skips_harness_with_no_home_dir() { + // Empty bundle => no rows, no panic. + let bundle = HarnessBundle::default(); + let rows = gather_sessions(&bundle, Path::new("/anywhere"), None, None); + assert!(rows.is_empty()); + } + + #[test] + fn gather_sessions_filters_by_harness() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "hi", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, Some(Harness::Codex), None); + assert!(rows.is_empty(), "filter to codex must drop claude rows"); + } +``` + +- [ ] **Step 5.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::gather +``` + +Expected: FAIL — `gather_sessions` doesn't exist. + +- [ ] **Step 5.3: Implement `gather_sessions` for the three project-keyed harnesses** + +Add above the `mod tests` block: + +```rust +/// Aggregate sessions across the harnesses in `bundle`, ranked so that +/// rows whose project (or recorded cwd) canonicalizes to `cwd` come +/// first, sorted by descending `last_activity`. +/// +/// Filters: `harness_filter` keeps only rows from one harness; `project_filter` +/// keeps only rows whose project (for keyed) or cwd (for session-keyed) +/// canonicalizes to that path. +pub(crate) fn gather_sessions( + bundle: &HarnessBundle, + cwd: &std::path::Path, + harness_filter: Option, + project_filter: Option<&std::path::Path>, +) -> Vec { + let mut rows = Vec::new(); + let canonical_cwd = canonicalize_or_self(cwd); + let canonical_project = project_filter.map(canonicalize_or_self); + + let want = |h: Harness| harness_filter.is_none_or(|f| f == h); + + if want(Harness::Claude) { + if let Some(mgr) = &bundle.claude { + collect_claude(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Gemini) { + if let Some(mgr) = &bundle.gemini { + collect_gemini(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Pi) { + if let Some(mgr) = &bundle.pi { + collect_pi(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + + rows.sort_by(|a, b| { + b.matches_cwd + .cmp(&a.matches_cwd) + .then_with(|| b.last_activity.cmp(&a.last_activity)) + }); + rows +} + +fn canonicalize_or_self(p: &std::path::Path) -> std::path::PathBuf { + std::fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()) +} + +fn paths_match(a: &std::path::Path, b: &std::path::Path) -> bool { + canonicalize_or_self(a) == canonicalize_or_self(b) +} + +fn collect_claude( + mgr: &toolpath_claude::ClaudeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found(&e) => return, + Err(e) => { + eprintln!("warning: claude aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: claude project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Claude, + project: Some(m.project_path), + cwd: None, + session_id: m.session_id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_gemini( + mgr: &toolpath_gemini::GeminiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found(&e) => return, + Err(e) => { + eprintln!("warning: gemini aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: gemini project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Gemini, + project: Some(m.project_path), + cwd: None, + session_id: m.session_uuid, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_pi( + mgr: &toolpath_pi::PiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_pi(&e) => return, + Err(e) => { + eprintln!("warning: pi aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_sessions(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: pi project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + // SessionMeta.timestamp is a String; parse to DateTime when possible. + let last_activity = chrono::DateTime::parse_from_rfc3339(&m.timestamp) + .ok() + .map(|d| d.with_timezone(&Utc)); + out.push(SessionRow { + harness: Harness::Pi, + project: Some(project.clone()), + cwd: None, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity, + message_count: m.entry_count, + matches_cwd, + }); + } + } +} + +fn is_not_found(err: &toolpath_claude::ConvoError) -> bool { + use toolpath_claude::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::ClaudeDirectoryNotFound(_)) +} + +fn is_not_found_pi(err: &toolpath_pi::PiError) -> bool { + use toolpath_pi::PiError; + matches!(err, PiError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, PiError::ProjectNotFound(_)) +} +``` + +Note: claude / gemini / codex / opencode all re-export `ConvoError` with `Io(io::Error)` and `NoHomeDirectory` variants. Pi re-exports `PiError` (different name) with `Io` and `ProjectNotFound` variants. The helpers above already account for that. Variant names were verified against `crates/toolpath-{claude,gemini,codex,opencode,pi}/src/error.rs` while writing this plan. + +- [ ] **Step 5.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: PASS. If `is_not_found` doesn't compile, inspect the provider's `ConvoError` enum and adjust the match arms; the test set still passes once it compiles because the fixture has a real home. + +- [ ] **Step 5.5: Run clippy to catch warning-as-error issues** + +```bash +cargo clippy -p path-cli -- -D warnings +``` + +Expected: clean. + +- [ ] **Step 5.6: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): implement gather_sessions for claude/gemini/pi + +Aggregates SessionRow values from the three project-keyed providers, +sorts cwd-matching rows first then by recency, and silently skips +harnesses whose listing returns empty or NotFound. Codex and opencode +land in the next commit." +``` + +--- + +## Task 6: Extend `gather_sessions` to codex and opencode + add ranking/filter coverage + +Codex and opencode address sessions by id; their `cwd` lives inside the session metadata, so the matching logic differs slightly from the project-keyed harnesses. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 6.1: Write the failing tests** + +Append to the `mod tests` block in `cmd_share.rs`: + +```rust + fn codex_only_bundle(home: &Path) -> HarnessBundle { + let codex_dir = home.join(".codex"); + std::fs::create_dir_all(&codex_dir).unwrap(); + let resolver = toolpath_codex::PathResolver::new().with_codex_dir(&codex_dir); + HarnessBundle { + codex: Some(toolpath_codex::CodexConvo::with_resolver(resolver)), + ..Default::default() + } + } + + fn write_codex_session(codex_dir: &Path, id: &str, cwd: &str) { + // Date-bucketed layout: ~/.codex/sessions/YYYY/MM/DD/rollout-*-.jsonl + let dir = codex_dir.join("sessions/2026/05/07"); + std::fs::create_dir_all(&dir).unwrap(); + let file = dir.join(format!("rollout-2026-05-07T00-00-00-{id}.jsonl")); + let meta = format!( + r#"{{"timestamp":"2026-05-07T00:00:00Z","type":"session_meta","payload":{{"id":"{id}","timestamp":"2026-05-07T00:00:00Z","cwd":"{cwd}","originator":"codex-tui","cli_version":"test","source":"cli","model_provider":"openai"}}}}"# + ); + let user = format!( + r#"{{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{{"type":"message","role":"user","content":[{{"type":"input_text","text":"hi"}}]}}}}"# + ); + std::fs::write(file, format!("{meta}\n{user}\n")).unwrap(); + } + + #[test] + fn gather_sessions_includes_codex_rows_with_cwd_match() { + let temp = TempDir::new().unwrap(); + write_codex_session( + &temp.path().join(".codex"), + "00000000-0000-0000-0000-0000000000aa", + "/work/proj", + ); + let bundle = codex_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/work/proj"), None, None); + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Codex); + assert_eq!(rows[0].cwd.as_deref(), Some("/work/proj")); + assert!(rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_ranks_cwd_matches_first() { + // Two claude sessions: one in cwd (older), one elsewhere (newer). + // Despite the elsewhere row being newer, the cwd-match must come first. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + write_claude_session(&claude_dir, "-cwd-project", "in-cwd-session", "hi"); + // Bump activity on the not-in-cwd session by writing a later timestamp. + let not_dir = claude_dir.join("projects").join("-other-project"); + std::fs::create_dir_all(¬_dir).unwrap(); + std::fs::write( + not_dir.join("not-in-cwd-session.jsonl"), + r#"{"type":"user","uuid":"u-x","timestamp":"2030-01-01T00:00:00Z","cwd":"/other/project","message":{"role":"user","content":"later"}}"#.to_string() + + "\n", + ) + .unwrap(); + let bundle = claude_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/cwd/project"), None, None); + + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].session_id, "in-cwd-session"); + assert!(rows[0].matches_cwd); + assert!(!rows[1].matches_cwd); + } +``` + +- [ ] **Step 6.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::gather_sessions_includes_codex +cargo test -p path-cli --lib cmd_share::tests::gather_sessions_ranks +``` + +Expected: FAIL — codex collection isn't implemented. + +- [ ] **Step 6.3: Add `collect_codex` and `collect_opencode` and dispatch them** + +Inside `gather_sessions`, add the two extra blocks after the pi block: + +```rust + if want(Harness::Codex) { + if let Some(mgr) = &bundle.codex { + collect_codex(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Opencode) { + if let Some(mgr) = &bundle.opencode { + collect_opencode(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } +``` + +Add the two new collector functions next to the existing ones: + +```rust +fn collect_codex( + mgr: &toolpath_codex::CodexConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.list_sessions() { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_codex(&e) => return, + Err(e) => { + eprintln!("warning: codex aggregation failed: {e}"); + return; + } + }; + for m in metas { + let cwd_str = m.cwd.as_ref().map(|p| p.to_string_lossy().into_owned()); + if let Some(filter) = project_filter { + let stored = match cwd_str.as_deref() { + Some(s) => std::path::PathBuf::from(s), + None => continue, + }; + if !paths_match(&stored, filter) { + continue; + } + } + let matches_cwd = m + .cwd + .as_deref() + .map(|p| paths_match(p, canonical_cwd)) + .unwrap_or(false); + out.push(SessionRow { + harness: Harness::Codex, + project: None, + cwd: cwd_str, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.line_count, + matches_cwd, + }); + } +} + +fn collect_opencode( + mgr: &toolpath_opencode::OpencodeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.io().list_session_metadata(None) { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_opencode(&e) => return, + Err(e) => { + eprintln!("warning: opencode aggregation failed: {e}"); + return; + } + }; + for m in metas { + if let Some(filter) = project_filter + && !paths_match(&m.directory, filter) + { + continue; + } + let matches_cwd = paths_match(&m.directory, canonical_cwd); + let cwd_str = m.directory.to_string_lossy().into_owned(); + let title = match (&m.first_user_message, m.title.is_empty()) { + (Some(s), _) if !s.is_empty() => s.clone(), + (_, false) => m.title.clone(), + _ => "(no prompt)".to_string(), + }; + out.push(SessionRow { + harness: Harness::Opencode, + project: None, + cwd: Some(cwd_str), + session_id: m.id, + title, + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } +} + +fn is_not_found_codex(err: &toolpath_codex::ConvoError) -> bool { + use toolpath_codex::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) +} + +fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { + use toolpath_opencode::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) +} +``` + +(Both `is_not_found_codex` and `is_not_found_opencode` use `ConvoError` since both providers re-export that name. Variant names verified against `crates/toolpath-{codex,opencode}/src/error.rs`.) + +- [ ] **Step 6.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +cargo clippy -p path-cli -- -D warnings +``` + +Expected: PASS, clippy clean. + +- [ ] **Step 6.5: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): cover codex+opencode in gather_sessions + +Adds collect_codex/collect_opencode and the matching ranking/filter +tests. Session-keyed providers compare canonical(stored_cwd) to +canonical(cwd) for matches_cwd; project_filter applies to the same +recorded cwd." +``` + +--- + +## Task 7: Implement explicit-args path (skip picker, derive, upload) + +This makes `path share --harness X --session Y [--project P] [--anon] ...` end-to-end functional. The picker path lands in task 8. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/tests/integration.rs` + +- [ ] **Step 7.1: Write the failing integration test** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_explicit_args_uploads_via_anon() { + use std::io::Write; + use std::net::TcpListener; + + // Stand up a one-shot mock that returns a valid AnonUploadResponse. + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + // Drain the request just enough to keep the OS happy. + use std::io::Read; + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc-123","url":"https://example.test/anon/abc-123"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + // Build a claude fixture so the explicit-args path has something to derive. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = "-".to_string() + + &project.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("HOME", temp.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stdout(predicate::str::contains("https://example.test/anon/abc-123")) + .stderr(predicate::str::contains("Uploaded")); + + server.join().unwrap(); +} +``` + +- [ ] **Step 7.2: Run the test to confirm it fails** + +```bash +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +``` + +Expected: FAIL — `path share` still bails with "not yet implemented". + +- [ ] **Step 7.3: Implement the explicit-args path** + +In `cmd_share.rs`, replace the stub `pub fn run` with: + +```rust +pub fn run(args: ShareArgs) -> Result<()> { + let harness = args.harness.map(Harness::from_arg); + + if let (Some(h), Some(session)) = (harness, &args.session) { + return share_explicit(h, session.as_str(), &args); + } + + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + // Picker path lands in the next task. + anyhow::bail!("interactive `path share` is not yet implemented") +} + +fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { + let project = match (harness.project_keyed(), args.project.as_ref()) { + (true, Some(p)) => Some(p.to_string_lossy().into_owned()), + (true, None) => anyhow::bail!( + "--project required when --harness is {} and --session is set", + harness.name() + ), + (false, _) => None, + }; + + let derived = derive_one(harness, project.as_deref(), session)?; + let summary = format!( + "{} session {}", + harness.name(), + derived.cache_id + ); + + if !args.no_cache { + let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; + eprintln!( + "Imported {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); + } + + let body = derived.doc.to_json()?; + let upload = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + crate::cmd_export::run_pathbase_inner(upload, &body, &summary) +} + +fn derive_one( + harness: Harness, + project: Option<&str>, + session: &str, +) -> Result { + match harness { + Harness::Claude => { + crate::cmd_import::derive_claude_pair(project.expect("project_keyed"), session) + } + Harness::Gemini => crate::cmd_import::derive_gemini_pair( + project.expect("project_keyed"), + session, + false, + ), + Harness::Pi => { + crate::cmd_import::derive_pi_pair(project.expect("project_keyed"), session, None) + } + Harness::Codex => crate::cmd_import::derive_codex_one(session), + Harness::Opencode => crate::cmd_import::derive_opencode_one(session, false), + } +} +``` + +`RepoSpec` is `Clone`-able via the existing `#[derive(Debug, Clone)]` on the struct in `cmd_export`, so `args.repo.clone()` works. + +- [ ] **Step 7.4: Run the test to verify it passes** + +```bash +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +``` + +Expected: PASS. + +- [ ] **Step 7.5: Add cache-behavior integration tests** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +/// Helper for the cache tests. Spawns a one-shot mock anon-upload server +/// on a free port and returns (port, server-thread-handle, fixture-temp, +/// project-path, $HOME-path). +fn share_anon_fixture() -> (u16, std::thread::JoinHandle<()>, tempfile::TempDir, PathBuf, PathBuf) +{ + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = "-".to_string() + + &project.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let home = temp.path().to_path_buf(); + (port, server, temp, project, home) +} + +#[test] +fn share_writes_cache_by_default() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!( + entries.len(), + 1, + "expected exactly one cache entry, got {entries:?}" + ); + let name = entries[0].file_name().to_string_lossy().into_owned(); + assert!( + name.starts_with("claude-"), + "expected claude-* cache id, got {name}" + ); + + server.join().unwrap(); +} + +#[test] +fn share_no_cache_skips_write() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + if docs.exists() { + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert!( + entries.is_empty(), + "expected no cache entries with --no-cache, got {entries:?}" + ); + } + + server.join().unwrap(); +} +``` + +- [ ] **Step 7.6: Run the new tests** + +```bash +cargo test -p path-cli --test integration share_writes_cache_by_default share_no_cache_skips_write +``` + +Expected: PASS. + +- [ ] **Step 7.7: Run the full test suite + clippy** + +```bash +cargo test -p path-cli +cargo clippy -p path-cli -- -D warnings +``` + +Expected: green. + +- [ ] **Step 7.8: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): implement \`path share\` explicit-args path + +When --harness and --session are both set, share derives the session +via cmd_import's pair helpers, optionally writes the cache, then +uploads via cmd_export::run_pathbase_inner. Picker path follows." +``` + +--- + +## Task 8: Implement the picker, non-TTY recipe, and empty-result probe summary + +Adds the unified fzf picker, the recipe message when fzf isn't available, and the probe-summary error when no sessions exist anywhere. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/tests/integration.rs` + +- [ ] **Step 8.1: Write the failing tests** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_filters_by_project_with_no_matches_errors() { + let cfg = tempfile::tempdir().unwrap(); + let home = tempfile::tempdir().unwrap(); + let nonexistent = home.path().join("never"); + + cmd() + .env("HOME", home.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--project"]) + .arg(&nonexistent) + .assert() + .failure() + .stderr(predicate::str::contains("No agent sessions found in project")); +} +``` + +Append to `crates/path-cli/src/cmd_share.rs` `mod tests`: + +```rust + #[test] + fn parse_picker_row_roundtrips_keyed() { + let row = SessionRow { + harness: Harness::Claude, + project: Some("/tmp/proj".to_string()), + cwd: None, + session_id: "sess-abc".to_string(), + title: "Hello\tworld".to_string(), + last_activity: None, + message_count: 3, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Claude); + assert_eq!(key, "/tmp/proj"); + assert_eq!(session, "sess-abc"); + } + + #[test] + fn parse_picker_row_roundtrips_session_keyed() { + let row = SessionRow { + harness: Harness::Codex, + project: None, + cwd: Some("/work/proj".to_string()), + session_id: "0190abcd".to_string(), + title: "(no prompt)".to_string(), + last_activity: None, + message_count: 0, + matches_cwd: false, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Codex); + assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot + assert_eq!(session, "0190abcd"); + } +``` + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_no_harness_non_tty_prints_recipe() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share"]) + .assert() + .failure() + .stderr(predicate::str::contains("path import")) + .stderr(predicate::str::contains("path export pathbase")); +} +``` + +- [ ] **Step 8.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::parse_picker_row +cargo test -p path-cli --test integration share_no_harness_non_tty_prints_recipe +cargo test -p path-cli --test integration share_filters_by_project_with_no_matches_errors +``` + +Expected: FAIL — picker functions, non-TTY message, and probe-summary path don't exist. + +- [ ] **Step 8.3: Add picker formatting + dispatch** + +Append to `cmd_share.rs`: + +```rust +/// Build the TSV line fed to fzf. Cols 1–3 are hidden (harness/key/session, +/// used as parser keys); cols 4..8 are visible to the user. +fn format_picker_row(row: &SessionRow) -> String { + let key = row + .project + .clone() + .or_else(|| row.cwd.clone()) + .unwrap_or_default(); + let when = row + .last_activity + .map(|t| t.format("%Y-%m-%d %H:%M").to_string()) + .unwrap_or_else(|| " — ".to_string()); + let scope = if row.matches_cwd { "·" } else { " " }; + let project_short = project_short(&key); + let title = fzf_title(&row.title); + format!( + "{}\t{}\t{}\t{}\t{}\t{} msgs\t{}\t{}\t{}", + row.harness.name(), + tab_safe(&key), + tab_safe(&row.session_id), + row.harness.symbol(), + when, + row.message_count, + scope, + tab_safe(&project_short), + title, + ) +} + +/// Inverse of [`format_picker_row`] — pulls (harness, key, session) back +/// out of the line fzf returned. Returns `None` if the line is malformed. +fn parse_picker_row(line: &str) -> Option<(Harness, String, String)> { + let mut parts = line.split('\t'); + let h = Harness::parse(parts.next()?)?; + let key = parts.next()?.to_string(); + let session = parts.next()?.to_string(); + if session.is_empty() { + return None; + } + Some((h, key, session)) +} + +fn tab_safe(s: &str) -> String { + s.replace(['\t', '\n', '\r'], " ") +} + +fn fzf_title(s: &str) -> String { + const MAX: usize = 120; + let safe = tab_safe(s); + if safe.chars().count() > MAX { + let head: String = safe.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + safe + } +} + +fn project_short(p: &str) -> String { + let trimmed = p.trim_end_matches('/'); + let parts: Vec<&str> = trimmed.rsplit('/').take(2).collect(); + if parts.is_empty() { + return p.to_string(); + } + let mut out: Vec<&str> = parts.into_iter().collect(); + out.reverse(); + out.join("/") +} +``` + +- [ ] **Step 8.4: Wire the picker into `run`** + +Replace the second `anyhow::bail!` in `pub fn run` with the picker dispatch: + +```rust +pub fn run(args: ShareArgs) -> Result<()> { + let harness = args.harness.map(Harness::from_arg); + + if let (Some(h), Some(session)) = (harness, &args.session) { + return share_explicit(h, session.as_str(), &args); + } + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + let cwd = std::env::current_dir()?; + let bundle = HarnessBundle::from_environment(); + let project_filter = args.project.as_deref(); + let rows = gather_sessions(&bundle, &cwd, harness, project_filter); + + if rows.is_empty() { + return bail_no_sessions(&bundle, project_filter); + } + + if !crate::fzf::available() { + eprintln!( + "Interactive `path share` needs `fzf` on PATH and a TTY.\n\ + \n\ + Manual recipe:\n \ + path import # writes a cache entry, prints its id\n \ + path export pathbase --input " + ); + anyhow::bail!("fzf unavailable; run `path import ` then `path export pathbase`"); + } + + let lines: Vec = rows.iter().map(format_picker_row).collect(); + let host = pathbase_host_for_picker(&args); + let header = format!("share an agent session (Enter = upload to {host})"); + let opts = crate::fzf::PickOptions { + with_nth: "4..", + prompt: "share> ", + preview: Some("path show {1} --project {2} --session {3}"), + header: Some(&header), + tiebreak: "index", + multi: false, + }; + let selected = crate::fzf::pick(&lines, &opts)?; + let line = match selected.into_iter().next() { + Some(l) => l, + None => return Ok(()), // user cancelled + }; + let (h, key, session) = parse_picker_row(&line) + .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; + + let mut explicit = ShareArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + harness: Some(harness_to_arg(h)), + session: Some(session.clone()), + project: if h.project_keyed() { + Some(PathBuf::from(&key)) + } else { + None + }, + force: args.force, + no_cache: args.no_cache, + }; + eprintln!( + "Picked {} session {}", + h.name(), + explicit.session.as_deref().unwrap_or("?") + ); + let session_id = explicit.session.take().unwrap(); + share_explicit(h, &session_id, &explicit) +} + +fn harness_to_arg(h: Harness) -> HarnessArg { + match h { + Harness::Claude => HarnessArg::Claude, + Harness::Gemini => HarnessArg::Gemini, + Harness::Codex => HarnessArg::Codex, + Harness::Opencode => HarnessArg::Opencode, + Harness::Pi => HarnessArg::Pi, + } +} + +fn pathbase_host_for_picker(args: &ShareArgs) -> String { + use crate::cmd_pathbase::resolve_url; + if let Some(u) = &args.url { + return resolve_url(Some(u.clone())); + } + // Best-effort: if there's a stored session, surface its URL; otherwise fall back to default. + let path = match crate::cmd_pathbase::credentials_path() { + Ok(p) => p, + Err(_) => return resolve_url(None), + }; + match crate::cmd_pathbase::load_session(&path) { + Ok(Some(s)) => s.url, + _ => resolve_url(None), + } +} + +fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::Path>) -> Result<()> { + if let Some(p) = project_filter { + anyhow::bail!( + "No agent sessions found in project {}. Run without --project to see sessions across all projects.", + p.display() + ); + } + + let mut summary = String::from("No agent sessions found.\n"); + summary.push_str(&probe_summary_line("claude", bundle.claude.is_some())); + summary.push_str(&probe_summary_line("gemini", bundle.gemini.is_some())); + summary.push_str(&probe_summary_line("codex", bundle.codex.is_some())); + summary.push_str(&probe_summary_line("opencode", bundle.opencode.is_some())); + summary.push_str(&probe_summary_line("pi", bundle.pi.is_some())); + eprint!("{summary}"); + anyhow::bail!("no shareable sessions"); +} + +fn probe_summary_line(name: &str, present: bool) -> String { + if present { + format!(" {name}: 0 sessions\n") + } else { + format!(" {name}: not configured\n") + } +} +``` + +In `cmd_pathbase.rs`, the `credentials_path` and `load_session` helpers are already `pub(crate)` — no change needed. + +`crate::cmd_pathbase` and `crate::cmd_cache` and `crate::cmd_export` and `crate::cmd_import` are all in scope by virtue of being sibling modules under `path_cli::`. Add `use` statements at the top of `cmd_share.rs` if rust-analyzer prefers — the qualified paths above also work. + +The `pick` call's `preview` template substitutes col 1 (harness) into the `path show` invocation. `path show` already supports each harness as a subcommand. For codex/opencode the `--project {2}` arg becomes `--project /work/proj` even though those subcommands don't accept `--project`; if a future version of `path show` errors on that, swap to per-harness preview templates. Today they accept `--session` regardless, and unknown args print to stderr (preview pane) without aborting the picker. + +If `path show codex --project foo --session bar` errors, drop the `--project` from the preview template entirely; the design allows that simplification. + +- [ ] **Step 8.5: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +cargo test -p path-cli --test integration share_no_harness_non_tty_prints_recipe +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +cargo clippy -p path-cli -- -D warnings +``` + +Expected: all green. + +- [ ] **Step 8.6: Manual smoke test of the picker (locally only — not CI)** + +```bash +cargo build -p path-cli +./target/debug/path share --url http://127.0.0.1:1 +``` + +Expected on a machine with installed harnesses and fzf: an fzf list opens; cwd-matching sessions appear at the top; selecting one fails the upload (port 1) but proves the picker → derive → upload wiring. Press Esc to cancel — exit code should be 0 with nothing on stdout. + +- [ ] **Step 8.7: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): wire the unified \`path share\` picker + +Aggregates SessionRow values across installed harnesses, ranks +cwd-matches first, and pipes them through fzf. Falls back to a +manual-recipe message when fzf isn't available, and prints a probe +summary when no harness has any sessions to share." +``` + +--- + +## Task 9: Documentation — `CLAUDE.md` + +**Files:** +- Modify: `CLAUDE.md` + +- [ ] **Step 9.1: Add a `path share` line to the CLI usage block** + +In `CLAUDE.md`, under the `## CLI usage` section, after the `path import` group of examples and before the `# Export toolpath documents…` block, insert: + +```markdown +# Share an agent session to Pathbase (interactive picker, single-shot) +cargo run -p path-cli -- share +cargo run -p path-cli -- share --harness claude --session --project /path/to/project +cargo run -p path-cli -- share --url https://my-pathbase.example +``` + +- [ ] **Step 9.2: Add a "Things to know" entry** + +In the `## Things to know` bullet list, append: + +```markdown +- `path share` is the one-shot equivalent of `path import | path export pathbase`. It probes installed agent harnesses (claude/gemini/codex/opencode/pi), aggregates their sessions into a single fzf picker, and ranks rows whose project (claude/gemini/pi) or recorded cwd (codex/opencode) canonicalizes to the current directory at the top. `--harness` narrows the picker to one provider; `--harness X --session Y` (and `--project P` for keyed providers) skips the picker entirely. Pathbase flags (`--url`, `--anon`, `--repo`, `--slug`, `--public`) match `path export pathbase`. By default the derived doc is written to the cache like `import` does; pass `--no-cache` to skip. +``` + +- [ ] **Step 9.3: Build the workspace once more as a sanity check** + +```bash +cargo build --workspace +cargo test --workspace +cargo clippy --workspace -- -D warnings +``` + +Expected: clean. + +- [ ] **Step 9.4: Commit** + +```bash +git add CLAUDE.md +git commit -m "docs: document \`path share\` in CLAUDE.md" +``` + +--- + +## Done criteria + +- `path share --help` lists all flags from the design. +- `path share --harness X --session Y [--project P]` derives + uploads in one shot, with the share URL on stdout. +- `path share` (no flags, fzf available) opens a unified picker with cwd-matching rows ranked first. +- `path share` (no flags, no fzf) prints the manual recipe and exits 1. +- `path share --project P` filters to that project; if no rows match, exits 1 with a focused error message. +- All existing tests still pass; `cargo clippy --workspace -- -D warnings` is clean. +- The `CLAUDE.md` CLI block and Things-to-know list reflect the new command. From 3959146fe4d14bebce708c645dd8c948757ceb32 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 16:12:33 -0400 Subject: [PATCH 03/36] refactor(path-cli): extract single-pair derive helpers MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Lifts DerivedDoc to pub(crate) and adds derive_{claude,gemini,pi}_pair and derive_{codex,opencode}_one. These are the explicit-args paths already exercised by the (Some(p), Some(s), _) arm of each existing dispatch — extracted so cmd_share can reuse them without re-implementing the per-harness wiring. --- crates/path-cli/src/cmd_import.rs | 112 +++++++++++++++++++++++++++++- 1 file changed, 109 insertions(+), 3 deletions(-) diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index 75ae52e..a1929ea 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -171,9 +171,9 @@ pub fn run(args: ImportArgs, pretty: bool) -> Result<()> { emit(&docs, args.force, args.no_cache, pretty) } -struct DerivedDoc { - cache_id: String, - doc: Graph, +pub(crate) struct DerivedDoc { + pub(crate) cache_id: String, + pub(crate) doc: Graph, } fn emit(docs: &[DerivedDoc], force: bool, no_cache: bool, pretty: bool) -> Result<()> { @@ -447,6 +447,27 @@ fn derive_claude_with_manager( wrap_paths_claude(paths) } +/// Derive a single Claude conversation given an explicit project + session. +/// Used by `cmd_share` after its picker has resolved the pair; mirrors the +/// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. +#[allow(dead_code)] // wired up by cmd_share in a follow-up task +pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result { + let manager = toolpath_claude::ClaudeConvo::new(); + let cfg = toolpath_claude::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking: false, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_claude::derive::derive_path(&convo, &cfg); + let cache_id = make_id("claude", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_claude(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -637,6 +658,29 @@ fn derive_gemini_with_manager( wrap_paths_gemini(paths) } +/// Derive a single Gemini conversation given an explicit project + session. +#[allow(dead_code)] // wired up by cmd_share in a follow-up task +pub(crate) fn derive_gemini_pair( + project: &str, + session: &str, + include_thinking: bool, +) -> Result { + let manager = toolpath_gemini::GeminiConvo::new(); + let cfg = toolpath_gemini::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_gemini::derive::derive_path(&convo, &cfg); + let cache_id = make_id("gemini", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_gemini(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -791,6 +835,22 @@ fn derive_codex(session: Option, all: bool) -> Result> { wrap_paths_codex(paths) } +/// Derive a single Codex session given an explicit session id. +#[allow(dead_code)] // wired up by cmd_share in a follow-up task +pub(crate) fn derive_codex_one(session: &str) -> Result { + let manager = toolpath_codex::CodexConvo::new(); + let config = toolpath_codex::derive::DeriveConfig { project_path: None }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_codex::derive::derive_path(&s, &config); + let cache_id = make_id("codex", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_codex(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -919,6 +979,30 @@ fn derive_opencode( } } +/// Derive a single opencode session given an explicit session id. +#[cfg(not(target_os = "emscripten"))] +#[allow(dead_code)] // wired up by cmd_share in a follow-up task +pub(crate) fn derive_opencode_one( + session: &str, + no_snapshot_diffs: bool, +) -> Result { + let manager = toolpath_opencode::OpencodeConvo::new(); + let config = toolpath_opencode::derive::DeriveConfig { + no_snapshot_diffs, + ..Default::default() + }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = + toolpath_opencode::derive::derive_path_with_resolver(&s, &config, manager.resolver()); + let cache_id = make_id("opencode", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_opencode(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -1071,6 +1155,28 @@ fn derive_pi_with_manager( Ok(docs) } +/// Derive a single Pi session given an explicit project + session. +#[allow(dead_code)] // wired up by cmd_share in a follow-up task +pub(crate) fn derive_pi_pair( + project: &str, + session: &str, + base: Option, +) -> Result { + let manager = if let Some(path) = base { + let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); + toolpath_pi::PiConvo::with_resolver(resolver) + } else { + toolpath_pi::PiConvo::new() + }; + let config = toolpath_pi::DeriveConfig::default(); + let session = manager + .read_session(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let doc = Graph::from_path(toolpath_pi::derive::derive_path(&session, &config)); + let cache_id = make_id("pi", &doc_inner_id(&doc)); + Ok(DerivedDoc { cache_id, doc }) +} + #[cfg(not(target_os = "emscripten"))] fn pick_pi_in_project( manager: &toolpath_pi::PiConvo, From 58609d0ec0cab79771076842a6dd4cb5e666f43c Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 16:25:39 -0400 Subject: [PATCH 04/36] refactor(path-cli): split run_pathbase into wrapper + inner run_pathbase_inner takes a body string and a summary_source label, so callers with an in-memory toolpath document (cmd_share) can upload without round-tripping through the cache. --- crates/path-cli/src/cmd_export.rs | 237 +++++++++++++++++------------- 1 file changed, 133 insertions(+), 104 deletions(-) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index a6f6bd1..61b0acd 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -153,12 +153,12 @@ pub enum ExportTarget { /// `owner/name` pair for `--repo`. #[derive(Debug, Clone)] -pub struct RepoSpec { - pub owner: String, - pub name: String, +pub(crate) struct RepoSpec { + pub(crate) owner: String, + pub(crate) name: String, } -fn parse_repo_spec(s: &str) -> std::result::Result { +pub(crate) fn parse_repo_spec(s: &str) -> std::result::Result { let (owner, name) = s .split_once('/') .ok_or_else(|| format!("expected owner/name, got `{s}`"))?; @@ -226,6 +226,18 @@ struct PathbaseExportArgs { public: bool, } +/// Pathbase upload knobs that don't depend on where the body came from. +/// Identical to [`PathbaseExportArgs`] minus the `input` field — the body +/// is supplied by the caller (read from cache, derived in memory, …). +#[derive(Debug)] +pub(crate) struct PathbaseUploadArgs { + pub(crate) url: Option, + pub(crate) anon: bool, + pub(crate) repo: Option, + pub(crate) slug: Option, + pub(crate) public: bool, +} + fn run_claude(input: String, project: Option, output: Option) -> Result<()> { #[cfg(target_os = "emscripten")] { @@ -1208,124 +1220,141 @@ fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { #[cfg(not(target_os = "emscripten"))] { - use crate::cmd_pathbase::{ - anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, - resolve_url, - }; - let file = cache_ref(&args.input)?; let body = std::fs::read_to_string(&file) .with_context(|| format!("Failed to read {}", file.display()))?; - // Validate locally so we give a clean error rather than relying on - // the server to reject malformed payloads. - let doc = toolpath::v1::Graph::from_json(&body) - .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; - - let stored = load_session(&credentials_path()?)?; - let base_url = match (&args.url, &stored) { - (Some(u), _) => resolve_url(Some(u.clone())), - (None, Some(s)) => s.url.clone(), - (None, None) => resolve_url(None), + let upload = PathbaseUploadArgs { + url: args.url, + anon: args.anon, + repo: args.repo, + slug: args.slug, + public: args.public, }; + let summary_source = file.display().to_string(); + run_pathbase_inner(upload, &body, &summary_source) + } +} - // Anonymous mode: explicit --anon, or no credentials at all and no - // override flags steering us toward an authed endpoint. - let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn run_pathbase_inner( + args: PathbaseUploadArgs, + body: &str, + summary_source: &str, +) -> Result<()> { + use crate::cmd_pathbase::{ + anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, + resolve_url, + }; - if go_anon { - if !args.anon && stored.is_none() { - eprintln!( - "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." - ); - } - let resp = anon_paths_post(&base_url, &body)?; - // Server returns either a full URL or a path-only string; in the - // latter case prefix the base so the user gets a clickable link. - let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { - resp.url.clone() - } else if resp.url.starts_with('/') { - format!("{base_url}{}", resp.url) - } else { - format!("{base_url}/{}", resp.url) - }; - println!("{printable}"); - eprintln!( - "Uploaded {} → anon path {} ({} bytes)", - file.display(), - resp.id, - body.len() - ); - return Ok(()); - } + // Validate locally so we give a clean error rather than relying on + // the server to reject malformed payloads. + let doc = toolpath::v1::Graph::from_json(body) + .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; - let session = stored.ok_or_else(|| { - anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") - })?; - if host_of(&base_url) != host_of(&session.url) { - eprintln!( - "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", - base_url, session.url - ); - } + let stored = load_session(&credentials_path()?)?; + let base_url = match (&args.url, &stored) { + (Some(u), _) => resolve_url(Some(u.clone())), + (None, Some(s)) => s.url.clone(), + (None, None) => resolve_url(None), + }; - let (owner, repo) = match args.repo { - Some(spec) => (spec.owner, spec.name), - None => { - // Pathstash default: own the repo "pathstash" under our username, - // creating it on demand. api_me is the source of truth for the - // username (display name in stored.user can drift). - let user = api_me(&base_url, &session.token)?; - repos_post(&base_url, &session.token, "pathstash")?; - (user.username, "pathstash".to_string()) - } - }; + // Anonymous mode: explicit --anon, or no credentials at all and no + // override flags steering us toward an authed endpoint. + let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); - let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); - let created = paths_post( - &base_url, - &session.token, - &owner, - &repo, - &slug, - &body, - args.public, - )?; - - // The visibility we surface is what the server actually applied, - // not what we requested. If a server-side policy ever clamps - // `is_public` (rate limits, account flags, future feature flags), - // we render the URL form the path can actually be reached at. - if created.is_public != args.public { + if go_anon { + if !args.anon && stored.is_none() { eprintln!( - "note: requested is_public={} but server applied is_public={}", - args.public, created.is_public + "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." ); } - let visibility = if created.is_public { - "public" + let resp = anon_paths_post(&base_url, body)?; + // Server returns either a full URL or a path-only string; in the + // latter case prefix the base so the user gets a clickable link. + let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { + resp.url.clone() + } else if resp.url.starts_with('/') { + format!("{base_url}{}", resp.url) } else { - "secret" + format!("{base_url}/{}", resp.url) }; - let url = pathbase_share_url( - &base_url, - &owner, - &repo, - &created.slug, - &created.id, - created.is_public, - ); - println!("{url}"); + println!("{printable}"); eprintln!( - "Uploaded {} → {}/{}/{} ({} path, {} bytes)", - file.display(), - owner, - repo, - created.slug, - visibility, + "Uploaded {} → anon path {} ({} bytes)", + summary_source, + resp.id, body.len() ); - Ok(()) + return Ok(()); + } + + let session = stored.ok_or_else(|| { + anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") + })?; + if host_of(&base_url) != host_of(&session.url) { + eprintln!( + "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", + base_url, session.url + ); + } + + let (owner, repo) = match args.repo { + Some(spec) => (spec.owner, spec.name), + None => { + // Pathstash default: own the repo "pathstash" under our username, + // creating it on demand. api_me is the source of truth for the + // username (display name in stored.user can drift). + let user = api_me(&base_url, &session.token)?; + repos_post(&base_url, &session.token, "pathstash")?; + (user.username, "pathstash".to_string()) + } + }; + + let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); + let created = paths_post( + &base_url, + &session.token, + &owner, + &repo, + &slug, + body, + args.public, + )?; + + // The visibility we surface is what the server actually applied, + // not what we requested. If a server-side policy ever clamps + // `is_public` (rate limits, account flags, future feature flags), + // we render the URL form the path can actually be reached at. + if created.is_public != args.public { + eprintln!( + "note: requested is_public={} but server applied is_public={}", + args.public, created.is_public + ); } + let visibility = if created.is_public { + "public" + } else { + "secret" + }; + let url = pathbase_share_url( + &base_url, + &owner, + &repo, + &created.slug, + &created.id, + created.is_public, + ); + println!("{url}"); + eprintln!( + "Uploaded {} → {}/{}/{} ({} path, {} bytes)", + summary_source, + owner, + repo, + created.slug, + visibility, + body.len() + ); + Ok(()) } /// Pick the canonical share URL for a path uploaded via `export pathbase`. From de264c05401b8186623633a000077ea3c9d2c58a Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 16:58:33 -0400 Subject: [PATCH 05/36] fix(path-cli): gate PathbaseUploadArgs to native target Code review flagged that the struct was defined unconditionally but only constructed from a #[cfg(not(target_os = "emscripten"))] block, producing a dead-code warning on wasm. Match the gate to its only caller. --- crates/path-cli/src/cmd_export.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index 61b0acd..e747ce0 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -229,6 +229,7 @@ struct PathbaseExportArgs { /// Pathbase upload knobs that don't depend on where the body came from. /// Identical to [`PathbaseExportArgs`] minus the `input` field — the body /// is supplied by the caller (read from cache, derived in memory, …). +#[cfg(not(target_os = "emscripten"))] #[derive(Debug)] pub(crate) struct PathbaseUploadArgs { pub(crate) url: Option, From 12d14c65fec5a97ab3ad2cf62cdd04600280628f Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 17:02:43 -0400 Subject: [PATCH 06/36] feat(path-cli): scaffold `path share` command Adds the cmd_share module with the full CLI surface (--url, --harness, --session, --project, --anon, --repo, --slug, --public, --force, --no-cache) and a stub run() that bails. Wires it into lib.rs as Commands::Share. Subsequent tasks fill in the body. --- crates/path-cli/src/cmd_share.rs | 71 ++++++++++++++++++++++++++++ crates/path-cli/src/lib.rs | 10 ++++ crates/path-cli/tests/integration.rs | 12 +++++ 3 files changed, 93 insertions(+) create mode 100644 crates/path-cli/src/cmd_share.rs diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs new file mode 100644 index 0000000..ec234a6 --- /dev/null +++ b/crates/path-cli/src/cmd_share.rs @@ -0,0 +1,71 @@ +//! `path share` — interactive Pathbase upload across installed agent +//! harnesses. See `docs/superpowers/specs/2026-05-07-path-share-command-design.md`. + +#![cfg(not(target_os = "emscripten"))] + +use anyhow::Result; +use clap::{Args, ValueEnum}; +use std::path::PathBuf; + +use crate::cmd_export::RepoSpec; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)] +#[value(rename_all = "lower")] +pub enum HarnessArg { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +#[derive(Args, Debug)] +pub struct ShareArgs { + /// Pathbase server URL (defaults to the stored session's server) + #[arg(long)] + pub url: Option, + + /// Force the anonymous endpoint, ignoring any stored credentials + #[arg(long, conflicts_with_all = ["repo", "public"])] + pub anon: bool, + + /// Target a specific repo as `owner/name` instead of `/pathstash` + #[arg(long, value_parser = crate::cmd_export::parse_repo_spec)] + pub repo: Option, + + /// Override the auto-derived slug (defaults to the toolpath document id) + #[arg(long)] + pub slug: Option, + + /// Make the uploaded path publicly listable (default: secret/unlisted) + #[arg(long)] + pub public: bool, + + /// Narrow the picker to one harness, or skip the picker entirely + /// when used with --session. + #[arg(long, value_enum)] + pub harness: Option, + + /// Skip the picker. Requires --harness; requires --project for + /// claude/gemini/pi. + #[arg(long, requires = "harness")] + pub session: Option, + + /// Override cwd-as-project. Filters the picker to sessions tied to + /// this project across all harnesses. + #[arg(long)] + pub project: Option, + + /// Overwrite the cache entry if it already exists + #[arg(long)] + pub force: bool, + + /// Skip writing the cache; derive in-memory only + #[arg(long)] + pub no_cache: bool, +} + +pub fn run(args: ShareArgs) -> Result<()> { + let _ = args; + anyhow::bail!("`path share` is not yet implemented") +} diff --git a/crates/path-cli/src/lib.rs b/crates/path-cli/src/lib.rs index 5381588..c7f53af 100644 --- a/crates/path-cli/src/lib.rs +++ b/crates/path-cli/src/lib.rs @@ -14,6 +14,8 @@ mod cmd_project; mod cmd_query; mod cmd_render; #[cfg(not(target_os = "emscripten"))] +mod cmd_share; +#[cfg(not(target_os = "emscripten"))] mod cmd_show; mod cmd_track; mod cmd_validate; @@ -114,6 +116,12 @@ enum Commands { #[command(subcommand)] op: cmd_auth::AuthOp, }, + /// Share an agent session to Pathbase via an interactive picker + #[cfg(not(target_os = "emscripten"))] + Share { + #[command(flatten)] + args: cmd_share::ShareArgs, + }, // ── Deprecated aliases ──────────────────────────────────────────── #[command(hide = true, about = "[deprecated] Use `path import`")] @@ -158,6 +166,8 @@ pub fn run() -> Result<()> { } #[cfg(not(target_os = "emscripten"))] Commands::Auth { op } => cmd_auth::run(op), + #[cfg(not(target_os = "emscripten"))] + Commands::Share { args } => cmd_share::run(args), Commands::Derive { source } => cmd_derive::run(source, cli.pretty), Commands::Incept { args } => cmd_incept::run(args), diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 2f26c3a..54402c7 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -663,3 +663,15 @@ fn derive_alias_still_works_with_warning() { .stdout(predicate::str::contains("\"paths\":")) .stderr(predicate::str::contains("deprecated")); } + +#[test] +fn share_help_lists_unified_picker_flags() { + cmd() + .args(["share", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("--harness")) + .stdout(predicate::str::contains("--session")) + .stdout(predicate::str::contains("--project")) + .stdout(predicate::str::contains("--anon")); +} From 09b16540869b50c3d43d1927c87601f09cb8e723 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 17:57:50 -0400 Subject: [PATCH 07/36] feat(path-cli): add Harness, SessionRow, HarnessBundle types Pure data types plus from_arg/parse helpers and a project_keyed predicate. HarnessBundle::from_environment instantiates each provider unconditionally; gather_sessions (next task) skips providers whose listing returns empty or NotFound. --- crates/path-cli/src/cmd_share.rs | 162 +++++++++++++++++++++++++++++++ 1 file changed, 162 insertions(+) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index ec234a6..7dd0b49 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -4,6 +4,7 @@ #![cfg(not(target_os = "emscripten"))] use anyhow::Result; +use chrono::{DateTime, Utc}; use clap::{Args, ValueEnum}; use std::path::PathBuf; @@ -65,7 +66,168 @@ pub struct ShareArgs { pub no_cache: bool, } +/// Which agent harness a session was produced by. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum Harness { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +#[allow(dead_code)] // wired up by gather_sessions in a follow-up task +impl Harness { + pub(crate) fn name(&self) -> &'static str { + match self { + Harness::Claude => "claude", + Harness::Gemini => "gemini", + Harness::Codex => "codex", + Harness::Opencode => "opencode", + Harness::Pi => "pi", + } + } + + /// Padded so all five symbols line up in the fzf column. + pub(crate) fn symbol(&self) -> &'static str { + match self { + Harness::Claude => "claude ", + Harness::Gemini => "gemini ", + Harness::Codex => "codex ", + Harness::Opencode => "opencode", + Harness::Pi => "pi ", + } + } + + /// True when the underlying provider keys sessions by project path. + /// claude/gemini/pi: true. codex/opencode: false (sessions store cwd + /// per-row, not as a directory key). + pub(crate) fn project_keyed(&self) -> bool { + matches!(self, Harness::Claude | Harness::Gemini | Harness::Pi) + } + + pub(crate) fn from_arg(arg: HarnessArg) -> Self { + match arg { + HarnessArg::Claude => Harness::Claude, + HarnessArg::Gemini => Harness::Gemini, + HarnessArg::Codex => Harness::Codex, + HarnessArg::Opencode => Harness::Opencode, + HarnessArg::Pi => Harness::Pi, + } + } + + pub(crate) fn parse(s: &str) -> Option { + match s { + "claude" => Some(Harness::Claude), + "gemini" => Some(Harness::Gemini), + "codex" => Some(Harness::Codex), + "opencode" => Some(Harness::Opencode), + "pi" => Some(Harness::Pi), + _ => None, + } + } +} + +/// One row in the unified session picker. +#[allow(dead_code)] // wired up by gather_sessions in a follow-up task +#[derive(Debug, Clone)] +pub(crate) struct SessionRow { + pub(crate) harness: Harness, + /// Project path for keyed providers; `None` for codex/opencode. + pub(crate) project: Option, + /// Recorded cwd from the session (codex/opencode only). + pub(crate) cwd: Option, + pub(crate) session_id: String, + pub(crate) title: String, + pub(crate) last_activity: Option>, + pub(crate) message_count: usize, + pub(crate) matches_cwd: bool, +} + +/// Bundle of provider managers used during aggregation. Production code +/// builds this from real `$HOME` via `from_environment`; tests construct +/// it directly with provider-specific resolvers. +#[allow(dead_code)] // wired up by gather_sessions in a follow-up task +#[derive(Default)] +pub(crate) struct HarnessBundle { + pub(crate) claude: Option, + pub(crate) gemini: Option, + pub(crate) codex: Option, + pub(crate) opencode: Option, + pub(crate) pi: Option, +} + +impl HarnessBundle { + /// Build the production bundle. Each provider is included + /// unconditionally (its `new()` doesn't fail on a missing home dir); + /// `gather_sessions` skips the ones whose listing returns empty/NotFound. + #[allow(dead_code)] // wired up by gather_sessions in a follow-up task + pub(crate) fn from_environment() -> Self { + Self { + claude: Some(toolpath_claude::ClaudeConvo::new()), + gemini: Some(toolpath_gemini::GeminiConvo::new()), + codex: Some(toolpath_codex::CodexConvo::new()), + opencode: Some(toolpath_opencode::OpencodeConvo::new()), + pi: Some(toolpath_pi::PiConvo::new()), + } + } +} + pub fn run(args: ShareArgs) -> Result<()> { let _ = args; anyhow::bail!("`path share` is not yet implemented") } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn harness_name_and_symbol_are_distinct() { + let all = [ + Harness::Claude, + Harness::Gemini, + Harness::Codex, + Harness::Opencode, + Harness::Pi, + ]; + let names: Vec<&str> = all.iter().map(|h| h.name()).collect(); + let symbols: Vec<&str> = all.iter().map(|h| h.symbol()).collect(); + assert_eq!(names.len(), 5); + assert_eq!( + names.iter().collect::>().len(), + 5, + "names must be unique" + ); + assert_eq!( + symbols + .iter() + .collect::>() + .len(), + 5, + "symbols must be unique" + ); + } + + #[test] + fn harness_project_keyed_matches_design() { + assert!(Harness::Claude.project_keyed()); + assert!(Harness::Gemini.project_keyed()); + assert!(Harness::Pi.project_keyed()); + assert!(!Harness::Codex.project_keyed()); + assert!(!Harness::Opencode.project_keyed()); + } + + #[test] + fn harness_from_arg_roundtrips() { + for (arg, harness) in [ + (HarnessArg::Claude, Harness::Claude), + (HarnessArg::Gemini, Harness::Gemini), + (HarnessArg::Codex, Harness::Codex), + (HarnessArg::Opencode, Harness::Opencode), + (HarnessArg::Pi, Harness::Pi), + ] { + assert_eq!(Harness::from_arg(arg), harness); + } + } +} From f654f96443b081b230cdeb821ed1e5dee6b37a11 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 19:46:04 -0400 Subject: [PATCH 08/36] feat(path-cli): implement gather_sessions for claude/gemini/pi Aggregates SessionRow values from the three project-keyed providers, sorts cwd-matching rows first then by recency, and silently skips harnesses whose listing returns empty or NotFound. Codex and opencode land in the next commit. --- crates/path-cli/src/cmd_share.rs | 306 +++++++++++++++++++++++++++++++ 1 file changed, 306 insertions(+) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 7dd0b49..c5a4176 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -173,6 +173,223 @@ impl HarnessBundle { } } +/// Aggregate sessions across the harnesses in `bundle`, ranked so that +/// rows whose project (or recorded cwd) canonicalizes to `cwd` come +/// first, sorted by descending `last_activity`. +/// +/// Filters: `harness_filter` keeps only rows from one harness; `project_filter` +/// keeps only rows whose project (for keyed) or cwd (for session-keyed) +/// canonicalizes to that path. +#[allow(dead_code)] // call sites land in Tasks 7-8 +pub(crate) fn gather_sessions( + bundle: &HarnessBundle, + cwd: &std::path::Path, + harness_filter: Option, + project_filter: Option<&std::path::Path>, +) -> Vec { + let mut rows = Vec::new(); + let canonical_cwd = canonicalize_or_self(cwd); + let canonical_project = project_filter.map(canonicalize_or_self); + + let want = |h: Harness| harness_filter.is_none_or(|f| f == h); + + if want(Harness::Claude) + && let Some(mgr) = &bundle.claude + { + collect_claude(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Gemini) + && let Some(mgr) = &bundle.gemini + { + collect_gemini(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Pi) + && let Some(mgr) = &bundle.pi + { + collect_pi(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + + rows.sort_by(|a, b| { + b.matches_cwd + .cmp(&a.matches_cwd) + .then_with(|| b.last_activity.cmp(&a.last_activity)) + }); + rows +} + +fn canonicalize_or_self(p: &std::path::Path) -> std::path::PathBuf { + std::fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()) +} + +fn paths_match(a: &std::path::Path, b: &std::path::Path) -> bool { + canonicalize_or_self(a) == canonicalize_or_self(b) +} + +fn collect_claude( + mgr: &toolpath_claude::ClaudeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_claude(&e) => return, + Err(e) => { + eprintln!("warning: claude aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: claude project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Claude, + project: Some(m.project_path), + cwd: None, + session_id: m.session_id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_gemini( + mgr: &toolpath_gemini::GeminiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_gemini(&e) => return, + Err(e) => { + eprintln!("warning: gemini aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: gemini project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Gemini, + project: Some(m.project_path), + cwd: None, + session_id: m.session_uuid, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_pi( + mgr: &toolpath_pi::PiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_pi(&e) => return, + Err(e) => { + eprintln!("warning: pi aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_sessions(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: pi project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + // SessionMeta.timestamp is a String; parse to DateTime when possible. + let last_activity = chrono::DateTime::parse_from_rfc3339(&m.timestamp) + .ok() + .map(|d| d.with_timezone(&Utc)); + out.push(SessionRow { + harness: Harness::Pi, + project: Some(project.clone()), + cwd: None, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity, + message_count: m.entry_count, + matches_cwd, + }); + } + } +} + +fn is_not_found_claude(err: &toolpath_claude::ConvoError) -> bool { + use toolpath_claude::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::ClaudeDirectoryNotFound(_)) +} + +fn is_not_found_gemini(err: &toolpath_gemini::ConvoError) -> bool { + use toolpath_gemini::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::GeminiDirectoryNotFound(_)) +} + +fn is_not_found_pi(err: &toolpath_pi::PiError) -> bool { + use toolpath_pi::PiError; + matches!(err, PiError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, PiError::ProjectNotFound(_)) +} + pub fn run(args: ShareArgs) -> Result<()> { let _ = args; anyhow::bail!("`path share` is not yet implemented") @@ -230,4 +447,93 @@ mod tests { assert_eq!(Harness::from_arg(arg), harness); } } + + use std::path::Path; + use tempfile::TempDir; + + fn write_claude_session(claude_dir: &Path, project_slug: &str, session: &str, prompt: &str) { + let project_dir = claude_dir.join("projects").join(project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let user = format!( + r#"{{"type":"user","uuid":"u-{session}","timestamp":"2024-01-02T00:00:00Z","cwd":"/test/project","message":{{"role":"user","content":"{prompt}"}}}}"# + ); + let asst = format!( + r#"{{"type":"assistant","uuid":"a-{session}","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"hi"}}}}"# + ); + std::fs::write( + project_dir.join(format!("{session}.jsonl")), + format!("{user}\n{asst}\n"), + ) + .unwrap(); + } + + fn claude_only_bundle(home: &Path) -> HarnessBundle { + let claude_dir = home.join(".claude"); + std::fs::create_dir_all(&claude_dir).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + } + } + + #[test] + fn gather_sessions_includes_claude_rows_for_a_project() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Claude); + assert_eq!(rows[0].session_id, "abc-session-one"); + assert_eq!(rows[0].project.as_deref(), Some("/test/project")); + assert!(rows[0].matches_cwd, "cwd should match the project path"); + } + + #[test] + fn gather_sessions_marks_non_matching_project_rows() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/some/other/place"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert!(!rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_skips_harness_with_no_home_dir() { + // Empty bundle => no rows, no panic. + let bundle = HarnessBundle::default(); + let rows = gather_sessions(&bundle, Path::new("/anywhere"), None, None); + assert!(rows.is_empty()); + } + + #[test] + fn gather_sessions_filters_by_harness() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "hi", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, Some(Harness::Codex), None); + assert!(rows.is_empty(), "filter to codex must drop claude rows"); + } } From 8f347681e21f481359a2c85b38ff6c79e0d29aed Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 21:31:56 -0400 Subject: [PATCH 09/36] feat(path-cli): cover codex+opencode in gather_sessions Adds collect_codex/collect_opencode and the matching ranking/filter tests. Session-keyed providers compare canonical(stored_cwd) to canonical(cwd) for matches_cwd; project_filter applies to the same recorded cwd. --- crates/path-cli/src/cmd_share.rs | 177 +++++++++++++++++++++++++++++++ 1 file changed, 177 insertions(+) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index c5a4176..7f54fee 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -208,6 +208,16 @@ pub(crate) fn gather_sessions( { collect_pi(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); } + if want(Harness::Codex) + && let Some(mgr) = &bundle.codex + { + collect_codex(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Opencode) + && let Some(mgr) = &bundle.opencode + { + collect_opencode(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } rows.sort_by(|a, b| { b.matches_cwd @@ -370,6 +380,93 @@ fn collect_pi( } } +fn collect_codex( + mgr: &toolpath_codex::CodexConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.list_sessions() { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_codex(&e) => return, + Err(e) => { + eprintln!("warning: codex aggregation failed: {e}"); + return; + } + }; + for m in metas { + let cwd_str = m.cwd.as_ref().map(|p| p.to_string_lossy().into_owned()); + if let Some(filter) = project_filter { + let stored = match cwd_str.as_deref() { + Some(s) => std::path::PathBuf::from(s), + None => continue, + }; + if !paths_match(&stored, filter) { + continue; + } + } + let matches_cwd = m + .cwd + .as_deref() + .map(|p| paths_match(p, canonical_cwd)) + .unwrap_or(false); + out.push(SessionRow { + harness: Harness::Codex, + project: None, + cwd: cwd_str, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.line_count, + matches_cwd, + }); + } +} + +fn collect_opencode( + mgr: &toolpath_opencode::OpencodeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.io().list_session_metadata(None) { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_opencode(&e) => return, + Err(e) => { + eprintln!("warning: opencode aggregation failed: {e}"); + return; + } + }; + for m in metas { + if let Some(filter) = project_filter + && !paths_match(&m.directory, filter) + { + continue; + } + let matches_cwd = paths_match(&m.directory, canonical_cwd); + let cwd_str = m.directory.to_string_lossy().into_owned(); + let title = match (&m.first_user_message, m.title.is_empty()) { + (Some(s), _) if !s.is_empty() => s.clone(), + (_, false) => m.title.clone(), + _ => "(no prompt)".to_string(), + }; + out.push(SessionRow { + harness: Harness::Opencode, + project: None, + cwd: Some(cwd_str), + session_id: m.id, + title, + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } +} + fn is_not_found_claude(err: &toolpath_claude::ConvoError) -> bool { use toolpath_claude::ConvoError; matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) @@ -390,6 +487,21 @@ fn is_not_found_pi(err: &toolpath_pi::PiError) -> bool { || matches!(err, PiError::ProjectNotFound(_)) } +fn is_not_found_codex(err: &toolpath_codex::ConvoError) -> bool { + use toolpath_codex::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::CodexDirectoryNotFound(_)) +} + +fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { + use toolpath_opencode::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::OpencodeDirectoryNotFound(_)) + || matches!(err, ConvoError::DatabaseNotFound(_)) +} + pub fn run(args: ShareArgs) -> Result<()> { let _ = args; anyhow::bail!("`path share` is not yet implemented") @@ -536,4 +648,69 @@ mod tests { let rows = gather_sessions(&bundle, cwd, Some(Harness::Codex), None); assert!(rows.is_empty(), "filter to codex must drop claude rows"); } + + fn codex_only_bundle(home: &Path) -> HarnessBundle { + let codex_dir = home.join(".codex"); + std::fs::create_dir_all(&codex_dir).unwrap(); + let resolver = toolpath_codex::PathResolver::new().with_codex_dir(&codex_dir); + HarnessBundle { + codex: Some(toolpath_codex::CodexConvo::with_resolver(resolver)), + ..Default::default() + } + } + + fn write_codex_session(codex_dir: &Path, id: &str, cwd: &str) { + // Date-bucketed layout: ~/.codex/sessions/YYYY/MM/DD/rollout-*-.jsonl + let dir = codex_dir.join("sessions/2026/05/07"); + std::fs::create_dir_all(&dir).unwrap(); + let file = dir.join(format!("rollout-2026-05-07T00-00-00-{id}.jsonl")); + let meta = format!( + r#"{{"timestamp":"2026-05-07T00:00:00Z","type":"session_meta","payload":{{"id":"{id}","timestamp":"2026-05-07T00:00:00Z","cwd":"{cwd}","originator":"codex-tui","cli_version":"test","source":"cli","model_provider":"openai"}}}}"# + ); + let user = format!( + r#"{{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{{"type":"message","role":"user","content":[{{"type":"input_text","text":"hi"}}]}}}}"# + ); + std::fs::write(file, format!("{meta}\n{user}\n")).unwrap(); + } + + #[test] + fn gather_sessions_includes_codex_rows_with_cwd_match() { + let temp = TempDir::new().unwrap(); + write_codex_session( + &temp.path().join(".codex"), + "00000000-0000-0000-0000-0000000000aa", + "/work/proj", + ); + let bundle = codex_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/work/proj"), None, None); + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Codex); + assert_eq!(rows[0].cwd.as_deref(), Some("/work/proj")); + assert!(rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_ranks_cwd_matches_first() { + // Two claude sessions: one in cwd (older), one elsewhere (newer). + // Despite the elsewhere row being newer, the cwd-match must come first. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + write_claude_session(&claude_dir, "-cwd-project", "in-cwd-session", "hi"); + // Bump activity on the not-in-cwd session by writing a later timestamp. + let not_dir = claude_dir.join("projects").join("-other-project"); + std::fs::create_dir_all(¬_dir).unwrap(); + std::fs::write( + not_dir.join("not-in-cwd-session.jsonl"), + r#"{"type":"user","uuid":"u-x","timestamp":"2030-01-01T00:00:00Z","cwd":"/other/project","message":{"role":"user","content":"later"}}"#.to_string() + + "\n", + ) + .unwrap(); + let bundle = claude_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/cwd/project"), None, None); + + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].session_id, "in-cwd-session"); + assert!(rows[0].matches_cwd); + assert!(!rows[1].matches_cwd); + } } From 6bffafc8f8724d97ec364b53fb7f7896209e1146 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 21:43:31 -0400 Subject: [PATCH 10/36] feat(path-cli): implement `path share` explicit-args path When --harness and --session are both set, share derives the session via cmd_import's pair helpers, optionally writes the cache, then uploads via cmd_export::run_pathbase_inner. Picker path follows. --- crates/path-cli/src/cmd_import.rs | 5 - crates/path-cli/src/cmd_share.rs | 73 +++++++++- crates/path-cli/tests/integration.rs | 198 +++++++++++++++++++++++++++ 3 files changed, 268 insertions(+), 8 deletions(-) diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index a1929ea..d0fb47d 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -450,7 +450,6 @@ fn derive_claude_with_manager( /// Derive a single Claude conversation given an explicit project + session. /// Used by `cmd_share` after its picker has resolved the pair; mirrors the /// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. -#[allow(dead_code)] // wired up by cmd_share in a follow-up task pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result { let manager = toolpath_claude::ClaudeConvo::new(); let cfg = toolpath_claude::derive::DeriveConfig { @@ -659,7 +658,6 @@ fn derive_gemini_with_manager( } /// Derive a single Gemini conversation given an explicit project + session. -#[allow(dead_code)] // wired up by cmd_share in a follow-up task pub(crate) fn derive_gemini_pair( project: &str, session: &str, @@ -836,7 +834,6 @@ fn derive_codex(session: Option, all: bool) -> Result> { } /// Derive a single Codex session given an explicit session id. -#[allow(dead_code)] // wired up by cmd_share in a follow-up task pub(crate) fn derive_codex_one(session: &str) -> Result { let manager = toolpath_codex::CodexConvo::new(); let config = toolpath_codex::derive::DeriveConfig { project_path: None }; @@ -981,7 +978,6 @@ fn derive_opencode( /// Derive a single opencode session given an explicit session id. #[cfg(not(target_os = "emscripten"))] -#[allow(dead_code)] // wired up by cmd_share in a follow-up task pub(crate) fn derive_opencode_one( session: &str, no_snapshot_diffs: bool, @@ -1156,7 +1152,6 @@ fn derive_pi_with_manager( } /// Derive a single Pi session given an explicit project + session. -#[allow(dead_code)] // wired up by cmd_share in a follow-up task pub(crate) fn derive_pi_pair( project: &str, session: &str, diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 7f54fee..5874f06 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -76,7 +76,6 @@ pub(crate) enum Harness { Pi, } -#[allow(dead_code)] // wired up by gather_sessions in a follow-up task impl Harness { pub(crate) fn name(&self) -> &'static str { match self { @@ -89,6 +88,7 @@ impl Harness { } /// Padded so all five symbols line up in the fzf column. + #[allow(dead_code)] // wired up by the picker in a follow-up task pub(crate) fn symbol(&self) -> &'static str { match self { Harness::Claude => "claude ", @@ -116,6 +116,7 @@ impl Harness { } } + #[allow(dead_code)] // wired up by the picker in a follow-up task pub(crate) fn parse(s: &str) -> Option { match s { "claude" => Some(Harness::Claude), @@ -503,8 +504,74 @@ fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { } pub fn run(args: ShareArgs) -> Result<()> { - let _ = args; - anyhow::bail!("`path share` is not yet implemented") + let harness = args.harness.map(Harness::from_arg); + + if let (Some(h), Some(session)) = (harness, &args.session) { + return share_explicit(h, session.as_str(), &args); + } + + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + // Picker path lands in the next task. + anyhow::bail!("interactive `path share` is not yet implemented") +} + +fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { + let project = match (harness.project_keyed(), args.project.as_ref()) { + (true, Some(p)) => Some(p.to_string_lossy().into_owned()), + (true, None) => anyhow::bail!( + "--project required when --harness is {} and --session is set", + harness.name() + ), + (false, _) => None, + }; + + let derived = derive_one(harness, project.as_deref(), session)?; + let summary = format!("{} session {}", harness.name(), derived.cache_id); + + if !args.no_cache { + let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; + eprintln!( + "Imported {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); + } + + let body = derived.doc.to_json()?; + let upload = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + crate::cmd_export::run_pathbase_inner(upload, &body, &summary) +} + +fn derive_one( + harness: Harness, + project: Option<&str>, + session: &str, +) -> Result { + match harness { + Harness::Claude => { + crate::cmd_import::derive_claude_pair(project.expect("project_keyed"), session) + } + Harness::Gemini => crate::cmd_import::derive_gemini_pair( + project.expect("project_keyed"), + session, + false, + ), + Harness::Pi => { + crate::cmd_import::derive_pi_pair(project.expect("project_keyed"), session, None) + } + Harness::Codex => crate::cmd_import::derive_codex_one(session), + Harness::Opencode => crate::cmd_import::derive_opencode_one(session, false), + } } #[cfg(test)] diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 54402c7..af8254c 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -675,3 +675,201 @@ fn share_help_lists_unified_picker_flags() { .stdout(predicate::str::contains("--project")) .stdout(predicate::str::contains("--anon")); } + +#[test] +fn share_explicit_args_uploads_via_anon() { + use std::io::Write; + use std::net::TcpListener; + + // Stand up a one-shot mock that returns a valid AnonUploadResponse. + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + // Drain the request just enough to keep the OS happy. + use std::io::Read; + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc-123","url":"https://example.test/anon/abc-123"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + // Build a claude fixture so the explicit-args path has something to derive. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + // toolpath-claude maps '/', '_', and '.' to '-' when sanitizing project + // paths into directory slugs — mirror that here so the fixture lands + // where the resolver looks for it. + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("HOME", temp.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stdout(predicate::str::contains("https://example.test/anon/abc-123")) + .stderr(predicate::str::contains("Uploaded")); + + server.join().unwrap(); +} + +/// Helper for the cache tests. Spawns a one-shot mock anon-upload server +/// on a free port and returns (port, server-thread-handle, fixture-temp, +/// project-path, $HOME-path). +fn share_anon_fixture() -> (u16, std::thread::JoinHandle<()>, tempfile::TempDir, PathBuf, PathBuf) +{ + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + // toolpath-claude maps '/', '_', and '.' to '-' when sanitizing project + // paths into directory slugs — mirror that here so the fixture lands + // where the resolver looks for it. + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let home = temp.path().to_path_buf(); + (port, server, temp, project, home) +} + +#[test] +fn share_writes_cache_by_default() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!( + entries.len(), + 1, + "expected exactly one cache entry, got {entries:?}" + ); + let name = entries[0].file_name().to_string_lossy().into_owned(); + assert!( + name.starts_with("claude-"), + "expected claude-* cache id, got {name}" + ); + + server.join().unwrap(); +} + +#[test] +fn share_no_cache_skips_write() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + if docs.exists() { + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert!( + entries.is_empty(), + "expected no cache entries with --no-cache, got {entries:?}" + ); + } + + server.join().unwrap(); +} From cc6941a6c9b7a5804cfa0e74a34d97b9471d0f5b Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 22:06:29 -0400 Subject: [PATCH 11/36] chore(path-cli): drop useless format! in codex test fixture --- crates/path-cli/src/cmd_share.rs | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 5874f06..e01480d 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -734,9 +734,7 @@ mod tests { let meta = format!( r#"{{"timestamp":"2026-05-07T00:00:00Z","type":"session_meta","payload":{{"id":"{id}","timestamp":"2026-05-07T00:00:00Z","cwd":"{cwd}","originator":"codex-tui","cli_version":"test","source":"cli","model_provider":"openai"}}}}"# ); - let user = format!( - r#"{{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{{"type":"message","role":"user","content":[{{"type":"input_text","text":"hi"}}]}}}}"# - ); + let user = r#"{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"input_text","text":"hi"}]}}"#; std::fs::write(file, format!("{meta}\n{user}\n")).unwrap(); } From 5a727c0b67a73f021239f68f852c9f7e5313913e Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 22:25:03 -0400 Subject: [PATCH 12/36] feat(path-cli): wire the unified `path share` picker Aggregates SessionRow values across installed harnesses, ranks cwd-matches first, and pipes them through fzf. Falls back to a manual-recipe message when fzf isn't available, and prints a probe summary when no harness has any sessions to share. --- crates/path-cli/src/cmd_share.rs | 229 +++++++++++++++++++++++++-- crates/path-cli/tests/integration.rs | 28 ++++ 2 files changed, 248 insertions(+), 9 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index e01480d..fcf0dca 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -88,7 +88,6 @@ impl Harness { } /// Padded so all five symbols line up in the fzf column. - #[allow(dead_code)] // wired up by the picker in a follow-up task pub(crate) fn symbol(&self) -> &'static str { match self { Harness::Claude => "claude ", @@ -116,7 +115,6 @@ impl Harness { } } - #[allow(dead_code)] // wired up by the picker in a follow-up task pub(crate) fn parse(s: &str) -> Option { match s { "claude" => Some(Harness::Claude), @@ -130,7 +128,6 @@ impl Harness { } /// One row in the unified session picker. -#[allow(dead_code)] // wired up by gather_sessions in a follow-up task #[derive(Debug, Clone)] pub(crate) struct SessionRow { pub(crate) harness: Harness, @@ -148,7 +145,6 @@ pub(crate) struct SessionRow { /// Bundle of provider managers used during aggregation. Production code /// builds this from real `$HOME` via `from_environment`; tests construct /// it directly with provider-specific resolvers. -#[allow(dead_code)] // wired up by gather_sessions in a follow-up task #[derive(Default)] pub(crate) struct HarnessBundle { pub(crate) claude: Option, @@ -162,7 +158,6 @@ impl HarnessBundle { /// Build the production bundle. Each provider is included /// unconditionally (its `new()` doesn't fail on a missing home dir); /// `gather_sessions` skips the ones whose listing returns empty/NotFound. - #[allow(dead_code)] // wired up by gather_sessions in a follow-up task pub(crate) fn from_environment() -> Self { Self { claude: Some(toolpath_claude::ClaudeConvo::new()), @@ -181,7 +176,6 @@ impl HarnessBundle { /// Filters: `harness_filter` keeps only rows from one harness; `project_filter` /// keeps only rows whose project (for keyed) or cwd (for session-keyed) /// canonicalizes to that path. -#[allow(dead_code)] // call sites land in Tasks 7-8 pub(crate) fn gather_sessions( bundle: &HarnessBundle, cwd: &std::path::Path, @@ -509,13 +503,124 @@ pub fn run(args: ShareArgs) -> Result<()> { if let (Some(h), Some(session)) = (harness, &args.session) { return share_explicit(h, session.as_str(), &args); } - if args.session.is_some() && harness.is_none() { anyhow::bail!("--session requires --harness"); } - // Picker path lands in the next task. - anyhow::bail!("interactive `path share` is not yet implemented") + let cwd = std::env::current_dir()?; + let bundle = HarnessBundle::from_environment(); + let project_filter = args.project.as_deref(); + let rows = gather_sessions(&bundle, &cwd, harness, project_filter); + + if rows.is_empty() { + return bail_no_sessions(&bundle, project_filter); + } + + if !crate::fzf::available() { + eprintln!( + "Interactive `path share` needs `fzf` on PATH and a TTY.\n\ + \n\ + Manual recipe:\n \ + path import # writes a cache entry, prints its id\n \ + path export pathbase --input " + ); + anyhow::bail!("fzf unavailable; run `path import ` then `path export pathbase`"); + } + + let lines: Vec = rows.iter().map(format_picker_row).collect(); + let host = pathbase_host_for_picker(&args); + let header = format!("share an agent session (Enter = upload to {host})"); + let opts = crate::fzf::PickOptions { + with_nth: "4..", + prompt: "share> ", + preview: Some("path show {1} --project {2} --session {3}"), + header: Some(&header), + tiebreak: "index", + multi: false, + }; + let selected = crate::fzf::pick(&lines, &opts)?; + let line = match selected.into_iter().next() { + Some(l) => l, + None => return Ok(()), // user cancelled + }; + let (h, key, session) = parse_picker_row(&line) + .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; + + let mut explicit = ShareArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + harness: Some(harness_to_arg(h)), + session: Some(session.clone()), + project: if h.project_keyed() { + Some(PathBuf::from(&key)) + } else { + None + }, + force: args.force, + no_cache: args.no_cache, + }; + eprintln!( + "Picked {} session {}", + h.name(), + explicit.session.as_deref().unwrap_or("?") + ); + let session_id = explicit.session.take().unwrap(); + share_explicit(h, &session_id, &explicit) +} + +fn harness_to_arg(h: Harness) -> HarnessArg { + match h { + Harness::Claude => HarnessArg::Claude, + Harness::Gemini => HarnessArg::Gemini, + Harness::Codex => HarnessArg::Codex, + Harness::Opencode => HarnessArg::Opencode, + Harness::Pi => HarnessArg::Pi, + } +} + +fn pathbase_host_for_picker(args: &ShareArgs) -> String { + use crate::cmd_pathbase::resolve_url; + if let Some(u) = &args.url { + return resolve_url(Some(u.clone())); + } + // Best-effort: if there's a stored session, surface its URL; otherwise fall back to default. + let path = match crate::cmd_pathbase::credentials_path() { + Ok(p) => p, + Err(_) => return resolve_url(None), + }; + match crate::cmd_pathbase::load_session(&path) { + Ok(Some(s)) => s.url, + _ => resolve_url(None), + } +} + +fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::Path>) -> Result<()> { + if let Some(p) = project_filter { + anyhow::bail!( + "No agent sessions found in project {}. Run without --project to see sessions across all projects.", + p.display() + ); + } + + let mut summary = String::from("No agent sessions found.\n"); + summary.push_str(&probe_summary_line("claude", bundle.claude.is_some())); + summary.push_str(&probe_summary_line("gemini", bundle.gemini.is_some())); + summary.push_str(&probe_summary_line("codex", bundle.codex.is_some())); + summary.push_str(&probe_summary_line("opencode", bundle.opencode.is_some())); + summary.push_str(&probe_summary_line("pi", bundle.pi.is_some())); + eprint!("{summary}"); + anyhow::bail!("no shareable sessions"); +} + +fn probe_summary_line(name: &str, present: bool) -> String { + if present { + format!(" {name}: 0 sessions\n") + } else { + format!(" {name}: not configured\n") + } } fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { @@ -552,6 +657,74 @@ fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<( crate::cmd_export::run_pathbase_inner(upload, &body, &summary) } +/// Build the TSV line fed to fzf. Cols 1–3 are hidden (harness/key/session, +/// used as parser keys); cols 4..8 are visible to the user. +fn format_picker_row(row: &SessionRow) -> String { + let key = row + .project + .clone() + .or_else(|| row.cwd.clone()) + .unwrap_or_default(); + let when = row + .last_activity + .map(|t| t.format("%Y-%m-%d %H:%M").to_string()) + .unwrap_or_else(|| " — ".to_string()); + let scope = if row.matches_cwd { "·" } else { " " }; + let project_short = project_short(&key); + let title = fzf_title(&row.title); + format!( + "{}\t{}\t{}\t{}\t{}\t{} msgs\t{}\t{}\t{}", + row.harness.name(), + tab_safe(&key), + tab_safe(&row.session_id), + row.harness.symbol(), + when, + row.message_count, + scope, + tab_safe(&project_short), + title, + ) +} + +/// Inverse of [`format_picker_row`] — pulls (harness, key, session) back +/// out of the line fzf returned. Returns `None` if the line is malformed. +fn parse_picker_row(line: &str) -> Option<(Harness, String, String)> { + let mut parts = line.split('\t'); + let h = Harness::parse(parts.next()?)?; + let key = parts.next()?.to_string(); + let session = parts.next()?.to_string(); + if session.is_empty() { + return None; + } + Some((h, key, session)) +} + +fn tab_safe(s: &str) -> String { + s.replace(['\t', '\n', '\r'], " ") +} + +fn fzf_title(s: &str) -> String { + const MAX: usize = 120; + let safe = tab_safe(s); + if safe.chars().count() > MAX { + let head: String = safe.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + safe + } +} + +fn project_short(p: &str) -> String { + let trimmed = p.trim_end_matches('/'); + let parts: Vec<&str> = trimmed.rsplit('/').take(2).collect(); + if parts.is_empty() { + return p.to_string(); + } + let mut out: Vec<&str> = parts.into_iter().collect(); + out.reverse(); + out.join("/") +} + fn derive_one( harness: Harness, project: Option<&str>, @@ -778,4 +951,42 @@ mod tests { assert!(rows[0].matches_cwd); assert!(!rows[1].matches_cwd); } + + #[test] + fn parse_picker_row_roundtrips_keyed() { + let row = SessionRow { + harness: Harness::Claude, + project: Some("/tmp/proj".to_string()), + cwd: None, + session_id: "sess-abc".to_string(), + title: "Hello\tworld".to_string(), + last_activity: None, + message_count: 3, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Claude); + assert_eq!(key, "/tmp/proj"); + assert_eq!(session, "sess-abc"); + } + + #[test] + fn parse_picker_row_roundtrips_session_keyed() { + let row = SessionRow { + harness: Harness::Codex, + project: None, + cwd: Some("/work/proj".to_string()), + session_id: "0190abcd".to_string(), + title: "(no prompt)".to_string(), + last_activity: None, + message_count: 0, + matches_cwd: false, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Codex); + assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot + assert_eq!(session, "0190abcd"); + } } diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index af8254c..9c4e898 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -873,3 +873,31 @@ fn share_no_cache_skips_write() { server.join().unwrap(); } + +#[test] +fn share_filters_by_project_with_no_matches_errors() { + let cfg = tempfile::tempdir().unwrap(); + let home = tempfile::tempdir().unwrap(); + let nonexistent = home.path().join("never"); + + cmd() + .env("HOME", home.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--project"]) + .arg(&nonexistent) + .assert() + .failure() + .stderr(predicate::str::contains("No agent sessions found in project")); +} + +#[test] +fn share_no_harness_non_tty_prints_recipe() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share"]) + .assert() + .failure() + .stderr(predicate::str::contains("path import")) + .stderr(predicate::str::contains("path export pathbase")); +} From 2d4b25a58325492e39f9b05fd8ed7a420fa0f0d8 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 22:33:02 -0400 Subject: [PATCH 13/36] test(path-cli): make share-recipe test independent of $HOME Without a fixture under $HOME, gather_sessions returns an empty Vec and share() bails through bail_no_sessions before the fzf-unavailable recipe path. Fixture builds a minimal claude session in a tempdir so the recipe path fires regardless of host environment. --- crates/path-cli/tests/integration.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 9c4e898..352461c 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -892,8 +892,33 @@ fn share_filters_by_project_with_no_matches_errors() { #[test] fn share_no_harness_non_tty_prints_recipe() { + // Build a minimal claude fixture in a tempdir, point HOME at it, so + // gather_sessions returns a non-empty Vec. Without this, an environment + // with no agent harnesses configured (e.g. CI) would hit bail_no_sessions + // before the fzf-unavailable recipe path. We want the recipe path here. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-recipe.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + let cfg = tempfile::tempdir().unwrap(); cmd() + .env("HOME", temp.path()) .env("TOOLPATH_CONFIG_DIR", cfg.path()) .args(["share"]) .assert() From 51c40387969cf2899edf50bf45d4d2a31cf3cf61 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:19:01 -0400 Subject: [PATCH 14/36] fix(path-cli): accept hidden --project on `show codex` and `show opencode` The `path share` picker uses a unified preview template `path show {harness} --project {key} --session {id}` for all five supported harnesses. Codex and opencode previously rejected --project with a clap error, breaking the preview pane for those rows. Accept --project as a no-op (hidden from --help) so the unified template renders correctly. --- crates/path-cli/src/cmd_show.rs | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/crates/path-cli/src/cmd_show.rs b/crates/path-cli/src/cmd_show.rs index 0e1d37e..d301c43 100644 --- a/crates/path-cli/src/cmd_show.rs +++ b/crates/path-cli/src/cmd_show.rs @@ -38,12 +38,20 @@ pub enum ShowSource { /// Session id, UUID, or filename stem #[arg(short, long)] session: String, + + /// Compatibility shim for the unified `path share` preview template; ignored. + #[arg(long, hide = true)] + project: Option, }, /// Show an opencode session as a markdown summary Opencode { /// Session id (`ses_…`) #[arg(short, long)] session: String, + + /// Compatibility shim for the unified `path share` preview template; ignored. + #[arg(long, hide = true)] + project: Option, }, /// Show a Pi (pi.dev) session as a markdown summary Pi { @@ -96,7 +104,10 @@ fn derive_one(source: ShowSource) -> Result { }; Ok(toolpath_gemini::derive::derive_path(&convo, &cfg)) } - ShowSource::Codex { session } => { + ShowSource::Codex { + session, + project: _, + } => { let manager = toolpath_codex::CodexConvo::new(); let s = manager .read_session(&session) @@ -104,7 +115,10 @@ fn derive_one(source: ShowSource) -> Result { let cfg = toolpath_codex::derive::DeriveConfig { project_path: None }; Ok(toolpath_codex::derive::derive_path(&s, &cfg)) } - ShowSource::Opencode { session } => { + ShowSource::Opencode { + session, + project: _, + } => { let manager = toolpath_opencode::OpencodeConvo::new(); let s = manager .read_session(&session) From 67a67c775fe73606fd66449fff5e2fd7f0e31af8 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:19:08 -0400 Subject: [PATCH 15/36] refactor(path-cli): drop synthetic session field in picker dispatch share_explicit takes session as a separate parameter, so the synthetic ShareArgs we built from the picker selection set the field then took it back out via .take().unwrap(). Just pass the destructured session String directly and leave the synthetic args's session field as None. --- crates/path-cli/src/cmd_share.rs | 13 ++++--------- 1 file changed, 4 insertions(+), 9 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index fcf0dca..d90b934 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -546,14 +546,14 @@ pub fn run(args: ShareArgs) -> Result<()> { let (h, key, session) = parse_picker_row(&line) .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; - let mut explicit = ShareArgs { + let explicit = ShareArgs { url: args.url.clone(), anon: args.anon, repo: args.repo.clone(), slug: args.slug.clone(), public: args.public, harness: Some(harness_to_arg(h)), - session: Some(session.clone()), + session: None, // unused by share_explicit project: if h.project_keyed() { Some(PathBuf::from(&key)) } else { @@ -562,13 +562,8 @@ pub fn run(args: ShareArgs) -> Result<()> { force: args.force, no_cache: args.no_cache, }; - eprintln!( - "Picked {} session {}", - h.name(), - explicit.session.as_deref().unwrap_or("?") - ); - let session_id = explicit.session.take().unwrap(); - share_explicit(h, &session_id, &explicit) + eprintln!("Picked {} session {}", h.name(), session); + share_explicit(h, &session, &explicit) } fn harness_to_arg(h: Harness) -> HarnessArg { From 26227cc15c27094b93ebfbd9f234ada6c0199f70 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:32:20 -0400 Subject: [PATCH 16/36] docs: document `path share` in CLAUDE.md --- CLAUDE.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/CLAUDE.md b/CLAUDE.md index 30062c5..010d558 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -84,6 +84,11 @@ cargo run -p path-cli -- import pi --project /path/to/project cargo run -p path-cli -- import pathbase cargo run -p path-cli -- import claude --project . --no-cache | path render md --input - +# Share an agent session to Pathbase (interactive picker, single-shot) +cargo run -p path-cli -- share +cargo run -p path-cli -- share --harness claude --session --project /path/to/project +cargo run -p path-cli -- share --url https://my-pathbase.example + # Export toolpath documents into external formats. is a cache id or a file path. cargo run -p path-cli -- export claude --input --project /tmp/sandbox cargo run -p path-cli -- export claude --input --output conv.jsonl @@ -218,3 +223,4 @@ Build the site after changes: `cd site && pnpm run build` (should produce 7 page - Format references for the agent on-disk formats we derive from live at `docs/agents/formats/`. The Claude Code format (`~/.claude/projects/…` JSONL) gets the deepest treatment — twelve focused docs at `docs/agents/formats/claude-code/` covering envelope, entry types, tools, session chains, compaction, writing-compatible JSONL, a linear walkthrough, and a version-keyed changelog. Sibling single-file references: `codex.md`, `gemini.md`, `opencode.md`. Keep them in sync with their derive crates when fields or behaviors change. - Interactive session selection: `path import ` (claude / gemini / pi / codex / opencode) auto-launches `fzf` when stdin and stderr are TTYs, `fzf` is on `$PATH`, and no `--session` was given. Multi-select (TAB) produces a `Graph` document; single-select produces a `Path`. The picker uses `path show --…` as its `--preview` command. When fzf isn't available, it falls back to most-recent (with `--project`) or prints the manual recipe (without). `path list --format tsv` is the documented machine-readable surface — column 1 is the project (for claude/gemini/pi) or session id (for codex/opencode), and the trailing column carries `first_user_message` so consumers can fuzzy-match by topic. - Conversation metadata title field: `toolpath-claude::ConversationMetadata`, `toolpath-gemini::ConversationMetadata`, and `toolpath-pi::SessionMeta` all expose `first_user_message: Option` — the first non-empty user-prompt text. Populated cheaply during the metadata pass (single-pass for Claude/Gemini; one extra short read for Pi). Used by the picker UI but useful for any "list sessions by topic" surface. +- `path share` is the one-shot equivalent of `path import | path export pathbase`. It probes installed agent harnesses (claude/gemini/codex/opencode/pi), aggregates their sessions into a single fzf picker, and ranks rows whose project (claude/gemini/pi) or recorded cwd (codex/opencode) canonicalizes to the current directory at the top. `--harness` narrows the picker to one provider; `--harness X --session Y` (and `--project P` for keyed providers) skips the picker entirely. Pathbase flags (`--url`, `--anon`, `--repo`, `--slug`, `--public`) match `path export pathbase`. By default the derived doc is written to the cache like `import` does; pass `--no-cache` to skip. From 464d3d0a03cce1b5f6e7e04fa6a127a3a85f5fbc Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:53:40 -0400 Subject: [PATCH 17/36] feat(path-cli): exit 130 on `path share` fzf cancel MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fzf::pick now returns PickResult { Selected, NoMatch, Cancelled } so callers can distinguish a deliberate user cancel from no-match. cmd_share exits 130 on Cancelled to match the spec; cmd_import preserves its existing 'empty pairs → no documents produced' bail. --- crates/path-cli/src/cmd_import.rs | 40 ++++++++++++++++++++++++------- crates/path-cli/src/cmd_share.rs | 17 +++++++++---- crates/path-cli/src/fzf.rs | 29 ++++++++++++++++++---- 3 files changed, 69 insertions(+), 17 deletions(-) diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index d0fb47d..80d2f14 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -519,7 +519,10 @@ fn pick_claude_in_project( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -565,7 +568,10 @@ fn pick_claude_global( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -727,7 +733,10 @@ fn pick_gemini_in_project( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -773,7 +782,10 @@ fn pick_gemini_global( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -898,7 +910,10 @@ fn pick_codex(manager: &toolpath_codex::CodexConvo) -> Result tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_single_id(&selected))) } @@ -1053,7 +1068,10 @@ fn pick_opencode( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_single_id(&selected))) } @@ -1207,7 +1225,10 @@ fn pick_pi_in_project( tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -1253,7 +1274,10 @@ fn pick_pi_global(manager: &toolpath_pi::PiConvo) -> Result v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index d90b934..c2b460b 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -538,10 +538,19 @@ pub fn run(args: ShareArgs) -> Result<()> { tiebreak: "index", multi: false, }; - let selected = crate::fzf::pick(&lines, &opts)?; - let line = match selected.into_iter().next() { - Some(l) => l, - None => return Ok(()), // user cancelled + let line = match crate::fzf::pick(&lines, &opts)? { + crate::fzf::PickResult::Selected(v) => match v.into_iter().next() { + Some(l) => l, + // Selected with an empty payload should not happen (fzf exits 0 + // only when at least one row was confirmed), but treat it like + // no-match for safety. + None => return Ok(()), + }, + // No row matched the query — exit 0, same as today, no extra noise. + crate::fzf::PickResult::NoMatch => return Ok(()), + // Esc / Ctrl-C: deliberate user cancel. Signal to the shell with + // exit 130 so it's distinguishable from a successful share. + crate::fzf::PickResult::Cancelled => std::process::exit(130), }; let (h, key, session) = parse_picker_row(&line) .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; diff --git a/crates/path-cli/src/fzf.rs b/crates/path-cli/src/fzf.rs index 500c2ee..efa3feb 100644 --- a/crates/path-cli/src/fzf.rs +++ b/crates/path-cli/src/fzf.rs @@ -37,9 +37,25 @@ fn which(cmd: &str) -> Option { None } -/// Run fzf with the supplied lines on stdin. Returns the selected lines, or -/// an empty vec if the user cancelled (Esc / Ctrl-C / no match). -pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result> { +/// Outcome of an fzf invocation. +/// +/// Distinguishes a deliberate user cancel (Esc / Ctrl-C, fzf exit 130) from +/// the no-match case (fzf exit 1). Callers that want to surface a non-zero +/// exit on cancel can match on `Cancelled`; callers that just want the picked +/// lines treat both `NoMatch` and `Cancelled` as "empty selection". +pub enum PickResult { + /// fzf exited 0 with at least one selected line. + Selected(Vec), + /// fzf exited 1: input was non-empty but nothing matched the query. + NoMatch, + /// fzf exited 130: the user pressed Esc / Ctrl-C / Ctrl-D. + Cancelled, +} + +/// Run fzf with the supplied lines on stdin. Returns a `PickResult` so the +/// caller can distinguish a successful selection from no-match vs. an +/// explicit user cancel (which some callers map to a non-zero exit). +pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result { let mut args: Vec = vec![ "--delimiter=\t".into(), format!("--with-nth={}", opts.with_nth), @@ -85,9 +101,12 @@ pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result> { match output.status.code() { Some(0) => { let text = String::from_utf8_lossy(&output.stdout); - Ok(text.lines().map(|s| s.to_string()).collect()) + Ok(PickResult::Selected( + text.lines().map(|s| s.to_string()).collect(), + )) } - Some(1) | Some(130) => Ok(Vec::new()), + Some(1) => Ok(PickResult::NoMatch), + Some(130) => Ok(PickResult::Cancelled), _ => anyhow::bail!("fzf exited with status {:?}", output.status), } } From 8bda7d0668e4f521ca253f164a5c4e49caf1a8d2 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:56:44 -0400 Subject: [PATCH 18/36] feat(path-cli): probe harness paths in no-sessions message bail_no_sessions now reports each harness's base directory and whether it exists, instead of an unreachable 'not configured' branch that always read '0 sessions'. Helps users diagnose 'I have claude installed but path share says nothing'. --- crates/path-cli/src/cmd_share.rs | 252 +++++++++++++++++++++++++++++-- 1 file changed, 242 insertions(+), 10 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index c2b460b..369ce68 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -610,23 +610,143 @@ fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::P } let mut summary = String::from("No agent sessions found.\n"); - summary.push_str(&probe_summary_line("claude", bundle.claude.is_some())); - summary.push_str(&probe_summary_line("gemini", bundle.gemini.is_some())); - summary.push_str(&probe_summary_line("codex", bundle.codex.is_some())); - summary.push_str(&probe_summary_line("opencode", bundle.opencode.is_some())); - summary.push_str(&probe_summary_line("pi", bundle.pi.is_some())); + // Pad harness names so the path column lines up: "opencode:" is the + // longest at 9 chars (8 + colon). + let home = home_dir(); + summary.push_str(&format_status_line("claude", &harness_status_claude(bundle, home.as_deref()))); + summary.push_str(&format_status_line("gemini", &harness_status_gemini(bundle, home.as_deref()))); + summary.push_str(&format_status_line("codex", &harness_status_codex(bundle, home.as_deref()))); + summary.push_str(&format_status_line( + "opencode", + &harness_status_opencode(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line("pi", &harness_status_pi(bundle, home.as_deref()))); eprint!("{summary}"); anyhow::bail!("no shareable sessions"); } -fn probe_summary_line(name: &str, present: bool) -> String { - if present { - format!(" {name}: 0 sessions\n") - } else { - format!(" {name}: not configured\n") +/// Cross-platform `$HOME` lookup matching the providers' internal helpers. +/// Returns `None` only when neither `$HOME` nor `$USERPROFILE` is set. +fn home_dir() -> Option { + std::env::var_os("HOME") + .or_else(|| std::env::var_os("USERPROFILE")) + .map(std::path::PathBuf::from) +} + +/// Human-readable status of a harness's on-disk store: either the (possibly +/// home-relative) path with a "(0 sessions)" hint, or the path with a +/// "not found" hint when the directory/database is absent. +#[derive(Debug, PartialEq, Eq)] +struct HarnessStatus { + /// Display path (tilde-prefixed when under `$HOME`). + path: String, + /// True when the path exists on disk. + exists: bool, +} + +impl HarnessStatus { + fn render(&self) -> String { + if self.exists { + format!("{} (0 sessions)", self.path) + } else { + format!("{} not found", self.path) + } + } + + /// Status when the resolver itself failed (e.g. no $HOME). + fn unresolved() -> Self { + Self { + path: "".to_string(), + exists: false, + } + } +} + +/// Format a single status line, padding the harness name so that the path +/// column lines up across all five rows. The longest name is "opencode" (8). +fn format_status_line(name: &str, status: &HarnessStatus) -> String { + format!(" {:<9} {}\n", format!("{name}:"), status.render()) +} + +fn harness_status_claude(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.claude else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().projects_dir() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_gemini(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.gemini else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().tmp_dir() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), } } +fn harness_status_codex(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.codex else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().sessions_root() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_opencode(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.opencode else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().db_path() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_pi(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.pi else { + return HarnessStatus::unresolved(); + }; + let p = mgr.resolver().sessions_dir().to_path_buf(); + HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + } +} + +/// Display `path` as `~/relative/part` when it's under `home`, otherwise +/// return its absolute lossy form. Pure helper — does no filesystem I/O. +fn home_relative(path: &std::path::Path, home: Option<&std::path::Path>) -> String { + if let Some(home) = home + && let Ok(rest) = path.strip_prefix(home) + { + // strip_prefix returns the empty path when path == home; treat that + // as plain "~". + if rest.as_os_str().is_empty() { + return "~".to_string(); + } + return format!("~/{}", rest.display()); + } + path.display().to_string() +} + fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { let project = match (harness.project_keyed(), args.project.as_ref()) { (true, Some(p)) => Some(p.to_string_lossy().into_owned()), @@ -993,4 +1113,116 @@ mod tests { assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot assert_eq!(session, "0190abcd"); } + + #[test] + fn home_relative_strips_home_prefix() { + let home = Path::new("/Users/alex"); + assert_eq!( + home_relative(Path::new("/Users/alex/.claude/projects"), Some(home)), + "~/.claude/projects" + ); + } + + #[test] + fn home_relative_returns_tilde_for_home_itself() { + let home = Path::new("/Users/alex"); + assert_eq!(home_relative(home, Some(home)), "~"); + } + + #[test] + fn home_relative_passes_through_paths_outside_home() { + let home = Path::new("/Users/alex"); + assert_eq!(home_relative(Path::new("/tmp/elsewhere"), Some(home)), "/tmp/elsewhere"); + } + + #[test] + fn home_relative_passes_through_when_no_home() { + assert_eq!(home_relative(Path::new("/foo/bar"), None), "/foo/bar"); + } + + #[test] + fn harness_status_renders_existing_path_with_zero_sessions() { + let s = HarnessStatus { + path: "~/.claude/projects".to_string(), + exists: true, + }; + assert_eq!(s.render(), "~/.claude/projects (0 sessions)"); + } + + #[test] + fn harness_status_renders_missing_path_as_not_found() { + let s = HarnessStatus { + path: "~/.gemini/tmp".to_string(), + exists: false, + }; + assert_eq!(s.render(), "~/.gemini/tmp not found"); + } + + #[test] + fn format_status_line_pads_for_alignment() { + let s = HarnessStatus { + path: "~/.codex/sessions".to_string(), + exists: true, + }; + // "claude:" (7) needs 2 trailing spaces; "opencode:" (9) needs 0; + // "pi:" (3) needs 6. The visible-path column should always start at + // the same offset. + let claude_line = format_status_line("claude", &s); + let opencode_line = format_status_line("opencode", &s); + let pi_line = format_status_line("pi", &s); + let offset = |line: &str| line.find('~').unwrap(); + assert_eq!(offset(&claude_line), offset(&opencode_line)); + assert_eq!(offset(&claude_line), offset(&pi_line)); + } + + #[test] + fn harness_status_for_missing_claude_dir_reports_not_found() { + // Bundle whose claude resolver points at a directory that doesn't + // exist on disk; the status should still resolve a path and report + // it as missing rather than going through the `unresolved` branch. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); // never created + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + let bundle = HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + }; + let status = harness_status_claude(&bundle, None); + assert!(!status.exists, "missing dir must report exists=false"); + assert!( + status.path.contains("projects"), + "path must include the projects subdir (got {:?})", + status.path + ); + } + + #[test] + fn harness_status_for_present_claude_dir_reports_existence() { + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + std::fs::create_dir_all(claude_dir.join("projects")).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + let bundle = HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + }; + let status = harness_status_claude(&bundle, None); + assert!(status.exists); + } + + #[test] + fn harness_status_for_empty_bundle_is_unresolved() { + let bundle = HarnessBundle::default(); + // Every harness slot is None, so each status hits the unresolved branch. + for status in [ + harness_status_claude(&bundle, None), + harness_status_gemini(&bundle, None), + harness_status_codex(&bundle, None), + harness_status_opencode(&bundle, None), + harness_status_pi(&bundle, None), + ] { + assert_eq!(status, HarnessStatus::unresolved()); + assert!(!status.exists); + } + } } From 1655b86fda02c56c33b9057b21d82ea1ac2c0025 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Thu, 7 May 2026 23:58:12 -0400 Subject: [PATCH 19/36] feat(path-cli): show session title in picker confirmation The 'Picked session ' line now prints the conversation title instead of the opaque session id, matching the spec. parse_picker_row returns the title alongside (harness, key, session_id). --- crates/path-cli/src/cmd_share.rs | 45 ++++++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 8 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 369ce68..70fad02 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -552,7 +552,7 @@ pub fn run(args: ShareArgs) -> Result<()> { // exit 130 so it's distinguishable from a successful share. crate::fzf::PickResult::Cancelled => std::process::exit(130), }; - let (h, key, session) = parse_picker_row(&line) + let (h, key, session, title) = parse_picker_row(&line) .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; let explicit = ShareArgs { @@ -571,7 +571,10 @@ pub fn run(args: ShareArgs) -> Result<()> { force: args.force, no_cache: args.no_cache, }; - eprintln!("Picked {} session {}", h.name(), session); + // Show the conversation title in the confirmation line; the session id + // is opaque and doesn't help the user verify they picked the right + // thing. `{:?}` adds the surrounding quotes per the spec. + eprintln!("Picked {} session {:?}", h.name(), title); share_explicit(h, &session, &explicit) } @@ -810,9 +813,11 @@ fn format_picker_row(row: &SessionRow) -> String { ) } -/// Inverse of [`format_picker_row`] — pulls (harness, key, session) back -/// out of the line fzf returned. Returns `None` if the line is malformed. -fn parse_picker_row(line: &str) -> Option<(Harness, String, String)> { +/// Inverse of [`format_picker_row`] — pulls (harness, key, session, title) +/// back out of the line fzf returned. Returns `None` if the line is +/// malformed. The title is column 9 of the TSV; it lives in the visible +/// portion so it round-trips through fzf unchanged. +fn parse_picker_row(line: &str) -> Option<(Harness, String, String, String)> { let mut parts = line.split('\t'); let h = Harness::parse(parts.next()?)?; let key = parts.next()?.to_string(); @@ -820,7 +825,10 @@ fn parse_picker_row(line: &str) -> Option<(Harness, String, String)> { if session.is_empty() { return None; } - Some((h, key, session)) + // Skip cols 4..8 (symbol, when, msgs, scope, project_short) to reach + // the title at col 9. + let title = parts.nth(5).unwrap_or("").to_string(); + Some((h, key, session, title)) } fn tab_safe(s: &str) -> String { @@ -1089,10 +1097,13 @@ mod tests { matches_cwd: true, }; let line = format_picker_row(&row); - let (harness, key, session) = parse_picker_row(&line).unwrap(); + let (harness, key, session, title) = parse_picker_row(&line).unwrap(); assert_eq!(harness, Harness::Claude); assert_eq!(key, "/tmp/proj"); assert_eq!(session, "sess-abc"); + // tab_safe replaces the tab with a space, but the title content + // otherwise round-trips. + assert_eq!(title, "Hello world"); } #[test] @@ -1108,10 +1119,28 @@ mod tests { matches_cwd: false, }; let line = format_picker_row(&row); - let (harness, key, session) = parse_picker_row(&line).unwrap(); + let (harness, key, session, title) = parse_picker_row(&line).unwrap(); assert_eq!(harness, Harness::Codex); assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot assert_eq!(session, "0190abcd"); + assert_eq!(title, "(no prompt)"); + } + + #[test] + fn parse_picker_row_carries_title_with_unicode() { + let row = SessionRow { + harness: Harness::Gemini, + project: Some("/work/proj".to_string()), + cwd: None, + session_id: "11111111-2222-3333-4444-555555555555".to_string(), + title: "Add the share command — finally".to_string(), + last_activity: None, + message_count: 42, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (_, _, _, title) = parse_picker_row(&line).unwrap(); + assert_eq!(title, "Add the share command — finally"); } #[test] From 7f29f165e14fb14e8bd4e58437e679d62381b4b5 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 00:10:37 -0400 Subject: [PATCH 20/36] test(path-cli): cover logged-out anon-default share path MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Asserts the 'not logged in — uploading anonymously' stderr notice when share() falls through to anon without an explicit --anon flag. Closes a coverage gap from final review. --- crates/path-cli/tests/integration.rs | 31 ++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 352461c..d810cd4 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -874,6 +874,37 @@ fn share_no_cache_skips_write() { server.join().unwrap(); } +#[test] +fn share_logged_out_anon_default() { + // No --anon flag and no credentials file => share() falls through to the + // anonymous endpoint and emits a "not logged in — uploading anonymously" + // notice on stderr. This covers the logged-out branch in + // cmd_export::run_pathbase_inner that the explicit --anon tests skip. + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stderr(predicate::str::contains("not logged in")) + .stderr(predicate::str::contains("uploading anonymously")); + + server.join().unwrap(); +} + #[test] fn share_filters_by_project_with_no_matches_errors() { let cfg = tempfile::tempdir().unwrap(); From a177fdeea6429727479f25da0796dc1388845303 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 00:10:44 -0400 Subject: [PATCH 21/36] test(path-cli): verify matches_cwd through a symlink paths_match uses canonicalize_or_self for both arguments. A symlink pointing at a project directory and the directory itself should both canonicalize to the same path, so cwd-ranking works regardless of which form the user navigated through. --- crates/path-cli/src/cmd_share.rs | 42 ++++++++++++++++++++++++++++++++ 1 file changed, 42 insertions(+) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 70fad02..2b670e4 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -1084,6 +1084,48 @@ mod tests { assert!(!rows[1].matches_cwd); } + #[test] + #[cfg(unix)] + fn paths_match_canonicalizes_through_symlink() { + // `paths_match` is the function that produces `SessionRow.matches_cwd` + // (collect_* all delegate to it). Without canonicalization, a user who + // navigated to a project via a symlink would see their cwd-row sink + // in the picker because the symlink path string ≠ the project path + // string. Verify both arguments are canonicalized. + // + // Note: we test `paths_match` directly rather than going through + // `gather_sessions` because Claude's project-dir slug encoding is + // lossy (sanitize_project_path: '/', '_', '.' → '-'; unsanitize: only + // '-' → '/'). On macOS, tempdir paths contain '.' and end up under + // /private/var/..., so the unsanitized slug never round-trips back to + // the real on-disk path. This direct test covers the canonicalization + // bug regardless of platform-specific tempdir layouts. + let temp = TempDir::new().unwrap(); + let real_project = temp.path().join("real-project"); + std::fs::create_dir_all(&real_project).unwrap(); + let symlink_path = temp.path().join("symlink-to-project"); + std::os::unix::fs::symlink(&real_project, &symlink_path).unwrap(); + + // Sanity-check the setup: the symlink and its target are different + // string-paths but resolve to the same canonical path. + assert_ne!(real_project, symlink_path); + assert_eq!( + std::fs::canonicalize(&real_project).unwrap(), + std::fs::canonicalize(&symlink_path).unwrap(), + ); + + // The actual property under test. + assert!( + paths_match(&real_project, &symlink_path), + "paths_match must canonicalize both sides so symlink == target" + ); + // And symmetric. + assert!( + paths_match(&symlink_path, &real_project), + "paths_match must be symmetric across the symlink" + ); + } + #[test] fn parse_picker_row_roundtrips_keyed() { let row = SessionRow { From 052b1740cda19d4178916fb0c37736785ba01ef1 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 00:20:23 -0400 Subject: [PATCH 22/36] feat(path-cli): stack `path share` picker preview above the list The default side-by-side preview pane gets cramped on narrow terminals. Add a preview_window field to fzf::PickOptions and switch the share picker to up:60%:wrap so the session preview gets the full terminal width. Existing import pickers keep right:60%:wrap (explicit at each call site). --- crates/path-cli/src/cmd_export.rs | 5 ++- crates/path-cli/src/cmd_import.rs | 13 +++++--- crates/path-cli/src/cmd_share.rs | 47 +++++++++++++++++++++------- crates/path-cli/src/fzf.rs | 6 +++- crates/path-cli/tests/integration.rs | 17 +++++++--- 5 files changed, 64 insertions(+), 24 deletions(-) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index e747ce0..db340dc 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -1289,9 +1289,8 @@ pub(crate) fn run_pathbase_inner( return Ok(()); } - let session = stored.ok_or_else(|| { - anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") - })?; + let session = stored + .ok_or_else(|| anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`."))?; if host_of(&base_url) != host_of(&session.url) { eprintln!( "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index 80d2f14..f25467e 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -516,6 +516,7 @@ fn pick_claude_in_project( prompt: "claude session> ", preview: Some("path show claude --project {1} --session {2}"), header: Some("pick a Claude session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -565,6 +566,7 @@ fn pick_claude_global( prompt: "claude session> ", preview: Some("path show claude --project {1} --session {2}"), header: Some("pick a Claude session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -730,6 +732,7 @@ fn pick_gemini_in_project( prompt: "gemini session> ", preview: Some("path show gemini --project {1} --session {2}"), header: Some("pick a Gemini session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -779,6 +782,7 @@ fn pick_gemini_global( prompt: "gemini session> ", preview: Some("path show gemini --project {1} --session {2}"), header: Some("pick a Gemini session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -907,6 +911,7 @@ fn pick_codex(manager: &toolpath_codex::CodexConvo) -> Result prompt: "codex session> ", preview: Some("path show codex --session {1}"), header: Some("pick a Codex session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -993,10 +998,7 @@ fn derive_opencode( /// Derive a single opencode session given an explicit session id. #[cfg(not(target_os = "emscripten"))] -pub(crate) fn derive_opencode_one( - session: &str, - no_snapshot_diffs: bool, -) -> Result { +pub(crate) fn derive_opencode_one(session: &str, no_snapshot_diffs: bool) -> Result { let manager = toolpath_opencode::OpencodeConvo::new(); let config = toolpath_opencode::derive::DeriveConfig { no_snapshot_diffs, @@ -1065,6 +1067,7 @@ fn pick_opencode( prompt: "opencode session> ", preview: Some("path show opencode --session {1}"), header: Some("pick an opencode session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -1222,6 +1225,7 @@ fn pick_pi_in_project( prompt: "pi session> ", preview: Some("path show pi --project {1} --session {2}"), header: Some("pick a Pi session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; @@ -1271,6 +1275,7 @@ fn pick_pi_global(manager: &toolpath_pi::PiConvo) -> Result ", preview: Some("path show pi --project {1} --session {2}"), header: Some("pick a Pi session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 2b670e4..860f8a4 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -534,6 +534,10 @@ pub fn run(args: ShareArgs) -> Result<()> { with_nth: "4..", prompt: "share> ", preview: Some("path show {1} --project {2} --session {3}"), + // Stacked layout: preview above the list, list below. Fits narrow + // terminals better than the default side-by-side and gives the + // session preview the full terminal width to render `path show`. + preview_window: "up:60%:wrap", header: Some(&header), tiebreak: "index", multi: false, @@ -604,7 +608,10 @@ fn pathbase_host_for_picker(args: &ShareArgs) -> String { } } -fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::Path>) -> Result<()> { +fn bail_no_sessions( + bundle: &HarnessBundle, + project_filter: Option<&std::path::Path>, +) -> Result<()> { if let Some(p) = project_filter { anyhow::bail!( "No agent sessions found in project {}. Run without --project to see sessions across all projects.", @@ -616,14 +623,26 @@ fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::P // Pad harness names so the path column lines up: "opencode:" is the // longest at 9 chars (8 + colon). let home = home_dir(); - summary.push_str(&format_status_line("claude", &harness_status_claude(bundle, home.as_deref()))); - summary.push_str(&format_status_line("gemini", &harness_status_gemini(bundle, home.as_deref()))); - summary.push_str(&format_status_line("codex", &harness_status_codex(bundle, home.as_deref()))); + summary.push_str(&format_status_line( + "claude", + &harness_status_claude(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "gemini", + &harness_status_gemini(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "codex", + &harness_status_codex(bundle, home.as_deref()), + )); summary.push_str(&format_status_line( "opencode", &harness_status_opencode(bundle, home.as_deref()), )); - summary.push_str(&format_status_line("pi", &harness_status_pi(bundle, home.as_deref()))); + summary.push_str(&format_status_line( + "pi", + &harness_status_pi(bundle, home.as_deref()), + )); eprint!("{summary}"); anyhow::bail!("no shareable sessions"); } @@ -710,7 +729,10 @@ fn harness_status_codex(bundle: &HarnessBundle, home: Option<&std::path::Path>) } } -fn harness_status_opencode(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { +fn harness_status_opencode( + bundle: &HarnessBundle, + home: Option<&std::path::Path>, +) -> HarnessStatus { let Some(mgr) = &bundle.opencode else { return HarnessStatus::unresolved(); }; @@ -866,11 +888,9 @@ fn derive_one( Harness::Claude => { crate::cmd_import::derive_claude_pair(project.expect("project_keyed"), session) } - Harness::Gemini => crate::cmd_import::derive_gemini_pair( - project.expect("project_keyed"), - session, - false, - ), + Harness::Gemini => { + crate::cmd_import::derive_gemini_pair(project.expect("project_keyed"), session, false) + } Harness::Pi => { crate::cmd_import::derive_pi_pair(project.expect("project_keyed"), session, None) } @@ -1203,7 +1223,10 @@ mod tests { #[test] fn home_relative_passes_through_paths_outside_home() { let home = Path::new("/Users/alex"); - assert_eq!(home_relative(Path::new("/tmp/elsewhere"), Some(home)), "/tmp/elsewhere"); + assert_eq!( + home_relative(Path::new("/tmp/elsewhere"), Some(home)), + "/tmp/elsewhere" + ); } #[test] diff --git a/crates/path-cli/src/fzf.rs b/crates/path-cli/src/fzf.rs index efa3feb..056df6d 100644 --- a/crates/path-cli/src/fzf.rs +++ b/crates/path-cli/src/fzf.rs @@ -69,7 +69,7 @@ pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result { if let Some(preview) = opts.preview { args.push(format!("--preview={}", preview)); - args.push("--preview-window=right:60%:wrap".into()); + args.push(format!("--preview-window={}", opts.preview_window)); } if let Some(header) = opts.header { @@ -119,6 +119,9 @@ pub struct PickOptions<'a> { pub prompt: &'a str, /// Optional `--preview` command. Use `{1}`, `{2}` ... to substitute fields. pub preview: Option<&'a str>, + /// `--preview-window` placement. Defaults to `right:60%:wrap` (side-by-side); + /// pass `up:60%:wrap` for a stacked layout that fits narrow terminals. + pub preview_window: &'a str, /// Optional header line shown above the list. pub header: Option<&'a str>, /// Tiebreak ordering — `index` preserves input order. @@ -133,6 +136,7 @@ impl Default for PickOptions<'_> { with_nth: "2..", prompt: "> ", preview: None, + preview_window: "right:60%:wrap", header: None, tiebreak: "index", multi: false, diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index d810cd4..7bfb3c9 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -740,7 +740,9 @@ fn share_explicit_args_uploads_via_anon() { .arg(format!("http://127.0.0.1:{port}")) .assert() .success() - .stdout(predicate::str::contains("https://example.test/anon/abc-123")) + .stdout(predicate::str::contains( + "https://example.test/anon/abc-123", + )) .stderr(predicate::str::contains("Uploaded")); server.join().unwrap(); @@ -749,8 +751,13 @@ fn share_explicit_args_uploads_via_anon() { /// Helper for the cache tests. Spawns a one-shot mock anon-upload server /// on a free port and returns (port, server-thread-handle, fixture-temp, /// project-path, $HOME-path). -fn share_anon_fixture() -> (u16, std::thread::JoinHandle<()>, tempfile::TempDir, PathBuf, PathBuf) -{ +fn share_anon_fixture() -> ( + u16, + std::thread::JoinHandle<()>, + tempfile::TempDir, + PathBuf, + PathBuf, +) { use std::io::{Read, Write}; use std::net::TcpListener; @@ -918,7 +925,9 @@ fn share_filters_by_project_with_no_matches_errors() { .arg(&nonexistent) .assert() .failure() - .stderr(predicate::str::contains("No agent sessions found in project")); + .stderr(predicate::str::contains( + "No agent sessions found in project", + )); } #[test] From 3684b2c70d4ebb434d99373f7e89043dfdb77b34 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 09:31:30 -0400 Subject: [PATCH 23/36] feat(path-cli): clearer auth-failure errors on `path share` Three small improvements after live-testing `path share --url `: - api_me now reads the response body up-front, distinguishes 401/403 from generic non-success and from non-JSON bodies, and names the URL + re-auth command in every branch. Replaces the opaque "parsing /auth/me response: column 1" that hits when the URL points at something that isn't a Pathbase deployment. - 401s from paths_post and repos_post now carry the same shape: name the URL, point at `path auth login --url `, and surface `--anon` as the no-auth fallback. - The host-mismatch warning in run_pathbase_inner is now actionable: it tells the user how to fix the situation instead of just predicting failure. Adds three short_body unit tests and updates the existing paths_post_401 assertion to the new contract (URL + re-auth hint + --anon hint). --- crates/path-cli/src/cmd_export.rs | 6 +- crates/path-cli/src/cmd_pathbase.rs | 85 +++++++++++++++++++++++++---- 2 files changed, 79 insertions(+), 12 deletions(-) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index db340dc..1dc99a0 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -1293,8 +1293,10 @@ pub(crate) fn run_pathbase_inner( .ok_or_else(|| anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`."))?; if host_of(&base_url) != host_of(&session.url) { eprintln!( - "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", - base_url, session.url + "warning: stored credentials are for {}, but you're uploading to {}.\n\ + If this fails, pass `--anon` to upload anonymously, or run\n\ + `path auth login --url {}` to authenticate against this server.", + session.url, base_url, base_url ); } diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 94b7320..b93539b 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -149,14 +149,46 @@ pub(crate) fn api_me(base_url: &str, token: &str) -> Result { .send() .with_context(|| format!("connect to {base_url}"))?; - if resp.status() == reqwest::StatusCode::UNAUTHORIZED { - bail!("stored session is no longer valid — run `path auth login` again"); + let status = resp.status(); + let body = resp.text().unwrap_or_default(); + + if status == reqwest::StatusCode::UNAUTHORIZED || status == reqwest::StatusCode::FORBIDDEN { + bail!( + "{base_url} rejected your stored credentials ({status}). \ + Run `path auth login --url {base_url}` to authenticate against this server, \ + or pass `--anon` to upload anonymously." + ); } - if !resp.status().is_success() { - bail!("server returned {}", resp.status()); + if !status.is_success() { + bail!( + "GET {base_url}/api/v1/auth/me returned {status}: {}", + short_body(&body) + ); + } + serde_json::from_str(&body).map_err(|e| { + anyhow!( + "{base_url} returned a non-JSON response from /api/v1/auth/me ({status}): {} \ + ({e}). The URL may not be a Pathbase deployment.", + short_body(&body) + ) + }) +} + +/// Trim a response body to a single-line snippet for error messages. +/// Replaces newlines, collapses long bodies down to ~200 chars with an ellipsis. +fn short_body(body: &str) -> String { + const MAX: usize = 200; + let cleaned: String = body.replace(['\n', '\r'], " "); + let trimmed = cleaned.trim(); + if trimmed.is_empty() { + return "".to_string(); + } + if trimmed.chars().count() > MAX { + let head: String = trimmed.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + trimmed.to_string() } - let user: User = resp.json().context("parsing /auth/me response")?; - Ok(user) } // ── pathbase-client bridge ───────────────────────────────────────────── @@ -276,7 +308,11 @@ pub(crate) fn paths_post( }) } Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { - 401 => bail!("stored session is no longer valid — run `path auth login` again"), + 401 => bail!( + "{base_url} rejected your stored credentials (HTTP 401). \ + Run `path auth login --url {base_url}` to authenticate against this server, \ + or pass `--anon` to upload anonymously." + ), code => bail!("upload to {owner}/{repo} failed (HTTP {code})"), }, Err(pathbase_client::Error::UnexpectedResponse(resp)) => { @@ -311,7 +347,11 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> match block_on(client.create_repo(&body)) { Ok(_) => Ok(()), Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { - 401 => bail!("stored session is no longer valid — run `path auth login` again"), + 401 => bail!( + "{base_url} rejected your stored credentials (HTTP 401). \ + Run `path auth login --url {base_url}` to authenticate against this server, \ + or pass `--anon` to upload anonymously." + ), 409 => Ok(()), code => bail!("creating repo {name} failed (HTTP {code})"), }, @@ -447,6 +487,25 @@ mod tests { assert_eq!(got, "https://example.com"); } + #[test] + fn short_body_handles_empty_and_whitespace() { + assert_eq!(short_body(""), ""); + assert_eq!(short_body(" \n\t "), ""); + } + + #[test] + fn short_body_collapses_newlines_to_spaces() { + assert_eq!(short_body("line1\nline2\r\nline3"), "line1 line2 line3"); + } + + #[test] + fn short_body_truncates_long_input_with_ellipsis() { + let long = "x".repeat(500); + let s = short_body(&long); + assert_eq!(s.chars().count(), 200); + assert!(s.ends_with('…')); + } + #[test] fn store_then_load_roundtrips_on_disk() { let dir = tempfile::tempdir().unwrap(); @@ -621,9 +680,15 @@ mod tests { #[test] fn paths_post_401_surfaces_relogin_message() { let server = MockServer::start("HTTP/1.1 401 Unauthorized", r#"{"error":"bad"}"#); + let base = server.base(); let err = - paths_post(&server.base(), "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); - assert!(err.to_string().contains("run `path auth login`")); + paths_post(&base, "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); + let msg = err.to_string(); + // Should name the URL the credentials are being rejected by, point at + // `path auth login --url`, and offer `--anon` as the bypass. + assert!(msg.contains(&base), "expected base URL in error: {msg}"); + assert!(msg.contains("path auth login --url"), "expected re-auth hint: {msg}"); + assert!(msg.contains("--anon"), "expected --anon hint: {msg}"); } #[test] From cafa1bb2422231e36a4eb1e133d493a41c5067bc Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 09:47:40 -0400 Subject: [PATCH 24/36] feat(path-cli): pre-flight Pathbase auth and fall back to anon on failure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Live testing turned up two related UX problems on `path share`: 1. The credentials check ran *after* the picker, so picking a session against a `--url` whose credentials don't apply meant doing a bunch of work just to fail at upload time. 2. A 401 was a hard error even when the user had no `--repo`/`--public`/ `--slug` flags forcing authed mode — anonymous would have worked just fine. Refactor: - Add `cmd_pathbase::AuthMode { Anon, Authed }` and `preflight_auth` that resolves credentials up front with a real `api_me` probe. On a probe failure with no auth-requiring flags it falls back to `Anon` with a stderr notice; with auth flags it propagates the error so the user knows their explicit request can't be satisfied. - `cmd_share::run` now runs preflight before the picker. On `--url ` with mismatched creds, the failure now happens before any session is picked, derived, or cached. - `cmd_export::run_pathbase_inner` now takes a pre-resolved AuthMode + base_url and stops doing its own credentials probing — so cmd_share and cmd_export share the same authed-vs-anon decision logic. - `host_of` and the supporting test moved to cmd_pathbase next to the rest of the URL/auth helpers; cmd_share's `pathbase_host_for_picker` collapses into the new `resolve_upload_base_url`. Adds 5 preflight unit tests (+5, total 233 lib + 38 integration). --- crates/path-cli/src/cmd_export.rs | 145 ++++++----------- crates/path-cli/src/cmd_pathbase.rs | 237 ++++++++++++++++++++++++++++ crates/path-cli/src/cmd_share.rs | 60 ++++--- 3 files changed, 324 insertions(+), 118 deletions(-) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index 1dc99a0..2fecfe5 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -1221,6 +1221,8 @@ fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { #[cfg(not(target_os = "emscripten"))] { + use crate::cmd_pathbase::preflight_auth; + let file = cache_ref(&args.input)?; let body = std::fs::read_to_string(&file) .with_context(|| format!("Failed to read {}", file.display()))?; @@ -1231,97 +1233,82 @@ fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { slug: args.slug, public: args.public, }; + let base_url = resolve_upload_base_url(&upload); + let needs_auth = upload.repo.is_some() || upload.public || upload.slug.is_some(); + let auth = preflight_auth(&base_url, upload.anon, needs_auth)?; let summary_source = file.display().to_string(); - run_pathbase_inner(upload, &body, &summary_source) + run_pathbase_inner(auth, base_url, upload, &body, &summary_source) + } +} + +/// Resolve the upload target URL from the CLI flag, the stored session, +/// or the default. Mirrors the order used inside `run_pathbase_inner` so +/// `cmd_share`'s pre-flight resolution agrees with the eventual upload. +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn resolve_upload_base_url(args: &PathbaseUploadArgs) -> String { + use crate::cmd_pathbase::{credentials_path, load_session, resolve_url}; + + if let Some(u) = &args.url { + return resolve_url(Some(u.clone())); + } + if let Ok(path) = credentials_path() + && let Ok(Some(s)) = load_session(&path) + { + return s.url; } + resolve_url(None) } #[cfg(not(target_os = "emscripten"))] pub(crate) fn run_pathbase_inner( + auth: crate::cmd_pathbase::AuthMode, + base_url: String, args: PathbaseUploadArgs, body: &str, summary_source: &str, ) -> Result<()> { - use crate::cmd_pathbase::{ - anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, - resolve_url, - }; + use crate::cmd_pathbase::{AuthMode, anon_paths_post, paths_post, repos_post}; // Validate locally so we give a clean error rather than relying on // the server to reject malformed payloads. let doc = toolpath::v1::Graph::from_json(body) .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; - let stored = load_session(&credentials_path()?)?; - let base_url = match (&args.url, &stored) { - (Some(u), _) => resolve_url(Some(u.clone())), - (None, Some(s)) => s.url.clone(), - (None, None) => resolve_url(None), - }; - - // Anonymous mode: explicit --anon, or no credentials at all and no - // override flags steering us toward an authed endpoint. - let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); - - if go_anon { - if !args.anon && stored.is_none() { + let (token, username) = match auth { + AuthMode::Anon => { + let resp = anon_paths_post(&base_url, body)?; + let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") + { + resp.url.clone() + } else if resp.url.starts_with('/') { + format!("{base_url}{}", resp.url) + } else { + format!("{base_url}/{}", resp.url) + }; + println!("{printable}"); eprintln!( - "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." + "Uploaded {} → anon path {} ({} bytes)", + summary_source, + resp.id, + body.len() ); + return Ok(()); } - let resp = anon_paths_post(&base_url, body)?; - // Server returns either a full URL or a path-only string; in the - // latter case prefix the base so the user gets a clickable link. - let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { - resp.url.clone() - } else if resp.url.starts_with('/') { - format!("{base_url}{}", resp.url) - } else { - format!("{base_url}/{}", resp.url) - }; - println!("{printable}"); - eprintln!( - "Uploaded {} → anon path {} ({} bytes)", - summary_source, - resp.id, - body.len() - ); - return Ok(()); - } - - let session = stored - .ok_or_else(|| anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`."))?; - if host_of(&base_url) != host_of(&session.url) { - eprintln!( - "warning: stored credentials are for {}, but you're uploading to {}.\n\ - If this fails, pass `--anon` to upload anonymously, or run\n\ - `path auth login --url {}` to authenticate against this server.", - session.url, base_url, base_url - ); - } + AuthMode::Authed { token, username } => (token, username), + }; let (owner, repo) = match args.repo { Some(spec) => (spec.owner, spec.name), None => { - // Pathstash default: own the repo "pathstash" under our username, - // creating it on demand. api_me is the source of truth for the - // username (display name in stored.user can drift). - let user = api_me(&base_url, &session.token)?; - repos_post(&base_url, &session.token, "pathstash")?; - (user.username, "pathstash".to_string()) + // Pathstash default: own the repo "pathstash" under the username + // we resolved during preflight. Create it on demand. + repos_post(&base_url, &token, "pathstash")?; + (username, "pathstash".to_string()) } }; let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); - let created = paths_post( - &base_url, - &session.token, - &owner, - &repo, - &slug, - body, - args.public, - )?; + let created = paths_post(&base_url, &token, &owner, &repo, &slug, body, args.public)?; // The visibility we surface is what the server actually applied, // not what we requested. If a server-side policy ever clamps @@ -1423,21 +1410,6 @@ fn derive_slug(doc: &toolpath::v1::Graph) -> String { format!("path-{}", &hex[..12]) } -/// Extract `scheme://host[:port]` from a URL, dropping any path/query. -/// Returns the input unchanged if it doesn't look like a URL. -#[cfg(not(target_os = "emscripten"))] -fn host_of(url: &str) -> &str { - let after_scheme = match url.find("://") { - Some(i) => i + 3, - None => return url, - }; - // Find the next `/` after the scheme://; everything before it is host[:port]. - match url[after_scheme..].find('/') { - Some(off) => &url[..after_scheme + off], - None => url, - } -} - #[cfg(all(test, not(target_os = "emscripten")))] mod tests { use super::*; @@ -1581,21 +1553,6 @@ mod tests { assert!(err.to_string().contains("parse") || err.to_string().contains("Failed")); } - #[test] - fn host_of_strips_path() { - assert_eq!(host_of("https://pathbase.dev"), "https://pathbase.dev"); - assert_eq!(host_of("https://pathbase.dev/"), "https://pathbase.dev"); - assert_eq!( - host_of("https://pathbase.dev/api/v1/traces"), - "https://pathbase.dev" - ); - assert_eq!( - host_of("http://127.0.0.1:9000/foo"), - "http://127.0.0.1:9000" - ); - assert_eq!(host_of("not-a-url"), "not-a-url"); - } - #[test] fn gemini_writes_resume_ready_layout() { // End-to-end: a path doc whose conversation.append carries a diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index b93539b..634a805 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -71,6 +71,21 @@ pub(crate) fn resolve_url(cli_url: Option) -> String { raw.trim_end_matches('/').to_string() } +/// Extract `scheme://host[:port]` from a URL, dropping any path/query. +/// Returns the input unchanged if it doesn't look like a URL. Used to +/// compare a stored session's host against the upload target so we can +/// warn / fall back when the two don't agree. +pub(crate) fn host_of(url: &str) -> &str { + let after_scheme = match url.find("://") { + Some(i) => i + 3, + None => return url, + }; + match url[after_scheme..].find('/') { + Some(off) => &url[..after_scheme + off], + None => url, + } +} + pub(crate) fn prompt_line(prompt: &str) -> Result { use std::io::{BufRead, Write}; let mut stdout = std::io::stdout(); @@ -174,6 +189,85 @@ pub(crate) fn api_me(base_url: &str, token: &str) -> Result { }) } +/// Pre-resolved upload mode. Produced by [`preflight_auth`] before any +/// expensive work (session pickers, cache writes, derive passes) so that +/// callers can fail fast or fall back to anonymous mode without making +/// the user select a session and *then* discover the credentials are bad. +#[derive(Debug)] +pub(crate) enum AuthMode { + /// Use the public anonymous endpoint. No credentials required; + /// 5 MB cap and rate-limited. + Anon, + /// Use the authenticated endpoint. Credentials have already been + /// validated against the target server via `api_me`. + Authed { token: String, username: String }, +} + +/// Probe credentials and decide whether the upload should go authed or +/// anonymous, *before* any picker/derive/cache work. Behavior: +/// +/// - `--anon` → `Anon`, no credentials check. +/// - No stored credentials and no auth-requiring flags → `Anon` with the +/// "not logged in — uploading anonymously" notice. +/// - Stored credentials present → call `api_me` against the target URL. +/// - On success → `Authed { token, username }`. +/// - On failure with no auth-requiring flags (`--repo`/`--public`/`--slug`) +/// → fall back to `Anon` with a stderr notice explaining why. +/// - On failure with auth-requiring flags → propagate the error so the +/// user knows their explicit request can't be satisfied. +/// +/// `host_of(base_url) != host_of(stored.url)` triggers an advisory warning +/// before the credentials probe so the user sees the mismatch even if +/// `api_me` happens to succeed. +pub(crate) fn preflight_auth( + base_url: &str, + anon: bool, + needs_auth: bool, +) -> Result { + if anon { + return Ok(AuthMode::Anon); + } + let stored = load_session(&credentials_path()?)?; + + let go_anon = stored.is_none() && !needs_auth; + if go_anon { + eprintln!( + "note: not logged in — uploading anonymously (not listable). \ + Run `path auth login --url {base_url}` for a listable upload." + ); + return Ok(AuthMode::Anon); + } + + let session = match stored { + Some(s) => s, + None => bail!("Not logged in. Run `path auth login` or pass `--anon`."), + }; + + if host_of(base_url) != host_of(&session.url) { + eprintln!( + "warning: stored credentials are for {}, but you're uploading to {}.", + session.url, base_url + ); + } + + match api_me(base_url, &session.token) { + Ok(user) => Ok(AuthMode::Authed { + token: session.token, + username: user.username, + }), + Err(e) if needs_auth => Err(e.context( + "--repo / --public / --slug require an authenticated upload, so falling back \ + to anonymous wasn't an option. Drop those flags to upload anonymously.", + )), + Err(e) => { + eprintln!( + "note: authenticated upload not available — falling back to anonymous.\n reason: {e}" + ); + Ok(AuthMode::Anon) + } + } +} + /// Trim a response body to a single-line snippet for error messages. /// Replaces newlines, collapses long bodies down to ~200 chars with an ellipsis. fn short_body(body: &str) -> String { @@ -487,6 +581,21 @@ mod tests { assert_eq!(got, "https://example.com"); } + #[test] + fn host_of_strips_path() { + assert_eq!(host_of("https://pathbase.dev"), "https://pathbase.dev"); + assert_eq!(host_of("https://pathbase.dev/"), "https://pathbase.dev"); + assert_eq!( + host_of("https://pathbase.dev/api/v1/traces"), + "https://pathbase.dev" + ); + assert_eq!( + host_of("http://127.0.0.1:9000/foo"), + "http://127.0.0.1:9000" + ); + assert_eq!(host_of("not-a-url"), "not-a-url"); + } + #[test] fn short_body_handles_empty_and_whitespace() { assert_eq!(short_body(""), ""); @@ -766,4 +875,132 @@ mod tests { .unwrap_err(); assert!(err.to_string().contains("not found")); } + + // ── preflight_auth ──────────────────────────────────────────────── + // + // The preflight is the gate that decides authed-vs-anon BEFORE the + // share picker runs, so a credential rejection shouldn't make the + // user pick a session and *then* fail. These tests use + // TOOLPATH_CONFIG_DIR + a tempdir-credentials file to drive the + // logged-in path through the same MockServer used elsewhere. + + fn write_credentials(dir: &std::path::Path, url: &str) { + let creds = StoredSession { + url: url.to_string(), + token: "tok".into(), + user: User { + id: "u1".into(), + username: "alice".into(), + email: None, + display_name: None, + avatar_url: None, + }, + }; + store_session(&dir.join(CREDENTIALS_FILE), &creds).unwrap(); + } + + fn me_response_body(username: &str) -> String { + format!(r#"{{"id":"u1","username":"{username}"}}"#) + } + + /// Cleared TOOLPATH_CONFIG_DIR + no `--anon` + no auth-requiring flags + /// → preflight returns Anon with the "not logged in" notice. + #[test] + fn preflight_anon_when_logged_out_and_no_auth_flags() { + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + let mode = preflight_auth("https://pathbase.dev", false, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Stored credentials AND host matches AND api_me succeeds → Authed. + #[test] + fn preflight_authed_when_credentials_validate() { + let server = MockServer::start("HTTP/1.1 200 OK", Box::leak(me_response_body("alice").into_boxed_str())); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let mode = preflight_auth(&base, false, false).unwrap(); + match mode { + AuthMode::Authed { username, .. } => assert_eq!(username, "alice"), + AuthMode::Anon => panic!("expected Authed, got Anon"), + } + } + + /// Stored credentials but api_me rejects with 401 + no auth-requiring + /// flags → fall back to Anon (don't error). + #[test] + fn preflight_falls_back_to_anon_on_401_without_auth_flags() { + let server = MockServer::start("HTTP/1.1 401 Unauthorized", "{}"); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let mode = preflight_auth(&base, false, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Stored credentials but api_me rejects + needs_auth=true → propagate + /// the error so the user knows --repo/--public/--slug can't be honored. + #[test] + fn preflight_propagates_401_when_auth_required() { + let server = MockServer::start("HTTP/1.1 401 Unauthorized", "{}"); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let err = preflight_auth(&base, false, true).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("--repo"), "expected mention of --repo: {msg}"); + } + + /// `--anon` short-circuits past every check. + #[test] + fn preflight_anon_flag_skips_credentials_check() { + // Even with valid credentials in place, --anon returns Anon without + // calling api_me (no MockServer needed — would 404). + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), "https://pathbase.dev"); + let mode = preflight_auth("https://pathbase.dev", true, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Test-helper guard for `std::env::set_var`. Restores the prior value + /// on drop so tests don't leak state. Tests touching env vars run + /// serially because `cargo test` shares process env across the suite; + /// the existing pathbase tests don't depend on TOOLPATH_CONFIG_DIR so + /// this guard's blast radius is just the preflight tests above. + struct EnvGuard { + key: String, + prior: Option, + } + impl EnvGuard { + fn set(key: &str, val: &str) -> Self { + let prior = std::env::var_os(key); + // SAFETY: tests are single-threaded with respect to each other + // for the env vars these guards control; the cargo test harness + // runs them concurrently across env vars but the only env var + // these tests touch is TOOLPATH_CONFIG_DIR, and no other tests + // in this crate touch it. + unsafe { + std::env::set_var(key, val); + } + Self { + key: key.to_string(), + prior, + } + } + } + impl Drop for EnvGuard { + fn drop(&mut self) { + unsafe { + match &self.prior { + Some(v) => std::env::set_var(&self.key, v), + None => std::env::remove_var(&self.key), + } + } + } + } } diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 860f8a4..9c76757 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -500,13 +500,30 @@ fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { pub fn run(args: ShareArgs) -> Result<()> { let harness = args.harness.map(Harness::from_arg); - if let (Some(h), Some(session)) = (harness, &args.session) { - return share_explicit(h, session.as_str(), &args); - } if args.session.is_some() && harness.is_none() { anyhow::bail!("--session requires --harness"); } + // Build upload args + base URL once and reuse for both the explicit + // path and the picker path. `needs_auth` decides whether preflight + // can fall back to anon on credential failure. + let upload_args = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + let base_url = crate::cmd_export::resolve_upload_base_url(&upload_args); + let needs_auth = upload_args.repo.is_some() || upload_args.public || upload_args.slug.is_some(); + + if let (Some(h), Some(session)) = (harness, &args.session) { + // Explicit-args: validate creds before derive so a credential + // failure doesn't waste the derive/cache work. + let auth = crate::cmd_pathbase::preflight_auth(&base_url, upload_args.anon, needs_auth)?; + return share_explicit(h, session.as_str(), &args, auth, base_url); + } + let cwd = std::env::current_dir()?; let bundle = HarnessBundle::from_environment(); let project_filter = args.project.as_deref(); @@ -527,9 +544,14 @@ pub fn run(args: ShareArgs) -> Result<()> { anyhow::bail!("fzf unavailable; run `path import ` then `path export pathbase`"); } + // We have rows AND fzf available — now validate credentials before + // making the user pick a session. If preflight returns Anon (either + // explicit --anon, no creds + no auth flags, or auth probe failed + // and fell back), the picker still fires with that knowledge baked in. + let auth = crate::cmd_pathbase::preflight_auth(&base_url, upload_args.anon, needs_auth)?; + let lines: Vec = rows.iter().map(format_picker_row).collect(); - let host = pathbase_host_for_picker(&args); - let header = format!("share an agent session (Enter = upload to {host})"); + let header = format!("share an agent session (Enter = upload to {base_url})"); let opts = crate::fzf::PickOptions { with_nth: "4..", prompt: "share> ", @@ -579,7 +601,7 @@ pub fn run(args: ShareArgs) -> Result<()> { // is opaque and doesn't help the user verify they picked the right // thing. `{:?}` adds the surrounding quotes per the spec. eprintln!("Picked {} session {:?}", h.name(), title); - share_explicit(h, &session, &explicit) + share_explicit(h, &session, &explicit, auth, base_url) } fn harness_to_arg(h: Harness) -> HarnessArg { @@ -592,22 +614,6 @@ fn harness_to_arg(h: Harness) -> HarnessArg { } } -fn pathbase_host_for_picker(args: &ShareArgs) -> String { - use crate::cmd_pathbase::resolve_url; - if let Some(u) = &args.url { - return resolve_url(Some(u.clone())); - } - // Best-effort: if there's a stored session, surface its URL; otherwise fall back to default. - let path = match crate::cmd_pathbase::credentials_path() { - Ok(p) => p, - Err(_) => return resolve_url(None), - }; - match crate::cmd_pathbase::load_session(&path) { - Ok(Some(s)) => s.url, - _ => resolve_url(None), - } -} - fn bail_no_sessions( bundle: &HarnessBundle, project_filter: Option<&std::path::Path>, @@ -772,7 +778,13 @@ fn home_relative(path: &std::path::Path, home: Option<&std::path::Path>) -> Stri path.display().to_string() } -fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { +fn share_explicit( + harness: Harness, + session: &str, + args: &ShareArgs, + auth: crate::cmd_pathbase::AuthMode, + base_url: String, +) -> Result<()> { let project = match (harness.project_keyed(), args.project.as_ref()) { (true, Some(p)) => Some(p.to_string_lossy().into_owned()), (true, None) => anyhow::bail!( @@ -803,7 +815,7 @@ fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<( slug: args.slug.clone(), public: args.public, }; - crate::cmd_export::run_pathbase_inner(upload, &body, &summary) + crate::cmd_export::run_pathbase_inner(auth, base_url, upload, &body, &summary) } /// Build the TSV line fed to fzf. Cols 1–3 are hidden (harness/key/session, From 4004eeaafe66c088396fb9bedfa02e5fd6de5fed Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 10:25:37 -0400 Subject: [PATCH 25/36] fix(path-cli): reuse existing cache entry on `path share` re-run MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit `path import` errors on cache-id collision because the user told it to import. `path share` is different — the cache write is incidental (upload uses the in-memory body), so colliding with a prior aborted share shouldn't block the upload that triggered the second run. When the cache entry for the derived id already exists and `--force` isn't set, share now logs "Reusing cached session → " and proceeds to upload from memory. `--force` still overwrites; explicit `--no-cache` still skips the cache entirely. Caught live: an earlier share against pathbase-dev had aborted mid-upload, leaving a cache entry that then blocked the retry with "cache entry … already exists; pass --force to overwrite" — even though the retry's whole purpose was to send the same data. --- crates/path-cli/src/cmd_share.rs | 32 +++++++++++++++++++++++++------- 1 file changed, 25 insertions(+), 7 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 9c76757..9a10498 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -798,13 +798,31 @@ fn share_explicit( let summary = format!("{} session {}", harness.name(), derived.cache_id); if !args.no_cache { - let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; - eprintln!( - "Imported {} session → {} ({})", - harness.name(), - derived.cache_id, - path.display() - ); + // Cache write is incidental for share — the upload uses the + // in-memory body. If an entry with this id already exists from + // a prior run, reuse it (with a notice) instead of hard-failing + // the way `path import` does. `--force` still overwrites. + let existing = crate::cmd_cache::cache_path(&derived.cache_id) + .ok() + .filter(|p| p.exists()); + match (existing, args.force) { + (Some(path), false) => eprintln!( + "Reusing cached {} session → {} ({}); pass --force to overwrite", + harness.name(), + derived.cache_id, + path.display() + ), + _ => { + let path = + crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; + eprintln!( + "Imported {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); + } + } } let body = derived.doc.to_json()?; From 15c3e2415922fa267d96d5baf1c172ca89bdc073 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 10:28:40 -0400 Subject: [PATCH 26/36] fix(path-cli): accept `share_url` / `path` from anon-upload response MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit pathbase-dev returns `{"id", "share_url", "path"}` for successful anon uploads while the OpenAPI spec the generated `pathbase-client` was built against says `{"id", "url"}`. progenitor's strict response decode rejects the deployment's response with Error: anon upload failed: Invalid Response Payload: missing field `url` — even though the upload itself succeeded. Bypass the generated client for this endpoint (same approach as `paths_download`) and accept any of `share_url` / `url` / `path` as the location field. Adds three regression tests: share_url-only, path-only, and a body-in-error check on 5xx. --- crates/path-cli/src/cmd_pathbase.rs | 145 +++++++++++++++++++++------- 1 file changed, 111 insertions(+), 34 deletions(-) diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 634a805..407d9d9 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -219,11 +219,7 @@ pub(crate) enum AuthMode { /// `host_of(base_url) != host_of(stored.url)` triggers an advisory warning /// before the credentials probe so the user sees the mismatch even if /// `api_me` happens to succeed. -pub(crate) fn preflight_auth( - base_url: &str, - anon: bool, - needs_auth: bool, -) -> Result { +pub(crate) fn preflight_auth(base_url: &str, anon: bool, needs_auth: bool) -> Result { if anon { return Ok(AuthMode::Anon); } @@ -342,33 +338,70 @@ fn parse_document(json: &str) -> Result Result { - let body = pathbase_client::types::AnonUploadBody { - document: parse_document(document_json)?, - }; - let client = pathbase_client(base_url, None)?; - match block_on(client.create_anon_path(&body)) { - Ok(resp) => { - let inner = resp.into_inner(); - Ok(AnonUploadResponse { - id: inner.id, - url: inner.url, - }) - } - Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { - 413 => bail!( - "anon upload exceeds the 5 MB cap — log in (`path auth login`) for a listable upload without that limit" - ), - 429 => bail!("anon upload rate-limited; retry shortly or log in"), - code => bail!("anon upload failed (HTTP {code})"), - }, - Err(pathbase_client::Error::UnexpectedResponse(resp)) => { - bail!( - "anon upload returned unexpected status: HTTP {}", - resp.status() + let body = serde_json::json!({ + "document": parse_document(document_json)?, + }); + let client = http_client()?; + let resp = client + .post(format!("{base_url}/api/v1/anon/paths")) + .json(&body) + .send() + .with_context(|| format!("connect to {base_url}"))?; + + let status = resp.status(); + let text = resp.text().unwrap_or_default(); + + if status.is_success() { + let v: serde_json::Value = serde_json::from_str(&text).map_err(|e| { + anyhow!( + "anon upload returned non-JSON ({status}): {} ({e})", + short_body(&text) ) - } - Err(e) => Err(anyhow!("anon upload failed: {e}")), + })?; + let id = v + .get("id") + .and_then(|x| x.as_str()) + .map(String::from) + .ok_or_else(|| { + anyhow!( + "anon upload response missing `id`: {}", + short_body(&text) + ) + })?; + // Server-shape compat: production currently returns `url`, but + // pathbase-dev returns `share_url` + `path`. Accept any. + let url = v + .get("share_url") + .or_else(|| v.get("url")) + .or_else(|| v.get("path")) + .and_then(|x| x.as_str()) + .map(String::from) + .ok_or_else(|| { + anyhow!( + "anon upload response missing `share_url` / `url` / `path`: {}", + short_body(&text) + ) + })?; + return Ok(AnonUploadResponse { id, url }); + } + + match status.as_u16() { + 413 => bail!( + "anon upload exceeds the 5 MB cap — log in (`path auth login`) for a listable upload without that limit" + ), + 429 => bail!("anon upload rate-limited; retry shortly or log in"), + code => bail!("anon upload failed (HTTP {code}): {}", short_body(&text)), } } @@ -790,13 +823,15 @@ mod tests { fn paths_post_401_surfaces_relogin_message() { let server = MockServer::start("HTTP/1.1 401 Unauthorized", r#"{"error":"bad"}"#); let base = server.base(); - let err = - paths_post(&base, "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); + let err = paths_post(&base, "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); let msg = err.to_string(); // Should name the URL the credentials are being rejected by, point at // `path auth login --url`, and offer `--anon` as the bypass. assert!(msg.contains(&base), "expected base URL in error: {msg}"); - assert!(msg.contains("path auth login --url"), "expected re-auth hint: {msg}"); + assert!( + msg.contains("path auth login --url"), + "expected re-auth hint: {msg}" + ); assert!(msg.contains("--anon"), "expected --anon hint: {msg}"); } @@ -838,6 +873,45 @@ mod tests { assert!(err.to_string().contains("path auth login"), "{err}"); } + /// Server-shape compat: pathbase-dev returns `share_url` + `path` + /// (no `url` key) for anon uploads. Production / older deployments + /// may still return `url`. Accept all three so the CLI works against + /// both surfaces. Regression: a strict response decode here used to + /// fail successful anon uploads with "missing field `url`". + #[test] + fn anon_paths_post_accepts_share_url_field() { + let server = MockServer::start( + "HTTP/1.1 201 Created", + r#"{"id":"xyz","share_url":"https://pathbase-dev.example/anon/xyz","path":"/anon/pathstash/paths/xyz"}"#, + ); + let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); + assert_eq!(resp.id, "xyz"); + assert_eq!(resp.url, "https://pathbase-dev.example/anon/xyz"); + } + + #[test] + fn anon_paths_post_accepts_path_only_when_url_fields_missing() { + let server = MockServer::start( + "HTTP/1.1 201 Created", + r#"{"id":"abc","path":"/anon/pathstash/paths/abc"}"#, + ); + let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); + assert_eq!(resp.id, "abc"); + assert_eq!(resp.url, "/anon/pathstash/paths/abc"); + } + + #[test] + fn anon_paths_post_includes_body_in_5xx_error() { + let server = MockServer::start( + "HTTP/1.1 503 Service Unavailable", + r#"{"error":"db down"}"#, + ); + let err = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("503"), "expected status code: {msg}"); + assert!(msg.contains("db down"), "expected body in error: {msg}"); + } + #[test] fn repos_post_treats_409_as_success() { let server = MockServer::start("HTTP/1.1 409 Conflict", r#"{"error":"already exists"}"#); @@ -916,7 +990,10 @@ mod tests { /// Stored credentials AND host matches AND api_me succeeds → Authed. #[test] fn preflight_authed_when_credentials_validate() { - let server = MockServer::start("HTTP/1.1 200 OK", Box::leak(me_response_body("alice").into_boxed_str())); + let server = MockServer::start( + "HTTP/1.1 200 OK", + Box::leak(me_response_body("alice").into_boxed_str()), + ); let cfg = tempfile::tempdir().unwrap(); let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); write_credentials(cfg.path(), &server.base()); From 54d5d73b9507bb18b92778e3c185ebb8af83afe4 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 10:34:15 -0400 Subject: [PATCH 27/36] fix(path-cli): always rewrite share cache so it matches the upload MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The previous "reuse existing cache entry" behavior was wrong: the upload always uses the freshly-derived in-memory body, so when a conversation has grown since the prior share, the upload would contain the new turns while the cache file would still hold the older version. Cache and upload would silently disagree. Right semantics for `path share`: the cache reflects what was just uploaded. The user invoking share is asking to ship the current state of the session, and the cache is incidental disk persistence of that exact payload — so always overwrite. Drops the --force flag from share's CLI surface (it's a no-op now; overwriting is the default and only behavior). --no-cache still skips the cache write entirely. Adds an integration test that locks in the contract: derive a 2-turn session, share it, append 2 more turns to the JSONL, share again, assert the cache file now contains the new turn. --- crates/path-cli/src/cmd_share.rs | 44 ++++------- crates/path-cli/tests/integration.rs | 110 +++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 30 deletions(-) diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 9a10498..c1009e9 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -57,10 +57,6 @@ pub struct ShareArgs { #[arg(long)] pub project: Option, - /// Overwrite the cache entry if it already exists - #[arg(long)] - pub force: bool, - /// Skip writing the cache; derive in-memory only #[arg(long)] pub no_cache: bool, @@ -594,7 +590,6 @@ pub fn run(args: ShareArgs) -> Result<()> { } else { None }, - force: args.force, no_cache: args.no_cache, }; // Show the conversation title in the confirmation line; the session id @@ -798,31 +793,20 @@ fn share_explicit( let summary = format!("{} session {}", harness.name(), derived.cache_id); if !args.no_cache { - // Cache write is incidental for share — the upload uses the - // in-memory body. If an entry with this id already exists from - // a prior run, reuse it (with a notice) instead of hard-failing - // the way `path import` does. `--force` still overwrites. - let existing = crate::cmd_cache::cache_path(&derived.cache_id) - .ok() - .filter(|p| p.exists()); - match (existing, args.force) { - (Some(path), false) => eprintln!( - "Reusing cached {} session → {} ({}); pass --force to overwrite", - harness.name(), - derived.cache_id, - path.display() - ), - _ => { - let path = - crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; - eprintln!( - "Imported {} session → {} ({})", - harness.name(), - derived.cache_id, - path.display() - ); - } - } + // The cache entry should always reflect what was just uploaded. + // `path share` is "ship the current state of this session"; if + // the conversation has grown since a prior share, the in-memory + // body has the new turns but a stale cache file would not — and + // the upload uses the fresh body, not the cache. Always + // overwrite so cache and upload agree (use `--no-cache` to skip + // the cache write entirely). + let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, true)?; + eprintln!( + "Cached {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); } let body = derived.doc.to_json()?; diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 7bfb3c9..d2e3a95 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -803,6 +803,116 @@ fn share_anon_fixture() -> ( (port, server, temp, project, home) } +/// Spawn a one-shot mock anon-upload server on a free port. Returns the +/// port and the join handle. Used by tests that need multiple sequential +/// uploads (the default fixture builds the claude session too, which we +/// don't want to redo between runs). +fn one_shot_anon_server() -> (u16, std::thread::JoinHandle<()>) { + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + (port, server) +} + +/// `path share` re-run after a conversation has grown should overwrite +/// the cache file with the fresh derive — otherwise the cache and the +/// uploaded body would disagree (upload uses the in-memory fresh body, +/// cache file would be stale). Lock that contract in. +#[test] +fn share_rewrites_cache_when_session_has_grown() { + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let session_file = project_dir.join("session-grow.jsonl"); + let cwd_str = project.display().to_string(); + let initial = format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd_str}","message":{{"role":"user","content":"first"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"reply-1"}}}} +"# + ); + std::fs::write(&session_file, &initial).unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + let home = temp.path(); + + // First share: cache picks up the 2-turn conversation. + let (port1, server1) = one_shot_anon_server(); + cmd() + .env("HOME", home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--harness", "claude", "--session", "session-grow", "--project"]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port1}")) + .assert() + .success(); + server1.join().unwrap(); + + let docs = cfg.path().join("documents"); + let cache_files: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!(cache_files.len(), 1, "expected one cache entry after first share"); + let cache_path = cache_files[0].path(); + let cache_v1 = std::fs::read_to_string(&cache_path).unwrap(); + assert!(cache_v1.contains("reply-1"), "v1 cache must contain reply-1"); + assert!(!cache_v1.contains("reply-2"), "v1 cache must not contain reply-2 yet"); + + // Conversation continues: append two more turns to the session JSONL. + let mut grown = initial.clone(); + grown.push_str(&format!( + r#"{{"type":"user","uuid":"u-2","timestamp":"2024-01-02T00:00:00Z","cwd":"{cwd_str}","message":{{"role":"user","content":"second"}}}} +{{"type":"assistant","uuid":"a-2","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"reply-2"}}}} +"# + )); + std::fs::write(&session_file, &grown).unwrap(); + + // Second share: must overwrite the cache file with the grown derive, + // not silently keep the v1 contents while uploading v2. + let (port2, server2) = one_shot_anon_server(); + cmd() + .env("HOME", home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--harness", "claude", "--session", "session-grow", "--project"]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port2}")) + .assert() + .success(); + server2.join().unwrap(); + + let cache_v2 = std::fs::read_to_string(&cache_path).unwrap(); + assert!( + cache_v2.contains("reply-2"), + "v2 cache should contain the new turn, got: {cache_v2}" + ); + assert_ne!( + cache_v1, cache_v2, + "cache file must be rewritten when the session has grown" + ); +} + #[test] fn share_writes_cache_by_default() { let (port, server, _temp, project, home) = share_anon_fixture(); From 94526cfdb96cc18ae6e90f2635917e293527b1b3 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 10:52:51 -0400 Subject: [PATCH 28/36] fix(path-cli): terse fallback notice on `path share` auth fallthrough MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The "reason:" line that followed "falling back to anonymous" dumped the full api_me error including HTML body snippets and serde diagnostics — readable as a debugger trace, not as a CLI notice. For a graceful fallthrough the user just needs to know what's happening: note: ; falling back to anonymous upload. Refactor: - api_me errors are now terse one-liners ("rejected the stored credentials", "isn't a Pathbase deployment", "returned ") with no actionable hints baked in. - preflight_auth's fallback path uses the terse error directly. - preflight_auth's propagate path (--repo / --public / --slug set) carries the "Run `path auth login --url ` …" hint, where it's actually relevant. Also fixes a parallelism flake: the unit-test EnvGuard was racing on TOOLPATH_CONFIG_DIR across cargo test threads. Add a static mutex held for the guard's lifetime so EnvGuard-using tests run serially against each other. --- crates/path-cli/src/cmd_pathbase.rs | 62 ++++++++++++++--------------- 1 file changed, 30 insertions(+), 32 deletions(-) diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 407d9d9..b9d5654 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -156,6 +156,11 @@ pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { Ok(()) } +/// Errors are intentionally terse one-liners — callers compose them +/// into either a fallback notice ("note: ; falling back to +/// anonymous") or a propagated error with actionable next-step hints. +/// Don't bake the hints in here; otherwise the fallback notice gets +/// telephone-pole long. pub(crate) fn api_me(base_url: &str, token: &str) -> Result { let client = http_client()?; let resp = client @@ -168,24 +173,13 @@ pub(crate) fn api_me(base_url: &str, token: &str) -> Result { let body = resp.text().unwrap_or_default(); if status == reqwest::StatusCode::UNAUTHORIZED || status == reqwest::StatusCode::FORBIDDEN { - bail!( - "{base_url} rejected your stored credentials ({status}). \ - Run `path auth login --url {base_url}` to authenticate against this server, \ - or pass `--anon` to upload anonymously." - ); + bail!("{base_url} rejected the stored credentials ({status})"); } if !status.is_success() { - bail!( - "GET {base_url}/api/v1/auth/me returned {status}: {}", - short_body(&body) - ); + bail!("{base_url} returned {status} on /api/v1/auth/me"); } - serde_json::from_str(&body).map_err(|e| { - anyhow!( - "{base_url} returned a non-JSON response from /api/v1/auth/me ({status}): {} \ - ({e}). The URL may not be a Pathbase deployment.", - short_body(&body) - ) + serde_json::from_str(&body).map_err(|_| { + anyhow!("{base_url} isn't a Pathbase deployment (non-JSON /api/v1/auth/me response)") }) } @@ -251,14 +245,13 @@ pub(crate) fn preflight_auth(base_url: &str, anon: bool, needs_auth: bool) -> Re token: session.token, username: user.username, }), - Err(e) if needs_auth => Err(e.context( - "--repo / --public / --slug require an authenticated upload, so falling back \ - to anonymous wasn't an option. Drop those flags to upload anonymously.", - )), + Err(e) if needs_auth => Err(e.context(format!( + "--repo / --public / --slug require an authenticated upload. \ + Run `path auth login --url {base_url}` to authenticate against this \ + server, or drop those flags to upload anonymously." + ))), Err(e) => { - eprintln!( - "note: authenticated upload not available — falling back to anonymous.\n reason: {e}" - ); + eprintln!("note: {e}; falling back to anonymous upload."); Ok(AuthMode::Anon) } } @@ -1044,29 +1037,34 @@ mod tests { assert!(matches!(mode, AuthMode::Anon)); } - /// Test-helper guard for `std::env::set_var`. Restores the prior value - /// on drop so tests don't leak state. Tests touching env vars run - /// serially because `cargo test` shares process env across the suite; - /// the existing pathbase tests don't depend on TOOLPATH_CONFIG_DIR so - /// this guard's blast radius is just the preflight tests above. + /// Test-helper guard for `std::env::set_var`. Process env is shared + /// across all `cargo test` threads, so concurrent tests that mutate + /// the same key would race — `EnvGuard` serializes them via a global + /// mutex held for the guard's lifetime. Drop restores the prior value. + static ENV_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); + struct EnvGuard { key: String, prior: Option, + _lock: std::sync::MutexGuard<'static, ()>, } impl EnvGuard { fn set(key: &str, val: &str) -> Self { + // PoisonError on a previously-panicked test still gives us a + // valid lock — recover the inner guard and proceed. + let lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); let prior = std::env::var_os(key); - // SAFETY: tests are single-threaded with respect to each other - // for the env vars these guards control; the cargo test harness - // runs them concurrently across env vars but the only env var - // these tests touch is TOOLPATH_CONFIG_DIR, and no other tests - // in this crate touch it. + // SAFETY: ENV_LOCK serializes EnvGuard-using tests against + // each other. The only env var these tests touch is + // TOOLPATH_CONFIG_DIR, and no other tests in this crate + // mutate or read it from the test process. unsafe { std::env::set_var(key, val); } Self { key: key.to_string(), prior, + _lock: lock, } } } From ced5365cf3d696f70b232c86529200167867aa2a Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 10:55:27 -0400 Subject: [PATCH 29/36] fix(path-cli): print share URL last on `path share` upload MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Both the anon and authed branches of run_pathbase_inner used to print the URL on stdout *first* and the "Uploaded …" summary on stderr *second*. In a terminal that puts the summary below the URL, which buries the link the user actually wants to copy. Swap the order: summary on stderr first, URL on stdout last. The share URL is now the final line every time, regardless of whether both streams are merging in the terminal. --- crates/path-cli/src/cmd_export.rs | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index 2fecfe5..e8d10aa 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -1277,21 +1277,23 @@ pub(crate) fn run_pathbase_inner( let (token, username) = match auth { AuthMode::Anon => { let resp = anon_paths_post(&base_url, body)?; - let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") - { + let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { resp.url.clone() } else if resp.url.starts_with('/') { format!("{base_url}{}", resp.url) } else { format!("{base_url}/{}", resp.url) }; - println!("{printable}"); + // Summary first on stderr, then the URL on stdout — the + // share URL is the primary product, so it's the last line + // the user (or a script piping the output) sees. eprintln!( "Uploaded {} → anon path {} ({} bytes)", summary_source, resp.id, body.len() ); + println!("{printable}"); return Ok(()); } AuthMode::Authed { token, username } => (token, username), @@ -1333,7 +1335,8 @@ pub(crate) fn run_pathbase_inner( &created.id, created.is_public, ); - println!("{url}"); + // Summary first on stderr, URL last on stdout — same ordering as + // the anon path so the share URL is consistently the final line. eprintln!( "Uploaded {} → {}/{}/{} ({} path, {} bytes)", summary_source, @@ -1343,6 +1346,7 @@ pub(crate) fn run_pathbase_inner( visibility, body.len() ); + println!("{url}"); Ok(()) } From 19260b2a15df38ec76febd78063be85d610fc269 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 14:50:08 -0400 Subject: [PATCH 30/36] rename(path-cli): unify derive helper suffix to '_session' MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit The original split between '_pair' (project-keyed providers — claude, gemini, pi take a project+session pair) and '_one' (session-keyed providers — codex, opencode take just a session id) leaned on internal jargon that no reader could intuit. The asymmetry was already expressed in the parameter list; the suffix was redundant. Rename all five helpers to derive__session so they read uniformly: 'derive one session, given the args needed to identify it.' Also rename the dispatcher in cmd_share from derive_one to derive_session for symmetry. --- crates/path-cli/src/cmd_import.rs | 10 +++++----- crates/path-cli/src/cmd_share.rs | 14 +++++++------- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index f25467e..ad80315 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -450,7 +450,7 @@ fn derive_claude_with_manager( /// Derive a single Claude conversation given an explicit project + session. /// Used by `cmd_share` after its picker has resolved the pair; mirrors the /// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. -pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result { +pub(crate) fn derive_claude_session(project: &str, session: &str) -> Result { let manager = toolpath_claude::ClaudeConvo::new(); let cfg = toolpath_claude::derive::DeriveConfig { project_path: Some(project.to_string()), @@ -666,7 +666,7 @@ fn derive_gemini_with_manager( } /// Derive a single Gemini conversation given an explicit project + session. -pub(crate) fn derive_gemini_pair( +pub(crate) fn derive_gemini_session( project: &str, session: &str, include_thinking: bool, @@ -850,7 +850,7 @@ fn derive_codex(session: Option, all: bool) -> Result> { } /// Derive a single Codex session given an explicit session id. -pub(crate) fn derive_codex_one(session: &str) -> Result { +pub(crate) fn derive_codex_session(session: &str) -> Result { let manager = toolpath_codex::CodexConvo::new(); let config = toolpath_codex::derive::DeriveConfig { project_path: None }; let s = manager @@ -998,7 +998,7 @@ fn derive_opencode( /// Derive a single opencode session given an explicit session id. #[cfg(not(target_os = "emscripten"))] -pub(crate) fn derive_opencode_one(session: &str, no_snapshot_diffs: bool) -> Result { +pub(crate) fn derive_opencode_session(session: &str, no_snapshot_diffs: bool) -> Result { let manager = toolpath_opencode::OpencodeConvo::new(); let config = toolpath_opencode::derive::DeriveConfig { no_snapshot_diffs, @@ -1173,7 +1173,7 @@ fn derive_pi_with_manager( } /// Derive a single Pi session given an explicit project + session. -pub(crate) fn derive_pi_pair( +pub(crate) fn derive_pi_session( project: &str, session: &str, base: Option, diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index c1009e9..3143b28 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -789,7 +789,7 @@ fn share_explicit( (false, _) => None, }; - let derived = derive_one(harness, project.as_deref(), session)?; + let derived = derive_session(harness, project.as_deref(), session)?; let summary = format!("{} session {}", harness.name(), derived.cache_id); if !args.no_cache { @@ -893,23 +893,23 @@ fn project_short(p: &str) -> String { out.join("/") } -fn derive_one( +fn derive_session( harness: Harness, project: Option<&str>, session: &str, ) -> Result { match harness { Harness::Claude => { - crate::cmd_import::derive_claude_pair(project.expect("project_keyed"), session) + crate::cmd_import::derive_claude_session(project.expect("project_keyed"), session) } Harness::Gemini => { - crate::cmd_import::derive_gemini_pair(project.expect("project_keyed"), session, false) + crate::cmd_import::derive_gemini_session(project.expect("project_keyed"), session, false) } Harness::Pi => { - crate::cmd_import::derive_pi_pair(project.expect("project_keyed"), session, None) + crate::cmd_import::derive_pi_session(project.expect("project_keyed"), session, None) } - Harness::Codex => crate::cmd_import::derive_codex_one(session), - Harness::Opencode => crate::cmd_import::derive_opencode_one(session, false), + Harness::Codex => crate::cmd_import::derive_codex_session(session), + Harness::Opencode => crate::cmd_import::derive_opencode_session(session, false), } } From a4b5e19523f2ec575becb356ccbf4cbecf22ee49 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 16:32:50 -0400 Subject: [PATCH 31/36] build: down-convert OpenAPI 3.1 to 3.0 in refresh script Pathbase emits OpenAPI 3.1 ('type': ['string', 'null']), but our generator stack (progenitor 0.14 / openapiv3 2.x) only understands 3.0 ('type': 'string', 'nullable': true). Refreshing the spec now triggers 'not yet implemented: invalid type: null' at build time. Add a jq down-converter to the refresh script that handles the two 3.1 idioms appearing in the live spec: - Type-as-array nullable: 'type': ['X', 'null'] -> 'type': 'X' + 'nullable': true. Multi-type unions are rejected with a clear error so we notice if the spec ever uses something more exotic. - Nullable refs: 'oneOf': [{'type': 'null'}, {'\$ref': X}] -> 'allOf': [{'\$ref': X}] + 'nullable': true. After this, 'PATHBASE_URL=https://pathbase-dev.fly.dev bash scripts/refresh-pathbase-openapi.sh' produces a spec the build script can consume without panicking. --- scripts/refresh-pathbase-openapi.sh | 47 +++++++++++++++++++++++++++-- 1 file changed, 44 insertions(+), 3 deletions(-) diff --git a/scripts/refresh-pathbase-openapi.sh b/scripts/refresh-pathbase-openapi.sh index 4c5c11b..0ba95c6 100755 --- a/scripts/refresh-pathbase-openapi.sh +++ b/scripts/refresh-pathbase-openapi.sh @@ -19,6 +19,47 @@ _tmp="$(mktemp -t pathbase-openapi.XXXXXX.json)" trap 'rm -f "${_tmp}"' EXIT curl -fsSL "${_url}/api/v1/openapi.json" -o "${_tmp}" -# Pretty-print with jq for stable diffs. -jq . "${_tmp}" > "${_dest}" -echo "refresh: wrote ${_dest} ($(wc -l < "${_dest}") lines)" +# Pathbase emits OpenAPI 3.1 (`"type": ["string", "null"]`) but our +# generator stack (progenitor 0.14 / openapiv3 2.x) only understands +# 3.0 (`"type": "string", "nullable": true`). Down-convert nullable +# unions to 3.0 form so the build doesn't panic on `not yet +# implemented: invalid type: null`. +# +# Only handles the single-non-null + "null" pattern. Multi-type +# unions are rejected explicitly so we notice if the spec ever uses +# something more exotic. +jq ' + def downconvert_type_array: + if type == "object" + and (has("type")) + and (.type | type) == "array" + then + if (.type | any(. == "null")) and (.type | map(select(. != "null")) | length) == 1 + then .type = (.type | map(select(. != "null"))[0]) | .nullable = true + elif (.type | any(. == "null")) + then + error("multi-type nullable union not supported by openapiv3 0.x: \(.type)") + else . + end + else . end; + + # `oneOf: [{type: null}, {$ref: X}]` (or in either order) is OpenAPI 3.1 + # idiom for "nullable ref". Convert to 3.0: `{nullable: true, allOf: [{$ref: X}]}`. + def downconvert_nullable_ref: + if type == "object" + and has("oneOf") + and (.oneOf | type) == "array" + and (.oneOf | length) == 2 + and (.oneOf | any(. == {"type": "null"})) + and (.oneOf | any(has("$ref"))) + then + (.oneOf | map(select(has("$ref"))) | .[0]) as $ref_obj + | del(.oneOf) + | .nullable = true + | .allOf = [{"$ref": $ref_obj["$ref"]}] + | (if $ref_obj.description then .description = $ref_obj.description else . end) + else . end; + + walk(downconvert_type_array | downconvert_nullable_ref) +' "${_tmp}" > "${_dest}" +echo "refresh: wrote ${_dest} ($(wc -l < "${_dest}") lines, OpenAPI 3.0 form)" From 4c21e2e2b916940fd2d376f7737c20b1223638b8 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 16:33:02 -0400 Subject: [PATCH 32/36] chore(pathbase-client): refresh OpenAPI spec from pathbase-dev The previous spec was stale enough that the actual server response shapes had drifted, causing strict-deser failures on the typed client (e.g. AnonUploadResponse: spec said {id, url}, server returns {id, path, share_url}). Refresh from https://pathbase-dev.fly.dev (the canonical live source) through the down-converter in scripts/refresh-pathbase-openapi.sh. Notable new shapes the typed client now agrees on: - AnonUploadResponse: {id, path, share_url} - User: required uuid + created_at/updated_at, plus optional email/display_name/avatar_url/bio - Several new endpoints (graphs, profile, health, signups) that the CLI doesn't use yet but now compile-time-typed if it ever does --- crates/pathbase-client/openapi.json | 579 +++++++++++++++++++++++----- 1 file changed, 473 insertions(+), 106 deletions(-) diff --git a/crates/pathbase-client/openapi.json b/crates/pathbase-client/openapi.json index 96e8661..601914b 100644 --- a/crates/pathbase-client/openapi.json +++ b/crates/pathbase-client/openapi.json @@ -40,7 +40,7 @@ "description": "Invalid document / unsupported variant / empty path" }, "413": { - "description": "Request body exceeds 5 MB" + "description": "Request body exceeds the configured anon upload byte limit" }, "429": { "description": "Rate limit exceeded" @@ -61,29 +61,6 @@ } } }, - "/api/v1/auth/me": { - "get": { - "tags": [ - "Auth" - ], - "operationId": "me", - "responses": { - "200": { - "description": "Current authenticated user", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/User" - } - } - } - }, - "401": { - "description": "Not authenticated" - } - } - } - }, "/api/v1/health": { "get": { "tags": [ @@ -305,6 +282,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -503,6 +490,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -740,6 +737,67 @@ } } }, + "/api/v1/repos/{owner}/{repo}/paths/{slug}/chat": { + "get": { + "tags": [ + "Paths" + ], + "operationId": "get_path_chat", + "parameters": [ + { + "name": "owner", + "in": "path", + "description": "Owner username", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "slug", + "in": "path", + "description": "Path slug", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "include_html", + "in": "query", + "description": "Render `text` and `thinking` fields to sanitized HTML server-side.\nDefaults to true; set false for lighter payloads when the client\nrenders markdown lazily.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "Chat-shaped projection of the path's HEAD-ancestor chain", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChatProjection" + } + } + } + }, + "404": { + "description": "Not found" + } + } + } + }, "/api/v1/repos/{owner}/{repo}/paths/{slug}/download": { "get": { "tags": [ @@ -788,15 +846,51 @@ } } }, - "/api/v1/settings/profile": { + "/api/v1/signups": { + "post": { + "tags": [ + "Signups" + ], + "operationId": "create_signup", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSignupBody" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Signup recorded", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSignupResponse" + } + } + } + }, + "400": { + "description": "Invalid email or source" + }, + "429": { + "description": "Rate limit exceeded" + } + } + } + }, + "/api/v1/users/me": { "get": { "tags": [ - "Settings" + "Users" ], - "operationId": "get_profile", + "operationId": "get_me", "responses": { "200": { - "description": "Current user profile", + "description": "Current authenticated user", "content": { "application/json": { "schema": { @@ -812,9 +906,9 @@ }, "patch": { "tags": [ - "Settings" + "Users" ], - "operationId": "update_profile", + "operationId": "update_me", "requestBody": { "content": { "application/json": { @@ -827,7 +921,7 @@ }, "responses": { "200": { - "description": "Updated profile", + "description": "Profile updated", "content": { "application/json": { "schema": { @@ -891,6 +985,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -916,6 +1020,32 @@ }, "components": { "schemas": { + "ActorView": { + "type": "object", + "required": [ + "id", + "kind", + "display" + ], + "properties": { + "display": { + "type": "string", + "description": "Display name resolved against `meta.actors` if present, else the\nsuffix after `:`." + }, + "id": { + "type": "string", + "description": "Canonical actor string (e.g. `\"claude:opus-4-7\"`)." + }, + "kind": { + "type": "string", + "description": "Prefix before `:` — `\"claude\"`, `\"human\"`, etc." + }, + "model": { + "type": "string", + "nullable": true + } + } + }, "AnonUploadBody": { "type": "object", "required": [ @@ -930,18 +1060,179 @@ "AnonUploadResponse": { "type": "object", "required": [ - "url", - "id" + "path", + "id", + "share_url" ], "properties": { "id": { "type": "string" }, - "url": { + "path": { + "type": "string", + "description": "Site-relative path to the uploaded trace's frontend page, e.g.\n`/anon/pathstash/paths/`. Suitable for in-app navigation." + }, + "share_url": { + "type": "string", + "description": "Absolute URL for sharing externally." + } + } + }, + "ChatCursor": { + "type": "object", + "properties": { + "before": { + "type": "string", + "description": "Canonical step id one step older than `turns[0]`. `None` when the\nchain reaches a root step in this projection.", + "nullable": true + } + } + }, + "ChatProjection": { + "type": "object", + "required": [ + "path_id", + "actors", + "turns", + "step_ids", + "cursor" + ], + "properties": { + "actors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ActorView" + } + }, + "cursor": { + "$ref": "#/components/schemas/ChatCursor" + }, + "head": { + "type": "integer", + "format": "int32", + "description": "Index into `turns` — the latest turn along the HEAD chain. `None`\nwhen the document has no head.", + "minimum": 0, + "nullable": true + }, + "path_id": { + "type": "string" + }, + "step_ids": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Canonical step ids parallel to `turns`, for deep-links and\n`/step/{id}` fetches." + }, + "title": { + "type": "string", + "nullable": true + }, + "turns": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ChatTurn" + } + } + } + }, + "ChatTurn": { + "type": "object", + "required": [ + "actor_id", + "is_head", + "kind", + "text_html", + "thinking_html", + "tool_uses", + "invocations" + ], + "properties": { + "actor_id": { + "type": "integer", + "format": "int32", + "description": "Index into `actors`.", + "minimum": 0 + }, + "intent": { + "type": "string", + "nullable": true + }, + "invocations": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ToolInvocation" + }, + "description": "`tool.invoke` siblings of an assistant step, spliced inline." + }, + "is_head": { + "type": "boolean" + }, + "kind": { + "$ref": "#/components/schemas/ChatTurnKind" + }, + "model": { + "type": "string", + "nullable": true + }, + "parent_id": { + "type": "integer", + "format": "int32", + "description": "Index into `turns` — `i - 1` along the HEAD chain, `None` at the root.", + "minimum": 0, + "nullable": true + }, + "text": { + "type": "string", + "nullable": true + }, + "text_html": { + "type": "string", + "description": "Sanitized HTML for `text`. Empty when `include_html` is false or\n`text` is empty." + }, + "thinking": { + "type": "string", + "nullable": true + }, + "thinking_html": { "type": "string" + }, + "timestamp": { + "type": "string", + "nullable": true + }, + "tool_diff": { + "nullable": true, + "allOf": [ + { + "$ref": "#/components/schemas/ToolDiff" + } + ], + "description": "For `kind = \"tool\"` only: the first non-empty `change[k].raw`,\npre-split into lines." + }, + "tool_name": { + "type": "string", + "description": "For `kind = \"tool\"` only: the tool name (`extra.name`).", + "nullable": true + }, + "tool_uses": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tool names from `extra.tool_uses` (string list)." } } }, + "ChatTurnKind": { + "type": "string", + "enum": [ + "user", + "assistant", + "tool", + "system" + ] + }, "CreateRepoBody": { "type": "object", "required": [ @@ -949,16 +1240,40 @@ ], "properties": { "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "name": { "type": "string" } } }, + "CreateSignupBody": { + "type": "object", + "required": [ + "email", + "source" + ], + "properties": { + "email": { + "type": "string" + }, + "source": { + "type": "string" + } + } + }, + "CreateSignupResponse": { + "type": "object", + "required": [ + "ok" + ], + "properties": { + "ok": { + "type": "boolean" + } + } + }, "Graph": { "type": "object", "required": [ @@ -966,7 +1281,6 @@ "repo_id", "slug", "toolpath_id", - "document", "created_at", "updated_at" ], @@ -976,7 +1290,14 @@ "format": "date-time" }, "document": { - "type": "object" + "type": "object", + "description": "Reconstructed full Graph document with inline paths. Only populated\nby handlers that explicitly fetch the constituent paths and steps.", + "nullable": true + }, + "header": { + "type": "object", + "description": "Stored graph metadata: `{graph: GraphIdentity, meta?: GraphMeta}` —\nthe toolpath `Graph` minus its `paths`. None when never set.", + "nullable": true }, "id": { "type": "string", @@ -990,10 +1311,8 @@ "type": "string" }, "title": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "toolpath_id": { "type": "string" @@ -1011,10 +1330,8 @@ ], "properties": { "reason": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "status": { "type": "string" @@ -1037,10 +1354,8 @@ "format": "date-time" }, "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "id": { "type": "string", @@ -1057,10 +1372,8 @@ "format": "uuid" }, "readme": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "updated_at": { "type": "string", @@ -1068,6 +1381,76 @@ } } }, + "ToolDiff": { + "type": "object", + "required": [ + "path", + "lines" + ], + "properties": { + "lines": { + "type": "array", + "items": { + "type": "string" + } + }, + "path": { + "type": "string" + } + } + }, + "ToolInvocation": { + "type": "object", + "required": [ + "step_id", + "actor_id", + "text_html" + ], + "properties": { + "actor_id": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "input": { + "type": "string", + "description": "Tool input args from the structural payload (`extra.input`),\nJSON-stringified when not already a string.", + "nullable": true + }, + "result": { + "type": "string", + "description": "Tool output from the structural payload (`extra.result`),\nJSON-stringified when not already a string.", + "nullable": true + }, + "step_id": { + "type": "string", + "description": "Canonical step id of the tool.invoke step." + }, + "text": { + "type": "string", + "nullable": true + }, + "text_html": { + "type": "string" + }, + "timestamp": { + "type": "string", + "nullable": true + }, + "tool_diff": { + "nullable": true, + "allOf": [ + { + "$ref": "#/components/schemas/ToolDiff" + } + ] + }, + "tool_name": { + "type": "string", + "nullable": true + } + } + }, "TracePath": { "type": "object", "required": [ @@ -1075,7 +1458,6 @@ "repo_id", "slug", "toolpath_id", - "document", "step_count", "is_public", "created_at", @@ -1087,7 +1469,14 @@ "format": "date-time" }, "document": { - "type": "object" + "type": "object", + "description": "Reconstructed full Graph document. Only populated by handlers that\nexplicitly fetch the steps (e.g. single-path GET, download, chat).\nListing endpoints leave this None to avoid N+1 step fetches.", + "nullable": true + }, + "header": { + "type": "object", + "description": "Stored path metadata: `{path: PathIdentity, meta?: PathMeta}` — the\ntoolpath `Path` minus its `steps`. None when never set.", + "nullable": true }, "id": { "type": "string", @@ -1108,10 +1497,8 @@ "format": "int32" }, "title": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "toolpath_id": { "type": "string" @@ -1126,10 +1513,8 @@ "type": "object", "properties": { "is_public": { - "type": [ - "boolean", - "null" - ] + "type": "boolean", + "nullable": true } } }, @@ -1137,16 +1522,12 @@ "type": "object", "properties": { "bio": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "display_name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true } } }, @@ -1154,22 +1535,16 @@ "type": "object", "properties": { "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "readme": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true } } }, @@ -1217,32 +1592,24 @@ ], "properties": { "avatar_url": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "bio": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "created_at": { "type": "string", "format": "date-time" }, "display_name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "email": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "id": { "type": "string", @@ -1284,13 +1651,13 @@ "name": "Graphs", "description": "Computation graphs" }, - { - "name": "Settings", - "description": "User settings" - }, { "name": "Anon", "description": "Anonymous uploads" + }, + { + "name": "Signups", + "description": "Marketing-page email capture" } ] } From 4324c8b94cca74de8a6317a4546211785c967f3a Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 16:33:23 -0400 Subject: [PATCH 33/36] refactor(path-cli): route the four hand-rolled endpoints through pathbase-client MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit api_me, api_logout, anon_paths_post, and paths_download were each hand-rolled via blocking reqwest with assorted reasons (endpoint not in spec, response-shape drift, byte-fidelity). With the spec refreshed from pathbase-dev, those reasons no longer apply for these four — convert them to the generated typed client so the only remaining hand-rolled HTTP is api_redeem (which still isn't in the spec, see the comment in cmd_pathbase.rs). Behavioral changes: - api_me now hits /api/v1/users/me (renamed from /api/v1/auth/me in the spec). Maps the wire User into the lean local User used for credentials persistence; no on-disk format change. - api_logout still posts to /api/v1/auth/logout, just typed now. - anon_paths_post returns AnonUploadResponse {id, share_url} from the server's {id, path, share_url}. The previous "accept any of url / share_url / path" fallback is gone — the typed client enforces the spec. Still maps 413 to the 'log in for a listable upload' notice and 429 to rate-limit advice. - paths_download decodes through serde_json::Map and re-serializes; byte-fidelity isn't a real requirement because the consumer parses to Graph and re-serializes regardless. The downstream cache writer writes pretty-printed JSON anyway. host_of moves to cmd_pathbase, where it's already used by preflight_auth. cmd_export imports it from there. Three test mocks (two in integration.rs, one in cmd_pathbase.rs) update from {id, url} to the new {id, path, share_url} shape so the typed client can decode them. The fragile 'accepts share_url / path / url' tolerance tests are gone — the spec is now the single source of truth. --- crates/path-cli/src/cmd_pathbase.rs | 289 +++++++++++---------------- crates/path-cli/tests/integration.rs | 6 +- 2 files changed, 118 insertions(+), 177 deletions(-) diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index b9d5654..55df9b0 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -144,16 +144,17 @@ pub(crate) fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { } pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/logout")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; - if !resp.status().is_success() && resp.status() != reqwest::StatusCode::NO_CONTENT { - bail!("server returned {}", resp.status()); + let client = pathbase_client(base_url, Some(token))?; + match block_on(client.logout()) { + Ok(_) => Ok(()), + Err(pathbase_client::Error::ErrorResponse(resp)) => { + bail!("server returned {}", resp.status()) + } + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + bail!("server returned {}", resp.status()) + } + Err(e) => Err(anyhow!("connect to {base_url}: {e}")), } - Ok(()) } /// Errors are intentionally terse one-liners — callers compose them @@ -162,25 +163,36 @@ pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { /// Don't bake the hints in here; otherwise the fallback notice gets /// telephone-pole long. pub(crate) fn api_me(base_url: &str, token: &str) -> Result { - let client = http_client()?; - let resp = client - .get(format!("{base_url}/api/v1/auth/me")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let body = resp.text().unwrap_or_default(); - - if status == reqwest::StatusCode::UNAUTHORIZED || status == reqwest::StatusCode::FORBIDDEN { - bail!("{base_url} rejected the stored credentials ({status})"); - } - if !status.is_success() { - bail!("{base_url} returned {status} on /api/v1/auth/me"); + let client = pathbase_client(base_url, Some(token))?; + match block_on(client.get_me()) { + Ok(resp) => { + let u = resp.into_inner(); + Ok(User { + id: u.id.to_string(), + username: u.username, + email: u.email, + display_name: u.display_name, + avatar_url: u.avatar_url, + }) + } + Err(pathbase_client::Error::ErrorResponse(resp)) => { + let status = resp.status(); + if status == reqwest::StatusCode::UNAUTHORIZED + || status == reqwest::StatusCode::FORBIDDEN + { + bail!("{base_url} rejected the stored credentials ({status})") + } else { + bail!("{base_url} returned {status} on /api/v1/users/me") + } + } + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + bail!("{base_url} returned {} on /api/v1/users/me", resp.status()) + } + Err(pathbase_client::Error::InvalidResponsePayload(_, _)) => { + bail!("{base_url} isn't a Pathbase deployment (non-JSON /api/v1/users/me response)") + } + Err(e) => Err(anyhow!("connect to {base_url}: {e}")), } - serde_json::from_str(&body).map_err(|_| { - anyhow!("{base_url} isn't a Pathbase deployment (non-JSON /api/v1/auth/me response)") - }) } /// Pre-resolved upload mode. Produced by [`preflight_auth`] before any @@ -331,70 +343,37 @@ fn parse_document(json: &str) -> Result Result { - let body = serde_json::json!({ - "document": parse_document(document_json)?, - }); - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/anon/paths")) - .json(&body) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let text = resp.text().unwrap_or_default(); - - if status.is_success() { - let v: serde_json::Value = serde_json::from_str(&text).map_err(|e| { - anyhow!( - "anon upload returned non-JSON ({status}): {} ({e})", - short_body(&text) - ) - })?; - let id = v - .get("id") - .and_then(|x| x.as_str()) - .map(String::from) - .ok_or_else(|| { - anyhow!( - "anon upload response missing `id`: {}", - short_body(&text) - ) - })?; - // Server-shape compat: production currently returns `url`, but - // pathbase-dev returns `share_url` + `path`. Accept any. - let url = v - .get("share_url") - .or_else(|| v.get("url")) - .or_else(|| v.get("path")) - .and_then(|x| x.as_str()) - .map(String::from) - .ok_or_else(|| { - anyhow!( - "anon upload response missing `share_url` / `url` / `path`: {}", - short_body(&text) - ) - })?; - return Ok(AnonUploadResponse { id, url }); - } - - match status.as_u16() { - 413 => bail!( - "anon upload exceeds the 5 MB cap — log in (`path auth login`) for a listable upload without that limit" - ), - 429 => bail!("anon upload rate-limited; retry shortly or log in"), - code => bail!("anon upload failed (HTTP {code}): {}", short_body(&text)), + let body = pathbase_client::types::AnonUploadBody { + document: parse_document(document_json)?, + }; + let client = pathbase_client(base_url, None)?; + match block_on(client.create_anon_path(&body)) { + Ok(resp) => { + let inner = resp.into_inner(); + Ok(AnonUploadResponse { + id: inner.id, + url: inner.share_url, + }) + } + Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { + 413 => bail!( + "anon upload exceeds the 5 MB cap — log in (`path auth login`) for a listable upload without that limit" + ), + 429 => bail!("anon upload rate-limited; retry shortly or log in"), + code => bail!("anon upload failed (HTTP {code})"), + }, + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + if msg.is_empty() { + bail!("anon upload failed ({status})") + } else { + bail!("anon upload failed ({status}): {msg}") + } + } + Err(e) => Err(anyhow!("anon upload failed: {e}")), } } @@ -491,15 +470,12 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> /// paths both download without authentication; only fully private paths /// (gated by an ACL beyond `is_public=false`) require auth. /// -/// **Why this doesn't go through `pathbase-client`.** progenitor's -/// generated client decodes the response body into -/// `serde_json::Map` (per the spec's -/// `application/json` content type) and we'd then re-serialize to get a -/// String back. That's a wasted round-trip — and the BTreeMap-backed -/// `serde_json::Map` reorders keys, so the bytes the caller sees aren't -/// the bytes the server sent. For a "give me back the document I just -/// uploaded" endpoint, byte-fidelity matters. We use blocking reqwest -/// directly and forward the response body verbatim. +/// Returns a serialized JSON string. The generated client decodes into +/// `serde_json::Map`, which we re-serialize on the way out — keys may +/// be reordered relative to the server's bytes, but the consumer parses +/// to `Graph` and re-serializes anyway, so byte-fidelity isn't a real +/// requirement. The downstream `write_cached` writes pretty-printed +/// JSON regardless. pub(crate) fn paths_download( base_url: &str, token: Option<&str>, @@ -507,33 +483,29 @@ pub(crate) fn paths_download( repo: &str, slug: &str, ) -> Result { - let client = http_client()?; - let mut req = client.get(format!( - "{base_url}/api/v1/repos/{owner}/{repo}/paths/{slug}/download" - )); - if let Some(t) = token { - req = req.bearer_auth(t); - } - let resp = req - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let text = resp.text().unwrap_or_default(); - - if status == reqwest::StatusCode::UNAUTHORIZED { - bail!( - "this path is private and requires authentication — run `path auth login --url {base_url}` and retry" - ); - } - if status == reqwest::StatusCode::NOT_FOUND { - bail!("path {owner}/{repo}/{slug} not found on {base_url}"); - } - if !status.is_success() { - let msg = error_message(&text).unwrap_or(text); - bail!("download of {owner}/{repo}/{slug} failed ({status}): {msg}"); + let client = pathbase_client(base_url, token)?; + match block_on(client.download_path(owner, repo, slug)) { + Ok(resp) => { + let map = resp.into_inner(); + serde_json::to_string(&map).context("re-serializing downloaded path") + } + Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status() { + reqwest::StatusCode::UNAUTHORIZED => bail!( + "this path is private and requires authentication — run `path auth login --url {base_url}` and retry" + ), + reqwest::StatusCode::NOT_FOUND => { + bail!("path {owner}/{repo}/{slug} not found on {base_url}") + } + status => bail!("download of {owner}/{repo}/{slug} failed ({status})"), + }, + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + bail!("download of {owner}/{repo}/{slug} failed ({status}): {msg}") + } + Err(e) => Err(anyhow!("download of {owner}/{repo}/{slug} failed: {e}")), } - Ok(text) } // ── File storage ──────────────────────────────────────────────────────── @@ -839,15 +811,17 @@ mod tests { assert!(err.to_string().contains("database is on fire"), "{err}"); } + /// Anon upload returns `{id, path, share_url}` per the OpenAPI spec. + /// We expose `share_url` to callers as the canonical share link. #[test] fn anon_paths_post_wraps_document_and_omits_auth() { let server = MockServer::start( "HTTP/1.1 201 Created", - r#"{"id":"abc","url":"https://pathbase.dev/anon/abc"}"#, + r#"{"id":"abc","path":"/anon/pathstash/paths/abc","share_url":"https://pathbase.dev/anon/pathstash/paths/abc"}"#, ); let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); assert_eq!(resp.id, "abc"); - assert_eq!(resp.url, "https://pathbase.dev/anon/abc"); + assert_eq!(resp.url, "https://pathbase.dev/anon/pathstash/paths/abc"); let req = String::from_utf8(server.request()).unwrap(); assert!(req.starts_with("POST /api/v1/anon/paths "), "got: {req}"); @@ -866,63 +840,26 @@ mod tests { assert!(err.to_string().contains("path auth login"), "{err}"); } - /// Server-shape compat: pathbase-dev returns `share_url` + `path` - /// (no `url` key) for anon uploads. Production / older deployments - /// may still return `url`. Accept all three so the CLI works against - /// both surfaces. Regression: a strict response decode here used to - /// fail successful anon uploads with "missing field `url`". - #[test] - fn anon_paths_post_accepts_share_url_field() { - let server = MockServer::start( - "HTTP/1.1 201 Created", - r#"{"id":"xyz","share_url":"https://pathbase-dev.example/anon/xyz","path":"/anon/pathstash/paths/xyz"}"#, - ); - let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); - assert_eq!(resp.id, "xyz"); - assert_eq!(resp.url, "https://pathbase-dev.example/anon/xyz"); - } - - #[test] - fn anon_paths_post_accepts_path_only_when_url_fields_missing() { - let server = MockServer::start( - "HTTP/1.1 201 Created", - r#"{"id":"abc","path":"/anon/pathstash/paths/abc"}"#, - ); - let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); - assert_eq!(resp.id, "abc"); - assert_eq!(resp.url, "/anon/pathstash/paths/abc"); - } - - #[test] - fn anon_paths_post_includes_body_in_5xx_error() { - let server = MockServer::start( - "HTTP/1.1 503 Service Unavailable", - r#"{"error":"db down"}"#, - ); - let err = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap_err(); - let msg = err.to_string(); - assert!(msg.contains("503"), "expected status code: {msg}"); - assert!(msg.contains("db down"), "expected body in error: {msg}"); - } - #[test] fn repos_post_treats_409_as_success() { let server = MockServer::start("HTTP/1.1 409 Conflict", r#"{"error":"already exists"}"#); repos_post(&server.base(), "tok", "pathstash").unwrap(); } + /// Download decodes through `serde_json::Map` and re-serializes, so + /// keys may be reordered relative to the server's bytes. The + /// downstream cache writer (`write_cached`) round-trips through + /// `Graph` and writes pretty-printed JSON anyway, so the only + /// invariant we care about is "the JSON parses to the same value". #[test] - fn paths_download_returns_body_byte_for_byte() { - // Key ordering matters: the server's bytes must come back unmodified. - // With the round-trip removed (raw blocking GET, no Map decode), this - // is a straight string equality. If progenitor ever sneaks back in - // for this endpoint, the BTreeMap-backed Map reorders keys and this - // assertion catches it. + fn paths_download_returns_body_as_json() { let body = r#"{"Step":{"step":{"id":"s1","actor":"human:x","timestamp":"2024-01-01T00:00:00Z"},"change":{}}}"#; let server = MockServer::start("HTTP/1.1 200 OK", body); let got = paths_download(&server.base(), Some("tok"), "alex", "pathstash", "my-path").unwrap(); - assert_eq!(got, body); + let got_v: serde_json::Value = serde_json::from_str(&got).unwrap(); + let want_v: serde_json::Value = serde_json::from_str(body).unwrap(); + assert_eq!(got_v, want_v, "downloaded body should parse to the same value"); let req = String::from_utf8(server.request()).unwrap(); assert!( @@ -967,7 +904,11 @@ mod tests { } fn me_response_body(username: &str) -> String { - format!(r#"{{"id":"u1","username":"{username}"}}"#) + // The generated User type requires id (uuid), username, created_at, + // updated_at. Mock the bare minimum that parses cleanly. + format!( + r#"{{"id":"00000000-0000-0000-0000-000000000001","username":"{username}","created_at":"2024-01-01T00:00:00Z","updated_at":"2024-01-01T00:00:00Z"}}"# + ) } /// Cleared TOOLPATH_CONFIG_DIR + no `--anon` + no auth-requiring flags diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index d2e3a95..62fd5ab 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -690,7 +690,7 @@ fn share_explicit_args_uploads_via_anon() { use std::io::Read; let mut buf = [0u8; 4096]; let _ = stream.read(&mut buf); - let body = r#"{"id":"abc-123","url":"https://example.test/anon/abc-123"}"#; + let body = r#"{"id":"abc-123","path":"/anon/x/y/abc-123","share_url":"https://example.test/anon/abc-123"}"#; let resp = format!( "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", body.len(), @@ -767,7 +767,7 @@ fn share_anon_fixture() -> ( let (mut stream, _) = listener.accept().unwrap(); let mut buf = [0u8; 4096]; let _ = stream.read(&mut buf); - let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let body = r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; let resp = format!( "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", body.len(), @@ -817,7 +817,7 @@ fn one_shot_anon_server() -> (u16, std::thread::JoinHandle<()>) { let (mut stream, _) = listener.accept().unwrap(); let mut buf = [0u8; 4096]; let _ = stream.read(&mut buf); - let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let body = r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; let resp = format!( "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", body.len(), From 4283a31a9c4e65aaf6fbe904165ccf14185c8414 Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 16:48:06 -0400 Subject: [PATCH 34/36] fix(path-cli): show full error chain on Pathbase upload failure MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit A real upload to pathbase-dev returned Error: upload to alex/pathstash failed: Communication Error: error sending request for url (...) — useless: the user can't tell whether it's a timeout, a TLS issue, a connect refusal, or a body error. progenitor's CommunicationError wraps a reqwest::Error, but the default Display only shows the top level. The actually-useful detail (Connection refused / handshake failed / Timeout etc.) sits two or three levels down in source(). Add two helpers: - full_chain(err): walks std::error::Error::source() and joins each link's Display with ": ". - reqwest_hint(err): classifies the reqwest error via is_timeout / is_connect / is_body / is_decode and returns a short hint, falling back to full_chain when no specific bucket fits. Wire both into the catch-all arms of api_me, api_logout, anon_paths_post, paths_post, repos_post, and paths_download. The fallback `Err(e)` arms now use full_chain instead of plain `{e}`. After this: Error: upload to alex/pathstash failed: request timed out after 30s — try again, or shrink the upload Error: upload to alex/pathstash failed: couldn't connect to server: error sending request: dns error: failed to lookup address: nodename nor servname provided --- crates/path-cli/src/cmd_pathbase.rs | 74 ++++++++++++++++++++++++++--- 1 file changed, 67 insertions(+), 7 deletions(-) diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 55df9b0..93abb5a 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -153,7 +153,10 @@ pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { Err(pathbase_client::Error::UnexpectedResponse(resp)) => { bail!("server returned {}", resp.status()) } - Err(e) => Err(anyhow!("connect to {base_url}: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("connect to {base_url}: {}", full_chain(&e))), } } @@ -191,7 +194,10 @@ pub(crate) fn api_me(base_url: &str, token: &str) -> Result { Err(pathbase_client::Error::InvalidResponsePayload(_, _)) => { bail!("{base_url} isn't a Pathbase deployment (non-JSON /api/v1/users/me response)") } - Err(e) => Err(anyhow!("connect to {base_url}: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("connect to {base_url}: {}", full_chain(&e))), } } @@ -373,7 +379,10 @@ pub(crate) fn anon_paths_post(base_url: &str, document_json: &str) -> Result Err(anyhow!("anon upload failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("anon upload failed: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("anon upload failed: {}", full_chain(&e))), } } @@ -424,7 +433,10 @@ pub(crate) fn paths_post( bail!("upload to {owner}/{repo} failed ({status}): {msg}") } } - Err(e) => Err(anyhow!("upload to {owner}/{repo} failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("upload to {owner}/{repo} failed: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("upload to {owner}/{repo} failed: {}", full_chain(&e))), } } @@ -434,6 +446,41 @@ fn error_message(body: &str) -> Option { .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) } +/// Walk an error's `source()` chain and join each link's `Display` +/// with `: `. progenitor's `CommunicationError(reqwest::Error)` +/// renders as "error sending request" by default — the actually-useful +/// detail (timeout / connection refused / TLS handshake) sits two or +/// three levels down in `source()`. This surfaces it. +fn full_chain(err: &(dyn std::error::Error + 'static)) -> String { + let mut s = err.to_string(); + let mut cur = err.source(); + while let Some(c) = cur { + s.push_str(": "); + s.push_str(&c.to_string()); + cur = c.source(); + } + s +} + +/// Classify a `reqwest::Error` into a short hint so users can tell +/// "took too long" from "couldn't connect" from "server hung up." Falls +/// back to the full source chain when no specific hint applies. +fn reqwest_hint(err: &reqwest::Error) -> String { + if err.is_timeout() { + return "request timed out after 30s — try again, or shrink the upload".to_string(); + } + if err.is_connect() { + return format!("couldn't connect to server: {}", full_chain(err)); + } + if err.is_body() { + return format!("body error: {}", full_chain(err)); + } + if err.is_decode() { + return format!("response decode error: {}", full_chain(err)); + } + full_chain(err) +} + /// `POST /api/v1/repos` — create a repo owned by the authenticated user. /// Treats 409 (already exists) as success so callers can use this /// idempotently to ensure pathstash exists before uploading to it. @@ -461,7 +508,10 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> 409 => Ok(()), code => bail!("creating repo {name} returned unexpected status: HTTP {code}"), }, - Err(e) => Err(anyhow!("creating repo {name} failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("creating repo {name} failed: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("creating repo {name} failed: {}", full_chain(&e))), } } @@ -504,7 +554,14 @@ pub(crate) fn paths_download( let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); bail!("download of {owner}/{repo}/{slug} failed ({status}): {msg}") } - Err(e) => Err(anyhow!("download of {owner}/{repo}/{slug} failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => bail!( + "download of {owner}/{repo}/{slug} failed: {}", + reqwest_hint(&e) + ), + Err(e) => Err(anyhow!( + "download of {owner}/{repo}/{slug} failed: {}", + full_chain(&e) + )), } } @@ -859,7 +916,10 @@ mod tests { paths_download(&server.base(), Some("tok"), "alex", "pathstash", "my-path").unwrap(); let got_v: serde_json::Value = serde_json::from_str(&got).unwrap(); let want_v: serde_json::Value = serde_json::from_str(body).unwrap(); - assert_eq!(got_v, want_v, "downloaded body should parse to the same value"); + assert_eq!( + got_v, want_v, + "downloaded body should parse to the same value" + ); let req = String::from_utf8(server.request()).unwrap(); assert!( From b492274b7c2026f41be2145788e063c984a18dbb Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 17:07:02 -0400 Subject: [PATCH 35/36] refactor(path-cli): refresh spec; route api_redeem through typed client The pathbase-dev spec gained a few things since the last refresh, and one of them was previously the only reason for hand-rolled HTTP in this module: /api/v1/auth/cli/redeem now appears in the OpenAPI contract with a documented RedeemBody / RedeemResponse pair. Changes: * scripts/refresh-pathbase-openapi.sh: drop operations whose request/response bodies use content types other than application/json. The new spec includes a NDJSON-streamed /paths/.../steps endpoint; progenitor 0.14 panics on it ('UnexpectedFormat: unexpected content type: application/x-ndjson'). The CLI doesn't use that surface, so strip it during refresh. * crates/pathbase-client/openapi.json: refresh from https://pathbase-dev.fly.dev. New endpoints (sessions, dev auth, github callback, signups) come along for the ride; the only ones the CLI cares about kept their shapes. * api_redeem now goes through client.cli_redeem; http_client and the local RedeemResponse struct are gone. With this change every Pathbase HTTP call goes through the typed client. * Doc-tighten: paths_post mentions that is_public defaults to true on the server, so we always pass Some(false) explicitly to keep share's default behavior secret. paths_download notes that private paths return 404 (not 401) per the new spec, with the error pointing at "try the UUID, or path auth login". cmd_pathbase's module-level doc no longer claims to host an HTTP client. cargo test -p path-cli: 233 lib + 39 integration green. cargo clippy --workspace -- -D warnings: clean. --- crates/path-cli/src/cmd_import.rs | 5 +- crates/path-cli/src/cmd_pathbase.rs | 121 +++--- crates/path-cli/src/cmd_share.rs | 8 +- crates/path-cli/tests/integration.rs | 40 +- crates/pathbase-client/openapi.json | 626 ++++++++++++++++++++++++++- scripts/refresh-pathbase-openapi.sh | 24 +- 6 files changed, 734 insertions(+), 90 deletions(-) diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index ad80315..217b3b0 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -998,7 +998,10 @@ fn derive_opencode( /// Derive a single opencode session given an explicit session id. #[cfg(not(target_os = "emscripten"))] -pub(crate) fn derive_opencode_session(session: &str, no_snapshot_diffs: bool) -> Result { +pub(crate) fn derive_opencode_session( + session: &str, + no_snapshot_diffs: bool, +) -> Result { let manager = toolpath_opencode::OpencodeConvo::new(); let config = toolpath_opencode::derive::DeriveConfig { no_snapshot_diffs, diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 93abb5a..313d815 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -1,9 +1,13 @@ //! Shared Pathbase client helpers. //! -//! Hosts the HTTP client and session-storage logic used by `cmd_auth`, -//! `cmd_import`, and `cmd_export`. Config-dir resolution lives in the -//! sibling `config` module so `cmd_cache` (which doesn't depend on -//! reqwest and must build on emscripten) can reuse it. +//! Wraps the typed `pathbase-client` (generated from +//! `crates/pathbase-client/openapi.json` — refresh via +//! `scripts/refresh-pathbase-openapi.sh`) plus session-storage logic +//! used by `cmd_auth`, `cmd_import`, `cmd_export`, and `cmd_share`. +//! Every Pathbase HTTP call now goes through the typed client; no +//! hand-rolled reqwest left in this module. Config-dir resolution lives +//! in the sibling `config` module so `cmd_cache` (which doesn't depend +//! on reqwest and must build on emscripten) can reuse it. use anyhow::{Context, Result, anyhow, bail}; use serde::{Deserialize, Serialize}; @@ -99,48 +103,46 @@ pub(crate) fn prompt_line(prompt: &str) -> Result { // ── HTTP layer ────────────────────────────────────────────────────────── -pub(crate) fn http_client() -> Result { - reqwest::blocking::Client::builder() - .user_agent(concat!("path-cli/", env!("CARGO_PKG_VERSION"))) - .timeout(std::time::Duration::from_secs(30)) - .build() - .context("failed to build HTTP client") -} - -#[derive(Deserialize)] -pub(crate) struct RedeemResponse { - pub token: String, - pub user: User, -} - pub(crate) fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/cli/redeem")) - .json(&serde_json::json!({ "code": code })) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let body = resp.text().unwrap_or_default(); - - if !status.is_success() { - if status == reqwest::StatusCode::UNAUTHORIZED { - bail!("code is invalid, already used, or expired — generate a new one"); + let body = pathbase_client::types::RedeemBody { + code: code.to_string(), + }; + let client = pathbase_client(base_url, None)?; + match block_on(client.cli_redeem(&body)) { + Ok(resp) => { + let inner = resp.into_inner(); + let u = inner.user; + Ok(( + inner.token, + User { + id: u.id.to_string(), + username: u.username, + email: u.email, + display_name: u.display_name, + avatar_url: u.avatar_url, + }, + )) } - if status == reqwest::StatusCode::BAD_REQUEST { - let msg = serde_json::from_str::(&body) - .ok() - .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) - .unwrap_or_else(|| body.clone()); - bail!("{msg}"); + Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { + 401 => bail!("code is invalid, already used, or expired — generate a new one"), + 400 => bail!("invalid code format"), + code => bail!("redeem failed (HTTP {code})"), + }, + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + if msg.is_empty() { + bail!("redeem failed ({status})") + } else { + bail!("redeem failed ({status}): {msg}") + } } - bail!("redeem failed ({status}): {body}"); + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("redeem failed: {}", full_chain(&e))), } - - let parsed: RedeemResponse = - serde_json::from_str(&body).with_context(|| format!("parsing redeem response: {body}"))?; - Ok((parsed.token, parsed.user)) } pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { @@ -387,10 +389,16 @@ pub(crate) fn anon_paths_post(base_url: &str, document_json: &str) -> Result//paths/`) as the unguessable share-by-link +/// form, but won't appear in any user's listing. pub(crate) fn paths_post( base_url: &str, token: &str, @@ -516,16 +524,18 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> } /// `GET /api/v1/repos/{owner}/{repo}/paths/{slug}/download` — fetch the -/// raw toolpath JSON for a path. Public paths and unlisted-but-shared -/// paths both download without authentication; only fully private paths -/// (gated by an ACL beyond `is_public=false`) require auth. +/// reconstructed Graph document for a path. +/// +/// Per the spec: private paths return 404 unless the caller is +/// owner-authenticated *or* addresses the path by its UUID +/// (the unguessable share-by-link form). The 404 message therefore +/// hints at both possibilities — "not found, or you're not the owner." /// /// Returns a serialized JSON string. The generated client decodes into /// `serde_json::Map`, which we re-serialize on the way out — keys may /// be reordered relative to the server's bytes, but the consumer parses /// to `Graph` and re-serializes anyway, so byte-fidelity isn't a real -/// requirement. The downstream `write_cached` writes pretty-printed -/// JSON regardless. +/// requirement. pub(crate) fn paths_download( base_url: &str, token: Option<&str>, @@ -540,12 +550,11 @@ pub(crate) fn paths_download( serde_json::to_string(&map).context("re-serializing downloaded path") } Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status() { - reqwest::StatusCode::UNAUTHORIZED => bail!( - "this path is private and requires authentication — run `path auth login --url {base_url}` and retry" + reqwest::StatusCode::NOT_FOUND => bail!( + "{owner}/{repo}/{slug} not found on {base_url} (or it's a private path \ + and you're not the owner — try the path's UUID instead, or \ + `path auth login --url {base_url}`)" ), - reqwest::StatusCode::NOT_FOUND => { - bail!("path {owner}/{repo}/{slug} not found on {base_url}") - } status => bail!("download of {owner}/{repo}/{slug} failed ({status})"), }, Err(pathbase_client::Error::UnexpectedResponse(resp)) => { diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs index 3143b28..b9ef8a9 100644 --- a/crates/path-cli/src/cmd_share.rs +++ b/crates/path-cli/src/cmd_share.rs @@ -902,9 +902,11 @@ fn derive_session( Harness::Claude => { crate::cmd_import::derive_claude_session(project.expect("project_keyed"), session) } - Harness::Gemini => { - crate::cmd_import::derive_gemini_session(project.expect("project_keyed"), session, false) - } + Harness::Gemini => crate::cmd_import::derive_gemini_session( + project.expect("project_keyed"), + session, + false, + ), Harness::Pi => { crate::cmd_import::derive_pi_session(project.expect("project_keyed"), session, None) } diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 62fd5ab..b75506b 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -767,7 +767,8 @@ fn share_anon_fixture() -> ( let (mut stream, _) = listener.accept().unwrap(); let mut buf = [0u8; 4096]; let _ = stream.read(&mut buf); - let body = r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; + let body = + r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; let resp = format!( "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", body.len(), @@ -817,7 +818,8 @@ fn one_shot_anon_server() -> (u16, std::thread::JoinHandle<()>) { let (mut stream, _) = listener.accept().unwrap(); let mut buf = [0u8; 4096]; let _ = stream.read(&mut buf); - let body = r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; + let body = + r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; let resp = format!( "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", body.len(), @@ -860,7 +862,14 @@ fn share_rewrites_cache_when_session_has_grown() { cmd() .env("HOME", home) .env("TOOLPATH_CONFIG_DIR", cfg.path()) - .args(["share", "--harness", "claude", "--session", "session-grow", "--project"]) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-grow", + "--project", + ]) .arg(&project) .args(["--anon", "--url"]) .arg(format!("http://127.0.0.1:{port1}")) @@ -873,11 +882,21 @@ fn share_rewrites_cache_when_session_has_grown() { .unwrap() .filter_map(|e| e.ok()) .collect(); - assert_eq!(cache_files.len(), 1, "expected one cache entry after first share"); + assert_eq!( + cache_files.len(), + 1, + "expected one cache entry after first share" + ); let cache_path = cache_files[0].path(); let cache_v1 = std::fs::read_to_string(&cache_path).unwrap(); - assert!(cache_v1.contains("reply-1"), "v1 cache must contain reply-1"); - assert!(!cache_v1.contains("reply-2"), "v1 cache must not contain reply-2 yet"); + assert!( + cache_v1.contains("reply-1"), + "v1 cache must contain reply-1" + ); + assert!( + !cache_v1.contains("reply-2"), + "v1 cache must not contain reply-2 yet" + ); // Conversation continues: append two more turns to the session JSONL. let mut grown = initial.clone(); @@ -894,7 +913,14 @@ fn share_rewrites_cache_when_session_has_grown() { cmd() .env("HOME", home) .env("TOOLPATH_CONFIG_DIR", cfg.path()) - .args(["share", "--harness", "claude", "--session", "session-grow", "--project"]) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-grow", + "--project", + ]) .arg(&project) .args(["--anon", "--url"]) .arg(format!("http://127.0.0.1:{port2}")) diff --git a/crates/pathbase-client/openapi.json b/crates/pathbase-client/openapi.json index 601914b..9565231 100644 --- a/crates/pathbase-client/openapi.json +++ b/crates/pathbase-client/openapi.json @@ -2,11 +2,11 @@ "openapi": "3.1.0", "info": { "title": "Pathbase API", - "description": "", + "description": "HTTP API for Pathbase — repositories, agent trace paths, computation graphs, and anonymous share links.\n\n**Stability.** v1 endpoints are stable in shape; additive changes (new fields, new endpoints, broader query params) ship without a version bump. Breaking changes get a new prefix (`/api/v2/...`) and a deprecation window.\n\n**Spec format.** OpenAPI 3.1, served live at `/api/v1/openapi.json`. The `x-pathbase-version` response header on every call carries the running build's `+` so consumers can pin or correlate.\n\n**Auth.** Most endpoints require a Pathbase session cookie or a `pat_…` bearer token (see the `bearerAuth` scheme). Unauthenticated endpoints — the `Anon` and `Signups` namespaces, public profile reads — are clearly tagged. Per-operation `security` annotations reflect the actual gate enforced by the handler.", "license": { "name": "" }, - "version": "1.0.0" + "version": "1.1.0" }, "paths": { "/api/v1/anon/paths": { @@ -14,6 +14,7 @@ "tags": [ "Anon" ], + "summary": "Upload a single-path Graph document anonymously. Stored under the\nshared `anon` user; the response carries a UUID-based `share_url`\nthe uploader can hand out. Per-IP rate-limited and capped by\n`body_limits.anon_upload_bytes`.", "operationId": "create_anon_path", "requestBody": { "content": { @@ -48,11 +49,244 @@ } } }, + "/api/v1/anon/paths/{id}": { + "get": { + "tags": [ + "Anon" + ], + "summary": "Read an anon-uploaded trace by its UUID. No auth, no rate limit.", + "description": "The handler explicitly overrides the global `Cache-Control` and\n`Vary` defaults: anon responses don't depend on the `Authorization`\nheader (the endpoint ignores it), so the auth-keyed cache defaults\nwould needlessly fragment any CDN/proxy cache.", + "operationId": "get_anon_path", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Anon path UUID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Reconstructed toolpath document", + "content": { + "application/json": { + "schema": {} + } + } + }, + "404": { + "description": "No anon path with that ID" + } + } + } + }, + "/api/v1/auth/cli/redeem": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Second half of the CLI browser-login flow. Redeems a one-time code\nfor a long-lived `pat_…` bearer token bound to a `cli` session. No\nauth required — the code is the credential.", + "operationId": "cli_redeem", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RedeemBody" + }, + "example": { + "code": "BCDF23GH" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Bearer token + user", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RedeemResponse" + } + } + } + }, + "400": { + "description": "Invalid code format" + }, + "401": { + "description": "Code expired or unknown" + } + } + } + }, + "/api/v1/auth/cli/request-grant": { + "post": { + "tags": [ + "Auth" + ], + "summary": "First half of the CLI browser-login flow. The signed-in browser\nasks for a short, human-readable code that the user types into a\nCLI (`pathbase login`); the CLI then redeems it for a long-lived\nbearer token via `/auth/cli/redeem`.", + "operationId": "cli_request_grant", + "responses": { + "200": { + "description": "Short-lived grant code", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CliGrantResponse" + }, + "example": { + "code": "BCDF23GH", + "expires_in": 600 + } + } + } + }, + "401": { + "description": "Not authenticated" + } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, + "/api/v1/auth/dev": { + "get": { + "tags": [ + "Auth" + ], + "summary": "Local-development sign-in shortcut — creates / signs in as `dev`.\nDisabled (400) when GitHub OAuth is configured, so it can never\nfire in production.", + "operationId": "dev_login", + "parameters": [ + { + "name": "redirect", + "in": "query", + "description": "Same-origin relative path to land on after sign-in.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect after signing in as the local `dev` user" + }, + "400": { + "description": "Disabled — GitHub OAuth is configured" + } + } + } + }, + "/api/v1/auth/github": { + "get": { + "tags": [ + "Auth" + ], + "summary": "Start the GitHub OAuth handshake — issue a 302 to GitHub's\n`authorize` URL with `state` carrying the validated `redirect`.\nFalls back to `/api/v1/auth/dev` when GitHub OAuth is unconfigured.", + "operationId": "github_redirect", + "parameters": [ + { + "name": "redirect", + "in": "query", + "description": "Same-origin relative path to land on after sign-in. Bad values are silently dropped (open-redirect defense).", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect to GitHub authorize URL (or to `/api/v1/auth/dev` when GitHub OAuth is unconfigured)" + } + } + } + }, + "/api/v1/auth/github/callback": { + "get": { + "tags": [ + "Auth" + ], + "summary": "GitHub OAuth callback — exchange the `code` for a token, look up or\ncreate the matching Pathbase user, and redirect to the validated\n`state` target (or `/`).", + "operationId": "github_callback", + "parameters": [ + { + "name": "code", + "in": "query", + "description": "GitHub-issued authorization code", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "state", + "in": "query", + "description": "Echoed redirect target from `/api/v1/auth/github`", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect after creating or linking the Pathbase account" + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Sign in with email + password. Sets `pb_session` cookie. Returns\nthe user; for the bearer token instead, use the CLI grant flow.", + "operationId": "login", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LoginBody" + }, + "example": { + "email": "alice@example.com", + "password": "correct-horse-battery-staple" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Signed in", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + } + } + } + }, + "401": { + "description": "Invalid credentials" + } + } + } + }, "/api/v1/auth/logout": { "post": { "tags": [ "Auth" ], + "summary": "Invalidate the caller's session (cookie or bearer-token) and clear\nthe `pb_session` cookie. Idempotent — succeeds even with no session.", "operationId": "logout", "responses": { "204": { @@ -61,11 +295,123 @@ } } }, + "/api/v1/auth/register": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Create a new account with username + email + password and sign the\ncaller in. Sets `pb_session` cookie and creates the default\n`pathstash` repo for quick uploads.", + "operationId": "register", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RegisterBody" + }, + "example": { + "email": "alice@example.com", + "password": "correct-horse-battery-staple", + "username": "alice" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Account created and signed in", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + } + } + } + }, + "400": { + "description": "Invalid username, email, or password" + }, + "409": { + "description": "Username or email already taken" + } + } + } + }, + "/api/v1/auth/sessions": { + "get": { + "tags": [ + "Auth" + ], + "summary": "List the caller's active sessions (web + CLI). The session that\nissued the request is flagged `is_current`.", + "operationId": "list_sessions", + "responses": { + "200": { + "description": "Sessions for the authenticated user", + "content": { + "application/json": { + "schema": { + "type": "array", + "items": { + "$ref": "#/components/schemas/SessionSummary" + } + } + } + } + }, + "401": { + "description": "Not authenticated" + } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, + "/api/v1/auth/sessions/{id}": { + "delete": { + "tags": [ + "Auth" + ], + "summary": "Revoke a specific session by ID. 404 (not 401/403) for sessions\nowned by other users — keeps the existence of those sessions\ninvisible to the caller.", + "operationId": "revoke_session", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Session ID to revoke", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Session revoked" + }, + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Session not found or not owned by the caller" + } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, "/api/v1/health": { "get": { "tags": [ "Health" ], + "summary": "Liveness + readiness probe — confirms the database is reachable.\nReturns 503 (still as JSON) when the round-trip fails so an\norchestrator can distinguish \"process up, dependency down\" from\n\"process down.\"", "operationId": "health", "responses": { "200": { @@ -96,19 +442,24 @@ "tags": [ "Repos" ], + "summary": "Create a new repository owned by the caller. Names are unique per\nowner; collisions return 409.", "operationId": "create_repo", "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateRepoBody" + }, + "example": { + "description": "Agent runs from the rebuild week", + "name": "my-traces" } } }, "required": true }, "responses": { - "200": { + "201": { "description": "Created repository", "content": { "application/json": { @@ -120,8 +471,16 @@ }, "401": { "description": "Not authenticated" + }, + "409": { + "description": "A repo with this name already exists for the caller" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}": { @@ -129,6 +488,7 @@ "tags": [ "Repos" ], + "summary": "Fetch a repository by `owner/name`. Public read; the contained\npaths/graphs enforce their own visibility on read.", "operationId": "get_repo", "parameters": [ { @@ -170,6 +530,7 @@ "tags": [ "Repos" ], + "summary": "Delete a repository, cascading to all paths and graphs it contains.\nOwner-only.", "operationId": "delete_repo", "parameters": [ { @@ -201,12 +562,18 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ "Repos" ], + "summary": "Update a repo's mutable fields. Only the owner may call this;\nnon-owners get 401.", "operationId": "update_repo", "parameters": [ { @@ -233,6 +600,9 @@ "application/json": { "schema": { "$ref": "#/components/schemas/UpdateRepoBody" + }, + "example": { + "description": "Updated tagline" } } }, @@ -255,7 +625,12 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/graphs": { @@ -263,6 +638,7 @@ "tags": [ "Graphs" ], + "summary": "List graphs in a repo. Public read.", "operationId": "list_graphs", "parameters": [ { @@ -317,6 +693,7 @@ "tags": [ "Graphs" ], + "summary": "Upload a multi-path Graph document. Inline paths (`PathOrRef::Path`)\nare persisted as new TracePaths in the same repo, deduped by their\ntoolpath ID. Refs to existing paths are kept as-is. Caller must own\nthe repo.", "operationId": "create_graph", "parameters": [ { @@ -365,7 +742,12 @@ "401": { "description": "Not authorized" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/graphs/{slug}": { @@ -373,6 +755,7 @@ "tags": [ "Graphs" ], + "summary": "Fetch a graph with its full reconstructed multi-path document.\nPublic read; constituent path visibility is enforced when those\npaths are accessed individually.", "operationId": "get_graph", "parameters": [ { @@ -396,7 +779,7 @@ { "name": "slug", "in": "path", - "description": "Graph slug", + "description": "Graph slug, or the graph's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -423,6 +806,7 @@ "tags": [ "Graphs" ], + "summary": "Delete a graph. Owner only. Detaches but does not delete the\nconstituent TracePaths — those remain reachable as standalone paths.", "operationId": "delete_graph", "parameters": [ { @@ -463,7 +847,12 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/paths": { @@ -471,6 +860,7 @@ "tags": [ "Paths" ], + "summary": "List paths in a repo. Visibility-filtered: callers see all paths if\nthey own the repo, public paths only otherwise.", "operationId": "list_paths", "parameters": [ { @@ -525,6 +915,7 @@ "tags": [ "Paths" ], + "summary": "Upload a single-path graph as a new path under `{owner}/{repo}`.\nMulti-path graphs go to `POST .../graphs`. Caller must own the repo.", "operationId": "create_path", "parameters": [ { @@ -573,7 +964,12 @@ "401": { "description": "Not authorized" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/paths/{slug}": { @@ -581,6 +977,7 @@ "tags": [ "Paths" ], + "summary": "Fetch a path with its full reconstructed document. Visibility-gated:\nprivate paths return 404 unless the caller owns the repo or\naddressed the path by its UUID (the unguessable share-by-link form).\nConditional via `If-None-Match`.", "operationId": "get_path", "parameters": [ { @@ -604,7 +1001,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -631,6 +1028,7 @@ "tags": [ "Paths" ], + "summary": "Delete a path. Owner only. Cascades to its step rows.", "operationId": "delete_path", "parameters": [ { @@ -654,7 +1052,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -671,12 +1069,18 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ "Paths" ], + "summary": "Patch a path's mutable fields — currently just `is_public`. Owner\nonly. Use this to flip a private upload public for sharing.", "operationId": "update_path", "parameters": [ { @@ -700,7 +1104,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -712,6 +1116,9 @@ "application/json": { "schema": { "$ref": "#/components/schemas/UpdatePathBody" + }, + "example": { + "is_public": true } } }, @@ -734,7 +1141,12 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/paths/{slug}/chat": { @@ -742,6 +1154,7 @@ "tags": [ "Paths" ], + "summary": "Render the path's HEAD-ancestor chain as a chat-projection — a\ndensely-indexed, pre-classified, optionally pre-rendered transcript.\nVisibility-gated like `GET .../paths/{slug}`.", "operationId": "get_path_chat", "parameters": [ { @@ -765,7 +1178,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -803,6 +1216,7 @@ "tags": [ "Paths" ], + "summary": "Stream the path's reconstructed Graph document as raw JSON — the\ninverse of `POST .../paths`. Visibility-gated; private paths return\n404 unless owner-authenticated or addressed by UUID.", "operationId": "download_path", "parameters": [ { @@ -826,7 +1240,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -851,6 +1265,7 @@ "tags": [ "Signups" ], + "summary": "Capture an email + source pair from a marketing form. Idempotent\nper (email, source); per-IP rate-limited; the response intentionally\ncarries no detail so a caller can't enumerate which addresses are\nalready on a list.", "operationId": "create_signup", "requestBody": { "content": { @@ -887,6 +1302,7 @@ "tags": [ "Users" ], + "summary": "Return the authenticated caller's full profile, including their\nemail — fields no other endpoint exposes.", "operationId": "get_me", "responses": { "200": { @@ -902,18 +1318,28 @@ "401": { "description": "Not authenticated" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ "Users" ], + "summary": "Patch the caller's mutable profile fields. Unspecified fields are\nleft untouched; pass `null` to clear an optional field.", "operationId": "update_me", "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UpdateProfileBody" + }, + "example": { + "bio": "Curious about agents.", + "display_name": "Alice" } } }, @@ -933,7 +1359,12 @@ "401": { "description": "Not authenticated" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/users/{username}": { @@ -941,6 +1372,7 @@ "tags": [ "Users" ], + "summary": "Return another user's public profile. Email is never included for\nusers other than the caller; use `/users/me` for the caller's own\nfull profile.", "operationId": "get_user", "parameters": [ { @@ -975,6 +1407,7 @@ "tags": [ "Users" ], + "summary": "List a user's repositories. Public read; visibility filtering on\nthe contained paths/graphs happens at their respective endpoints,\nnot here.", "operationId": "list_repos", "parameters": [ { @@ -1022,6 +1455,7 @@ "schemas": { "ActorView": { "type": "object", + "description": "Resolved actor identity for a turn. Lookup happens once on the\nserver against the document's `meta.actors` block; turns reference\nthe actor by `u32` index into the projection's `actors` array.", "required": [ "id", "kind", @@ -1078,18 +1512,37 @@ } } }, + "AppendStepsResponse": { + "type": "object", + "required": [ + "inserted", + "path" + ], + "properties": { + "inserted": { + "type": "integer", + "description": "Number of steps newly inserted (existing `step_id`s are skipped).", + "minimum": 0 + }, + "path": { + "$ref": "#/components/schemas/TracePath" + } + } + }, "ChatCursor": { "type": "object", + "description": "Pagination cursor for fetching older turns. Reserved for future\n`?before=` requests; currently every projection returns the\nfull chain in one shot.", "properties": { "before": { "type": "string", - "description": "Canonical step id one step older than `turns[0]`. `None` when the\nchain reaches a root step in this projection.", + "description": "Canonical step ID one step older than `turns[0]`. `None` when the\nchain reaches a root step in this projection.", "nullable": true } } }, "ChatProjection": { "type": "object", + "description": "Top-level chat-projection response. Densely-indexed (`u32` IDs into\n`turns` / `actors`) so JSON parsing is cheap and the wire shape\navoids duplicating actor strings on every turn.", "required": [ "path_id", "actors", @@ -1122,7 +1575,7 @@ "items": { "type": "string" }, - "description": "Canonical step ids parallel to `turns`, for deep-links and\n`/step/{id}` fetches." + "description": "Canonical step IDs parallel to `turns`, for deep-links and\n`/step/{id}` fetches." }, "title": { "type": "string", @@ -1138,6 +1591,7 @@ }, "ChatTurn": { "type": "object", + "description": "One linearized turn along the HEAD-ancestor chain. Already\nclassified, optionally pre-rendered to HTML, with tool invocations\ninlined — the renderer drops in `text_html` and renders no further.", "required": [ "actor_id", "is_head", @@ -1226,6 +1680,7 @@ }, "ChatTurnKind": { "type": "string", + "description": "Pre-classified turn kind. Server-side classification follows the\nprecedence rules in `packages/frontend/src/classify.ts`; the client\nrenders verbatim without re-deriving.", "enum": [ "user", "assistant", @@ -1233,6 +1688,24 @@ "system" ] }, + "CliGrantResponse": { + "type": "object", + "required": [ + "code", + "expires_in" + ], + "properties": { + "code": { + "type": "string", + "description": "Display code the browser shows; the user types it into the CLI." + }, + "expires_in": { + "type": "integer", + "format": "int64", + "description": "Seconds until the grant expires (10 minutes)." + } + } + }, "CreateRepoBody": { "type": "object", "required": [ @@ -1276,6 +1749,7 @@ }, "Graph": { "type": "object", + "description": "A computation graph — a named ordered collection of paths within a\nrepo. Stored as header metadata plus `(graph_id, path_id, position)`\njunction rows; the full `document` is reconstructed on read.", "required": [ "id", "repo_id", @@ -1338,8 +1812,72 @@ } } }, + "LoginBody": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "RedeemBody": { + "type": "object", + "required": [ + "code" + ], + "properties": { + "code": { + "type": "string" + } + } + }, + "RedeemResponse": { + "type": "object", + "required": [ + "token", + "user" + ], + "properties": { + "token": { + "type": "string", + "description": "Long-lived bearer token (`pat_…`). Send as\n`Authorization: Bearer ` from CLI calls." + }, + "user": { + "$ref": "#/components/schemas/User" + } + } + }, + "RegisterBody": { + "type": "object", + "required": [ + "username", + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string", + "description": "Minimum 8 characters. Stored as an Argon2id hash." + }, + "username": { + "type": "string", + "description": "Lowercase ASCII alphanumerics, hyphens, and underscores. Must\nnot collide with the reserved `me` / `anon` namespaces." + } + } + }, "Repo": { "type": "object", + "description": "A repository — a named bucket of paths and graphs owned by a single\nuser. The `(owner_id, name)` pair is unique; `name` is also the\nURL-segment (\"alice/my-traces\").", "required": [ "id", "owner_id", @@ -1381,8 +1919,42 @@ } } }, + "SessionSummary": { + "type": "object", + "required": [ + "id", + "kind", + "created_at", + "expires_at", + "is_current" + ], + "properties": { + "created_at": { + "type": "string" + }, + "expires_at": { + "type": "string" + }, + "id": { + "type": "string" + }, + "is_current": { + "type": "boolean", + "description": "Marks the session that issued the request; clients should warn\nbefore letting the user revoke it (logs the current device out)." + }, + "kind": { + "type": "string", + "description": "`web` (cookie) or `cli` (bearer token)." + }, + "user_agent": { + "type": "string", + "nullable": true + } + } + }, "ToolDiff": { "type": "object", + "description": "Pre-split diff payload — picked from the first non-empty\n`change[k].raw` on a structural step. Lines are pre-split so the\nrenderer doesn't repeat the work.", "required": [ "path", "lines" @@ -1401,6 +1973,7 @@ }, "ToolInvocation": { "type": "object", + "description": "A `tool.invoke` step spliced inline next to its parent assistant\nturn. Saves the client a second pass over the path's step graph.", "required": [ "step_id", "actor_id", @@ -1424,7 +1997,7 @@ }, "step_id": { "type": "string", - "description": "Canonical step id of the tool.invoke step." + "description": "Canonical step ID of the tool.invoke step." }, "text": { "type": "string", @@ -1453,6 +2026,7 @@ }, "TracePath": { "type": "object", + "description": "A single agent trace path within a repo. Stored in three pieces:\nheader metadata (here), step rows (in `path_steps`), and the\nreconstructed full `document` (re-assembled on read for handlers\nthat need it).", "required": [ "id", "repo_id", @@ -1584,6 +2158,7 @@ }, "User": { "type": "object", + "description": "A registered Pathbase account. `email` is optional because GitHub\nOAuth users can have a private email; CLI / API consumers see the\ncaller's own email via `/users/me` but never another user's.", "required": [ "id", "username", @@ -1624,6 +2199,13 @@ } } } + }, + "securitySchemes": { + "bearerAuth": { + "type": "http", + "scheme": "bearer", + "description": "Pathbase personal access token (`pat_…`) obtained via the CLI grant flow (`POST /auth/cli/request-grant` then `POST /auth/cli/redeem`). Send as `Authorization: Bearer `." + } } }, "tags": [ diff --git a/scripts/refresh-pathbase-openapi.sh b/scripts/refresh-pathbase-openapi.sh index 0ba95c6..85fc574 100755 --- a/scripts/refresh-pathbase-openapi.sh +++ b/scripts/refresh-pathbase-openapi.sh @@ -60,6 +60,28 @@ jq ' | (if $ref_obj.description then .description = $ref_obj.description else . end) else . end; - walk(downconvert_type_array | downconvert_nullable_ref) + # Progenitor 0.14 only handles JSON request/response bodies. Drop + # operations that use non-JSON content types (e.g. application/x-ndjson + # for streaming endpoints) so the build doesnt panic on + # `UnexpectedFormat("unexpected content type: ...")`. The CLI doesnt + # use these surfaces; if it ever needs them, switch to a hand-rolled + # call (see api_redeem for the pattern). + def has_unsupported_content(op): + ((op.requestBody.content // {}) | keys | any(. != "application/json")) + or ((op.responses // {}) | to_entries | any( + ((.value.content // {}) | keys | any(. != "application/json")) + )); + + def strip_unsupported_operations: + .paths |= with_entries( + .value |= with_entries( + select( + (.key | IN("get", "put", "post", "delete", "patch", "options", "head", "trace") | not) + or (has_unsupported_content(.value) | not) + ) + ) + ) | .paths |= with_entries(select((.value | length) > 0)); + + walk(downconvert_type_array | downconvert_nullable_ref) | strip_unsupported_operations ' "${_tmp}" > "${_dest}" echo "refresh: wrote ${_dest} ($(wc -l < "${_dest}") lines, OpenAPI 3.0 form)" From 5936f745e75dd53dab481c1ef529f0382aa6be0d Mon Sep 17 00:00:00 2001 From: Alex Kesling Date: Fri, 8 May 2026 17:15:50 -0400 Subject: [PATCH 36/36] ci: bump site build Node to 22 pnpm 11.0.8 (latest) requires Node.js >= 22.13. The deploy-site workflow was pinned to Node 20, so `pnpm install` aborted with ERR_UNKNOWN_BUILTIN_MODULE on `node:sqlite`. --- .github/workflows/deploy-site.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/deploy-site.yml b/.github/workflows/deploy-site.yml index af356a7..f4f1d11 100644 --- a/.github/workflows/deploy-site.yml +++ b/.github/workflows/deploy-site.yml @@ -70,7 +70,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: site/pnpm-lock.yaml