diff --git a/.github/workflows/deploy-site.yml b/.github/workflows/deploy-site.yml index af356a7..f4f1d11 100644 --- a/.github/workflows/deploy-site.yml +++ b/.github/workflows/deploy-site.yml @@ -70,7 +70,7 @@ jobs: - uses: actions/setup-node@v4 with: - node-version: 20 + node-version: 22 cache: pnpm cache-dependency-path: site/pnpm-lock.yaml diff --git a/CLAUDE.md b/CLAUDE.md index 30062c5..010d558 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -84,6 +84,11 @@ cargo run -p path-cli -- import pi --project /path/to/project cargo run -p path-cli -- import pathbase cargo run -p path-cli -- import claude --project . --no-cache | path render md --input - +# Share an agent session to Pathbase (interactive picker, single-shot) +cargo run -p path-cli -- share +cargo run -p path-cli -- share --harness claude --session --project /path/to/project +cargo run -p path-cli -- share --url https://my-pathbase.example + # Export toolpath documents into external formats. is a cache id or a file path. cargo run -p path-cli -- export claude --input --project /tmp/sandbox cargo run -p path-cli -- export claude --input --output conv.jsonl @@ -218,3 +223,4 @@ Build the site after changes: `cd site && pnpm run build` (should produce 7 page - Format references for the agent on-disk formats we derive from live at `docs/agents/formats/`. The Claude Code format (`~/.claude/projects/…` JSONL) gets the deepest treatment — twelve focused docs at `docs/agents/formats/claude-code/` covering envelope, entry types, tools, session chains, compaction, writing-compatible JSONL, a linear walkthrough, and a version-keyed changelog. Sibling single-file references: `codex.md`, `gemini.md`, `opencode.md`. Keep them in sync with their derive crates when fields or behaviors change. - Interactive session selection: `path import ` (claude / gemini / pi / codex / opencode) auto-launches `fzf` when stdin and stderr are TTYs, `fzf` is on `$PATH`, and no `--session` was given. Multi-select (TAB) produces a `Graph` document; single-select produces a `Path`. The picker uses `path show --…` as its `--preview` command. When fzf isn't available, it falls back to most-recent (with `--project`) or prints the manual recipe (without). `path list --format tsv` is the documented machine-readable surface — column 1 is the project (for claude/gemini/pi) or session id (for codex/opencode), and the trailing column carries `first_user_message` so consumers can fuzzy-match by topic. - Conversation metadata title field: `toolpath-claude::ConversationMetadata`, `toolpath-gemini::ConversationMetadata`, and `toolpath-pi::SessionMeta` all expose `first_user_message: Option` — the first non-empty user-prompt text. Populated cheaply during the metadata pass (single-pass for Claude/Gemini; one extra short read for Pi). Used by the picker UI but useful for any "list sessions by topic" surface. +- `path share` is the one-shot equivalent of `path import | path export pathbase`. It probes installed agent harnesses (claude/gemini/codex/opencode/pi), aggregates their sessions into a single fzf picker, and ranks rows whose project (claude/gemini/pi) or recorded cwd (codex/opencode) canonicalizes to the current directory at the top. `--harness` narrows the picker to one provider; `--harness X --session Y` (and `--project P` for keyed providers) skips the picker entirely. Pathbase flags (`--url`, `--anon`, `--repo`, `--slug`, `--public`) match `path export pathbase`. By default the derived doc is written to the cache like `import` does; pass `--no-cache` to skip. diff --git a/crates/path-cli/src/cmd_export.rs b/crates/path-cli/src/cmd_export.rs index a6f6bd1..e8d10aa 100644 --- a/crates/path-cli/src/cmd_export.rs +++ b/crates/path-cli/src/cmd_export.rs @@ -153,12 +153,12 @@ pub enum ExportTarget { /// `owner/name` pair for `--repo`. #[derive(Debug, Clone)] -pub struct RepoSpec { - pub owner: String, - pub name: String, +pub(crate) struct RepoSpec { + pub(crate) owner: String, + pub(crate) name: String, } -fn parse_repo_spec(s: &str) -> std::result::Result { +pub(crate) fn parse_repo_spec(s: &str) -> std::result::Result { let (owner, name) = s .split_once('/') .ok_or_else(|| format!("expected owner/name, got `{s}`"))?; @@ -226,6 +226,19 @@ struct PathbaseExportArgs { public: bool, } +/// Pathbase upload knobs that don't depend on where the body came from. +/// Identical to [`PathbaseExportArgs`] minus the `input` field — the body +/// is supplied by the caller (read from cache, derived in memory, …). +#[cfg(not(target_os = "emscripten"))] +#[derive(Debug)] +pub(crate) struct PathbaseUploadArgs { + pub(crate) url: Option, + pub(crate) anon: bool, + pub(crate) repo: Option, + pub(crate) slug: Option, + pub(crate) public: bool, +} + fn run_claude(input: String, project: Option, output: Option) -> Result<()> { #[cfg(target_os = "emscripten")] { @@ -1208,39 +1221,62 @@ fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { #[cfg(not(target_os = "emscripten"))] { - use crate::cmd_pathbase::{ - anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, - resolve_url, - }; + use crate::cmd_pathbase::preflight_auth; let file = cache_ref(&args.input)?; let body = std::fs::read_to_string(&file) .with_context(|| format!("Failed to read {}", file.display()))?; - // Validate locally so we give a clean error rather than relying on - // the server to reject malformed payloads. - let doc = toolpath::v1::Graph::from_json(&body) - .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; - - let stored = load_session(&credentials_path()?)?; - let base_url = match (&args.url, &stored) { - (Some(u), _) => resolve_url(Some(u.clone())), - (None, Some(s)) => s.url.clone(), - (None, None) => resolve_url(None), + let upload = PathbaseUploadArgs { + url: args.url, + anon: args.anon, + repo: args.repo, + slug: args.slug, + public: args.public, }; + let base_url = resolve_upload_base_url(&upload); + let needs_auth = upload.repo.is_some() || upload.public || upload.slug.is_some(); + let auth = preflight_auth(&base_url, upload.anon, needs_auth)?; + let summary_source = file.display().to_string(); + run_pathbase_inner(auth, base_url, upload, &body, &summary_source) + } +} - // Anonymous mode: explicit --anon, or no credentials at all and no - // override flags steering us toward an authed endpoint. - let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); +/// Resolve the upload target URL from the CLI flag, the stored session, +/// or the default. Mirrors the order used inside `run_pathbase_inner` so +/// `cmd_share`'s pre-flight resolution agrees with the eventual upload. +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn resolve_upload_base_url(args: &PathbaseUploadArgs) -> String { + use crate::cmd_pathbase::{credentials_path, load_session, resolve_url}; - if go_anon { - if !args.anon && stored.is_none() { - eprintln!( - "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." - ); - } - let resp = anon_paths_post(&base_url, &body)?; - // Server returns either a full URL or a path-only string; in the - // latter case prefix the base so the user gets a clickable link. + if let Some(u) = &args.url { + return resolve_url(Some(u.clone())); + } + if let Ok(path) = credentials_path() + && let Ok(Some(s)) = load_session(&path) + { + return s.url; + } + resolve_url(None) +} + +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn run_pathbase_inner( + auth: crate::cmd_pathbase::AuthMode, + base_url: String, + args: PathbaseUploadArgs, + body: &str, + summary_source: &str, +) -> Result<()> { + use crate::cmd_pathbase::{AuthMode, anon_paths_post, paths_post, repos_post}; + + // Validate locally so we give a clean error rather than relying on + // the server to reject malformed payloads. + let doc = toolpath::v1::Graph::from_json(body) + .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; + + let (token, username) = match auth { + AuthMode::Anon => { + let resp = anon_paths_post(&base_url, body)?; let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { resp.url.clone() } else if resp.url.starts_with('/') { @@ -1248,84 +1284,70 @@ fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { } else { format!("{base_url}/{}", resp.url) }; - println!("{printable}"); + // Summary first on stderr, then the URL on stdout — the + // share URL is the primary product, so it's the last line + // the user (or a script piping the output) sees. eprintln!( "Uploaded {} → anon path {} ({} bytes)", - file.display(), + summary_source, resp.id, body.len() ); + println!("{printable}"); return Ok(()); } + AuthMode::Authed { token, username } => (token, username), + }; - let session = stored.ok_or_else(|| { - anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") - })?; - if host_of(&base_url) != host_of(&session.url) { - eprintln!( - "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", - base_url, session.url - ); + let (owner, repo) = match args.repo { + Some(spec) => (spec.owner, spec.name), + None => { + // Pathstash default: own the repo "pathstash" under the username + // we resolved during preflight. Create it on demand. + repos_post(&base_url, &token, "pathstash")?; + (username, "pathstash".to_string()) } + }; - let (owner, repo) = match args.repo { - Some(spec) => (spec.owner, spec.name), - None => { - // Pathstash default: own the repo "pathstash" under our username, - // creating it on demand. api_me is the source of truth for the - // username (display name in stored.user can drift). - let user = api_me(&base_url, &session.token)?; - repos_post(&base_url, &session.token, "pathstash")?; - (user.username, "pathstash".to_string()) - } - }; + let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); + let created = paths_post(&base_url, &token, &owner, &repo, &slug, body, args.public)?; - let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); - let created = paths_post( - &base_url, - &session.token, - &owner, - &repo, - &slug, - &body, - args.public, - )?; - - // The visibility we surface is what the server actually applied, - // not what we requested. If a server-side policy ever clamps - // `is_public` (rate limits, account flags, future feature flags), - // we render the URL form the path can actually be reached at. - if created.is_public != args.public { - eprintln!( - "note: requested is_public={} but server applied is_public={}", - args.public, created.is_public - ); - } - let visibility = if created.is_public { - "public" - } else { - "secret" - }; - let url = pathbase_share_url( - &base_url, - &owner, - &repo, - &created.slug, - &created.id, - created.is_public, - ); - println!("{url}"); + // The visibility we surface is what the server actually applied, + // not what we requested. If a server-side policy ever clamps + // `is_public` (rate limits, account flags, future feature flags), + // we render the URL form the path can actually be reached at. + if created.is_public != args.public { eprintln!( - "Uploaded {} → {}/{}/{} ({} path, {} bytes)", - file.display(), - owner, - repo, - created.slug, - visibility, - body.len() + "note: requested is_public={} but server applied is_public={}", + args.public, created.is_public ); - Ok(()) } + let visibility = if created.is_public { + "public" + } else { + "secret" + }; + let url = pathbase_share_url( + &base_url, + &owner, + &repo, + &created.slug, + &created.id, + created.is_public, + ); + // Summary first on stderr, URL last on stdout — same ordering as + // the anon path so the share URL is consistently the final line. + eprintln!( + "Uploaded {} → {}/{}/{} ({} path, {} bytes)", + summary_source, + owner, + repo, + created.slug, + visibility, + body.len() + ); + println!("{url}"); + Ok(()) } /// Pick the canonical share URL for a path uploaded via `export pathbase`. @@ -1392,21 +1414,6 @@ fn derive_slug(doc: &toolpath::v1::Graph) -> String { format!("path-{}", &hex[..12]) } -/// Extract `scheme://host[:port]` from a URL, dropping any path/query. -/// Returns the input unchanged if it doesn't look like a URL. -#[cfg(not(target_os = "emscripten"))] -fn host_of(url: &str) -> &str { - let after_scheme = match url.find("://") { - Some(i) => i + 3, - None => return url, - }; - // Find the next `/` after the scheme://; everything before it is host[:port]. - match url[after_scheme..].find('/') { - Some(off) => &url[..after_scheme + off], - None => url, - } -} - #[cfg(all(test, not(target_os = "emscripten")))] mod tests { use super::*; @@ -1550,21 +1557,6 @@ mod tests { assert!(err.to_string().contains("parse") || err.to_string().contains("Failed")); } - #[test] - fn host_of_strips_path() { - assert_eq!(host_of("https://pathbase.dev"), "https://pathbase.dev"); - assert_eq!(host_of("https://pathbase.dev/"), "https://pathbase.dev"); - assert_eq!( - host_of("https://pathbase.dev/api/v1/traces"), - "https://pathbase.dev" - ); - assert_eq!( - host_of("http://127.0.0.1:9000/foo"), - "http://127.0.0.1:9000" - ); - assert_eq!(host_of("not-a-url"), "not-a-url"); - } - #[test] fn gemini_writes_resume_ready_layout() { // End-to-end: a path doc whose conversation.append carries a diff --git a/crates/path-cli/src/cmd_import.rs b/crates/path-cli/src/cmd_import.rs index 75ae52e..217b3b0 100644 --- a/crates/path-cli/src/cmd_import.rs +++ b/crates/path-cli/src/cmd_import.rs @@ -171,9 +171,9 @@ pub fn run(args: ImportArgs, pretty: bool) -> Result<()> { emit(&docs, args.force, args.no_cache, pretty) } -struct DerivedDoc { - cache_id: String, - doc: Graph, +pub(crate) struct DerivedDoc { + pub(crate) cache_id: String, + pub(crate) doc: Graph, } fn emit(docs: &[DerivedDoc], force: bool, no_cache: bool, pretty: bool) -> Result<()> { @@ -447,6 +447,26 @@ fn derive_claude_with_manager( wrap_paths_claude(paths) } +/// Derive a single Claude conversation given an explicit project + session. +/// Used by `cmd_share` after its picker has resolved the pair; mirrors the +/// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. +pub(crate) fn derive_claude_session(project: &str, session: &str) -> Result { + let manager = toolpath_claude::ClaudeConvo::new(); + let cfg = toolpath_claude::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking: false, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_claude::derive::derive_path(&convo, &cfg); + let cache_id = make_id("claude", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_claude(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -496,10 +516,14 @@ fn pick_claude_in_project( prompt: "claude session> ", preview: Some("path show claude --project {1} --session {2}"), header: Some("pick a Claude session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -542,10 +566,14 @@ fn pick_claude_global( prompt: "claude session> ", preview: Some("path show claude --project {1} --session {2}"), header: Some("pick a Claude session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -637,6 +665,28 @@ fn derive_gemini_with_manager( wrap_paths_gemini(paths) } +/// Derive a single Gemini conversation given an explicit project + session. +pub(crate) fn derive_gemini_session( + project: &str, + session: &str, + include_thinking: bool, +) -> Result { + let manager = toolpath_gemini::GeminiConvo::new(); + let cfg = toolpath_gemini::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_gemini::derive::derive_path(&convo, &cfg); + let cache_id = make_id("gemini", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_gemini(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -682,10 +732,14 @@ fn pick_gemini_in_project( prompt: "gemini session> ", preview: Some("path show gemini --project {1} --session {2}"), header: Some("pick a Gemini session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -728,10 +782,14 @@ fn pick_gemini_global( prompt: "gemini session> ", preview: Some("path show gemini --project {1} --session {2}"), header: Some("pick a Gemini session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -791,6 +849,21 @@ fn derive_codex(session: Option, all: bool) -> Result> { wrap_paths_codex(paths) } +/// Derive a single Codex session given an explicit session id. +pub(crate) fn derive_codex_session(session: &str) -> Result { + let manager = toolpath_codex::CodexConvo::new(); + let config = toolpath_codex::derive::DeriveConfig { project_path: None }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_codex::derive::derive_path(&s, &config); + let cache_id = make_id("codex", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_codex(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -838,10 +911,14 @@ fn pick_codex(manager: &toolpath_codex::CodexConvo) -> Result prompt: "codex session> ", preview: Some("path show codex --session {1}"), header: Some("pick a Codex session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_single_id(&selected))) } @@ -919,6 +996,29 @@ fn derive_opencode( } } +/// Derive a single opencode session given an explicit session id. +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn derive_opencode_session( + session: &str, + no_snapshot_diffs: bool, +) -> Result { + let manager = toolpath_opencode::OpencodeConvo::new(); + let config = toolpath_opencode::derive::DeriveConfig { + no_snapshot_diffs, + ..Default::default() + }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = + toolpath_opencode::derive::derive_path_with_resolver(&s, &config, manager.resolver()); + let cache_id = make_id("opencode", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} + fn wrap_paths_opencode(paths: Vec) -> Result> { Ok(paths .into_iter() @@ -970,10 +1070,14 @@ fn pick_opencode( prompt: "opencode session> ", preview: Some("path show opencode --session {1}"), header: Some("pick an opencode session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_single_id(&selected))) } @@ -1071,6 +1175,27 @@ fn derive_pi_with_manager( Ok(docs) } +/// Derive a single Pi session given an explicit project + session. +pub(crate) fn derive_pi_session( + project: &str, + session: &str, + base: Option, +) -> Result { + let manager = if let Some(path) = base { + let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); + toolpath_pi::PiConvo::with_resolver(resolver) + } else { + toolpath_pi::PiConvo::new() + }; + let config = toolpath_pi::DeriveConfig::default(); + let session = manager + .read_session(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let doc = Graph::from_path(toolpath_pi::derive::derive_path(&session, &config)); + let cache_id = make_id("pi", &doc_inner_id(&doc)); + Ok(DerivedDoc { cache_id, doc }) +} + #[cfg(not(target_os = "emscripten"))] fn pick_pi_in_project( manager: &toolpath_pi::PiConvo, @@ -1103,10 +1228,14 @@ fn pick_pi_in_project( prompt: "pi session> ", preview: Some("path show pi --project {1} --session {2}"), header: Some("pick a Pi session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } @@ -1149,10 +1278,14 @@ fn pick_pi_global(manager: &toolpath_pi::PiConvo) -> Result ", preview: Some("path show pi --project {1} --session {2}"), header: Some("pick a Pi session (TAB = multi-select, Enter = confirm)"), + preview_window: "right:60%:wrap", tiebreak: "index", multi: true, }; - let selected = fzf::pick(&lines, &opts)?; + let selected = match fzf::pick(&lines, &opts)? { + fzf::PickResult::Selected(v) => v, + fzf::PickResult::NoMatch | fzf::PickResult::Cancelled => Vec::new(), + }; Ok(Some(parse_project_session(&selected))) } diff --git a/crates/path-cli/src/cmd_pathbase.rs b/crates/path-cli/src/cmd_pathbase.rs index 94b7320..313d815 100644 --- a/crates/path-cli/src/cmd_pathbase.rs +++ b/crates/path-cli/src/cmd_pathbase.rs @@ -1,9 +1,13 @@ //! Shared Pathbase client helpers. //! -//! Hosts the HTTP client and session-storage logic used by `cmd_auth`, -//! `cmd_import`, and `cmd_export`. Config-dir resolution lives in the -//! sibling `config` module so `cmd_cache` (which doesn't depend on -//! reqwest and must build on emscripten) can reuse it. +//! Wraps the typed `pathbase-client` (generated from +//! `crates/pathbase-client/openapi.json` — refresh via +//! `scripts/refresh-pathbase-openapi.sh`) plus session-storage logic +//! used by `cmd_auth`, `cmd_import`, `cmd_export`, and `cmd_share`. +//! Every Pathbase HTTP call now goes through the typed client; no +//! hand-rolled reqwest left in this module. Config-dir resolution lives +//! in the sibling `config` module so `cmd_cache` (which doesn't depend +//! on reqwest and must build on emscripten) can reuse it. use anyhow::{Context, Result, anyhow, bail}; use serde::{Deserialize, Serialize}; @@ -71,6 +75,21 @@ pub(crate) fn resolve_url(cli_url: Option) -> String { raw.trim_end_matches('/').to_string() } +/// Extract `scheme://host[:port]` from a URL, dropping any path/query. +/// Returns the input unchanged if it doesn't look like a URL. Used to +/// compare a stored session's host against the upload target so we can +/// warn / fall back when the two don't agree. +pub(crate) fn host_of(url: &str) -> &str { + let after_scheme = match url.find("://") { + Some(i) => i + 3, + None => return url, + }; + match url[after_scheme..].find('/') { + Some(off) => &url[..after_scheme + off], + None => url, + } +} + pub(crate) fn prompt_line(prompt: &str) -> Result { use std::io::{BufRead, Write}; let mut stdout = std::io::stdout(); @@ -84,79 +103,195 @@ pub(crate) fn prompt_line(prompt: &str) -> Result { // ── HTTP layer ────────────────────────────────────────────────────────── -pub(crate) fn http_client() -> Result { - reqwest::blocking::Client::builder() - .user_agent(concat!("path-cli/", env!("CARGO_PKG_VERSION"))) - .timeout(std::time::Duration::from_secs(30)) - .build() - .context("failed to build HTTP client") +pub(crate) fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { + let body = pathbase_client::types::RedeemBody { + code: code.to_string(), + }; + let client = pathbase_client(base_url, None)?; + match block_on(client.cli_redeem(&body)) { + Ok(resp) => { + let inner = resp.into_inner(); + let u = inner.user; + Ok(( + inner.token, + User { + id: u.id.to_string(), + username: u.username, + email: u.email, + display_name: u.display_name, + avatar_url: u.avatar_url, + }, + )) + } + Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { + 401 => bail!("code is invalid, already used, or expired — generate a new one"), + 400 => bail!("invalid code format"), + code => bail!("redeem failed (HTTP {code})"), + }, + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + if msg.is_empty() { + bail!("redeem failed ({status})") + } else { + bail!("redeem failed ({status}): {msg}") + } + } + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("redeem failed: {}", full_chain(&e))), + } } -#[derive(Deserialize)] -pub(crate) struct RedeemResponse { - pub token: String, - pub user: User, +pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { + let client = pathbase_client(base_url, Some(token))?; + match block_on(client.logout()) { + Ok(_) => Ok(()), + Err(pathbase_client::Error::ErrorResponse(resp)) => { + bail!("server returned {}", resp.status()) + } + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + bail!("server returned {}", resp.status()) + } + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("connect to {base_url}: {}", full_chain(&e))), + } } -pub(crate) fn api_redeem(base_url: &str, code: &str) -> Result<(String, User)> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/cli/redeem")) - .json(&serde_json::json!({ "code": code })) - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let body = resp.text().unwrap_or_default(); - - if !status.is_success() { - if status == reqwest::StatusCode::UNAUTHORIZED { - bail!("code is invalid, already used, or expired — generate a new one"); +/// Errors are intentionally terse one-liners — callers compose them +/// into either a fallback notice ("note: ; falling back to +/// anonymous") or a propagated error with actionable next-step hints. +/// Don't bake the hints in here; otherwise the fallback notice gets +/// telephone-pole long. +pub(crate) fn api_me(base_url: &str, token: &str) -> Result { + let client = pathbase_client(base_url, Some(token))?; + match block_on(client.get_me()) { + Ok(resp) => { + let u = resp.into_inner(); + Ok(User { + id: u.id.to_string(), + username: u.username, + email: u.email, + display_name: u.display_name, + avatar_url: u.avatar_url, + }) } - if status == reqwest::StatusCode::BAD_REQUEST { - let msg = serde_json::from_str::(&body) - .ok() - .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) - .unwrap_or_else(|| body.clone()); - bail!("{msg}"); + Err(pathbase_client::Error::ErrorResponse(resp)) => { + let status = resp.status(); + if status == reqwest::StatusCode::UNAUTHORIZED + || status == reqwest::StatusCode::FORBIDDEN + { + bail!("{base_url} rejected the stored credentials ({status})") + } else { + bail!("{base_url} returned {status} on /api/v1/users/me") + } } - bail!("redeem failed ({status}): {body}"); + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + bail!("{base_url} returned {} on /api/v1/users/me", resp.status()) + } + Err(pathbase_client::Error::InvalidResponsePayload(_, _)) => { + bail!("{base_url} isn't a Pathbase deployment (non-JSON /api/v1/users/me response)") + } + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("connect to {base_url}: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("connect to {base_url}: {}", full_chain(&e))), } +} - let parsed: RedeemResponse = - serde_json::from_str(&body).with_context(|| format!("parsing redeem response: {body}"))?; - Ok((parsed.token, parsed.user)) +/// Pre-resolved upload mode. Produced by [`preflight_auth`] before any +/// expensive work (session pickers, cache writes, derive passes) so that +/// callers can fail fast or fall back to anonymous mode without making +/// the user select a session and *then* discover the credentials are bad. +#[derive(Debug)] +pub(crate) enum AuthMode { + /// Use the public anonymous endpoint. No credentials required; + /// 5 MB cap and rate-limited. + Anon, + /// Use the authenticated endpoint. Credentials have already been + /// validated against the target server via `api_me`. + Authed { token: String, username: String }, } -pub(crate) fn api_logout(base_url: &str, token: &str) -> Result<()> { - let client = http_client()?; - let resp = client - .post(format!("{base_url}/api/v1/auth/logout")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; - if !resp.status().is_success() && resp.status() != reqwest::StatusCode::NO_CONTENT { - bail!("server returned {}", resp.status()); +/// Probe credentials and decide whether the upload should go authed or +/// anonymous, *before* any picker/derive/cache work. Behavior: +/// +/// - `--anon` → `Anon`, no credentials check. +/// - No stored credentials and no auth-requiring flags → `Anon` with the +/// "not logged in — uploading anonymously" notice. +/// - Stored credentials present → call `api_me` against the target URL. +/// - On success → `Authed { token, username }`. +/// - On failure with no auth-requiring flags (`--repo`/`--public`/`--slug`) +/// → fall back to `Anon` with a stderr notice explaining why. +/// - On failure with auth-requiring flags → propagate the error so the +/// user knows their explicit request can't be satisfied. +/// +/// `host_of(base_url) != host_of(stored.url)` triggers an advisory warning +/// before the credentials probe so the user sees the mismatch even if +/// `api_me` happens to succeed. +pub(crate) fn preflight_auth(base_url: &str, anon: bool, needs_auth: bool) -> Result { + if anon { + return Ok(AuthMode::Anon); } - Ok(()) -} + let stored = load_session(&credentials_path()?)?; -pub(crate) fn api_me(base_url: &str, token: &str) -> Result { - let client = http_client()?; - let resp = client - .get(format!("{base_url}/api/v1/auth/me")) - .bearer_auth(token) - .send() - .with_context(|| format!("connect to {base_url}"))?; + let go_anon = stored.is_none() && !needs_auth; + if go_anon { + eprintln!( + "note: not logged in — uploading anonymously (not listable). \ + Run `path auth login --url {base_url}` for a listable upload." + ); + return Ok(AuthMode::Anon); + } + + let session = match stored { + Some(s) => s, + None => bail!("Not logged in. Run `path auth login` or pass `--anon`."), + }; + + if host_of(base_url) != host_of(&session.url) { + eprintln!( + "warning: stored credentials are for {}, but you're uploading to {}.", + session.url, base_url + ); + } + + match api_me(base_url, &session.token) { + Ok(user) => Ok(AuthMode::Authed { + token: session.token, + username: user.username, + }), + Err(e) if needs_auth => Err(e.context(format!( + "--repo / --public / --slug require an authenticated upload. \ + Run `path auth login --url {base_url}` to authenticate against this \ + server, or drop those flags to upload anonymously." + ))), + Err(e) => { + eprintln!("note: {e}; falling back to anonymous upload."); + Ok(AuthMode::Anon) + } + } +} - if resp.status() == reqwest::StatusCode::UNAUTHORIZED { - bail!("stored session is no longer valid — run `path auth login` again"); +/// Trim a response body to a single-line snippet for error messages. +/// Replaces newlines, collapses long bodies down to ~200 chars with an ellipsis. +fn short_body(body: &str) -> String { + const MAX: usize = 200; + let cleaned: String = body.replace(['\n', '\r'], " "); + let trimmed = cleaned.trim(); + if trimmed.is_empty() { + return "".to_string(); } - if !resp.status().is_success() { - bail!("server returned {}", resp.status()); + if trimmed.chars().count() > MAX { + let head: String = trimmed.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + trimmed.to_string() } - let user: User = resp.json().context("parsing /auth/me response")?; - Ok(user) } // ── pathbase-client bridge ───────────────────────────────────────────── @@ -226,7 +361,7 @@ pub(crate) fn anon_paths_post(base_url: &str, document_json: &str) -> Result match resp.status().as_u16() { @@ -237,20 +372,33 @@ pub(crate) fn anon_paths_post(base_url: &str, document_json: &str) -> Result bail!("anon upload failed (HTTP {code})"), }, Err(pathbase_client::Error::UnexpectedResponse(resp)) => { - bail!( - "anon upload returned unexpected status: HTTP {}", - resp.status() - ) + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + if msg.is_empty() { + bail!("anon upload failed ({status})") + } else { + bail!("anon upload failed ({status}): {msg}") + } + } + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("anon upload failed: {}", reqwest_hint(&e)) } - Err(e) => Err(anyhow!("anon upload failed: {e}")), + Err(e) => Err(anyhow!("anon upload failed: {}", full_chain(&e))), } } /// `POST /api/v1/repos/{owner}/{repo}/paths` — listable upload to a -/// repo the authenticated user owns. `is_public=false` writes a -/// pathstash-style secret/unlisted path; the URL is still publicly -/// addressable (UUIDs are public for both secret and public paths) but -/// won't appear in any user's listing. +/// repo the authenticated user owns. Multi-path graphs go to the +/// sibling `/graphs` endpoint, but `path share` only ever uploads +/// single-path graphs. +/// +/// Per the spec, `is_public` defaults to **true** on the server side. +/// We always pass `Some(is_public)` explicitly so the share command's +/// default (`--public` unset → `is_public=false`) reliably writes a +/// secret/unlisted path; the path stays addressable via its UUID +/// (`///paths/`) as the unguessable share-by-link +/// form, but won't appear in any user's listing. pub(crate) fn paths_post( base_url: &str, token: &str, @@ -276,7 +424,11 @@ pub(crate) fn paths_post( }) } Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { - 401 => bail!("stored session is no longer valid — run `path auth login` again"), + 401 => bail!( + "{base_url} rejected your stored credentials (HTTP 401). \ + Run `path auth login --url {base_url}` to authenticate against this server, \ + or pass `--anon` to upload anonymously." + ), code => bail!("upload to {owner}/{repo} failed (HTTP {code})"), }, Err(pathbase_client::Error::UnexpectedResponse(resp)) => { @@ -289,7 +441,10 @@ pub(crate) fn paths_post( bail!("upload to {owner}/{repo} failed ({status}): {msg}") } } - Err(e) => Err(anyhow!("upload to {owner}/{repo} failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("upload to {owner}/{repo} failed: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("upload to {owner}/{repo} failed: {}", full_chain(&e))), } } @@ -299,6 +454,41 @@ fn error_message(body: &str) -> Option { .and_then(|v| v.get("error").and_then(|e| e.as_str()).map(String::from)) } +/// Walk an error's `source()` chain and join each link's `Display` +/// with `: `. progenitor's `CommunicationError(reqwest::Error)` +/// renders as "error sending request" by default — the actually-useful +/// detail (timeout / connection refused / TLS handshake) sits two or +/// three levels down in `source()`. This surfaces it. +fn full_chain(err: &(dyn std::error::Error + 'static)) -> String { + let mut s = err.to_string(); + let mut cur = err.source(); + while let Some(c) = cur { + s.push_str(": "); + s.push_str(&c.to_string()); + cur = c.source(); + } + s +} + +/// Classify a `reqwest::Error` into a short hint so users can tell +/// "took too long" from "couldn't connect" from "server hung up." Falls +/// back to the full source chain when no specific hint applies. +fn reqwest_hint(err: &reqwest::Error) -> String { + if err.is_timeout() { + return "request timed out after 30s — try again, or shrink the upload".to_string(); + } + if err.is_connect() { + return format!("couldn't connect to server: {}", full_chain(err)); + } + if err.is_body() { + return format!("body error: {}", full_chain(err)); + } + if err.is_decode() { + return format!("response decode error: {}", full_chain(err)); + } + full_chain(err) +} + /// `POST /api/v1/repos` — create a repo owned by the authenticated user. /// Treats 409 (already exists) as success so callers can use this /// idempotently to ensure pathstash exists before uploading to it. @@ -311,7 +501,11 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> match block_on(client.create_repo(&body)) { Ok(_) => Ok(()), Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status().as_u16() { - 401 => bail!("stored session is no longer valid — run `path auth login` again"), + 401 => bail!( + "{base_url} rejected your stored credentials (HTTP 401). \ + Run `path auth login --url {base_url}` to authenticate against this server, \ + or pass `--anon` to upload anonymously." + ), 409 => Ok(()), code => bail!("creating repo {name} failed (HTTP {code})"), }, @@ -322,24 +516,26 @@ pub(crate) fn repos_post(base_url: &str, token: &str, name: &str) -> Result<()> 409 => Ok(()), code => bail!("creating repo {name} returned unexpected status: HTTP {code}"), }, - Err(e) => Err(anyhow!("creating repo {name} failed: {e}")), + Err(pathbase_client::Error::CommunicationError(e)) => { + bail!("creating repo {name} failed: {}", reqwest_hint(&e)) + } + Err(e) => Err(anyhow!("creating repo {name} failed: {}", full_chain(&e))), } } /// `GET /api/v1/repos/{owner}/{repo}/paths/{slug}/download` — fetch the -/// raw toolpath JSON for a path. Public paths and unlisted-but-shared -/// paths both download without authentication; only fully private paths -/// (gated by an ACL beyond `is_public=false`) require auth. +/// reconstructed Graph document for a path. +/// +/// Per the spec: private paths return 404 unless the caller is +/// owner-authenticated *or* addresses the path by its UUID +/// (the unguessable share-by-link form). The 404 message therefore +/// hints at both possibilities — "not found, or you're not the owner." /// -/// **Why this doesn't go through `pathbase-client`.** progenitor's -/// generated client decodes the response body into -/// `serde_json::Map` (per the spec's -/// `application/json` content type) and we'd then re-serialize to get a -/// String back. That's a wasted round-trip — and the BTreeMap-backed -/// `serde_json::Map` reorders keys, so the bytes the caller sees aren't -/// the bytes the server sent. For a "give me back the document I just -/// uploaded" endpoint, byte-fidelity matters. We use blocking reqwest -/// directly and forward the response body verbatim. +/// Returns a serialized JSON string. The generated client decodes into +/// `serde_json::Map`, which we re-serialize on the way out — keys may +/// be reordered relative to the server's bytes, but the consumer parses +/// to `Graph` and re-serializes anyway, so byte-fidelity isn't a real +/// requirement. pub(crate) fn paths_download( base_url: &str, token: Option<&str>, @@ -347,33 +543,35 @@ pub(crate) fn paths_download( repo: &str, slug: &str, ) -> Result { - let client = http_client()?; - let mut req = client.get(format!( - "{base_url}/api/v1/repos/{owner}/{repo}/paths/{slug}/download" - )); - if let Some(t) = token { - req = req.bearer_auth(t); - } - let resp = req - .send() - .with_context(|| format!("connect to {base_url}"))?; - - let status = resp.status(); - let text = resp.text().unwrap_or_default(); - - if status == reqwest::StatusCode::UNAUTHORIZED { - bail!( - "this path is private and requires authentication — run `path auth login --url {base_url}` and retry" - ); - } - if status == reqwest::StatusCode::NOT_FOUND { - bail!("path {owner}/{repo}/{slug} not found on {base_url}"); - } - if !status.is_success() { - let msg = error_message(&text).unwrap_or(text); - bail!("download of {owner}/{repo}/{slug} failed ({status}): {msg}"); + let client = pathbase_client(base_url, token)?; + match block_on(client.download_path(owner, repo, slug)) { + Ok(resp) => { + let map = resp.into_inner(); + serde_json::to_string(&map).context("re-serializing downloaded path") + } + Err(pathbase_client::Error::ErrorResponse(resp)) => match resp.status() { + reqwest::StatusCode::NOT_FOUND => bail!( + "{owner}/{repo}/{slug} not found on {base_url} (or it's a private path \ + and you're not the owner — try the path's UUID instead, or \ + `path auth login --url {base_url}`)" + ), + status => bail!("download of {owner}/{repo}/{slug} failed ({status})"), + }, + Err(pathbase_client::Error::UnexpectedResponse(resp)) => { + let status = resp.status(); + let body = block_on(resp.text()).unwrap_or_default(); + let msg = error_message(&body).unwrap_or_else(|| short_body(&body)); + bail!("download of {owner}/{repo}/{slug} failed ({status}): {msg}") + } + Err(pathbase_client::Error::CommunicationError(e)) => bail!( + "download of {owner}/{repo}/{slug} failed: {}", + reqwest_hint(&e) + ), + Err(e) => Err(anyhow!( + "download of {owner}/{repo}/{slug} failed: {}", + full_chain(&e) + )), } - Ok(text) } // ── File storage ──────────────────────────────────────────────────────── @@ -447,6 +645,40 @@ mod tests { assert_eq!(got, "https://example.com"); } + #[test] + fn host_of_strips_path() { + assert_eq!(host_of("https://pathbase.dev"), "https://pathbase.dev"); + assert_eq!(host_of("https://pathbase.dev/"), "https://pathbase.dev"); + assert_eq!( + host_of("https://pathbase.dev/api/v1/traces"), + "https://pathbase.dev" + ); + assert_eq!( + host_of("http://127.0.0.1:9000/foo"), + "http://127.0.0.1:9000" + ); + assert_eq!(host_of("not-a-url"), "not-a-url"); + } + + #[test] + fn short_body_handles_empty_and_whitespace() { + assert_eq!(short_body(""), ""); + assert_eq!(short_body(" \n\t "), ""); + } + + #[test] + fn short_body_collapses_newlines_to_spaces() { + assert_eq!(short_body("line1\nline2\r\nline3"), "line1 line2 line3"); + } + + #[test] + fn short_body_truncates_long_input_with_ellipsis() { + let long = "x".repeat(500); + let s = short_body(&long); + assert_eq!(s.chars().count(), 200); + assert!(s.ends_with('…')); + } + #[test] fn store_then_load_roundtrips_on_disk() { let dir = tempfile::tempdir().unwrap(); @@ -621,9 +853,17 @@ mod tests { #[test] fn paths_post_401_surfaces_relogin_message() { let server = MockServer::start("HTTP/1.1 401 Unauthorized", r#"{"error":"bad"}"#); - let err = - paths_post(&server.base(), "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); - assert!(err.to_string().contains("run `path auth login`")); + let base = server.base(); + let err = paths_post(&base, "tok", "alex", "pathstash", "s", "{}", false).unwrap_err(); + let msg = err.to_string(); + // Should name the URL the credentials are being rejected by, point at + // `path auth login --url`, and offer `--anon` as the bypass. + assert!(msg.contains(&base), "expected base URL in error: {msg}"); + assert!( + msg.contains("path auth login --url"), + "expected re-auth hint: {msg}" + ); + assert!(msg.contains("--anon"), "expected --anon hint: {msg}"); } #[test] @@ -637,15 +877,17 @@ mod tests { assert!(err.to_string().contains("database is on fire"), "{err}"); } + /// Anon upload returns `{id, path, share_url}` per the OpenAPI spec. + /// We expose `share_url` to callers as the canonical share link. #[test] fn anon_paths_post_wraps_document_and_omits_auth() { let server = MockServer::start( "HTTP/1.1 201 Created", - r#"{"id":"abc","url":"https://pathbase.dev/anon/abc"}"#, + r#"{"id":"abc","path":"/anon/pathstash/paths/abc","share_url":"https://pathbase.dev/anon/pathstash/paths/abc"}"#, ); let resp = anon_paths_post(&server.base(), r#"{"Step":{}}"#).unwrap(); assert_eq!(resp.id, "abc"); - assert_eq!(resp.url, "https://pathbase.dev/anon/abc"); + assert_eq!(resp.url, "https://pathbase.dev/anon/pathstash/paths/abc"); let req = String::from_utf8(server.request()).unwrap(); assert!(req.starts_with("POST /api/v1/anon/paths "), "got: {req}"); @@ -670,18 +912,23 @@ mod tests { repos_post(&server.base(), "tok", "pathstash").unwrap(); } + /// Download decodes through `serde_json::Map` and re-serializes, so + /// keys may be reordered relative to the server's bytes. The + /// downstream cache writer (`write_cached`) round-trips through + /// `Graph` and writes pretty-printed JSON anyway, so the only + /// invariant we care about is "the JSON parses to the same value". #[test] - fn paths_download_returns_body_byte_for_byte() { - // Key ordering matters: the server's bytes must come back unmodified. - // With the round-trip removed (raw blocking GET, no Map decode), this - // is a straight string equality. If progenitor ever sneaks back in - // for this endpoint, the BTreeMap-backed Map reorders keys and this - // assertion catches it. + fn paths_download_returns_body_as_json() { let body = r#"{"Step":{"step":{"id":"s1","actor":"human:x","timestamp":"2024-01-01T00:00:00Z"},"change":{}}}"#; let server = MockServer::start("HTTP/1.1 200 OK", body); let got = paths_download(&server.base(), Some("tok"), "alex", "pathstash", "my-path").unwrap(); - assert_eq!(got, body); + let got_v: serde_json::Value = serde_json::from_str(&got).unwrap(); + let want_v: serde_json::Value = serde_json::from_str(body).unwrap(); + assert_eq!( + got_v, want_v, + "downloaded body should parse to the same value" + ); let req = String::from_utf8(server.request()).unwrap(); assert!( @@ -701,4 +948,144 @@ mod tests { .unwrap_err(); assert!(err.to_string().contains("not found")); } + + // ── preflight_auth ──────────────────────────────────────────────── + // + // The preflight is the gate that decides authed-vs-anon BEFORE the + // share picker runs, so a credential rejection shouldn't make the + // user pick a session and *then* fail. These tests use + // TOOLPATH_CONFIG_DIR + a tempdir-credentials file to drive the + // logged-in path through the same MockServer used elsewhere. + + fn write_credentials(dir: &std::path::Path, url: &str) { + let creds = StoredSession { + url: url.to_string(), + token: "tok".into(), + user: User { + id: "u1".into(), + username: "alice".into(), + email: None, + display_name: None, + avatar_url: None, + }, + }; + store_session(&dir.join(CREDENTIALS_FILE), &creds).unwrap(); + } + + fn me_response_body(username: &str) -> String { + // The generated User type requires id (uuid), username, created_at, + // updated_at. Mock the bare minimum that parses cleanly. + format!( + r#"{{"id":"00000000-0000-0000-0000-000000000001","username":"{username}","created_at":"2024-01-01T00:00:00Z","updated_at":"2024-01-01T00:00:00Z"}}"# + ) + } + + /// Cleared TOOLPATH_CONFIG_DIR + no `--anon` + no auth-requiring flags + /// → preflight returns Anon with the "not logged in" notice. + #[test] + fn preflight_anon_when_logged_out_and_no_auth_flags() { + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + let mode = preflight_auth("https://pathbase.dev", false, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Stored credentials AND host matches AND api_me succeeds → Authed. + #[test] + fn preflight_authed_when_credentials_validate() { + let server = MockServer::start( + "HTTP/1.1 200 OK", + Box::leak(me_response_body("alice").into_boxed_str()), + ); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let mode = preflight_auth(&base, false, false).unwrap(); + match mode { + AuthMode::Authed { username, .. } => assert_eq!(username, "alice"), + AuthMode::Anon => panic!("expected Authed, got Anon"), + } + } + + /// Stored credentials but api_me rejects with 401 + no auth-requiring + /// flags → fall back to Anon (don't error). + #[test] + fn preflight_falls_back_to_anon_on_401_without_auth_flags() { + let server = MockServer::start("HTTP/1.1 401 Unauthorized", "{}"); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let mode = preflight_auth(&base, false, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Stored credentials but api_me rejects + needs_auth=true → propagate + /// the error so the user knows --repo/--public/--slug can't be honored. + #[test] + fn preflight_propagates_401_when_auth_required() { + let server = MockServer::start("HTTP/1.1 401 Unauthorized", "{}"); + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), &server.base()); + let base = server.base(); + let err = preflight_auth(&base, false, true).unwrap_err(); + let msg = err.to_string(); + assert!(msg.contains("--repo"), "expected mention of --repo: {msg}"); + } + + /// `--anon` short-circuits past every check. + #[test] + fn preflight_anon_flag_skips_credentials_check() { + // Even with valid credentials in place, --anon returns Anon without + // calling api_me (no MockServer needed — would 404). + let cfg = tempfile::tempdir().unwrap(); + let _g = EnvGuard::set("TOOLPATH_CONFIG_DIR", cfg.path().to_str().unwrap()); + write_credentials(cfg.path(), "https://pathbase.dev"); + let mode = preflight_auth("https://pathbase.dev", true, false).unwrap(); + assert!(matches!(mode, AuthMode::Anon)); + } + + /// Test-helper guard for `std::env::set_var`. Process env is shared + /// across all `cargo test` threads, so concurrent tests that mutate + /// the same key would race — `EnvGuard` serializes them via a global + /// mutex held for the guard's lifetime. Drop restores the prior value. + static ENV_LOCK: std::sync::Mutex<()> = std::sync::Mutex::new(()); + + struct EnvGuard { + key: String, + prior: Option, + _lock: std::sync::MutexGuard<'static, ()>, + } + impl EnvGuard { + fn set(key: &str, val: &str) -> Self { + // PoisonError on a previously-panicked test still gives us a + // valid lock — recover the inner guard and proceed. + let lock = ENV_LOCK.lock().unwrap_or_else(|e| e.into_inner()); + let prior = std::env::var_os(key); + // SAFETY: ENV_LOCK serializes EnvGuard-using tests against + // each other. The only env var these tests touch is + // TOOLPATH_CONFIG_DIR, and no other tests in this crate + // mutate or read it from the test process. + unsafe { + std::env::set_var(key, val); + } + Self { + key: key.to_string(), + prior, + _lock: lock, + } + } + } + impl Drop for EnvGuard { + fn drop(&mut self) { + unsafe { + match &self.prior { + Some(v) => std::env::set_var(&self.key, v), + None => std::env::remove_var(&self.key), + } + } + } + } } diff --git a/crates/path-cli/src/cmd_share.rs b/crates/path-cli/src/cmd_share.rs new file mode 100644 index 0000000..b9ef8a9 --- /dev/null +++ b/crates/path-cli/src/cmd_share.rs @@ -0,0 +1,1338 @@ +//! `path share` — interactive Pathbase upload across installed agent +//! harnesses. See `docs/superpowers/specs/2026-05-07-path-share-command-design.md`. + +#![cfg(not(target_os = "emscripten"))] + +use anyhow::Result; +use chrono::{DateTime, Utc}; +use clap::{Args, ValueEnum}; +use std::path::PathBuf; + +use crate::cmd_export::RepoSpec; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)] +#[value(rename_all = "lower")] +pub enum HarnessArg { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +#[derive(Args, Debug)] +pub struct ShareArgs { + /// Pathbase server URL (defaults to the stored session's server) + #[arg(long)] + pub url: Option, + + /// Force the anonymous endpoint, ignoring any stored credentials + #[arg(long, conflicts_with_all = ["repo", "public"])] + pub anon: bool, + + /// Target a specific repo as `owner/name` instead of `/pathstash` + #[arg(long, value_parser = crate::cmd_export::parse_repo_spec)] + pub repo: Option, + + /// Override the auto-derived slug (defaults to the toolpath document id) + #[arg(long)] + pub slug: Option, + + /// Make the uploaded path publicly listable (default: secret/unlisted) + #[arg(long)] + pub public: bool, + + /// Narrow the picker to one harness, or skip the picker entirely + /// when used with --session. + #[arg(long, value_enum)] + pub harness: Option, + + /// Skip the picker. Requires --harness; requires --project for + /// claude/gemini/pi. + #[arg(long, requires = "harness")] + pub session: Option, + + /// Override cwd-as-project. Filters the picker to sessions tied to + /// this project across all harnesses. + #[arg(long)] + pub project: Option, + + /// Skip writing the cache; derive in-memory only + #[arg(long)] + pub no_cache: bool, +} + +/// Which agent harness a session was produced by. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum Harness { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +impl Harness { + pub(crate) fn name(&self) -> &'static str { + match self { + Harness::Claude => "claude", + Harness::Gemini => "gemini", + Harness::Codex => "codex", + Harness::Opencode => "opencode", + Harness::Pi => "pi", + } + } + + /// Padded so all five symbols line up in the fzf column. + pub(crate) fn symbol(&self) -> &'static str { + match self { + Harness::Claude => "claude ", + Harness::Gemini => "gemini ", + Harness::Codex => "codex ", + Harness::Opencode => "opencode", + Harness::Pi => "pi ", + } + } + + /// True when the underlying provider keys sessions by project path. + /// claude/gemini/pi: true. codex/opencode: false (sessions store cwd + /// per-row, not as a directory key). + pub(crate) fn project_keyed(&self) -> bool { + matches!(self, Harness::Claude | Harness::Gemini | Harness::Pi) + } + + pub(crate) fn from_arg(arg: HarnessArg) -> Self { + match arg { + HarnessArg::Claude => Harness::Claude, + HarnessArg::Gemini => Harness::Gemini, + HarnessArg::Codex => Harness::Codex, + HarnessArg::Opencode => Harness::Opencode, + HarnessArg::Pi => Harness::Pi, + } + } + + pub(crate) fn parse(s: &str) -> Option { + match s { + "claude" => Some(Harness::Claude), + "gemini" => Some(Harness::Gemini), + "codex" => Some(Harness::Codex), + "opencode" => Some(Harness::Opencode), + "pi" => Some(Harness::Pi), + _ => None, + } + } +} + +/// One row in the unified session picker. +#[derive(Debug, Clone)] +pub(crate) struct SessionRow { + pub(crate) harness: Harness, + /// Project path for keyed providers; `None` for codex/opencode. + pub(crate) project: Option, + /// Recorded cwd from the session (codex/opencode only). + pub(crate) cwd: Option, + pub(crate) session_id: String, + pub(crate) title: String, + pub(crate) last_activity: Option>, + pub(crate) message_count: usize, + pub(crate) matches_cwd: bool, +} + +/// Bundle of provider managers used during aggregation. Production code +/// builds this from real `$HOME` via `from_environment`; tests construct +/// it directly with provider-specific resolvers. +#[derive(Default)] +pub(crate) struct HarnessBundle { + pub(crate) claude: Option, + pub(crate) gemini: Option, + pub(crate) codex: Option, + pub(crate) opencode: Option, + pub(crate) pi: Option, +} + +impl HarnessBundle { + /// Build the production bundle. Each provider is included + /// unconditionally (its `new()` doesn't fail on a missing home dir); + /// `gather_sessions` skips the ones whose listing returns empty/NotFound. + pub(crate) fn from_environment() -> Self { + Self { + claude: Some(toolpath_claude::ClaudeConvo::new()), + gemini: Some(toolpath_gemini::GeminiConvo::new()), + codex: Some(toolpath_codex::CodexConvo::new()), + opencode: Some(toolpath_opencode::OpencodeConvo::new()), + pi: Some(toolpath_pi::PiConvo::new()), + } + } +} + +/// Aggregate sessions across the harnesses in `bundle`, ranked so that +/// rows whose project (or recorded cwd) canonicalizes to `cwd` come +/// first, sorted by descending `last_activity`. +/// +/// Filters: `harness_filter` keeps only rows from one harness; `project_filter` +/// keeps only rows whose project (for keyed) or cwd (for session-keyed) +/// canonicalizes to that path. +pub(crate) fn gather_sessions( + bundle: &HarnessBundle, + cwd: &std::path::Path, + harness_filter: Option, + project_filter: Option<&std::path::Path>, +) -> Vec { + let mut rows = Vec::new(); + let canonical_cwd = canonicalize_or_self(cwd); + let canonical_project = project_filter.map(canonicalize_or_self); + + let want = |h: Harness| harness_filter.is_none_or(|f| f == h); + + if want(Harness::Claude) + && let Some(mgr) = &bundle.claude + { + collect_claude(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Gemini) + && let Some(mgr) = &bundle.gemini + { + collect_gemini(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Pi) + && let Some(mgr) = &bundle.pi + { + collect_pi(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Codex) + && let Some(mgr) = &bundle.codex + { + collect_codex(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + if want(Harness::Opencode) + && let Some(mgr) = &bundle.opencode + { + collect_opencode(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + + rows.sort_by(|a, b| { + b.matches_cwd + .cmp(&a.matches_cwd) + .then_with(|| b.last_activity.cmp(&a.last_activity)) + }); + rows +} + +fn canonicalize_or_self(p: &std::path::Path) -> std::path::PathBuf { + std::fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()) +} + +fn paths_match(a: &std::path::Path, b: &std::path::Path) -> bool { + canonicalize_or_self(a) == canonicalize_or_self(b) +} + +fn collect_claude( + mgr: &toolpath_claude::ClaudeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_claude(&e) => return, + Err(e) => { + eprintln!("warning: claude aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: claude project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Claude, + project: Some(m.project_path), + cwd: None, + session_id: m.session_id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_gemini( + mgr: &toolpath_gemini::GeminiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_gemini(&e) => return, + Err(e) => { + eprintln!("warning: gemini aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: gemini project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Gemini, + project: Some(m.project_path), + cwd: None, + session_id: m.session_uuid, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_pi( + mgr: &toolpath_pi::PiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_pi(&e) => return, + Err(e) => { + eprintln!("warning: pi aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_sessions(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: pi project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + // SessionMeta.timestamp is a String; parse to DateTime when possible. + let last_activity = chrono::DateTime::parse_from_rfc3339(&m.timestamp) + .ok() + .map(|d| d.with_timezone(&Utc)); + out.push(SessionRow { + harness: Harness::Pi, + project: Some(project.clone()), + cwd: None, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity, + message_count: m.entry_count, + matches_cwd, + }); + } + } +} + +fn collect_codex( + mgr: &toolpath_codex::CodexConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.list_sessions() { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_codex(&e) => return, + Err(e) => { + eprintln!("warning: codex aggregation failed: {e}"); + return; + } + }; + for m in metas { + let cwd_str = m.cwd.as_ref().map(|p| p.to_string_lossy().into_owned()); + if let Some(filter) = project_filter { + let stored = match cwd_str.as_deref() { + Some(s) => std::path::PathBuf::from(s), + None => continue, + }; + if !paths_match(&stored, filter) { + continue; + } + } + let matches_cwd = m + .cwd + .as_deref() + .map(|p| paths_match(p, canonical_cwd)) + .unwrap_or(false); + out.push(SessionRow { + harness: Harness::Codex, + project: None, + cwd: cwd_str, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.line_count, + matches_cwd, + }); + } +} + +fn collect_opencode( + mgr: &toolpath_opencode::OpencodeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.io().list_session_metadata(None) { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_opencode(&e) => return, + Err(e) => { + eprintln!("warning: opencode aggregation failed: {e}"); + return; + } + }; + for m in metas { + if let Some(filter) = project_filter + && !paths_match(&m.directory, filter) + { + continue; + } + let matches_cwd = paths_match(&m.directory, canonical_cwd); + let cwd_str = m.directory.to_string_lossy().into_owned(); + let title = match (&m.first_user_message, m.title.is_empty()) { + (Some(s), _) if !s.is_empty() => s.clone(), + (_, false) => m.title.clone(), + _ => "(no prompt)".to_string(), + }; + out.push(SessionRow { + harness: Harness::Opencode, + project: None, + cwd: Some(cwd_str), + session_id: m.id, + title, + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } +} + +fn is_not_found_claude(err: &toolpath_claude::ConvoError) -> bool { + use toolpath_claude::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::ClaudeDirectoryNotFound(_)) +} + +fn is_not_found_gemini(err: &toolpath_gemini::ConvoError) -> bool { + use toolpath_gemini::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::GeminiDirectoryNotFound(_)) +} + +fn is_not_found_pi(err: &toolpath_pi::PiError) -> bool { + use toolpath_pi::PiError; + matches!(err, PiError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, PiError::ProjectNotFound(_)) +} + +fn is_not_found_codex(err: &toolpath_codex::ConvoError) -> bool { + use toolpath_codex::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::CodexDirectoryNotFound(_)) +} + +fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { + use toolpath_opencode::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::OpencodeDirectoryNotFound(_)) + || matches!(err, ConvoError::DatabaseNotFound(_)) +} + +pub fn run(args: ShareArgs) -> Result<()> { + let harness = args.harness.map(Harness::from_arg); + + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + // Build upload args + base URL once and reuse for both the explicit + // path and the picker path. `needs_auth` decides whether preflight + // can fall back to anon on credential failure. + let upload_args = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + let base_url = crate::cmd_export::resolve_upload_base_url(&upload_args); + let needs_auth = upload_args.repo.is_some() || upload_args.public || upload_args.slug.is_some(); + + if let (Some(h), Some(session)) = (harness, &args.session) { + // Explicit-args: validate creds before derive so a credential + // failure doesn't waste the derive/cache work. + let auth = crate::cmd_pathbase::preflight_auth(&base_url, upload_args.anon, needs_auth)?; + return share_explicit(h, session.as_str(), &args, auth, base_url); + } + + let cwd = std::env::current_dir()?; + let bundle = HarnessBundle::from_environment(); + let project_filter = args.project.as_deref(); + let rows = gather_sessions(&bundle, &cwd, harness, project_filter); + + if rows.is_empty() { + return bail_no_sessions(&bundle, project_filter); + } + + if !crate::fzf::available() { + eprintln!( + "Interactive `path share` needs `fzf` on PATH and a TTY.\n\ + \n\ + Manual recipe:\n \ + path import # writes a cache entry, prints its id\n \ + path export pathbase --input " + ); + anyhow::bail!("fzf unavailable; run `path import ` then `path export pathbase`"); + } + + // We have rows AND fzf available — now validate credentials before + // making the user pick a session. If preflight returns Anon (either + // explicit --anon, no creds + no auth flags, or auth probe failed + // and fell back), the picker still fires with that knowledge baked in. + let auth = crate::cmd_pathbase::preflight_auth(&base_url, upload_args.anon, needs_auth)?; + + let lines: Vec = rows.iter().map(format_picker_row).collect(); + let header = format!("share an agent session (Enter = upload to {base_url})"); + let opts = crate::fzf::PickOptions { + with_nth: "4..", + prompt: "share> ", + preview: Some("path show {1} --project {2} --session {3}"), + // Stacked layout: preview above the list, list below. Fits narrow + // terminals better than the default side-by-side and gives the + // session preview the full terminal width to render `path show`. + preview_window: "up:60%:wrap", + header: Some(&header), + tiebreak: "index", + multi: false, + }; + let line = match crate::fzf::pick(&lines, &opts)? { + crate::fzf::PickResult::Selected(v) => match v.into_iter().next() { + Some(l) => l, + // Selected with an empty payload should not happen (fzf exits 0 + // only when at least one row was confirmed), but treat it like + // no-match for safety. + None => return Ok(()), + }, + // No row matched the query — exit 0, same as today, no extra noise. + crate::fzf::PickResult::NoMatch => return Ok(()), + // Esc / Ctrl-C: deliberate user cancel. Signal to the shell with + // exit 130 so it's distinguishable from a successful share. + crate::fzf::PickResult::Cancelled => std::process::exit(130), + }; + let (h, key, session, title) = parse_picker_row(&line) + .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; + + let explicit = ShareArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + harness: Some(harness_to_arg(h)), + session: None, // unused by share_explicit + project: if h.project_keyed() { + Some(PathBuf::from(&key)) + } else { + None + }, + no_cache: args.no_cache, + }; + // Show the conversation title in the confirmation line; the session id + // is opaque and doesn't help the user verify they picked the right + // thing. `{:?}` adds the surrounding quotes per the spec. + eprintln!("Picked {} session {:?}", h.name(), title); + share_explicit(h, &session, &explicit, auth, base_url) +} + +fn harness_to_arg(h: Harness) -> HarnessArg { + match h { + Harness::Claude => HarnessArg::Claude, + Harness::Gemini => HarnessArg::Gemini, + Harness::Codex => HarnessArg::Codex, + Harness::Opencode => HarnessArg::Opencode, + Harness::Pi => HarnessArg::Pi, + } +} + +fn bail_no_sessions( + bundle: &HarnessBundle, + project_filter: Option<&std::path::Path>, +) -> Result<()> { + if let Some(p) = project_filter { + anyhow::bail!( + "No agent sessions found in project {}. Run without --project to see sessions across all projects.", + p.display() + ); + } + + let mut summary = String::from("No agent sessions found.\n"); + // Pad harness names so the path column lines up: "opencode:" is the + // longest at 9 chars (8 + colon). + let home = home_dir(); + summary.push_str(&format_status_line( + "claude", + &harness_status_claude(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "gemini", + &harness_status_gemini(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "codex", + &harness_status_codex(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "opencode", + &harness_status_opencode(bundle, home.as_deref()), + )); + summary.push_str(&format_status_line( + "pi", + &harness_status_pi(bundle, home.as_deref()), + )); + eprint!("{summary}"); + anyhow::bail!("no shareable sessions"); +} + +/// Cross-platform `$HOME` lookup matching the providers' internal helpers. +/// Returns `None` only when neither `$HOME` nor `$USERPROFILE` is set. +fn home_dir() -> Option { + std::env::var_os("HOME") + .or_else(|| std::env::var_os("USERPROFILE")) + .map(std::path::PathBuf::from) +} + +/// Human-readable status of a harness's on-disk store: either the (possibly +/// home-relative) path with a "(0 sessions)" hint, or the path with a +/// "not found" hint when the directory/database is absent. +#[derive(Debug, PartialEq, Eq)] +struct HarnessStatus { + /// Display path (tilde-prefixed when under `$HOME`). + path: String, + /// True when the path exists on disk. + exists: bool, +} + +impl HarnessStatus { + fn render(&self) -> String { + if self.exists { + format!("{} (0 sessions)", self.path) + } else { + format!("{} not found", self.path) + } + } + + /// Status when the resolver itself failed (e.g. no $HOME). + fn unresolved() -> Self { + Self { + path: "".to_string(), + exists: false, + } + } +} + +/// Format a single status line, padding the harness name so that the path +/// column lines up across all five rows. The longest name is "opencode" (8). +fn format_status_line(name: &str, status: &HarnessStatus) -> String { + format!(" {:<9} {}\n", format!("{name}:"), status.render()) +} + +fn harness_status_claude(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.claude else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().projects_dir() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_gemini(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.gemini else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().tmp_dir() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_codex(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.codex else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().sessions_root() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_opencode( + bundle: &HarnessBundle, + home: Option<&std::path::Path>, +) -> HarnessStatus { + let Some(mgr) = &bundle.opencode else { + return HarnessStatus::unresolved(); + }; + match mgr.resolver().db_path() { + Ok(p) => HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + }, + Err(_) => HarnessStatus::unresolved(), + } +} + +fn harness_status_pi(bundle: &HarnessBundle, home: Option<&std::path::Path>) -> HarnessStatus { + let Some(mgr) = &bundle.pi else { + return HarnessStatus::unresolved(); + }; + let p = mgr.resolver().sessions_dir().to_path_buf(); + HarnessStatus { + path: home_relative(&p, home), + exists: p.exists(), + } +} + +/// Display `path` as `~/relative/part` when it's under `home`, otherwise +/// return its absolute lossy form. Pure helper — does no filesystem I/O. +fn home_relative(path: &std::path::Path, home: Option<&std::path::Path>) -> String { + if let Some(home) = home + && let Ok(rest) = path.strip_prefix(home) + { + // strip_prefix returns the empty path when path == home; treat that + // as plain "~". + if rest.as_os_str().is_empty() { + return "~".to_string(); + } + return format!("~/{}", rest.display()); + } + path.display().to_string() +} + +fn share_explicit( + harness: Harness, + session: &str, + args: &ShareArgs, + auth: crate::cmd_pathbase::AuthMode, + base_url: String, +) -> Result<()> { + let project = match (harness.project_keyed(), args.project.as_ref()) { + (true, Some(p)) => Some(p.to_string_lossy().into_owned()), + (true, None) => anyhow::bail!( + "--project required when --harness is {} and --session is set", + harness.name() + ), + (false, _) => None, + }; + + let derived = derive_session(harness, project.as_deref(), session)?; + let summary = format!("{} session {}", harness.name(), derived.cache_id); + + if !args.no_cache { + // The cache entry should always reflect what was just uploaded. + // `path share` is "ship the current state of this session"; if + // the conversation has grown since a prior share, the in-memory + // body has the new turns but a stale cache file would not — and + // the upload uses the fresh body, not the cache. Always + // overwrite so cache and upload agree (use `--no-cache` to skip + // the cache write entirely). + let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, true)?; + eprintln!( + "Cached {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); + } + + let body = derived.doc.to_json()?; + let upload = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + crate::cmd_export::run_pathbase_inner(auth, base_url, upload, &body, &summary) +} + +/// Build the TSV line fed to fzf. Cols 1–3 are hidden (harness/key/session, +/// used as parser keys); cols 4..8 are visible to the user. +fn format_picker_row(row: &SessionRow) -> String { + let key = row + .project + .clone() + .or_else(|| row.cwd.clone()) + .unwrap_or_default(); + let when = row + .last_activity + .map(|t| t.format("%Y-%m-%d %H:%M").to_string()) + .unwrap_or_else(|| " — ".to_string()); + let scope = if row.matches_cwd { "·" } else { " " }; + let project_short = project_short(&key); + let title = fzf_title(&row.title); + format!( + "{}\t{}\t{}\t{}\t{}\t{} msgs\t{}\t{}\t{}", + row.harness.name(), + tab_safe(&key), + tab_safe(&row.session_id), + row.harness.symbol(), + when, + row.message_count, + scope, + tab_safe(&project_short), + title, + ) +} + +/// Inverse of [`format_picker_row`] — pulls (harness, key, session, title) +/// back out of the line fzf returned. Returns `None` if the line is +/// malformed. The title is column 9 of the TSV; it lives in the visible +/// portion so it round-trips through fzf unchanged. +fn parse_picker_row(line: &str) -> Option<(Harness, String, String, String)> { + let mut parts = line.split('\t'); + let h = Harness::parse(parts.next()?)?; + let key = parts.next()?.to_string(); + let session = parts.next()?.to_string(); + if session.is_empty() { + return None; + } + // Skip cols 4..8 (symbol, when, msgs, scope, project_short) to reach + // the title at col 9. + let title = parts.nth(5).unwrap_or("").to_string(); + Some((h, key, session, title)) +} + +fn tab_safe(s: &str) -> String { + s.replace(['\t', '\n', '\r'], " ") +} + +fn fzf_title(s: &str) -> String { + const MAX: usize = 120; + let safe = tab_safe(s); + if safe.chars().count() > MAX { + let head: String = safe.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + safe + } +} + +fn project_short(p: &str) -> String { + let trimmed = p.trim_end_matches('/'); + let parts: Vec<&str> = trimmed.rsplit('/').take(2).collect(); + if parts.is_empty() { + return p.to_string(); + } + let mut out: Vec<&str> = parts.into_iter().collect(); + out.reverse(); + out.join("/") +} + +fn derive_session( + harness: Harness, + project: Option<&str>, + session: &str, +) -> Result { + match harness { + Harness::Claude => { + crate::cmd_import::derive_claude_session(project.expect("project_keyed"), session) + } + Harness::Gemini => crate::cmd_import::derive_gemini_session( + project.expect("project_keyed"), + session, + false, + ), + Harness::Pi => { + crate::cmd_import::derive_pi_session(project.expect("project_keyed"), session, None) + } + Harness::Codex => crate::cmd_import::derive_codex_session(session), + Harness::Opencode => crate::cmd_import::derive_opencode_session(session, false), + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn harness_name_and_symbol_are_distinct() { + let all = [ + Harness::Claude, + Harness::Gemini, + Harness::Codex, + Harness::Opencode, + Harness::Pi, + ]; + let names: Vec<&str> = all.iter().map(|h| h.name()).collect(); + let symbols: Vec<&str> = all.iter().map(|h| h.symbol()).collect(); + assert_eq!(names.len(), 5); + assert_eq!( + names.iter().collect::>().len(), + 5, + "names must be unique" + ); + assert_eq!( + symbols + .iter() + .collect::>() + .len(), + 5, + "symbols must be unique" + ); + } + + #[test] + fn harness_project_keyed_matches_design() { + assert!(Harness::Claude.project_keyed()); + assert!(Harness::Gemini.project_keyed()); + assert!(Harness::Pi.project_keyed()); + assert!(!Harness::Codex.project_keyed()); + assert!(!Harness::Opencode.project_keyed()); + } + + #[test] + fn harness_from_arg_roundtrips() { + for (arg, harness) in [ + (HarnessArg::Claude, Harness::Claude), + (HarnessArg::Gemini, Harness::Gemini), + (HarnessArg::Codex, Harness::Codex), + (HarnessArg::Opencode, Harness::Opencode), + (HarnessArg::Pi, Harness::Pi), + ] { + assert_eq!(Harness::from_arg(arg), harness); + } + } + + use std::path::Path; + use tempfile::TempDir; + + fn write_claude_session(claude_dir: &Path, project_slug: &str, session: &str, prompt: &str) { + let project_dir = claude_dir.join("projects").join(project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let user = format!( + r#"{{"type":"user","uuid":"u-{session}","timestamp":"2024-01-02T00:00:00Z","cwd":"/test/project","message":{{"role":"user","content":"{prompt}"}}}}"# + ); + let asst = format!( + r#"{{"type":"assistant","uuid":"a-{session}","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"hi"}}}}"# + ); + std::fs::write( + project_dir.join(format!("{session}.jsonl")), + format!("{user}\n{asst}\n"), + ) + .unwrap(); + } + + fn claude_only_bundle(home: &Path) -> HarnessBundle { + let claude_dir = home.join(".claude"); + std::fs::create_dir_all(&claude_dir).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + } + } + + #[test] + fn gather_sessions_includes_claude_rows_for_a_project() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Claude); + assert_eq!(rows[0].session_id, "abc-session-one"); + assert_eq!(rows[0].project.as_deref(), Some("/test/project")); + assert!(rows[0].matches_cwd, "cwd should match the project path"); + } + + #[test] + fn gather_sessions_marks_non_matching_project_rows() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/some/other/place"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert!(!rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_skips_harness_with_no_home_dir() { + // Empty bundle => no rows, no panic. + let bundle = HarnessBundle::default(); + let rows = gather_sessions(&bundle, Path::new("/anywhere"), None, None); + assert!(rows.is_empty()); + } + + #[test] + fn gather_sessions_filters_by_harness() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "hi", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, Some(Harness::Codex), None); + assert!(rows.is_empty(), "filter to codex must drop claude rows"); + } + + fn codex_only_bundle(home: &Path) -> HarnessBundle { + let codex_dir = home.join(".codex"); + std::fs::create_dir_all(&codex_dir).unwrap(); + let resolver = toolpath_codex::PathResolver::new().with_codex_dir(&codex_dir); + HarnessBundle { + codex: Some(toolpath_codex::CodexConvo::with_resolver(resolver)), + ..Default::default() + } + } + + fn write_codex_session(codex_dir: &Path, id: &str, cwd: &str) { + // Date-bucketed layout: ~/.codex/sessions/YYYY/MM/DD/rollout-*-.jsonl + let dir = codex_dir.join("sessions/2026/05/07"); + std::fs::create_dir_all(&dir).unwrap(); + let file = dir.join(format!("rollout-2026-05-07T00-00-00-{id}.jsonl")); + let meta = format!( + r#"{{"timestamp":"2026-05-07T00:00:00Z","type":"session_meta","payload":{{"id":"{id}","timestamp":"2026-05-07T00:00:00Z","cwd":"{cwd}","originator":"codex-tui","cli_version":"test","source":"cli","model_provider":"openai"}}}}"# + ); + let user = r#"{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{"type":"message","role":"user","content":[{"type":"input_text","text":"hi"}]}}"#; + std::fs::write(file, format!("{meta}\n{user}\n")).unwrap(); + } + + #[test] + fn gather_sessions_includes_codex_rows_with_cwd_match() { + let temp = TempDir::new().unwrap(); + write_codex_session( + &temp.path().join(".codex"), + "00000000-0000-0000-0000-0000000000aa", + "/work/proj", + ); + let bundle = codex_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/work/proj"), None, None); + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Codex); + assert_eq!(rows[0].cwd.as_deref(), Some("/work/proj")); + assert!(rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_ranks_cwd_matches_first() { + // Two claude sessions: one in cwd (older), one elsewhere (newer). + // Despite the elsewhere row being newer, the cwd-match must come first. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + write_claude_session(&claude_dir, "-cwd-project", "in-cwd-session", "hi"); + // Bump activity on the not-in-cwd session by writing a later timestamp. + let not_dir = claude_dir.join("projects").join("-other-project"); + std::fs::create_dir_all(¬_dir).unwrap(); + std::fs::write( + not_dir.join("not-in-cwd-session.jsonl"), + r#"{"type":"user","uuid":"u-x","timestamp":"2030-01-01T00:00:00Z","cwd":"/other/project","message":{"role":"user","content":"later"}}"#.to_string() + + "\n", + ) + .unwrap(); + let bundle = claude_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/cwd/project"), None, None); + + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].session_id, "in-cwd-session"); + assert!(rows[0].matches_cwd); + assert!(!rows[1].matches_cwd); + } + + #[test] + #[cfg(unix)] + fn paths_match_canonicalizes_through_symlink() { + // `paths_match` is the function that produces `SessionRow.matches_cwd` + // (collect_* all delegate to it). Without canonicalization, a user who + // navigated to a project via a symlink would see their cwd-row sink + // in the picker because the symlink path string ≠ the project path + // string. Verify both arguments are canonicalized. + // + // Note: we test `paths_match` directly rather than going through + // `gather_sessions` because Claude's project-dir slug encoding is + // lossy (sanitize_project_path: '/', '_', '.' → '-'; unsanitize: only + // '-' → '/'). On macOS, tempdir paths contain '.' and end up under + // /private/var/..., so the unsanitized slug never round-trips back to + // the real on-disk path. This direct test covers the canonicalization + // bug regardless of platform-specific tempdir layouts. + let temp = TempDir::new().unwrap(); + let real_project = temp.path().join("real-project"); + std::fs::create_dir_all(&real_project).unwrap(); + let symlink_path = temp.path().join("symlink-to-project"); + std::os::unix::fs::symlink(&real_project, &symlink_path).unwrap(); + + // Sanity-check the setup: the symlink and its target are different + // string-paths but resolve to the same canonical path. + assert_ne!(real_project, symlink_path); + assert_eq!( + std::fs::canonicalize(&real_project).unwrap(), + std::fs::canonicalize(&symlink_path).unwrap(), + ); + + // The actual property under test. + assert!( + paths_match(&real_project, &symlink_path), + "paths_match must canonicalize both sides so symlink == target" + ); + // And symmetric. + assert!( + paths_match(&symlink_path, &real_project), + "paths_match must be symmetric across the symlink" + ); + } + + #[test] + fn parse_picker_row_roundtrips_keyed() { + let row = SessionRow { + harness: Harness::Claude, + project: Some("/tmp/proj".to_string()), + cwd: None, + session_id: "sess-abc".to_string(), + title: "Hello\tworld".to_string(), + last_activity: None, + message_count: 3, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (harness, key, session, title) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Claude); + assert_eq!(key, "/tmp/proj"); + assert_eq!(session, "sess-abc"); + // tab_safe replaces the tab with a space, but the title content + // otherwise round-trips. + assert_eq!(title, "Hello world"); + } + + #[test] + fn parse_picker_row_roundtrips_session_keyed() { + let row = SessionRow { + harness: Harness::Codex, + project: None, + cwd: Some("/work/proj".to_string()), + session_id: "0190abcd".to_string(), + title: "(no prompt)".to_string(), + last_activity: None, + message_count: 0, + matches_cwd: false, + }; + let line = format_picker_row(&row); + let (harness, key, session, title) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Codex); + assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot + assert_eq!(session, "0190abcd"); + assert_eq!(title, "(no prompt)"); + } + + #[test] + fn parse_picker_row_carries_title_with_unicode() { + let row = SessionRow { + harness: Harness::Gemini, + project: Some("/work/proj".to_string()), + cwd: None, + session_id: "11111111-2222-3333-4444-555555555555".to_string(), + title: "Add the share command — finally".to_string(), + last_activity: None, + message_count: 42, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (_, _, _, title) = parse_picker_row(&line).unwrap(); + assert_eq!(title, "Add the share command — finally"); + } + + #[test] + fn home_relative_strips_home_prefix() { + let home = Path::new("/Users/alex"); + assert_eq!( + home_relative(Path::new("/Users/alex/.claude/projects"), Some(home)), + "~/.claude/projects" + ); + } + + #[test] + fn home_relative_returns_tilde_for_home_itself() { + let home = Path::new("/Users/alex"); + assert_eq!(home_relative(home, Some(home)), "~"); + } + + #[test] + fn home_relative_passes_through_paths_outside_home() { + let home = Path::new("/Users/alex"); + assert_eq!( + home_relative(Path::new("/tmp/elsewhere"), Some(home)), + "/tmp/elsewhere" + ); + } + + #[test] + fn home_relative_passes_through_when_no_home() { + assert_eq!(home_relative(Path::new("/foo/bar"), None), "/foo/bar"); + } + + #[test] + fn harness_status_renders_existing_path_with_zero_sessions() { + let s = HarnessStatus { + path: "~/.claude/projects".to_string(), + exists: true, + }; + assert_eq!(s.render(), "~/.claude/projects (0 sessions)"); + } + + #[test] + fn harness_status_renders_missing_path_as_not_found() { + let s = HarnessStatus { + path: "~/.gemini/tmp".to_string(), + exists: false, + }; + assert_eq!(s.render(), "~/.gemini/tmp not found"); + } + + #[test] + fn format_status_line_pads_for_alignment() { + let s = HarnessStatus { + path: "~/.codex/sessions".to_string(), + exists: true, + }; + // "claude:" (7) needs 2 trailing spaces; "opencode:" (9) needs 0; + // "pi:" (3) needs 6. The visible-path column should always start at + // the same offset. + let claude_line = format_status_line("claude", &s); + let opencode_line = format_status_line("opencode", &s); + let pi_line = format_status_line("pi", &s); + let offset = |line: &str| line.find('~').unwrap(); + assert_eq!(offset(&claude_line), offset(&opencode_line)); + assert_eq!(offset(&claude_line), offset(&pi_line)); + } + + #[test] + fn harness_status_for_missing_claude_dir_reports_not_found() { + // Bundle whose claude resolver points at a directory that doesn't + // exist on disk; the status should still resolve a path and report + // it as missing rather than going through the `unresolved` branch. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); // never created + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + let bundle = HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + }; + let status = harness_status_claude(&bundle, None); + assert!(!status.exists, "missing dir must report exists=false"); + assert!( + status.path.contains("projects"), + "path must include the projects subdir (got {:?})", + status.path + ); + } + + #[test] + fn harness_status_for_present_claude_dir_reports_existence() { + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + std::fs::create_dir_all(claude_dir.join("projects")).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + let bundle = HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + }; + let status = harness_status_claude(&bundle, None); + assert!(status.exists); + } + + #[test] + fn harness_status_for_empty_bundle_is_unresolved() { + let bundle = HarnessBundle::default(); + // Every harness slot is None, so each status hits the unresolved branch. + for status in [ + harness_status_claude(&bundle, None), + harness_status_gemini(&bundle, None), + harness_status_codex(&bundle, None), + harness_status_opencode(&bundle, None), + harness_status_pi(&bundle, None), + ] { + assert_eq!(status, HarnessStatus::unresolved()); + assert!(!status.exists); + } + } +} diff --git a/crates/path-cli/src/cmd_show.rs b/crates/path-cli/src/cmd_show.rs index 0e1d37e..d301c43 100644 --- a/crates/path-cli/src/cmd_show.rs +++ b/crates/path-cli/src/cmd_show.rs @@ -38,12 +38,20 @@ pub enum ShowSource { /// Session id, UUID, or filename stem #[arg(short, long)] session: String, + + /// Compatibility shim for the unified `path share` preview template; ignored. + #[arg(long, hide = true)] + project: Option, }, /// Show an opencode session as a markdown summary Opencode { /// Session id (`ses_…`) #[arg(short, long)] session: String, + + /// Compatibility shim for the unified `path share` preview template; ignored. + #[arg(long, hide = true)] + project: Option, }, /// Show a Pi (pi.dev) session as a markdown summary Pi { @@ -96,7 +104,10 @@ fn derive_one(source: ShowSource) -> Result { }; Ok(toolpath_gemini::derive::derive_path(&convo, &cfg)) } - ShowSource::Codex { session } => { + ShowSource::Codex { + session, + project: _, + } => { let manager = toolpath_codex::CodexConvo::new(); let s = manager .read_session(&session) @@ -104,7 +115,10 @@ fn derive_one(source: ShowSource) -> Result { let cfg = toolpath_codex::derive::DeriveConfig { project_path: None }; Ok(toolpath_codex::derive::derive_path(&s, &cfg)) } - ShowSource::Opencode { session } => { + ShowSource::Opencode { + session, + project: _, + } => { let manager = toolpath_opencode::OpencodeConvo::new(); let s = manager .read_session(&session) diff --git a/crates/path-cli/src/fzf.rs b/crates/path-cli/src/fzf.rs index 500c2ee..056df6d 100644 --- a/crates/path-cli/src/fzf.rs +++ b/crates/path-cli/src/fzf.rs @@ -37,9 +37,25 @@ fn which(cmd: &str) -> Option { None } -/// Run fzf with the supplied lines on stdin. Returns the selected lines, or -/// an empty vec if the user cancelled (Esc / Ctrl-C / no match). -pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result> { +/// Outcome of an fzf invocation. +/// +/// Distinguishes a deliberate user cancel (Esc / Ctrl-C, fzf exit 130) from +/// the no-match case (fzf exit 1). Callers that want to surface a non-zero +/// exit on cancel can match on `Cancelled`; callers that just want the picked +/// lines treat both `NoMatch` and `Cancelled` as "empty selection". +pub enum PickResult { + /// fzf exited 0 with at least one selected line. + Selected(Vec), + /// fzf exited 1: input was non-empty but nothing matched the query. + NoMatch, + /// fzf exited 130: the user pressed Esc / Ctrl-C / Ctrl-D. + Cancelled, +} + +/// Run fzf with the supplied lines on stdin. Returns a `PickResult` so the +/// caller can distinguish a successful selection from no-match vs. an +/// explicit user cancel (which some callers map to a non-zero exit). +pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result { let mut args: Vec = vec![ "--delimiter=\t".into(), format!("--with-nth={}", opts.with_nth), @@ -53,7 +69,7 @@ pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result> { if let Some(preview) = opts.preview { args.push(format!("--preview={}", preview)); - args.push("--preview-window=right:60%:wrap".into()); + args.push(format!("--preview-window={}", opts.preview_window)); } if let Some(header) = opts.header { @@ -85,9 +101,12 @@ pub fn pick(lines: &[String], opts: &PickOptions<'_>) -> Result> { match output.status.code() { Some(0) => { let text = String::from_utf8_lossy(&output.stdout); - Ok(text.lines().map(|s| s.to_string()).collect()) + Ok(PickResult::Selected( + text.lines().map(|s| s.to_string()).collect(), + )) } - Some(1) | Some(130) => Ok(Vec::new()), + Some(1) => Ok(PickResult::NoMatch), + Some(130) => Ok(PickResult::Cancelled), _ => anyhow::bail!("fzf exited with status {:?}", output.status), } } @@ -100,6 +119,9 @@ pub struct PickOptions<'a> { pub prompt: &'a str, /// Optional `--preview` command. Use `{1}`, `{2}` ... to substitute fields. pub preview: Option<&'a str>, + /// `--preview-window` placement. Defaults to `right:60%:wrap` (side-by-side); + /// pass `up:60%:wrap` for a stacked layout that fits narrow terminals. + pub preview_window: &'a str, /// Optional header line shown above the list. pub header: Option<&'a str>, /// Tiebreak ordering — `index` preserves input order. @@ -114,6 +136,7 @@ impl Default for PickOptions<'_> { with_nth: "2..", prompt: "> ", preview: None, + preview_window: "right:60%:wrap", header: None, tiebreak: "index", multi: false, diff --git a/crates/path-cli/src/lib.rs b/crates/path-cli/src/lib.rs index 5381588..c7f53af 100644 --- a/crates/path-cli/src/lib.rs +++ b/crates/path-cli/src/lib.rs @@ -14,6 +14,8 @@ mod cmd_project; mod cmd_query; mod cmd_render; #[cfg(not(target_os = "emscripten"))] +mod cmd_share; +#[cfg(not(target_os = "emscripten"))] mod cmd_show; mod cmd_track; mod cmd_validate; @@ -114,6 +116,12 @@ enum Commands { #[command(subcommand)] op: cmd_auth::AuthOp, }, + /// Share an agent session to Pathbase via an interactive picker + #[cfg(not(target_os = "emscripten"))] + Share { + #[command(flatten)] + args: cmd_share::ShareArgs, + }, // ── Deprecated aliases ──────────────────────────────────────────── #[command(hide = true, about = "[deprecated] Use `path import`")] @@ -158,6 +166,8 @@ pub fn run() -> Result<()> { } #[cfg(not(target_os = "emscripten"))] Commands::Auth { op } => cmd_auth::run(op), + #[cfg(not(target_os = "emscripten"))] + Commands::Share { args } => cmd_share::run(args), Commands::Derive { source } => cmd_derive::run(source, cli.pretty), Commands::Incept { args } => cmd_incept::run(args), diff --git a/crates/path-cli/tests/integration.rs b/crates/path-cli/tests/integration.rs index 2f26c3a..b75506b 100644 --- a/crates/path-cli/tests/integration.rs +++ b/crates/path-cli/tests/integration.rs @@ -663,3 +663,442 @@ fn derive_alias_still_works_with_warning() { .stdout(predicate::str::contains("\"paths\":")) .stderr(predicate::str::contains("deprecated")); } + +#[test] +fn share_help_lists_unified_picker_flags() { + cmd() + .args(["share", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("--harness")) + .stdout(predicate::str::contains("--session")) + .stdout(predicate::str::contains("--project")) + .stdout(predicate::str::contains("--anon")); +} + +#[test] +fn share_explicit_args_uploads_via_anon() { + use std::io::Write; + use std::net::TcpListener; + + // Stand up a one-shot mock that returns a valid AnonUploadResponse. + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + // Drain the request just enough to keep the OS happy. + use std::io::Read; + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc-123","path":"/anon/x/y/abc-123","share_url":"https://example.test/anon/abc-123"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + // Build a claude fixture so the explicit-args path has something to derive. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + // toolpath-claude maps '/', '_', and '.' to '-' when sanitizing project + // paths into directory slugs — mirror that here so the fixture lands + // where the resolver looks for it. + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("HOME", temp.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stdout(predicate::str::contains( + "https://example.test/anon/abc-123", + )) + .stderr(predicate::str::contains("Uploaded")); + + server.join().unwrap(); +} + +/// Helper for the cache tests. Spawns a one-shot mock anon-upload server +/// on a free port and returns (port, server-thread-handle, fixture-temp, +/// project-path, $HOME-path). +fn share_anon_fixture() -> ( + u16, + std::thread::JoinHandle<()>, + tempfile::TempDir, + PathBuf, + PathBuf, +) { + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = + r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + // toolpath-claude maps '/', '_', and '.' to '-' when sanitizing project + // paths into directory slugs — mirror that here so the fixture lands + // where the resolver looks for it. + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let home = temp.path().to_path_buf(); + (port, server, temp, project, home) +} + +/// Spawn a one-shot mock anon-upload server on a free port. Returns the +/// port and the join handle. Used by tests that need multiple sequential +/// uploads (the default fixture builds the claude session too, which we +/// don't want to redo between runs). +fn one_shot_anon_server() -> (u16, std::thread::JoinHandle<()>) { + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = + r#"{"id":"abc","path":"/anon/x/y/abc","share_url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + (port, server) +} + +/// `path share` re-run after a conversation has grown should overwrite +/// the cache file with the fresh derive — otherwise the cache and the +/// uploaded body would disagree (upload uses the in-memory fresh body, +/// cache file would be stale). Lock that contract in. +#[test] +fn share_rewrites_cache_when_session_has_grown() { + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let session_file = project_dir.join("session-grow.jsonl"); + let cwd_str = project.display().to_string(); + let initial = format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd_str}","message":{{"role":"user","content":"first"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"reply-1"}}}} +"# + ); + std::fs::write(&session_file, &initial).unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + let home = temp.path(); + + // First share: cache picks up the 2-turn conversation. + let (port1, server1) = one_shot_anon_server(); + cmd() + .env("HOME", home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-grow", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port1}")) + .assert() + .success(); + server1.join().unwrap(); + + let docs = cfg.path().join("documents"); + let cache_files: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!( + cache_files.len(), + 1, + "expected one cache entry after first share" + ); + let cache_path = cache_files[0].path(); + let cache_v1 = std::fs::read_to_string(&cache_path).unwrap(); + assert!( + cache_v1.contains("reply-1"), + "v1 cache must contain reply-1" + ); + assert!( + !cache_v1.contains("reply-2"), + "v1 cache must not contain reply-2 yet" + ); + + // Conversation continues: append two more turns to the session JSONL. + let mut grown = initial.clone(); + grown.push_str(&format!( + r#"{{"type":"user","uuid":"u-2","timestamp":"2024-01-02T00:00:00Z","cwd":"{cwd_str}","message":{{"role":"user","content":"second"}}}} +{{"type":"assistant","uuid":"a-2","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"reply-2"}}}} +"# + )); + std::fs::write(&session_file, &grown).unwrap(); + + // Second share: must overwrite the cache file with the grown derive, + // not silently keep the v1 contents while uploading v2. + let (port2, server2) = one_shot_anon_server(); + cmd() + .env("HOME", home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-grow", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port2}")) + .assert() + .success(); + server2.join().unwrap(); + + let cache_v2 = std::fs::read_to_string(&cache_path).unwrap(); + assert!( + cache_v2.contains("reply-2"), + "v2 cache should contain the new turn, got: {cache_v2}" + ); + assert_ne!( + cache_v1, cache_v2, + "cache file must be rewritten when the session has grown" + ); +} + +#[test] +fn share_writes_cache_by_default() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!( + entries.len(), + 1, + "expected exactly one cache entry, got {entries:?}" + ); + let name = entries[0].file_name().to_string_lossy().into_owned(); + assert!( + name.starts_with("claude-"), + "expected claude-* cache id, got {name}" + ); + + server.join().unwrap(); +} + +#[test] +fn share_no_cache_skips_write() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + if docs.exists() { + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert!( + entries.is_empty(), + "expected no cache entries with --no-cache, got {entries:?}" + ); + } + + server.join().unwrap(); +} + +#[test] +fn share_logged_out_anon_default() { + // No --anon flag and no credentials file => share() falls through to the + // anonymous endpoint and emits a "not logged in — uploading anonymously" + // notice on stderr. This covers the logged-out branch in + // cmd_export::run_pathbase_inner that the explicit --anon tests skip. + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stderr(predicate::str::contains("not logged in")) + .stderr(predicate::str::contains("uploading anonymously")); + + server.join().unwrap(); +} + +#[test] +fn share_filters_by_project_with_no_matches_errors() { + let cfg = tempfile::tempdir().unwrap(); + let home = tempfile::tempdir().unwrap(); + let nonexistent = home.path().join("never"); + + cmd() + .env("HOME", home.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--project"]) + .arg(&nonexistent) + .assert() + .failure() + .stderr(predicate::str::contains( + "No agent sessions found in project", + )); +} + +#[test] +fn share_no_harness_non_tty_prints_recipe() { + // Build a minimal claude fixture in a tempdir, point HOME at it, so + // gather_sessions returns a non-empty Vec. Without this, an environment + // with no agent harnesses configured (e.g. CI) would hit bail_no_sessions + // before the fzf-unavailable recipe path. We want the recipe path here. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = project + .to_string_lossy() + .replace([std::path::MAIN_SEPARATOR, '_', '.'], "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-recipe.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("HOME", temp.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share"]) + .assert() + .failure() + .stderr(predicate::str::contains("path import")) + .stderr(predicate::str::contains("path export pathbase")); +} diff --git a/crates/pathbase-client/openapi.json b/crates/pathbase-client/openapi.json index 96e8661..9565231 100644 --- a/crates/pathbase-client/openapi.json +++ b/crates/pathbase-client/openapi.json @@ -2,11 +2,11 @@ "openapi": "3.1.0", "info": { "title": "Pathbase API", - "description": "", + "description": "HTTP API for Pathbase — repositories, agent trace paths, computation graphs, and anonymous share links.\n\n**Stability.** v1 endpoints are stable in shape; additive changes (new fields, new endpoints, broader query params) ship without a version bump. Breaking changes get a new prefix (`/api/v2/...`) and a deprecation window.\n\n**Spec format.** OpenAPI 3.1, served live at `/api/v1/openapi.json`. The `x-pathbase-version` response header on every call carries the running build's `+` so consumers can pin or correlate.\n\n**Auth.** Most endpoints require a Pathbase session cookie or a `pat_…` bearer token (see the `bearerAuth` scheme). Unauthenticated endpoints — the `Anon` and `Signups` namespaces, public profile reads — are clearly tagged. Per-operation `security` annotations reflect the actual gate enforced by the handler.", "license": { "name": "" }, - "version": "1.0.0" + "version": "1.1.0" }, "paths": { "/api/v1/anon/paths": { @@ -14,6 +14,7 @@ "tags": [ "Anon" ], + "summary": "Upload a single-path Graph document anonymously. Stored under the\nshared `anon` user; the response carries a UUID-based `share_url`\nthe uploader can hand out. Per-IP rate-limited and capped by\n`body_limits.anon_upload_bytes`.", "operationId": "create_anon_path", "requestBody": { "content": { @@ -40,7 +41,7 @@ "description": "Invalid document / unsupported variant / empty path" }, "413": { - "description": "Request body exceeds 5 MB" + "description": "Request body exceeds the configured anon upload byte limit" }, "429": { "description": "Rate limit exceeded" @@ -48,11 +49,244 @@ } } }, + "/api/v1/anon/paths/{id}": { + "get": { + "tags": [ + "Anon" + ], + "summary": "Read an anon-uploaded trace by its UUID. No auth, no rate limit.", + "description": "The handler explicitly overrides the global `Cache-Control` and\n`Vary` defaults: anon responses don't depend on the `Authorization`\nheader (the endpoint ignores it), so the auth-keyed cache defaults\nwould needlessly fragment any CDN/proxy cache.", + "operationId": "get_anon_path", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Anon path UUID", + "required": true, + "schema": { + "type": "string" + } + } + ], + "responses": { + "200": { + "description": "Reconstructed toolpath document", + "content": { + "application/json": { + "schema": {} + } + } + }, + "404": { + "description": "No anon path with that ID" + } + } + } + }, + "/api/v1/auth/cli/redeem": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Second half of the CLI browser-login flow. Redeems a one-time code\nfor a long-lived `pat_…` bearer token bound to a `cli` session. No\nauth required — the code is the credential.", + "operationId": "cli_redeem", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RedeemBody" + }, + "example": { + "code": "BCDF23GH" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Bearer token + user", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RedeemResponse" + } + } + } + }, + "400": { + "description": "Invalid code format" + }, + "401": { + "description": "Code expired or unknown" + } + } + } + }, + "/api/v1/auth/cli/request-grant": { + "post": { + "tags": [ + "Auth" + ], + "summary": "First half of the CLI browser-login flow. The signed-in browser\nasks for a short, human-readable code that the user types into a\nCLI (`pathbase login`); the CLI then redeems it for a long-lived\nbearer token via `/auth/cli/redeem`.", + "operationId": "cli_request_grant", + "responses": { + "200": { + "description": "Short-lived grant code", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CliGrantResponse" + }, + "example": { + "code": "BCDF23GH", + "expires_in": 600 + } + } + } + }, + "401": { + "description": "Not authenticated" + } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, + "/api/v1/auth/dev": { + "get": { + "tags": [ + "Auth" + ], + "summary": "Local-development sign-in shortcut — creates / signs in as `dev`.\nDisabled (400) when GitHub OAuth is configured, so it can never\nfire in production.", + "operationId": "dev_login", + "parameters": [ + { + "name": "redirect", + "in": "query", + "description": "Same-origin relative path to land on after sign-in.", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect after signing in as the local `dev` user" + }, + "400": { + "description": "Disabled — GitHub OAuth is configured" + } + } + } + }, + "/api/v1/auth/github": { + "get": { + "tags": [ + "Auth" + ], + "summary": "Start the GitHub OAuth handshake — issue a 302 to GitHub's\n`authorize` URL with `state` carrying the validated `redirect`.\nFalls back to `/api/v1/auth/dev` when GitHub OAuth is unconfigured.", + "operationId": "github_redirect", + "parameters": [ + { + "name": "redirect", + "in": "query", + "description": "Same-origin relative path to land on after sign-in. Bad values are silently dropped (open-redirect defense).", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect to GitHub authorize URL (or to `/api/v1/auth/dev` when GitHub OAuth is unconfigured)" + } + } + } + }, + "/api/v1/auth/github/callback": { + "get": { + "tags": [ + "Auth" + ], + "summary": "GitHub OAuth callback — exchange the `code` for a token, look up or\ncreate the matching Pathbase user, and redirect to the validated\n`state` target (or `/`).", + "operationId": "github_callback", + "parameters": [ + { + "name": "code", + "in": "query", + "description": "GitHub-issued authorization code", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "state", + "in": "query", + "description": "Echoed redirect target from `/api/v1/auth/github`", + "required": false, + "schema": { + "type": "string" + } + } + ], + "responses": { + "302": { + "description": "Redirect after creating or linking the Pathbase account" + } + } + } + }, + "/api/v1/auth/login": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Sign in with email + password. Sets `pb_session` cookie. Returns\nthe user; for the bearer token instead, use the CLI grant flow.", + "operationId": "login", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/LoginBody" + }, + "example": { + "email": "alice@example.com", + "password": "correct-horse-battery-staple" + } + } + }, + "required": true + }, + "responses": { + "200": { + "description": "Signed in", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + } + } + } + }, + "401": { + "description": "Invalid credentials" + } + } + } + }, "/api/v1/auth/logout": { "post": { "tags": [ "Auth" ], + "summary": "Invalidate the caller's session (cookie or bearer-token) and clear\nthe `pb_session` cookie. Idempotent — succeeds even with no session.", "operationId": "logout", "responses": { "204": { @@ -61,19 +295,65 @@ } } }, - "/api/v1/auth/me": { + "/api/v1/auth/register": { + "post": { + "tags": [ + "Auth" + ], + "summary": "Create a new account with username + email + password and sign the\ncaller in. Sets `pb_session` cookie and creates the default\n`pathstash` repo for quick uploads.", + "operationId": "register", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/RegisterBody" + }, + "example": { + "email": "alice@example.com", + "password": "correct-horse-battery-staple", + "username": "alice" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Account created and signed in", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/User" + } + } + } + }, + "400": { + "description": "Invalid username, email, or password" + }, + "409": { + "description": "Username or email already taken" + } + } + } + }, + "/api/v1/auth/sessions": { "get": { "tags": [ "Auth" ], - "operationId": "me", + "summary": "List the caller's active sessions (web + CLI). The session that\nissued the request is flagged `is_current`.", + "operationId": "list_sessions", "responses": { "200": { - "description": "Current authenticated user", + "description": "Sessions for the authenticated user", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/User" + "type": "array", + "items": { + "$ref": "#/components/schemas/SessionSummary" + } } } } @@ -81,7 +361,49 @@ "401": { "description": "Not authenticated" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, + "/api/v1/auth/sessions/{id}": { + "delete": { + "tags": [ + "Auth" + ], + "summary": "Revoke a specific session by ID. 404 (not 401/403) for sessions\nowned by other users — keeps the existence of those sessions\ninvisible to the caller.", + "operationId": "revoke_session", + "parameters": [ + { + "name": "id", + "in": "path", + "description": "Session ID to revoke", + "required": true, + "schema": { + "type": "string", + "format": "uuid" + } + } + ], + "responses": { + "204": { + "description": "Session revoked" + }, + "401": { + "description": "Not authenticated" + }, + "404": { + "description": "Session not found or not owned by the caller" + } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/health": { @@ -89,6 +411,7 @@ "tags": [ "Health" ], + "summary": "Liveness + readiness probe — confirms the database is reachable.\nReturns 503 (still as JSON) when the round-trip fails so an\norchestrator can distinguish \"process up, dependency down\" from\n\"process down.\"", "operationId": "health", "responses": { "200": { @@ -119,19 +442,24 @@ "tags": [ "Repos" ], + "summary": "Create a new repository owned by the caller. Names are unique per\nowner; collisions return 409.", "operationId": "create_repo", "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/CreateRepoBody" + }, + "example": { + "description": "Agent runs from the rebuild week", + "name": "my-traces" } } }, "required": true }, "responses": { - "200": { + "201": { "description": "Created repository", "content": { "application/json": { @@ -143,8 +471,16 @@ }, "401": { "description": "Not authenticated" + }, + "409": { + "description": "A repo with this name already exists for the caller" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}": { @@ -152,6 +488,7 @@ "tags": [ "Repos" ], + "summary": "Fetch a repository by `owner/name`. Public read; the contained\npaths/graphs enforce their own visibility on read.", "operationId": "get_repo", "parameters": [ { @@ -193,6 +530,7 @@ "tags": [ "Repos" ], + "summary": "Delete a repository, cascading to all paths and graphs it contains.\nOwner-only.", "operationId": "delete_repo", "parameters": [ { @@ -224,12 +562,18 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ "Repos" ], + "summary": "Update a repo's mutable fields. Only the owner may call this;\nnon-owners get 401.", "operationId": "update_repo", "parameters": [ { @@ -256,6 +600,9 @@ "application/json": { "schema": { "$ref": "#/components/schemas/UpdateRepoBody" + }, + "example": { + "description": "Updated tagline" } } }, @@ -278,7 +625,12 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/graphs": { @@ -286,6 +638,7 @@ "tags": [ "Graphs" ], + "summary": "List graphs in a repo. Public read.", "operationId": "list_graphs", "parameters": [ { @@ -305,6 +658,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -330,6 +693,7 @@ "tags": [ "Graphs" ], + "summary": "Upload a multi-path Graph document. Inline paths (`PathOrRef::Path`)\nare persisted as new TracePaths in the same repo, deduped by their\ntoolpath ID. Refs to existing paths are kept as-is. Caller must own\nthe repo.", "operationId": "create_graph", "parameters": [ { @@ -378,7 +742,12 @@ "401": { "description": "Not authorized" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/graphs/{slug}": { @@ -386,6 +755,7 @@ "tags": [ "Graphs" ], + "summary": "Fetch a graph with its full reconstructed multi-path document.\nPublic read; constituent path visibility is enforced when those\npaths are accessed individually.", "operationId": "get_graph", "parameters": [ { @@ -409,7 +779,7 @@ { "name": "slug", "in": "path", - "description": "Graph slug", + "description": "Graph slug, or the graph's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -436,6 +806,7 @@ "tags": [ "Graphs" ], + "summary": "Delete a graph. Owner only. Detaches but does not delete the\nconstituent TracePaths — those remain reachable as standalone paths.", "operationId": "delete_graph", "parameters": [ { @@ -476,7 +847,12 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/repos/{owner}/{repo}/paths": { @@ -484,6 +860,7 @@ "tags": [ "Paths" ], + "summary": "List paths in a repo. Visibility-filtered: callers see all paths if\nthey own the repo, public paths only otherwise.", "operationId": "list_paths", "parameters": [ { @@ -503,6 +880,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -528,6 +915,7 @@ "tags": [ "Paths" ], + "summary": "Upload a single-path graph as a new path under `{owner}/{repo}`.\nMulti-path graphs go to `POST .../graphs`. Caller must own the repo.", "operationId": "create_path", "parameters": [ { @@ -576,14 +964,20 @@ "401": { "description": "Not authorized" } - } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } }, "/api/v1/repos/{owner}/{repo}/paths/{slug}": { "get": { "tags": [ "Paths" ], + "summary": "Fetch a path with its full reconstructed document. Visibility-gated:\nprivate paths return 404 unless the caller owns the repo or\naddressed the path by its UUID (the unguessable share-by-link form).\nConditional via `If-None-Match`.", "operationId": "get_path", "parameters": [ { @@ -607,7 +1001,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -634,6 +1028,7 @@ "tags": [ "Paths" ], + "summary": "Delete a path. Owner only. Cascades to its step rows.", "operationId": "delete_path", "parameters": [ { @@ -657,7 +1052,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -674,12 +1069,18 @@ "404": { "description": "Not found" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ "Paths" ], + "summary": "Patch a path's mutable fields — currently just `is_public`. Owner\nonly. Use this to flip a private upload public for sharing.", "operationId": "update_path", "parameters": [ { @@ -703,7 +1104,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -715,6 +1116,9 @@ "application/json": { "schema": { "$ref": "#/components/schemas/UpdatePathBody" + }, + "example": { + "is_public": true } } }, @@ -737,6 +1141,73 @@ "404": { "description": "Not found" } + }, + "security": [ + { + "bearerAuth": [] + } + ] + } + }, + "/api/v1/repos/{owner}/{repo}/paths/{slug}/chat": { + "get": { + "tags": [ + "Paths" + ], + "summary": "Render the path's HEAD-ancestor chain as a chat-projection — a\ndensely-indexed, pre-classified, optionally pre-rendered transcript.\nVisibility-gated like `GET .../paths/{slug}`.", + "operationId": "get_path_chat", + "parameters": [ + { + "name": "owner", + "in": "path", + "description": "Owner username", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "repo", + "in": "path", + "description": "Repository name", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "slug", + "in": "path", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", + "required": true, + "schema": { + "type": "string" + } + }, + { + "name": "include_html", + "in": "query", + "description": "Render `text` and `thinking` fields to sanitized HTML server-side.\nDefaults to true; set false for lighter payloads when the client\nrenders markdown lazily.", + "required": false, + "schema": { + "type": "boolean" + } + } + ], + "responses": { + "200": { + "description": "Chat-shaped projection of the path's HEAD-ancestor chain", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/ChatProjection" + } + } + } + }, + "404": { + "description": "Not found" + } } } }, @@ -745,6 +1216,7 @@ "tags": [ "Paths" ], + "summary": "Stream the path's reconstructed Graph document as raw JSON — the\ninverse of `POST .../paths`. Visibility-gated; private paths return\n404 unless owner-authenticated or addressed by UUID.", "operationId": "download_path", "parameters": [ { @@ -768,7 +1240,7 @@ { "name": "slug", "in": "path", - "description": "Path slug", + "description": "Path slug, or the path's UUID for unguessable share-by-link access", "required": true, "schema": { "type": "string" @@ -788,15 +1260,53 @@ } } }, - "/api/v1/settings/profile": { + "/api/v1/signups": { + "post": { + "tags": [ + "Signups" + ], + "summary": "Capture an email + source pair from a marketing form. Idempotent\nper (email, source); per-IP rate-limited; the response intentionally\ncarries no detail so a caller can't enumerate which addresses are\nalready on a list.", + "operationId": "create_signup", + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSignupBody" + } + } + }, + "required": true + }, + "responses": { + "201": { + "description": "Signup recorded", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/CreateSignupResponse" + } + } + } + }, + "400": { + "description": "Invalid email or source" + }, + "429": { + "description": "Rate limit exceeded" + } + } + } + }, + "/api/v1/users/me": { "get": { "tags": [ - "Settings" + "Users" ], - "operationId": "get_profile", + "summary": "Return the authenticated caller's full profile, including their\nemail — fields no other endpoint exposes.", + "operationId": "get_me", "responses": { "200": { - "description": "Current user profile", + "description": "Current authenticated user", "content": { "application/json": { "schema": { @@ -808,18 +1318,28 @@ "401": { "description": "Not authenticated" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] }, "patch": { "tags": [ - "Settings" + "Users" ], - "operationId": "update_profile", + "summary": "Patch the caller's mutable profile fields. Unspecified fields are\nleft untouched; pass `null` to clear an optional field.", + "operationId": "update_me", "requestBody": { "content": { "application/json": { "schema": { "$ref": "#/components/schemas/UpdateProfileBody" + }, + "example": { + "bio": "Curious about agents.", + "display_name": "Alice" } } }, @@ -827,7 +1347,7 @@ }, "responses": { "200": { - "description": "Updated profile", + "description": "Profile updated", "content": { "application/json": { "schema": { @@ -839,7 +1359,12 @@ "401": { "description": "Not authenticated" } - } + }, + "security": [ + { + "bearerAuth": [] + } + ] } }, "/api/v1/users/{username}": { @@ -847,6 +1372,7 @@ "tags": [ "Users" ], + "summary": "Return another user's public profile. Email is never included for\nusers other than the caller; use `/users/me` for the caller's own\nfull profile.", "operationId": "get_user", "parameters": [ { @@ -881,6 +1407,7 @@ "tags": [ "Users" ], + "summary": "List a user's repositories. Public read; visibility filtering on\nthe contained paths/graphs happens at their respective endpoints,\nnot here.", "operationId": "list_repos", "parameters": [ { @@ -891,6 +1418,16 @@ "schema": { "type": "string" } + }, + { + "name": "limit", + "in": "query", + "description": "Max items to return. Default and cap are server-configurable;\nout-of-range values are clamped silently.", + "required": false, + "schema": { + "type": "integer", + "minimum": 0 + } } ], "responses": { @@ -916,6 +1453,33 @@ }, "components": { "schemas": { + "ActorView": { + "type": "object", + "description": "Resolved actor identity for a turn. Lookup happens once on the\nserver against the document's `meta.actors` block; turns reference\nthe actor by `u32` index into the projection's `actors` array.", + "required": [ + "id", + "kind", + "display" + ], + "properties": { + "display": { + "type": "string", + "description": "Display name resolved against `meta.actors` if present, else the\nsuffix after `:`." + }, + "id": { + "type": "string", + "description": "Canonical actor string (e.g. `\"claude:opus-4-7\"`)." + }, + "kind": { + "type": "string", + "description": "Prefix before `:` — `\"claude\"`, `\"human\"`, etc." + }, + "model": { + "type": "string", + "nullable": true + } + } + }, "AnonUploadBody": { "type": "object", "required": [ @@ -930,15 +1494,215 @@ "AnonUploadResponse": { "type": "object", "required": [ - "url", - "id" + "path", + "id", + "share_url" ], "properties": { "id": { "type": "string" }, - "url": { + "path": { + "type": "string", + "description": "Site-relative path to the uploaded trace's frontend page, e.g.\n`/anon/pathstash/paths/`. Suitable for in-app navigation." + }, + "share_url": { + "type": "string", + "description": "Absolute URL for sharing externally." + } + } + }, + "AppendStepsResponse": { + "type": "object", + "required": [ + "inserted", + "path" + ], + "properties": { + "inserted": { + "type": "integer", + "description": "Number of steps newly inserted (existing `step_id`s are skipped).", + "minimum": 0 + }, + "path": { + "$ref": "#/components/schemas/TracePath" + } + } + }, + "ChatCursor": { + "type": "object", + "description": "Pagination cursor for fetching older turns. Reserved for future\n`?before=` requests; currently every projection returns the\nfull chain in one shot.", + "properties": { + "before": { + "type": "string", + "description": "Canonical step ID one step older than `turns[0]`. `None` when the\nchain reaches a root step in this projection.", + "nullable": true + } + } + }, + "ChatProjection": { + "type": "object", + "description": "Top-level chat-projection response. Densely-indexed (`u32` IDs into\n`turns` / `actors`) so JSON parsing is cheap and the wire shape\navoids duplicating actor strings on every turn.", + "required": [ + "path_id", + "actors", + "turns", + "step_ids", + "cursor" + ], + "properties": { + "actors": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ActorView" + } + }, + "cursor": { + "$ref": "#/components/schemas/ChatCursor" + }, + "head": { + "type": "integer", + "format": "int32", + "description": "Index into `turns` — the latest turn along the HEAD chain. `None`\nwhen the document has no head.", + "minimum": 0, + "nullable": true + }, + "path_id": { + "type": "string" + }, + "step_ids": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Canonical step IDs parallel to `turns`, for deep-links and\n`/step/{id}` fetches." + }, + "title": { + "type": "string", + "nullable": true + }, + "turns": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ChatTurn" + } + } + } + }, + "ChatTurn": { + "type": "object", + "description": "One linearized turn along the HEAD-ancestor chain. Already\nclassified, optionally pre-rendered to HTML, with tool invocations\ninlined — the renderer drops in `text_html` and renders no further.", + "required": [ + "actor_id", + "is_head", + "kind", + "text_html", + "thinking_html", + "tool_uses", + "invocations" + ], + "properties": { + "actor_id": { + "type": "integer", + "format": "int32", + "description": "Index into `actors`.", + "minimum": 0 + }, + "intent": { + "type": "string", + "nullable": true + }, + "invocations": { + "type": "array", + "items": { + "$ref": "#/components/schemas/ToolInvocation" + }, + "description": "`tool.invoke` siblings of an assistant step, spliced inline." + }, + "is_head": { + "type": "boolean" + }, + "kind": { + "$ref": "#/components/schemas/ChatTurnKind" + }, + "model": { + "type": "string", + "nullable": true + }, + "parent_id": { + "type": "integer", + "format": "int32", + "description": "Index into `turns` — `i - 1` along the HEAD chain, `None` at the root.", + "minimum": 0, + "nullable": true + }, + "text": { + "type": "string", + "nullable": true + }, + "text_html": { + "type": "string", + "description": "Sanitized HTML for `text`. Empty when `include_html` is false or\n`text` is empty." + }, + "thinking": { + "type": "string", + "nullable": true + }, + "thinking_html": { "type": "string" + }, + "timestamp": { + "type": "string", + "nullable": true + }, + "tool_diff": { + "nullable": true, + "allOf": [ + { + "$ref": "#/components/schemas/ToolDiff" + } + ], + "description": "For `kind = \"tool\"` only: the first non-empty `change[k].raw`,\npre-split into lines." + }, + "tool_name": { + "type": "string", + "description": "For `kind = \"tool\"` only: the tool name (`extra.name`).", + "nullable": true + }, + "tool_uses": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Tool names from `extra.tool_uses` (string list)." + } + } + }, + "ChatTurnKind": { + "type": "string", + "description": "Pre-classified turn kind. Server-side classification follows the\nprecedence rules in `packages/frontend/src/classify.ts`; the client\nrenders verbatim without re-deriving.", + "enum": [ + "user", + "assistant", + "tool", + "system" + ] + }, + "CliGrantResponse": { + "type": "object", + "required": [ + "code", + "expires_in" + ], + "properties": { + "code": { + "type": "string", + "description": "Display code the browser shows; the user types it into the CLI." + }, + "expires_in": { + "type": "integer", + "format": "int64", + "description": "Seconds until the grant expires (10 minutes)." } } }, @@ -949,24 +1713,48 @@ ], "properties": { "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "name": { "type": "string" } } }, + "CreateSignupBody": { + "type": "object", + "required": [ + "email", + "source" + ], + "properties": { + "email": { + "type": "string" + }, + "source": { + "type": "string" + } + } + }, + "CreateSignupResponse": { + "type": "object", + "required": [ + "ok" + ], + "properties": { + "ok": { + "type": "boolean" + } + } + }, "Graph": { "type": "object", + "description": "A computation graph — a named ordered collection of paths within a\nrepo. Stored as header metadata plus `(graph_id, path_id, position)`\njunction rows; the full `document` is reconstructed on read.", "required": [ "id", "repo_id", "slug", "toolpath_id", - "document", "created_at", "updated_at" ], @@ -976,7 +1764,14 @@ "format": "date-time" }, "document": { - "type": "object" + "type": "object", + "description": "Reconstructed full Graph document with inline paths. Only populated\nby handlers that explicitly fetch the constituent paths and steps.", + "nullable": true + }, + "header": { + "type": "object", + "description": "Stored graph metadata: `{graph: GraphIdentity, meta?: GraphMeta}` —\nthe toolpath `Graph` minus its `paths`. None when never set.", + "nullable": true }, "id": { "type": "string", @@ -990,10 +1785,8 @@ "type": "string" }, "title": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "toolpath_id": { "type": "string" @@ -1011,18 +1804,80 @@ ], "properties": { "reason": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "status": { "type": "string" } } }, + "LoginBody": { + "type": "object", + "required": [ + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string" + } + } + }, + "RedeemBody": { + "type": "object", + "required": [ + "code" + ], + "properties": { + "code": { + "type": "string" + } + } + }, + "RedeemResponse": { + "type": "object", + "required": [ + "token", + "user" + ], + "properties": { + "token": { + "type": "string", + "description": "Long-lived bearer token (`pat_…`). Send as\n`Authorization: Bearer ` from CLI calls." + }, + "user": { + "$ref": "#/components/schemas/User" + } + } + }, + "RegisterBody": { + "type": "object", + "required": [ + "username", + "email", + "password" + ], + "properties": { + "email": { + "type": "string" + }, + "password": { + "type": "string", + "description": "Minimum 8 characters. Stored as an Argon2id hash." + }, + "username": { + "type": "string", + "description": "Lowercase ASCII alphanumerics, hyphens, and underscores. Must\nnot collide with the reserved `me` / `anon` namespaces." + } + } + }, "Repo": { "type": "object", + "description": "A repository — a named bucket of paths and graphs owned by a single\nuser. The `(owner_id, name)` pair is unique; `name` is also the\nURL-segment (\"alice/my-traces\").", "required": [ "id", "owner_id", @@ -1037,10 +1892,8 @@ "format": "date-time" }, "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "id": { "type": "string", @@ -1057,10 +1910,8 @@ "format": "uuid" }, "readme": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "updated_at": { "type": "string", @@ -1068,14 +1919,119 @@ } } }, + "SessionSummary": { + "type": "object", + "required": [ + "id", + "kind", + "created_at", + "expires_at", + "is_current" + ], + "properties": { + "created_at": { + "type": "string" + }, + "expires_at": { + "type": "string" + }, + "id": { + "type": "string" + }, + "is_current": { + "type": "boolean", + "description": "Marks the session that issued the request; clients should warn\nbefore letting the user revoke it (logs the current device out)." + }, + "kind": { + "type": "string", + "description": "`web` (cookie) or `cli` (bearer token)." + }, + "user_agent": { + "type": "string", + "nullable": true + } + } + }, + "ToolDiff": { + "type": "object", + "description": "Pre-split diff payload — picked from the first non-empty\n`change[k].raw` on a structural step. Lines are pre-split so the\nrenderer doesn't repeat the work.", + "required": [ + "path", + "lines" + ], + "properties": { + "lines": { + "type": "array", + "items": { + "type": "string" + } + }, + "path": { + "type": "string" + } + } + }, + "ToolInvocation": { + "type": "object", + "description": "A `tool.invoke` step spliced inline next to its parent assistant\nturn. Saves the client a second pass over the path's step graph.", + "required": [ + "step_id", + "actor_id", + "text_html" + ], + "properties": { + "actor_id": { + "type": "integer", + "format": "int32", + "minimum": 0 + }, + "input": { + "type": "string", + "description": "Tool input args from the structural payload (`extra.input`),\nJSON-stringified when not already a string.", + "nullable": true + }, + "result": { + "type": "string", + "description": "Tool output from the structural payload (`extra.result`),\nJSON-stringified when not already a string.", + "nullable": true + }, + "step_id": { + "type": "string", + "description": "Canonical step ID of the tool.invoke step." + }, + "text": { + "type": "string", + "nullable": true + }, + "text_html": { + "type": "string" + }, + "timestamp": { + "type": "string", + "nullable": true + }, + "tool_diff": { + "nullable": true, + "allOf": [ + { + "$ref": "#/components/schemas/ToolDiff" + } + ] + }, + "tool_name": { + "type": "string", + "nullable": true + } + } + }, "TracePath": { "type": "object", + "description": "A single agent trace path within a repo. Stored in three pieces:\nheader metadata (here), step rows (in `path_steps`), and the\nreconstructed full `document` (re-assembled on read for handlers\nthat need it).", "required": [ "id", "repo_id", "slug", "toolpath_id", - "document", "step_count", "is_public", "created_at", @@ -1087,7 +2043,14 @@ "format": "date-time" }, "document": { - "type": "object" + "type": "object", + "description": "Reconstructed full Graph document. Only populated by handlers that\nexplicitly fetch the steps (e.g. single-path GET, download, chat).\nListing endpoints leave this None to avoid N+1 step fetches.", + "nullable": true + }, + "header": { + "type": "object", + "description": "Stored path metadata: `{path: PathIdentity, meta?: PathMeta}` — the\ntoolpath `Path` minus its `steps`. None when never set.", + "nullable": true }, "id": { "type": "string", @@ -1108,10 +2071,8 @@ "format": "int32" }, "title": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "toolpath_id": { "type": "string" @@ -1126,10 +2087,8 @@ "type": "object", "properties": { "is_public": { - "type": [ - "boolean", - "null" - ] + "type": "boolean", + "nullable": true } } }, @@ -1137,16 +2096,12 @@ "type": "object", "properties": { "bio": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "display_name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true } } }, @@ -1154,22 +2109,16 @@ "type": "object", "properties": { "description": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "readme": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true } } }, @@ -1209,6 +2158,7 @@ }, "User": { "type": "object", + "description": "A registered Pathbase account. `email` is optional because GitHub\nOAuth users can have a private email; CLI / API consumers see the\ncaller's own email via `/users/me` but never another user's.", "required": [ "id", "username", @@ -1217,32 +2167,24 @@ ], "properties": { "avatar_url": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "bio": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "created_at": { "type": "string", "format": "date-time" }, "display_name": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "email": { - "type": [ - "string", - "null" - ] + "type": "string", + "nullable": true }, "id": { "type": "string", @@ -1257,6 +2199,13 @@ } } } + }, + "securitySchemes": { + "bearerAuth": { + "type": "http", + "scheme": "bearer", + "description": "Pathbase personal access token (`pat_…`) obtained via the CLI grant flow (`POST /auth/cli/request-grant` then `POST /auth/cli/redeem`). Send as `Authorization: Bearer `." + } } }, "tags": [ @@ -1284,13 +2233,13 @@ "name": "Graphs", "description": "Computation graphs" }, - { - "name": "Settings", - "description": "User settings" - }, { "name": "Anon", "description": "Anonymous uploads" + }, + { + "name": "Signups", + "description": "Marketing-page email capture" } ] } diff --git a/docs/superpowers/plans/2026-05-07-path-share-command.md b/docs/superpowers/plans/2026-05-07-path-share-command.md new file mode 100644 index 0000000..db202cd --- /dev/null +++ b/docs/superpowers/plans/2026-05-07-path-share-command.md @@ -0,0 +1,2120 @@ +# `path share` Implementation Plan + +> **For agentic workers:** REQUIRED SUB-SKILL: Use superpowers:subagent-driven-development (recommended) or superpowers:executing-plans to implement this plan task-by-task. Steps use checkbox (`- [ ]`) syntax for tracking. + +**Goal:** Add a `path share` command that aggregates agent sessions across installed harnesses, ranks current-project sessions first in a single fzf picker, and uploads the picked session to Pathbase in one shot. + +**Architecture:** New `cmd_share.rs` module in `crates/path-cli/src/`. Reuses derive helpers from `cmd_import.rs` (lifted to `pub(crate)` as single-pair functions) and the upload helper from `cmd_export.rs` (refactored into a body-taking `run_pathbase_inner`). Aggregation, picker, and CLI dispatch live in the new module. + +**Tech Stack:** Rust 2024, clap (CLI), reqwest+tokio (HTTP via shared `cmd_pathbase` helpers), `fzf` (interactive picker), the existing `toolpath-{claude,gemini,codex,opencode,pi}` provider crates. + +**Spec:** `docs/superpowers/specs/2026-05-07-path-share-command-design.md` (commit `b3ee214`). + +--- + +## File map + +- **Modify** `crates/path-cli/src/cmd_import.rs` — lift `DerivedDoc` to `pub(crate)`; extract single-pair derive helpers as `pub(crate) fn`s. +- **Modify** `crates/path-cli/src/cmd_export.rs` — split `run_pathbase` into `run_pathbase_inner(args, body)` + thin wrapper; add `pub(crate) struct PathbaseUploadArgs`. +- **Create** `crates/path-cli/src/cmd_share.rs` — module: types (`Harness`, `SessionRow`, `HarnessBundle`), aggregation (`gather_sessions`), picker, dispatch (`run`). +- **Modify** `crates/path-cli/src/lib.rs` — add `mod cmd_share;` and `Commands::Share { args }` enum arm. +- **Modify** `crates/path-cli/tests/integration.rs` — add `share_*` integration tests. +- **Modify** `CLAUDE.md` — add a `path share` line to the CLI usage block, and one item to "Things to know" describing the unified picker. + +--- + +## Task 1: Refactor `cmd_import.rs` — lift visibility, extract single-pair derive helpers + +Mechanical refactor; no behavior change. The new `pub(crate)` helpers each derive a `DerivedDoc` for one explicit `(project, session)` or `session` pair, so `cmd_share` can call them after its own picker resolves a row. + +**Files:** +- Modify: `crates/path-cli/src/cmd_import.rs` + +- [ ] **Step 1.1: Lift `DerivedDoc` to `pub(crate)`** + +In `crates/path-cli/src/cmd_import.rs` around line 174, change: + +```rust +struct DerivedDoc { + cache_id: String, + doc: Graph, +} +``` + +to: + +```rust +pub(crate) struct DerivedDoc { + pub(crate) cache_id: String, + pub(crate) doc: Graph, +} +``` + +- [ ] **Step 1.2: Add `derive_claude_pair`** + +Add this function next to `derive_claude_with_manager` (around line 369): + +```rust +/// Derive a single Claude conversation given an explicit project + session. +/// Used by `cmd_share` after its picker has resolved the pair; mirrors the +/// `(Some(p), Some(s), _)` arm in [`derive_claude_with_manager`]. +pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result { + let manager = toolpath_claude::ClaudeConvo::new(); + let cfg = toolpath_claude::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking: false, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_claude::derive::derive_path(&convo, &cfg); + let cache_id = make_id("claude", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.3: Add `derive_gemini_pair`** + +Add this function next to `derive_gemini_with_manager` (around line 562): + +```rust +/// Derive a single Gemini conversation given an explicit project + session. +pub(crate) fn derive_gemini_pair( + project: &str, + session: &str, + include_thinking: bool, +) -> Result { + let manager = toolpath_gemini::GeminiConvo::new(); + let cfg = toolpath_gemini::derive::DeriveConfig { + project_path: Some(project.to_string()), + include_thinking, + }; + let convo = manager + .read_conversation(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_gemini::derive::derive_path(&convo, &cfg); + let cache_id = make_id("gemini", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.4: Add `derive_pi_pair`** + +Add this function next to `derive_pi_with_manager` (around line 995): + +```rust +/// Derive a single Pi session given an explicit project + session. +pub(crate) fn derive_pi_pair( + project: &str, + session: &str, + base: Option, +) -> Result { + let manager = if let Some(path) = base { + let resolver = toolpath_pi::PathResolver::new().with_sessions_dir(&path); + toolpath_pi::PiConvo::with_resolver(resolver) + } else { + toolpath_pi::PiConvo::new() + }; + let config = toolpath_pi::DeriveConfig::default(); + let session = manager + .read_session(project, session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let doc = Graph::from_path(toolpath_pi::derive::derive_path(&session, &config)); + let cache_id = make_id("pi", &doc_inner_id(&doc)); + Ok(DerivedDoc { cache_id, doc }) +} +``` + +- [ ] **Step 1.5: Add `derive_codex_one`** + +Add this function next to `derive_codex` (around line 738): + +```rust +/// Derive a single Codex session given an explicit session id. +pub(crate) fn derive_codex_one(session: &str) -> Result { + let manager = toolpath_codex::CodexConvo::new(); + let config = toolpath_codex::derive::DeriveConfig { project_path: None }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = toolpath_codex::derive::derive_path(&s, &config); + let cache_id = make_id("codex", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.6: Add `derive_opencode_one`** + +Add this function next to `derive_opencode` (around line 848). Wrap in the same `cfg(not(target_os = "emscripten"))` gate the rest of opencode uses: + +```rust +/// Derive a single opencode session given an explicit session id. +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn derive_opencode_one( + session: &str, + no_snapshot_diffs: bool, +) -> Result { + let manager = toolpath_opencode::OpencodeConvo::new(); + let config = toolpath_opencode::derive::DeriveConfig { + no_snapshot_diffs, + ..Default::default() + }; + let s = manager + .read_session(session) + .map_err(|e| anyhow::anyhow!("{}", e))?; + let path = + toolpath_opencode::derive::derive_path_with_resolver(&s, &config, manager.resolver()); + let cache_id = make_id("opencode", &path.path.id); + Ok(DerivedDoc { + cache_id, + doc: Graph::from_path(path), + }) +} +``` + +- [ ] **Step 1.7: Verify the workspace still builds and tests pass** + +```bash +cargo build -p path-cli +cargo test -p path-cli --lib +``` + +Expected: build succeeds, all existing tests pass (this was a pure addition — no call sites rewritten). + +- [ ] **Step 1.8: Commit** + +```bash +git add crates/path-cli/src/cmd_import.rs +git commit -m "refactor(path-cli): extract single-pair derive helpers + +Lifts DerivedDoc to pub(crate) and adds derive_{claude,gemini,pi}_pair +and derive_{codex,opencode}_one. These are the explicit-args paths +already exercised by the (Some(p), Some(s), _) arm of each existing +dispatch — extracted so cmd_share can reuse them without re-implementing +the per-harness wiring." +``` + +--- + +## Task 2: Refactor `cmd_export.rs` — split `run_pathbase` so the body can come from memory + +Today `run_pathbase` reads from a cache file. `cmd_share` has the derived `Graph` in memory; we want to upload without writing-then-reading. Extract a `run_pathbase_inner(args, body)` and have the existing wrapper read the file then call the inner. + +**Files:** +- Modify: `crates/path-cli/src/cmd_export.rs` + +- [ ] **Step 2.1: Add `pub(crate) struct PathbaseUploadArgs`** + +Add this near the existing `struct PathbaseExportArgs` (around line 219): + +```rust +/// Pathbase upload knobs that don't depend on where the body came from. +/// Identical to [`PathbaseExportArgs`] minus the `input` field — the body +/// is supplied by the caller (read from cache, derived in memory, …). +#[derive(Debug)] +pub(crate) struct PathbaseUploadArgs { + pub(crate) url: Option, + pub(crate) anon: bool, + pub(crate) repo: Option, + pub(crate) slug: Option, + pub(crate) public: bool, +} +``` + +- [ ] **Step 2.2: Lift `RepoSpec` and `parse_repo_spec` to `pub(crate)`** + +In the same file, change: + +```rust +#[derive(Debug, Clone)] +pub struct RepoSpec { + pub owner: String, + pub name: String, +} + +fn parse_repo_spec(s: &str) -> std::result::Result { +``` + +so both `pub` items become `pub(crate)` (already `pub` for `RepoSpec`; convert for `parse_repo_spec`): + +```rust +#[derive(Debug, Clone)] +pub(crate) struct RepoSpec { + pub(crate) owner: String, + pub(crate) name: String, +} + +pub(crate) fn parse_repo_spec(s: &str) -> std::result::Result { +``` + +- [ ] **Step 2.3: Extract `run_pathbase_inner`** + +Replace the body of `run_pathbase` (lines 1202–1329 — the `#[cfg(not(target_os = "emscripten"))]` arm) so that it reads the file then calls a new inner. The new shape: + +```rust +fn run_pathbase(args: PathbaseExportArgs) -> Result<()> { + #[cfg(target_os = "emscripten")] + { + let _ = args; + anyhow::bail!("'path export pathbase' requires a native environment with network access"); + } + + #[cfg(not(target_os = "emscripten"))] + { + let file = cache_ref(&args.input)?; + let body = std::fs::read_to_string(&file) + .with_context(|| format!("Failed to read {}", file.display()))?; + let upload = PathbaseUploadArgs { + url: args.url, + anon: args.anon, + repo: args.repo, + slug: args.slug, + public: args.public, + }; + let summary_source = file.display().to_string(); + run_pathbase_inner(upload, &body, &summary_source) + } +} + +#[cfg(not(target_os = "emscripten"))] +pub(crate) fn run_pathbase_inner( + args: PathbaseUploadArgs, + body: &str, + summary_source: &str, +) -> Result<()> { + use crate::cmd_pathbase::{ + anon_paths_post, api_me, credentials_path, load_session, paths_post, repos_post, + resolve_url, + }; + + // Validate locally so we give a clean error rather than relying on + // the server to reject malformed payloads. + let doc = toolpath::v1::Graph::from_json(body) + .map_err(|e| anyhow::anyhow!("Invalid toolpath document: {}", e))?; + + let stored = load_session(&credentials_path()?)?; + let base_url = match (&args.url, &stored) { + (Some(u), _) => resolve_url(Some(u.clone())), + (None, Some(s)) => s.url.clone(), + (None, None) => resolve_url(None), + }; + + let go_anon = args.anon || (stored.is_none() && args.repo.is_none() && args.slug.is_none()); + + if go_anon { + if !args.anon && stored.is_none() { + eprintln!( + "note: not logged in — uploading anonymously (not listable). Run `path auth login --url {base_url}` for a listable upload." + ); + } + let resp = anon_paths_post(&base_url, body)?; + let printable = if resp.url.starts_with("http://") || resp.url.starts_with("https://") { + resp.url.clone() + } else if resp.url.starts_with('/') { + format!("{base_url}{}", resp.url) + } else { + format!("{base_url}/{}", resp.url) + }; + println!("{printable}"); + eprintln!( + "Uploaded {} → anon path {} ({} bytes)", + summary_source, + resp.id, + body.len() + ); + return Ok(()); + } + + let session = stored.ok_or_else(|| { + anyhow::anyhow!("Not logged in. Run `path auth login` or pass `--anon`.") + })?; + if host_of(&base_url) != host_of(&session.url) { + eprintln!( + "warning: uploading to {} with a token issued by {}; expect 401 unless this is the same deployment", + base_url, session.url + ); + } + + let (owner, repo) = match args.repo { + Some(spec) => (spec.owner, spec.name), + None => { + let user = api_me(&base_url, &session.token)?; + repos_post(&base_url, &session.token, "pathstash")?; + (user.username, "pathstash".to_string()) + } + }; + + let slug = args.slug.unwrap_or_else(|| derive_slug(&doc)); + let created = paths_post( + &base_url, + &session.token, + &owner, + &repo, + &slug, + body, + args.public, + )?; + + if created.is_public != args.public { + eprintln!( + "note: requested is_public={} but server applied is_public={}", + args.public, created.is_public + ); + } + let visibility = if created.is_public { "public" } else { "secret" }; + let url = pathbase_share_url( + &base_url, + &owner, + &repo, + &created.slug, + &created.id, + created.is_public, + ); + println!("{url}"); + eprintln!( + "Uploaded {} → {}/{}/{} ({} path, {} bytes)", + summary_source, + owner, + repo, + created.slug, + visibility, + body.len() + ); + Ok(()) +} +``` + +`summary_source` is the human-readable label used in the stderr "Uploaded …" line — `cache_ref` path for `export pathbase`, and a synthesized " session " string for `cmd_share`. Keeps the inner free of cache-vs-memory branching. + +- [ ] **Step 2.4: Verify the workspace still builds and tests pass** + +```bash +cargo build -p path-cli +cargo test -p path-cli +``` + +Expected: existing `pathbase_*` tests in `cmd_pathbase.rs` and `export_pathbase_repo_flag_requires_login` integration test still pass — refactor preserved behavior. + +- [ ] **Step 2.5: Commit** + +```bash +git add crates/path-cli/src/cmd_export.rs +git commit -m "refactor(path-cli): split run_pathbase into wrapper + inner + +run_pathbase_inner takes a body string and a summary_source label, so +callers with an in-memory toolpath document (cmd_share) can upload +without round-tripping through the cache." +``` + +--- + +## Task 3: Scaffold `cmd_share.rs` and wire into `lib.rs` + +Empty module with the CLI surface and a `run()` stub that errors. This is the smallest change that lets `path share --help` print and `path share` produce a recognisable "not implemented yet" failure, so subsequent tasks can be tested incrementally. + +**Files:** +- Create: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/src/lib.rs` + +- [ ] **Step 3.1: Write the failing test for the help output** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_help_lists_unified_picker_flags() { + cmd() + .args(["share", "--help"]) + .assert() + .success() + .stdout(predicate::str::contains("--harness")) + .stdout(predicate::str::contains("--session")) + .stdout(predicate::str::contains("--project")) + .stdout(predicate::str::contains("--anon")); +} +``` + +- [ ] **Step 3.2: Run the test to confirm it fails** + +```bash +cargo test -p path-cli --test integration share_help_lists_unified_picker_flags +``` + +Expected: FAIL — `path share` is not yet a recognised subcommand. + +- [ ] **Step 3.3: Create `cmd_share.rs`** + +```rust +//! `path share` — interactive Pathbase upload across installed agent +//! harnesses. See `docs/superpowers/specs/2026-05-07-path-share-command-design.md`. + +#![cfg(not(target_os = "emscripten"))] + +use anyhow::Result; +use clap::{Args, ValueEnum}; +use std::path::PathBuf; + +use crate::cmd_export::RepoSpec; + +#[derive(Copy, Clone, Debug, PartialEq, Eq, ValueEnum)] +#[value(rename_all = "lower")] +pub enum HarnessArg { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +#[derive(Args, Debug)] +pub struct ShareArgs { + /// Pathbase server URL (defaults to the stored session's server) + #[arg(long)] + pub url: Option, + + /// Force the anonymous endpoint, ignoring any stored credentials + #[arg(long, conflicts_with_all = ["repo", "public"])] + pub anon: bool, + + /// Target a specific repo as `owner/name` instead of `/pathstash` + #[arg(long, value_parser = crate::cmd_export::parse_repo_spec)] + pub repo: Option, + + /// Override the auto-derived slug (defaults to the toolpath document id) + #[arg(long)] + pub slug: Option, + + /// Make the uploaded path publicly listable (default: secret/unlisted) + #[arg(long)] + pub public: bool, + + /// Narrow the picker to one harness, or skip the picker entirely + /// when used with --session. + #[arg(long, value_enum)] + pub harness: Option, + + /// Skip the picker. Requires --harness; requires --project for + /// claude/gemini/pi. + #[arg(long, requires = "harness")] + pub session: Option, + + /// Override cwd-as-project. Filters the picker to sessions tied to + /// this project across all harnesses. + #[arg(long)] + pub project: Option, + + /// Overwrite the cache entry if it already exists + #[arg(long)] + pub force: bool, + + /// Skip writing the cache; derive in-memory only + #[arg(long)] + pub no_cache: bool, +} + +pub fn run(args: ShareArgs) -> Result<()> { + let _ = args; + anyhow::bail!("`path share` is not yet implemented") +} +``` + +- [ ] **Step 3.4: Wire it into `lib.rs`** + +In `crates/path-cli/src/lib.rs`, add the module declaration alongside the others: + +```rust +#[cfg(not(target_os = "emscripten"))] +mod cmd_share; +``` + +Add to the `Commands` enum (anywhere among the existing arms; placing it next to `Auth` is natural): + +```rust + /// Share an agent session to Pathbase via an interactive picker + #[cfg(not(target_os = "emscripten"))] + Share { + #[command(flatten)] + args: cmd_share::ShareArgs, + }, +``` + +Add the dispatch arm in `pub fn run`: + +```rust + #[cfg(not(target_os = "emscripten"))] + Commands::Share { args } => cmd_share::run(args), +``` + +- [ ] **Step 3.5: Run the help test to verify it passes** + +```bash +cargo test -p path-cli --test integration share_help_lists_unified_picker_flags +``` + +Expected: PASS. + +- [ ] **Step 3.6: Confirm `path share` runs and bails with the stub error** + +```bash +cargo run -p path-cli -- share 2>&1 | head -3 +``` + +Expected: stderr says `Error: \`path share\` is not yet implemented`. + +- [ ] **Step 3.7: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/src/lib.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): scaffold \`path share\` command + +Adds the cmd_share module with the full CLI surface (--url, --harness, +--session, --project, --anon, --repo, --slug, --public, --force, +--no-cache) and a stub run() that bails. Wires it into lib.rs as +Commands::Share. Subsequent tasks fill in the body." +``` + +--- + +## Task 4: Add `Harness`, `SessionRow`, and `HarnessBundle` types + +Pure types with small helper methods. No aggregation logic yet — that comes in tasks 5 and 6. Splitting it out keeps the test fixtures focused. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 4.1: Write the failing tests for the type helpers** + +Append to `crates/path-cli/src/cmd_share.rs`: + +```rust +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn harness_name_and_symbol_are_distinct() { + let all = [ + Harness::Claude, + Harness::Gemini, + Harness::Codex, + Harness::Opencode, + Harness::Pi, + ]; + let names: Vec<&str> = all.iter().map(|h| h.name()).collect(); + let symbols: Vec<&str> = all.iter().map(|h| h.symbol()).collect(); + assert_eq!(names.len(), 5); + assert_eq!( + names.iter().collect::>().len(), + 5, + "names must be unique" + ); + assert_eq!( + symbols + .iter() + .collect::>() + .len(), + 5, + "symbols must be unique" + ); + } + + #[test] + fn harness_project_keyed_matches_design() { + assert!(Harness::Claude.project_keyed()); + assert!(Harness::Gemini.project_keyed()); + assert!(Harness::Pi.project_keyed()); + assert!(!Harness::Codex.project_keyed()); + assert!(!Harness::Opencode.project_keyed()); + } + + #[test] + fn harness_from_arg_roundtrips() { + for (arg, harness) in [ + (HarnessArg::Claude, Harness::Claude), + (HarnessArg::Gemini, Harness::Gemini), + (HarnessArg::Codex, Harness::Codex), + (HarnessArg::Opencode, Harness::Opencode), + (HarnessArg::Pi, Harness::Pi), + ] { + assert_eq!(Harness::from_arg(arg), harness); + } + } +} +``` + +- [ ] **Step 4.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: FAIL — `Harness`, `SessionRow`, etc. don't exist yet. + +- [ ] **Step 4.3: Add the types** + +Insert above the `pub fn run` definition: + +```rust +use chrono::{DateTime, Utc}; + +/// Which agent harness a session was produced by. +#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)] +pub(crate) enum Harness { + Claude, + Gemini, + Codex, + Opencode, + Pi, +} + +impl Harness { + pub(crate) fn name(&self) -> &'static str { + match self { + Harness::Claude => "claude", + Harness::Gemini => "gemini", + Harness::Codex => "codex", + Harness::Opencode => "opencode", + Harness::Pi => "pi", + } + } + + /// Padded so all five symbols line up in the fzf column. + pub(crate) fn symbol(&self) -> &'static str { + match self { + Harness::Claude => "claude ", + Harness::Gemini => "gemini ", + Harness::Codex => "codex ", + Harness::Opencode => "opencode", + Harness::Pi => "pi ", + } + } + + /// True when the underlying provider keys sessions by project path. + /// claude/gemini/pi: true. codex/opencode: false (sessions store cwd + /// per-row, not as a directory key). + pub(crate) fn project_keyed(&self) -> bool { + matches!(self, Harness::Claude | Harness::Gemini | Harness::Pi) + } + + pub(crate) fn from_arg(arg: HarnessArg) -> Self { + match arg { + HarnessArg::Claude => Harness::Claude, + HarnessArg::Gemini => Harness::Gemini, + HarnessArg::Codex => Harness::Codex, + HarnessArg::Opencode => Harness::Opencode, + HarnessArg::Pi => Harness::Pi, + } + } + + pub(crate) fn parse(s: &str) -> Option { + match s { + "claude" => Some(Harness::Claude), + "gemini" => Some(Harness::Gemini), + "codex" => Some(Harness::Codex), + "opencode" => Some(Harness::Opencode), + "pi" => Some(Harness::Pi), + _ => None, + } + } +} + +/// One row in the unified session picker. +#[derive(Debug, Clone)] +pub(crate) struct SessionRow { + pub(crate) harness: Harness, + /// Project path for keyed providers; `None` for codex/opencode. + pub(crate) project: Option, + /// Recorded cwd from the session (codex/opencode only). + pub(crate) cwd: Option, + pub(crate) session_id: String, + pub(crate) title: String, + pub(crate) last_activity: Option>, + pub(crate) message_count: usize, + pub(crate) matches_cwd: bool, +} + +/// Bundle of provider managers used during aggregation. Production code +/// builds this from real `$HOME` via `from_environment`; tests construct +/// it directly with provider-specific resolvers. +#[derive(Default)] +pub(crate) struct HarnessBundle { + pub(crate) claude: Option, + pub(crate) gemini: Option, + pub(crate) codex: Option, + pub(crate) opencode: Option, + pub(crate) pi: Option, +} + +impl HarnessBundle { + /// Build the production bundle. Each provider is included + /// unconditionally (its `new()` doesn't fail on a missing home dir); + /// `gather_sessions` skips the ones whose listing returns empty/NotFound. + pub(crate) fn from_environment() -> Self { + Self { + claude: Some(toolpath_claude::ClaudeConvo::new()), + gemini: Some(toolpath_gemini::GeminiConvo::new()), + codex: Some(toolpath_codex::CodexConvo::new()), + opencode: Some(toolpath_opencode::OpencodeConvo::new()), + pi: Some(toolpath_pi::PiConvo::new()), + } + } +} +``` + +- [ ] **Step 4.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: PASS. + +- [ ] **Step 4.5: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): add Harness, SessionRow, HarnessBundle types + +Pure data types plus from_arg/parse helpers and a project_keyed +predicate. HarnessBundle::from_environment instantiates each provider +unconditionally; gather_sessions (next task) skips providers whose +listing returns empty or NotFound." +``` + +--- + +## Task 5: Implement `gather_sessions` for project-keyed harnesses (claude, gemini, pi) + +Aggregator collects rows from claude/gemini/pi only in this task. Codex/opencode arrive in task 6. Each provider gets one unit test that uses an injectable resolver to point at a tempdir. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 5.1: Write the failing tests** + +Append to the `mod tests` block in `cmd_share.rs`: + +```rust + use std::path::Path; + use tempfile::TempDir; + + fn write_claude_session(claude_dir: &Path, project_slug: &str, session: &str, prompt: &str) { + let project_dir = claude_dir.join("projects").join(project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + let user = format!( + r#"{{"type":"user","uuid":"u-{session}","timestamp":"2024-01-02T00:00:00Z","cwd":"/test/project","message":{{"role":"user","content":"{prompt}"}}}}"# + ); + let asst = format!( + r#"{{"type":"assistant","uuid":"a-{session}","timestamp":"2024-01-02T00:00:01Z","message":{{"role":"assistant","content":"hi"}}}}"# + ); + std::fs::write( + project_dir.join(format!("{session}.jsonl")), + format!("{user}\n{asst}\n"), + ) + .unwrap(); + } + + fn claude_only_bundle(home: &Path) -> HarnessBundle { + let claude_dir = home.join(".claude"); + std::fs::create_dir_all(&claude_dir).unwrap(); + let resolver = toolpath_claude::PathResolver::new().with_claude_dir(&claude_dir); + HarnessBundle { + claude: Some(toolpath_claude::ClaudeConvo::with_resolver(resolver)), + ..Default::default() + } + } + + #[test] + fn gather_sessions_includes_claude_rows_for_a_project() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Claude); + assert_eq!(rows[0].session_id, "abc-session-one"); + assert_eq!(rows[0].project.as_deref(), Some("/test/project")); + assert!(rows[0].matches_cwd, "cwd should match the project path"); + } + + #[test] + fn gather_sessions_marks_non_matching_project_rows() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "Add a feature", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/some/other/place"); + let rows = gather_sessions(&bundle, cwd, None, None); + + assert_eq!(rows.len(), 1); + assert!(!rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_skips_harness_with_no_home_dir() { + // Empty bundle => no rows, no panic. + let bundle = HarnessBundle::default(); + let rows = gather_sessions(&bundle, Path::new("/anywhere"), None, None); + assert!(rows.is_empty()); + } + + #[test] + fn gather_sessions_filters_by_harness() { + let temp = TempDir::new().unwrap(); + write_claude_session( + &temp.path().join(".claude"), + "-test-project", + "abc-session-one", + "hi", + ); + let bundle = claude_only_bundle(temp.path()); + let cwd = Path::new("/test/project"); + let rows = gather_sessions(&bundle, cwd, Some(Harness::Codex), None); + assert!(rows.is_empty(), "filter to codex must drop claude rows"); + } +``` + +- [ ] **Step 5.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::gather +``` + +Expected: FAIL — `gather_sessions` doesn't exist. + +- [ ] **Step 5.3: Implement `gather_sessions` for the three project-keyed harnesses** + +Add above the `mod tests` block: + +```rust +/// Aggregate sessions across the harnesses in `bundle`, ranked so that +/// rows whose project (or recorded cwd) canonicalizes to `cwd` come +/// first, sorted by descending `last_activity`. +/// +/// Filters: `harness_filter` keeps only rows from one harness; `project_filter` +/// keeps only rows whose project (for keyed) or cwd (for session-keyed) +/// canonicalizes to that path. +pub(crate) fn gather_sessions( + bundle: &HarnessBundle, + cwd: &std::path::Path, + harness_filter: Option, + project_filter: Option<&std::path::Path>, +) -> Vec { + let mut rows = Vec::new(); + let canonical_cwd = canonicalize_or_self(cwd); + let canonical_project = project_filter.map(canonicalize_or_self); + + let want = |h: Harness| harness_filter.is_none_or(|f| f == h); + + if want(Harness::Claude) { + if let Some(mgr) = &bundle.claude { + collect_claude(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Gemini) { + if let Some(mgr) = &bundle.gemini { + collect_gemini(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Pi) { + if let Some(mgr) = &bundle.pi { + collect_pi(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + + rows.sort_by(|a, b| { + b.matches_cwd + .cmp(&a.matches_cwd) + .then_with(|| b.last_activity.cmp(&a.last_activity)) + }); + rows +} + +fn canonicalize_or_self(p: &std::path::Path) -> std::path::PathBuf { + std::fs::canonicalize(p).unwrap_or_else(|_| p.to_path_buf()) +} + +fn paths_match(a: &std::path::Path, b: &std::path::Path) -> bool { + canonicalize_or_self(a) == canonicalize_or_self(b) +} + +fn collect_claude( + mgr: &toolpath_claude::ClaudeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found(&e) => return, + Err(e) => { + eprintln!("warning: claude aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: claude project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Claude, + project: Some(m.project_path), + cwd: None, + session_id: m.session_id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_gemini( + mgr: &toolpath_gemini::GeminiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found(&e) => return, + Err(e) => { + eprintln!("warning: gemini aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_conversation_metadata(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: gemini project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + out.push(SessionRow { + harness: Harness::Gemini, + project: Some(m.project_path), + cwd: None, + session_id: m.session_uuid, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } + } +} + +fn collect_pi( + mgr: &toolpath_pi::PiConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let projects = match mgr.list_projects() { + Ok(ps) if !ps.is_empty() => ps, + Ok(_) => return, + Err(e) if is_not_found_pi(&e) => return, + Err(e) => { + eprintln!("warning: pi aggregation failed: {e}"); + return; + } + }; + for project in projects { + let project_path = std::path::Path::new(&project); + if let Some(filter) = project_filter + && !paths_match(project_path, filter) + { + continue; + } + let metas = match mgr.list_sessions(&project) { + Ok(m) => m, + Err(e) => { + eprintln!("warning: pi project {project} failed: {e}"); + continue; + } + }; + let matches_cwd = paths_match(project_path, canonical_cwd); + for m in metas { + // SessionMeta.timestamp is a String; parse to DateTime when possible. + let last_activity = chrono::DateTime::parse_from_rfc3339(&m.timestamp) + .ok() + .map(|d| d.with_timezone(&Utc)); + out.push(SessionRow { + harness: Harness::Pi, + project: Some(project.clone()), + cwd: None, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity, + message_count: m.entry_count, + matches_cwd, + }); + } + } +} + +fn is_not_found(err: &toolpath_claude::ConvoError) -> bool { + use toolpath_claude::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) + || matches!(err, ConvoError::ClaudeDirectoryNotFound(_)) +} + +fn is_not_found_pi(err: &toolpath_pi::PiError) -> bool { + use toolpath_pi::PiError; + matches!(err, PiError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, PiError::ProjectNotFound(_)) +} +``` + +Note: claude / gemini / codex / opencode all re-export `ConvoError` with `Io(io::Error)` and `NoHomeDirectory` variants. Pi re-exports `PiError` (different name) with `Io` and `ProjectNotFound` variants. The helpers above already account for that. Variant names were verified against `crates/toolpath-{claude,gemini,codex,opencode,pi}/src/error.rs` while writing this plan. + +- [ ] **Step 5.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +``` + +Expected: PASS. If `is_not_found` doesn't compile, inspect the provider's `ConvoError` enum and adjust the match arms; the test set still passes once it compiles because the fixture has a real home. + +- [ ] **Step 5.5: Run clippy to catch warning-as-error issues** + +```bash +cargo clippy -p path-cli -- -D warnings +``` + +Expected: clean. + +- [ ] **Step 5.6: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): implement gather_sessions for claude/gemini/pi + +Aggregates SessionRow values from the three project-keyed providers, +sorts cwd-matching rows first then by recency, and silently skips +harnesses whose listing returns empty or NotFound. Codex and opencode +land in the next commit." +``` + +--- + +## Task 6: Extend `gather_sessions` to codex and opencode + add ranking/filter coverage + +Codex and opencode address sessions by id; their `cwd` lives inside the session metadata, so the matching logic differs slightly from the project-keyed harnesses. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` + +- [ ] **Step 6.1: Write the failing tests** + +Append to the `mod tests` block in `cmd_share.rs`: + +```rust + fn codex_only_bundle(home: &Path) -> HarnessBundle { + let codex_dir = home.join(".codex"); + std::fs::create_dir_all(&codex_dir).unwrap(); + let resolver = toolpath_codex::PathResolver::new().with_codex_dir(&codex_dir); + HarnessBundle { + codex: Some(toolpath_codex::CodexConvo::with_resolver(resolver)), + ..Default::default() + } + } + + fn write_codex_session(codex_dir: &Path, id: &str, cwd: &str) { + // Date-bucketed layout: ~/.codex/sessions/YYYY/MM/DD/rollout-*-.jsonl + let dir = codex_dir.join("sessions/2026/05/07"); + std::fs::create_dir_all(&dir).unwrap(); + let file = dir.join(format!("rollout-2026-05-07T00-00-00-{id}.jsonl")); + let meta = format!( + r#"{{"timestamp":"2026-05-07T00:00:00Z","type":"session_meta","payload":{{"id":"{id}","timestamp":"2026-05-07T00:00:00Z","cwd":"{cwd}","originator":"codex-tui","cli_version":"test","source":"cli","model_provider":"openai"}}}}"# + ); + let user = format!( + r#"{{"timestamp":"2026-05-07T00:00:01Z","type":"response_item","payload":{{"type":"message","role":"user","content":[{{"type":"input_text","text":"hi"}}]}}}}"# + ); + std::fs::write(file, format!("{meta}\n{user}\n")).unwrap(); + } + + #[test] + fn gather_sessions_includes_codex_rows_with_cwd_match() { + let temp = TempDir::new().unwrap(); + write_codex_session( + &temp.path().join(".codex"), + "00000000-0000-0000-0000-0000000000aa", + "/work/proj", + ); + let bundle = codex_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/work/proj"), None, None); + assert_eq!(rows.len(), 1); + assert_eq!(rows[0].harness, Harness::Codex); + assert_eq!(rows[0].cwd.as_deref(), Some("/work/proj")); + assert!(rows[0].matches_cwd); + } + + #[test] + fn gather_sessions_ranks_cwd_matches_first() { + // Two claude sessions: one in cwd (older), one elsewhere (newer). + // Despite the elsewhere row being newer, the cwd-match must come first. + let temp = TempDir::new().unwrap(); + let claude_dir = temp.path().join(".claude"); + write_claude_session(&claude_dir, "-cwd-project", "in-cwd-session", "hi"); + // Bump activity on the not-in-cwd session by writing a later timestamp. + let not_dir = claude_dir.join("projects").join("-other-project"); + std::fs::create_dir_all(¬_dir).unwrap(); + std::fs::write( + not_dir.join("not-in-cwd-session.jsonl"), + r#"{"type":"user","uuid":"u-x","timestamp":"2030-01-01T00:00:00Z","cwd":"/other/project","message":{"role":"user","content":"later"}}"#.to_string() + + "\n", + ) + .unwrap(); + let bundle = claude_only_bundle(temp.path()); + let rows = gather_sessions(&bundle, Path::new("/cwd/project"), None, None); + + assert_eq!(rows.len(), 2); + assert_eq!(rows[0].session_id, "in-cwd-session"); + assert!(rows[0].matches_cwd); + assert!(!rows[1].matches_cwd); + } +``` + +- [ ] **Step 6.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::gather_sessions_includes_codex +cargo test -p path-cli --lib cmd_share::tests::gather_sessions_ranks +``` + +Expected: FAIL — codex collection isn't implemented. + +- [ ] **Step 6.3: Add `collect_codex` and `collect_opencode` and dispatch them** + +Inside `gather_sessions`, add the two extra blocks after the pi block: + +```rust + if want(Harness::Codex) { + if let Some(mgr) = &bundle.codex { + collect_codex(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } + if want(Harness::Opencode) { + if let Some(mgr) = &bundle.opencode { + collect_opencode(mgr, &canonical_cwd, canonical_project.as_deref(), &mut rows); + } + } +``` + +Add the two new collector functions next to the existing ones: + +```rust +fn collect_codex( + mgr: &toolpath_codex::CodexConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.list_sessions() { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_codex(&e) => return, + Err(e) => { + eprintln!("warning: codex aggregation failed: {e}"); + return; + } + }; + for m in metas { + let cwd_str = m.cwd.as_ref().map(|p| p.to_string_lossy().into_owned()); + if let Some(filter) = project_filter { + let stored = match cwd_str.as_deref() { + Some(s) => std::path::PathBuf::from(s), + None => continue, + }; + if !paths_match(&stored, filter) { + continue; + } + } + let matches_cwd = m + .cwd + .as_deref() + .map(|p| paths_match(p, canonical_cwd)) + .unwrap_or(false); + out.push(SessionRow { + harness: Harness::Codex, + project: None, + cwd: cwd_str, + session_id: m.id, + title: m + .first_user_message + .unwrap_or_else(|| "(no prompt)".to_string()), + last_activity: m.last_activity, + message_count: m.line_count, + matches_cwd, + }); + } +} + +fn collect_opencode( + mgr: &toolpath_opencode::OpencodeConvo, + canonical_cwd: &std::path::Path, + project_filter: Option<&std::path::Path>, + out: &mut Vec, +) { + let metas = match mgr.io().list_session_metadata(None) { + Ok(m) if !m.is_empty() => m, + Ok(_) => return, + Err(e) if is_not_found_opencode(&e) => return, + Err(e) => { + eprintln!("warning: opencode aggregation failed: {e}"); + return; + } + }; + for m in metas { + if let Some(filter) = project_filter + && !paths_match(&m.directory, filter) + { + continue; + } + let matches_cwd = paths_match(&m.directory, canonical_cwd); + let cwd_str = m.directory.to_string_lossy().into_owned(); + let title = match (&m.first_user_message, m.title.is_empty()) { + (Some(s), _) if !s.is_empty() => s.clone(), + (_, false) => m.title.clone(), + _ => "(no prompt)".to_string(), + }; + out.push(SessionRow { + harness: Harness::Opencode, + project: None, + cwd: Some(cwd_str), + session_id: m.id, + title, + last_activity: m.last_activity, + message_count: m.message_count, + matches_cwd, + }); + } +} + +fn is_not_found_codex(err: &toolpath_codex::ConvoError) -> bool { + use toolpath_codex::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) +} + +fn is_not_found_opencode(err: &toolpath_opencode::ConvoError) -> bool { + use toolpath_opencode::ConvoError; + matches!(err, ConvoError::Io(e) if e.kind() == std::io::ErrorKind::NotFound) + || matches!(err, ConvoError::NoHomeDirectory) +} +``` + +(Both `is_not_found_codex` and `is_not_found_opencode` use `ConvoError` since both providers re-export that name. Variant names verified against `crates/toolpath-{codex,opencode}/src/error.rs`.) + +- [ ] **Step 6.4: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +cargo clippy -p path-cli -- -D warnings +``` + +Expected: PASS, clippy clean. + +- [ ] **Step 6.5: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs +git commit -m "feat(path-cli): cover codex+opencode in gather_sessions + +Adds collect_codex/collect_opencode and the matching ranking/filter +tests. Session-keyed providers compare canonical(stored_cwd) to +canonical(cwd) for matches_cwd; project_filter applies to the same +recorded cwd." +``` + +--- + +## Task 7: Implement explicit-args path (skip picker, derive, upload) + +This makes `path share --harness X --session Y [--project P] [--anon] ...` end-to-end functional. The picker path lands in task 8. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/tests/integration.rs` + +- [ ] **Step 7.1: Write the failing integration test** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_explicit_args_uploads_via_anon() { + use std::io::Write; + use std::net::TcpListener; + + // Stand up a one-shot mock that returns a valid AnonUploadResponse. + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + // Drain the request just enough to keep the OS happy. + use std::io::Read; + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc-123","url":"https://example.test/anon/abc-123"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + // Build a claude fixture so the explicit-args path has something to derive. + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = "-".to_string() + + &project.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("HOME", temp.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success() + .stdout(predicate::str::contains("https://example.test/anon/abc-123")) + .stderr(predicate::str::contains("Uploaded")); + + server.join().unwrap(); +} +``` + +- [ ] **Step 7.2: Run the test to confirm it fails** + +```bash +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +``` + +Expected: FAIL — `path share` still bails with "not yet implemented". + +- [ ] **Step 7.3: Implement the explicit-args path** + +In `cmd_share.rs`, replace the stub `pub fn run` with: + +```rust +pub fn run(args: ShareArgs) -> Result<()> { + let harness = args.harness.map(Harness::from_arg); + + if let (Some(h), Some(session)) = (harness, &args.session) { + return share_explicit(h, session.as_str(), &args); + } + + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + // Picker path lands in the next task. + anyhow::bail!("interactive `path share` is not yet implemented") +} + +fn share_explicit(harness: Harness, session: &str, args: &ShareArgs) -> Result<()> { + let project = match (harness.project_keyed(), args.project.as_ref()) { + (true, Some(p)) => Some(p.to_string_lossy().into_owned()), + (true, None) => anyhow::bail!( + "--project required when --harness is {} and --session is set", + harness.name() + ), + (false, _) => None, + }; + + let derived = derive_one(harness, project.as_deref(), session)?; + let summary = format!( + "{} session {}", + harness.name(), + derived.cache_id + ); + + if !args.no_cache { + let path = crate::cmd_cache::write_cached(&derived.cache_id, &derived.doc, args.force)?; + eprintln!( + "Imported {} session → {} ({})", + harness.name(), + derived.cache_id, + path.display() + ); + } + + let body = derived.doc.to_json()?; + let upload = crate::cmd_export::PathbaseUploadArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + }; + crate::cmd_export::run_pathbase_inner(upload, &body, &summary) +} + +fn derive_one( + harness: Harness, + project: Option<&str>, + session: &str, +) -> Result { + match harness { + Harness::Claude => { + crate::cmd_import::derive_claude_pair(project.expect("project_keyed"), session) + } + Harness::Gemini => crate::cmd_import::derive_gemini_pair( + project.expect("project_keyed"), + session, + false, + ), + Harness::Pi => { + crate::cmd_import::derive_pi_pair(project.expect("project_keyed"), session, None) + } + Harness::Codex => crate::cmd_import::derive_codex_one(session), + Harness::Opencode => crate::cmd_import::derive_opencode_one(session, false), + } +} +``` + +`RepoSpec` is `Clone`-able via the existing `#[derive(Debug, Clone)]` on the struct in `cmd_export`, so `args.repo.clone()` works. + +- [ ] **Step 7.4: Run the test to verify it passes** + +```bash +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +``` + +Expected: PASS. + +- [ ] **Step 7.5: Add cache-behavior integration tests** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +/// Helper for the cache tests. Spawns a one-shot mock anon-upload server +/// on a free port and returns (port, server-thread-handle, fixture-temp, +/// project-path, $HOME-path). +fn share_anon_fixture() -> (u16, std::thread::JoinHandle<()>, tempfile::TempDir, PathBuf, PathBuf) +{ + use std::io::{Read, Write}; + use std::net::TcpListener; + + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let port = listener.local_addr().unwrap().port(); + let server = std::thread::spawn(move || { + let (mut stream, _) = listener.accept().unwrap(); + let mut buf = [0u8; 4096]; + let _ = stream.read(&mut buf); + let body = r#"{"id":"abc","url":"https://example.test/anon/abc"}"#; + let resp = format!( + "HTTP/1.1 201 Created\r\nContent-Length: {}\r\nContent-Type: application/json\r\n\r\n{}", + body.len(), + body + ); + let _ = stream.write_all(resp.as_bytes()); + }); + + let temp = tempfile::tempdir().unwrap(); + let project = temp.path().join("proj"); + std::fs::create_dir_all(&project).unwrap(); + let claude_dir = temp.path().join(".claude"); + let project_slug = "-".to_string() + + &project.to_string_lossy().replace(std::path::MAIN_SEPARATOR, "-"); + let project_dir = claude_dir.join("projects").join(&project_slug); + std::fs::create_dir_all(&project_dir).unwrap(); + std::fs::write( + project_dir.join("session-abc.jsonl"), + format!( + r#"{{"type":"user","uuid":"u-1","timestamp":"2024-01-01T00:00:00Z","cwd":"{cwd}","message":{{"role":"user","content":"hi"}}}} +{{"type":"assistant","uuid":"a-1","timestamp":"2024-01-01T00:00:01Z","message":{{"role":"assistant","content":"hello"}}}} +"#, + cwd = project.display() + ), + ) + .unwrap(); + + let home = temp.path().to_path_buf(); + (port, server, temp, project, home) +} + +#[test] +fn share_writes_cache_by_default() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert_eq!( + entries.len(), + 1, + "expected exactly one cache entry, got {entries:?}" + ); + let name = entries[0].file_name().to_string_lossy().into_owned(); + assert!( + name.starts_with("claude-"), + "expected claude-* cache id, got {name}" + ); + + server.join().unwrap(); +} + +#[test] +fn share_no_cache_skips_write() { + let (port, server, _temp, project, home) = share_anon_fixture(); + let cfg = tempfile::tempdir().unwrap(); + + cmd() + .env("HOME", &home) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args([ + "share", + "--harness", + "claude", + "--session", + "session-abc", + "--project", + ]) + .arg(&project) + .args(["--anon", "--no-cache", "--url"]) + .arg(format!("http://127.0.0.1:{port}")) + .assert() + .success(); + + let docs = cfg.path().join("documents"); + if docs.exists() { + let entries: Vec<_> = std::fs::read_dir(&docs) + .unwrap() + .filter_map(|e| e.ok()) + .collect(); + assert!( + entries.is_empty(), + "expected no cache entries with --no-cache, got {entries:?}" + ); + } + + server.join().unwrap(); +} +``` + +- [ ] **Step 7.6: Run the new tests** + +```bash +cargo test -p path-cli --test integration share_writes_cache_by_default share_no_cache_skips_write +``` + +Expected: PASS. + +- [ ] **Step 7.7: Run the full test suite + clippy** + +```bash +cargo test -p path-cli +cargo clippy -p path-cli -- -D warnings +``` + +Expected: green. + +- [ ] **Step 7.8: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): implement \`path share\` explicit-args path + +When --harness and --session are both set, share derives the session +via cmd_import's pair helpers, optionally writes the cache, then +uploads via cmd_export::run_pathbase_inner. Picker path follows." +``` + +--- + +## Task 8: Implement the picker, non-TTY recipe, and empty-result probe summary + +Adds the unified fzf picker, the recipe message when fzf isn't available, and the probe-summary error when no sessions exist anywhere. + +**Files:** +- Modify: `crates/path-cli/src/cmd_share.rs` +- Modify: `crates/path-cli/tests/integration.rs` + +- [ ] **Step 8.1: Write the failing tests** + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_filters_by_project_with_no_matches_errors() { + let cfg = tempfile::tempdir().unwrap(); + let home = tempfile::tempdir().unwrap(); + let nonexistent = home.path().join("never"); + + cmd() + .env("HOME", home.path()) + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share", "--project"]) + .arg(&nonexistent) + .assert() + .failure() + .stderr(predicate::str::contains("No agent sessions found in project")); +} +``` + +Append to `crates/path-cli/src/cmd_share.rs` `mod tests`: + +```rust + #[test] + fn parse_picker_row_roundtrips_keyed() { + let row = SessionRow { + harness: Harness::Claude, + project: Some("/tmp/proj".to_string()), + cwd: None, + session_id: "sess-abc".to_string(), + title: "Hello\tworld".to_string(), + last_activity: None, + message_count: 3, + matches_cwd: true, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Claude); + assert_eq!(key, "/tmp/proj"); + assert_eq!(session, "sess-abc"); + } + + #[test] + fn parse_picker_row_roundtrips_session_keyed() { + let row = SessionRow { + harness: Harness::Codex, + project: None, + cwd: Some("/work/proj".to_string()), + session_id: "0190abcd".to_string(), + title: "(no prompt)".to_string(), + last_activity: None, + message_count: 0, + matches_cwd: false, + }; + let line = format_picker_row(&row); + let (harness, key, session) = parse_picker_row(&line).unwrap(); + assert_eq!(harness, Harness::Codex); + assert_eq!(key, "/work/proj"); // codex has no project; cwd carried as the keyed slot + assert_eq!(session, "0190abcd"); + } +``` + +Append to `crates/path-cli/tests/integration.rs`: + +```rust +#[test] +fn share_no_harness_non_tty_prints_recipe() { + let cfg = tempfile::tempdir().unwrap(); + cmd() + .env("TOOLPATH_CONFIG_DIR", cfg.path()) + .args(["share"]) + .assert() + .failure() + .stderr(predicate::str::contains("path import")) + .stderr(predicate::str::contains("path export pathbase")); +} +``` + +- [ ] **Step 8.2: Run the tests to confirm they fail** + +```bash +cargo test -p path-cli --lib cmd_share::tests::parse_picker_row +cargo test -p path-cli --test integration share_no_harness_non_tty_prints_recipe +cargo test -p path-cli --test integration share_filters_by_project_with_no_matches_errors +``` + +Expected: FAIL — picker functions, non-TTY message, and probe-summary path don't exist. + +- [ ] **Step 8.3: Add picker formatting + dispatch** + +Append to `cmd_share.rs`: + +```rust +/// Build the TSV line fed to fzf. Cols 1–3 are hidden (harness/key/session, +/// used as parser keys); cols 4..8 are visible to the user. +fn format_picker_row(row: &SessionRow) -> String { + let key = row + .project + .clone() + .or_else(|| row.cwd.clone()) + .unwrap_or_default(); + let when = row + .last_activity + .map(|t| t.format("%Y-%m-%d %H:%M").to_string()) + .unwrap_or_else(|| " — ".to_string()); + let scope = if row.matches_cwd { "·" } else { " " }; + let project_short = project_short(&key); + let title = fzf_title(&row.title); + format!( + "{}\t{}\t{}\t{}\t{}\t{} msgs\t{}\t{}\t{}", + row.harness.name(), + tab_safe(&key), + tab_safe(&row.session_id), + row.harness.symbol(), + when, + row.message_count, + scope, + tab_safe(&project_short), + title, + ) +} + +/// Inverse of [`format_picker_row`] — pulls (harness, key, session) back +/// out of the line fzf returned. Returns `None` if the line is malformed. +fn parse_picker_row(line: &str) -> Option<(Harness, String, String)> { + let mut parts = line.split('\t'); + let h = Harness::parse(parts.next()?)?; + let key = parts.next()?.to_string(); + let session = parts.next()?.to_string(); + if session.is_empty() { + return None; + } + Some((h, key, session)) +} + +fn tab_safe(s: &str) -> String { + s.replace(['\t', '\n', '\r'], " ") +} + +fn fzf_title(s: &str) -> String { + const MAX: usize = 120; + let safe = tab_safe(s); + if safe.chars().count() > MAX { + let head: String = safe.chars().take(MAX - 1).collect(); + format!("{head}…") + } else { + safe + } +} + +fn project_short(p: &str) -> String { + let trimmed = p.trim_end_matches('/'); + let parts: Vec<&str> = trimmed.rsplit('/').take(2).collect(); + if parts.is_empty() { + return p.to_string(); + } + let mut out: Vec<&str> = parts.into_iter().collect(); + out.reverse(); + out.join("/") +} +``` + +- [ ] **Step 8.4: Wire the picker into `run`** + +Replace the second `anyhow::bail!` in `pub fn run` with the picker dispatch: + +```rust +pub fn run(args: ShareArgs) -> Result<()> { + let harness = args.harness.map(Harness::from_arg); + + if let (Some(h), Some(session)) = (harness, &args.session) { + return share_explicit(h, session.as_str(), &args); + } + if args.session.is_some() && harness.is_none() { + anyhow::bail!("--session requires --harness"); + } + + let cwd = std::env::current_dir()?; + let bundle = HarnessBundle::from_environment(); + let project_filter = args.project.as_deref(); + let rows = gather_sessions(&bundle, &cwd, harness, project_filter); + + if rows.is_empty() { + return bail_no_sessions(&bundle, project_filter); + } + + if !crate::fzf::available() { + eprintln!( + "Interactive `path share` needs `fzf` on PATH and a TTY.\n\ + \n\ + Manual recipe:\n \ + path import # writes a cache entry, prints its id\n \ + path export pathbase --input " + ); + anyhow::bail!("fzf unavailable; run `path import ` then `path export pathbase`"); + } + + let lines: Vec = rows.iter().map(format_picker_row).collect(); + let host = pathbase_host_for_picker(&args); + let header = format!("share an agent session (Enter = upload to {host})"); + let opts = crate::fzf::PickOptions { + with_nth: "4..", + prompt: "share> ", + preview: Some("path show {1} --project {2} --session {3}"), + header: Some(&header), + tiebreak: "index", + multi: false, + }; + let selected = crate::fzf::pick(&lines, &opts)?; + let line = match selected.into_iter().next() { + Some(l) => l, + None => return Ok(()), // user cancelled + }; + let (h, key, session) = parse_picker_row(&line) + .ok_or_else(|| anyhow::anyhow!("internal: failed to parse picker row"))?; + + let mut explicit = ShareArgs { + url: args.url.clone(), + anon: args.anon, + repo: args.repo.clone(), + slug: args.slug.clone(), + public: args.public, + harness: Some(harness_to_arg(h)), + session: Some(session.clone()), + project: if h.project_keyed() { + Some(PathBuf::from(&key)) + } else { + None + }, + force: args.force, + no_cache: args.no_cache, + }; + eprintln!( + "Picked {} session {}", + h.name(), + explicit.session.as_deref().unwrap_or("?") + ); + let session_id = explicit.session.take().unwrap(); + share_explicit(h, &session_id, &explicit) +} + +fn harness_to_arg(h: Harness) -> HarnessArg { + match h { + Harness::Claude => HarnessArg::Claude, + Harness::Gemini => HarnessArg::Gemini, + Harness::Codex => HarnessArg::Codex, + Harness::Opencode => HarnessArg::Opencode, + Harness::Pi => HarnessArg::Pi, + } +} + +fn pathbase_host_for_picker(args: &ShareArgs) -> String { + use crate::cmd_pathbase::resolve_url; + if let Some(u) = &args.url { + return resolve_url(Some(u.clone())); + } + // Best-effort: if there's a stored session, surface its URL; otherwise fall back to default. + let path = match crate::cmd_pathbase::credentials_path() { + Ok(p) => p, + Err(_) => return resolve_url(None), + }; + match crate::cmd_pathbase::load_session(&path) { + Ok(Some(s)) => s.url, + _ => resolve_url(None), + } +} + +fn bail_no_sessions(bundle: &HarnessBundle, project_filter: Option<&std::path::Path>) -> Result<()> { + if let Some(p) = project_filter { + anyhow::bail!( + "No agent sessions found in project {}. Run without --project to see sessions across all projects.", + p.display() + ); + } + + let mut summary = String::from("No agent sessions found.\n"); + summary.push_str(&probe_summary_line("claude", bundle.claude.is_some())); + summary.push_str(&probe_summary_line("gemini", bundle.gemini.is_some())); + summary.push_str(&probe_summary_line("codex", bundle.codex.is_some())); + summary.push_str(&probe_summary_line("opencode", bundle.opencode.is_some())); + summary.push_str(&probe_summary_line("pi", bundle.pi.is_some())); + eprint!("{summary}"); + anyhow::bail!("no shareable sessions"); +} + +fn probe_summary_line(name: &str, present: bool) -> String { + if present { + format!(" {name}: 0 sessions\n") + } else { + format!(" {name}: not configured\n") + } +} +``` + +In `cmd_pathbase.rs`, the `credentials_path` and `load_session` helpers are already `pub(crate)` — no change needed. + +`crate::cmd_pathbase` and `crate::cmd_cache` and `crate::cmd_export` and `crate::cmd_import` are all in scope by virtue of being sibling modules under `path_cli::`. Add `use` statements at the top of `cmd_share.rs` if rust-analyzer prefers — the qualified paths above also work. + +The `pick` call's `preview` template substitutes col 1 (harness) into the `path show` invocation. `path show` already supports each harness as a subcommand. For codex/opencode the `--project {2}` arg becomes `--project /work/proj` even though those subcommands don't accept `--project`; if a future version of `path show` errors on that, swap to per-harness preview templates. Today they accept `--session` regardless, and unknown args print to stderr (preview pane) without aborting the picker. + +If `path show codex --project foo --session bar` errors, drop the `--project` from the preview template entirely; the design allows that simplification. + +- [ ] **Step 8.5: Run the tests to verify they pass** + +```bash +cargo test -p path-cli --lib cmd_share +cargo test -p path-cli --test integration share_no_harness_non_tty_prints_recipe +cargo test -p path-cli --test integration share_explicit_args_uploads_via_anon +cargo clippy -p path-cli -- -D warnings +``` + +Expected: all green. + +- [ ] **Step 8.6: Manual smoke test of the picker (locally only — not CI)** + +```bash +cargo build -p path-cli +./target/debug/path share --url http://127.0.0.1:1 +``` + +Expected on a machine with installed harnesses and fzf: an fzf list opens; cwd-matching sessions appear at the top; selecting one fails the upload (port 1) but proves the picker → derive → upload wiring. Press Esc to cancel — exit code should be 0 with nothing on stdout. + +- [ ] **Step 8.7: Commit** + +```bash +git add crates/path-cli/src/cmd_share.rs crates/path-cli/tests/integration.rs +git commit -m "feat(path-cli): wire the unified \`path share\` picker + +Aggregates SessionRow values across installed harnesses, ranks +cwd-matches first, and pipes them through fzf. Falls back to a +manual-recipe message when fzf isn't available, and prints a probe +summary when no harness has any sessions to share." +``` + +--- + +## Task 9: Documentation — `CLAUDE.md` + +**Files:** +- Modify: `CLAUDE.md` + +- [ ] **Step 9.1: Add a `path share` line to the CLI usage block** + +In `CLAUDE.md`, under the `## CLI usage` section, after the `path import` group of examples and before the `# Export toolpath documents…` block, insert: + +```markdown +# Share an agent session to Pathbase (interactive picker, single-shot) +cargo run -p path-cli -- share +cargo run -p path-cli -- share --harness claude --session --project /path/to/project +cargo run -p path-cli -- share --url https://my-pathbase.example +``` + +- [ ] **Step 9.2: Add a "Things to know" entry** + +In the `## Things to know` bullet list, append: + +```markdown +- `path share` is the one-shot equivalent of `path import | path export pathbase`. It probes installed agent harnesses (claude/gemini/codex/opencode/pi), aggregates their sessions into a single fzf picker, and ranks rows whose project (claude/gemini/pi) or recorded cwd (codex/opencode) canonicalizes to the current directory at the top. `--harness` narrows the picker to one provider; `--harness X --session Y` (and `--project P` for keyed providers) skips the picker entirely. Pathbase flags (`--url`, `--anon`, `--repo`, `--slug`, `--public`) match `path export pathbase`. By default the derived doc is written to the cache like `import` does; pass `--no-cache` to skip. +``` + +- [ ] **Step 9.3: Build the workspace once more as a sanity check** + +```bash +cargo build --workspace +cargo test --workspace +cargo clippy --workspace -- -D warnings +``` + +Expected: clean. + +- [ ] **Step 9.4: Commit** + +```bash +git add CLAUDE.md +git commit -m "docs: document \`path share\` in CLAUDE.md" +``` + +--- + +## Done criteria + +- `path share --help` lists all flags from the design. +- `path share --harness X --session Y [--project P]` derives + uploads in one shot, with the share URL on stdout. +- `path share` (no flags, fzf available) opens a unified picker with cwd-matching rows ranked first. +- `path share` (no flags, no fzf) prints the manual recipe and exits 1. +- `path share --project P` filters to that project; if no rows match, exits 1 with a focused error message. +- All existing tests still pass; `cargo clippy --workspace -- -D warnings` is clean. +- The `CLAUDE.md` CLI block and Things-to-know list reflect the new command. diff --git a/docs/superpowers/specs/2026-05-07-path-share-command-design.md b/docs/superpowers/specs/2026-05-07-path-share-command-design.md new file mode 100644 index 0000000..ead0f58 --- /dev/null +++ b/docs/superpowers/specs/2026-05-07-path-share-command-design.md @@ -0,0 +1,265 @@ +# `path share` — interactive Pathbase upload + +**Status:** Design accepted, awaiting implementation plan. +**Date:** 2026-05-07 + +## Goal + +Collapse the existing two-step "derive a session, upload it" workflow +(`path import ` then `path export pathbase --input `) into a +single command that's optimised for the most common case: an +interactive user wants to share *one* agent session from the project +they're currently sitting in. + +Today this requires two commands and the user has to know which +harness ran the conversation. `path share` removes both of those. + +## Non-goals + +- Sharing git branches or GitHub PRs. Those flows already exist on + `path import` / `path export pathbase` and the user explicitly + scoped this command to "agent harnesses". +- Multi-session bundling. Multi-select is not exposed; one share, one + URL. +- Streaming uploads. The document is materialized in memory (and, by + default, in the cache) before posting. +- A `--include-thinking` flag for Gemini. Out of scope for v1. + +## Surface + +``` +path share [--url ] + [--harness ] + [--session ] + [--project ] + [--anon] [--repo ] [--slug ] [--public] + [--force] [--no-cache] +``` + +| Flag | Behavior | +| --------------------- | -------- | +| (no flags) | Unified picker over all detected harnesses, current-project rows ranked first. | +| `--harness X` | Pre-filter the picker to one harness. | +| `--harness X --session Y` | Skip the picker. `--project` required when X ∈ {claude, gemini, pi}. | +| `--session` alone | Error: ambiguous without `--harness`. | +| `--project P` | Filter the picker to sessions tied to that project (across all harnesses). | +| `--no-cache` | Skip writing `~/.toolpath/documents/.json`; derive in-memory only. | +| `--force` | Allow overwriting an existing cache entry. Same semantics as `import --force`. | +| `--url` | Override Pathbase server URL. Falls back to stored session, then `$PATHBASE_URL`, then `https://pathbase.dev`. | +| `--anon` | Force anonymous endpoint; conflicts with `--repo`, `--public`. | +| `--repo`, `--slug`, `--public` | Same semantics as `path export pathbase`. | + +When the user is logged out and passes none of `--anon` / `--repo` / +`--public` / `--slug`, the upload falls through to the anonymous +endpoint with a stderr notice — same default as `export pathbase` +today. + +## Internal architecture + +### New module: `cmd_share.rs` + +Lives next to the other `cmd_*.rs` files in `crates/path-cli/src/`. +Wired into `lib.rs` as a new `Commands::Share { args: cmd_share::ShareArgs }` +arm. + +### Session aggregation + +```rust +struct SessionRow { + harness: Harness, // Claude | Gemini | Codex | Opencode | Pi + project: Option, // project path for keyed providers; None for codex/opencode + cwd: Option, // recorded cwd from the session (codex/opencode only) + session_id: String, + title: String, // first_user_message or "(no prompt)" + last_activity: Option>, + message_count: usize, + matches_cwd: bool, // computed during aggregation +} + +fn gather_sessions( + cwd: &Path, + harness_filter: Option, + project_filter: Option<&Path>, +) -> Vec; +``` + +**Detection-by-probing.** No explicit "is X installed" config. For +each of the five harnesses, instantiate `*Convo::new()` and attempt +the listing API. Skip silently when: +- `home_dir()` resolves to None, +- the harness's base directory does not exist, +- listing returns Err with `io::ErrorKind::NotFound`, +- listing returns an empty `Vec`. + +Any other error emits a single `warning: aggregation failed: ` +to stderr and aggregation continues with the remaining harnesses. + +**Per-harness rules:** + +- **claude / gemini / pi** (project-keyed): `list_projects()` → + `list_conversation_metadata(p)` for each. `matches_cwd = + canonical(p) == canonical(cwd)`. Title from `first_user_message`. +- **codex / opencode** (session-keyed): `list_sessions()`. Codex + stores `cwd` in rollout meta; opencode stores `directory`. + `matches_cwd = canonical(stored_cwd) == canonical(cwd)`. No + `project` field. + +`canonicalize` failure on either side falls back to byte-equal string +compare; mismatch only affects ranking, never correctness. + +### Picker + +When `--session` is absent and stdin+stderr are TTYs and `fzf` is on +PATH, the rows are formatted into a TSV stream and fed to `fzf`: + +``` +col 1: harness (hidden, parser key) +col 2: project_or_cwd (hidden, derive arg) +col 3: session_id (hidden, derive arg) +col 4: harness symbol ("claude " / "gemini " / "codex " / "opencode" / "pi ") +col 5: when ("YYYY-MM-DD HH:MM" or "—") +col 6: msgs ("12 msgs") +col 7: scope ("·" for cwd-match, " " otherwise) +col 8: project_short (last two path segments) +col 9: title (truncated to 120 chars) +``` + +`fzf` shows columns 4..; preview command runs +`path show [--project {2}] --session {3}`. Single-select +only (no `--multi`). Header line: `share an agent session (Enter = upload to )`. + +**Sort order before piping to fzf:** +1. Rows with `matches_cwd = true`, descending by `last_activity`. +2. Rows with `matches_cwd = false`, descending by `last_activity`. + +### Non-interactive paths + +- `fzf` missing or non-TTY: print a generic recipe (use `path import + ` then `path export pathbase`) and exit 1. **No + most-recent fallback** — sharing is consequential enough to require + an explicit choice. +- `--harness X --session Y` (and `--project P` for keyed providers): + skip aggregation entirely; derive directly. +- `--harness X` alone: still uses the unified aggregator pre-filtered + to one harness; same fzf code path. +- Esc / Ctrl-C in fzf: exit 130, print nothing. + +### Derive + +Three small `pub(crate)` cuts in `cmd_import.rs`: + +```rust +pub(crate) struct DerivedDoc { pub cache_id: String, pub doc: Graph } + +pub(crate) fn derive_claude_pair(project: &str, session: &str) -> Result; +pub(crate) fn derive_gemini_pair(project: &str, session: &str, include_thinking: bool) -> Result; +pub(crate) fn derive_pi_pair(project: &str, session: &str, base: Option) -> Result; +pub(crate) fn derive_codex_one(session: &str) -> Result; +pub(crate) fn derive_opencode_one(session: &str, no_snapshot_diffs: bool) -> Result; +``` + +These extract the single-pair branches from the existing +`derive_claude` / `derive_gemini` / etc. dispatch functions in +`cmd_import.rs`. The existing dispatch keeps calling them — pure +mechanical refactor, no behavior change. + +### Cache + +Default behavior: write the derived `Graph` to +`~/.toolpath/documents/.json` via the existing +`write_cached(&id, &doc, force)`. Same `-` cache id +format as `path import` — a `share`-produced entry is +indistinguishable from an `import`-produced one and can be re-uploaded +later with `export pathbase --input `. `--no-cache` skips the +write. `--force` allows overwrite. + +### Upload + +`cmd_export::run_pathbase` is split: + +```rust +pub(crate) fn run_pathbase_inner(args: PathbaseExportArgs, body: &str) -> Result; +``` + +`run_pathbase` becomes a thin wrapper that reads the cache file then +calls the inner. `cmd_share` calls the inner directly with the +in-memory body (`doc.to_json()`). Same `--anon` / `--repo` / `--slug` +/ `--public` / `--url` semantics inherited from `export pathbase`. + +`UploadResult` carries the share URL and a short summary string for +stderr. + +## Output contract + +- **stdout**: the share URL, exactly one line. Scriptable. +- **stderr**: progress messages — + ``` + Picked claude session "Add share command" + Imported claude session → claude-abc (omitted with --no-cache) + Uploaded → alex/pathstash/ (secret path, 12 KB) + ``` +- The cache id is **not** echoed to stdout (unlike `path import`) + because the share URL is the primary product. The cache id appears + in the stderr "Imported …" line, which is enough to find it via + `cache ls`. + +**Exit codes.** 0 success; 130 user cancelled fzf; 1 anything else. + +## Error handling + +| Situation | Behavior | +| --- | --- | +| `home_dir()` None / harness base dir missing | Skip silently. | +| Per-file metadata read fails inside a harness | Underlying provider already handles this per-file; we don't second-guess. | +| Whole-harness listing returns Err other than NotFound | Single `warning: ...` to stderr; continue with other harnesses. | +| No sessions found anywhere | Print probe summary (one line per harness, with path and count or "not found"); exit 1. | +| No sessions match `--project P` | Print message naming the project; suggest running without `--project`; exit 1. | +| `--session` without `--harness` | Clap-level error (clap `requires = "harness"`). | +| `--anon` with `--repo`/`--public` | Clap-level conflict (copy from `export pathbase`). | +| `--harness ` + `--session` without `--project` | Runtime error: `"--project required when --harness is claude/gemini/pi and --session is set"`. | +| Logged out, no `--anon`, no auth-requiring flags | Anonymous upload with stderr notice (matches `export pathbase`). | +| Logged out, `--repo`/`--public`/`--slug` set | Error: "log in first" (inherited from `export pathbase`). | +| Logged in, `--url` host differs from stored session host | stderr warning, attempt anyway (inherited from `export pathbase`). | +| Server applies different `is_public` than requested | stderr note; share URL form follows what was actually applied (inherited). | + +## Testing + +### Unit tests in `cmd_share.rs` + +1. `gather_sessions` produces rows in the right order (cwd-match first, then by recency) — fixture builds tempdir layouts for two or three harnesses. +2. `gather_sessions` skips harnesses whose home dir is missing (no panic, no warning). +3. `gather_sessions` honors `--harness` and `--project` filters. +4. `parse_picker_row` round-trips `(harness, project, session_id)` through the TSV format. +5. `matches_cwd` uses canonicalized paths (test via temp-dir symlink that both forms match). + +Reuses the existing `setup_claude_manager` / `setup_claude_manager_with_two_sessions` helpers from `cmd_import` tests; adds a `setup_multi_harness` helper that wires two or three fake home dirs at once. + +### Integration tests in `crates/path-cli/tests/` + +1. `share_explicit_args.rs` — `path share --harness claude --project /tmp/x --session abc --no-cache --anon --url http://127.0.0.1:` against the existing `MockServer`. Asserts a single URL on stdout and that the request body is the derived Graph. +2. `share_no_harness_no_tty.rs` — non-TTY, no flags → exits 1 with the recipe message; nothing on stdout. +3. `share_filters_by_project.rs` — explicit `--project P` with no matches → exits 1 with the per-project not-found message. +4. `share_logged_out_anon_default.rs` — no credentials, no `--anon` → uploads via anon endpoint; stderr carries the "not logged in — uploading anonymously" notice. +5. `share_writes_cache_by_default.rs` — default behavior, explicit args → a file appears in the test config dir's `documents/` matching the derived cache id. +6. `share_no_cache_skips_write.rs` — same with `--no-cache` → no file appears. + +### Out of scope for tests + +- The fzf-driven path. Not exercised in CI (matches the existing import tests). The aggregator — the genuinely new logic — is fully unit-tested. + +### Documentation + +- A one-line entry in `CLAUDE.md`'s "Things to know" pointing at + `path share` and the unified-picker behavior, alongside the existing + fzf picker docs. +- A short paragraph in the CLI usage block at the top of `CLAUDE.md`. +- A `path share` section in any place README/CLI docs enumerate + commands. + +## Open questions + +None blocking. Future: +- `--include-thinking` could be added if Gemini sharing is common. +- Multi-select bundling could be added later if a user pattern emerges. +- A `--web` flag (or `path share --open`) that opens the resulting + URL via `open` / `xdg-open` is a small future addition. diff --git a/scripts/refresh-pathbase-openapi.sh b/scripts/refresh-pathbase-openapi.sh index 4c5c11b..85fc574 100755 --- a/scripts/refresh-pathbase-openapi.sh +++ b/scripts/refresh-pathbase-openapi.sh @@ -19,6 +19,69 @@ _tmp="$(mktemp -t pathbase-openapi.XXXXXX.json)" trap 'rm -f "${_tmp}"' EXIT curl -fsSL "${_url}/api/v1/openapi.json" -o "${_tmp}" -# Pretty-print with jq for stable diffs. -jq . "${_tmp}" > "${_dest}" -echo "refresh: wrote ${_dest} ($(wc -l < "${_dest}") lines)" +# Pathbase emits OpenAPI 3.1 (`"type": ["string", "null"]`) but our +# generator stack (progenitor 0.14 / openapiv3 2.x) only understands +# 3.0 (`"type": "string", "nullable": true`). Down-convert nullable +# unions to 3.0 form so the build doesn't panic on `not yet +# implemented: invalid type: null`. +# +# Only handles the single-non-null + "null" pattern. Multi-type +# unions are rejected explicitly so we notice if the spec ever uses +# something more exotic. +jq ' + def downconvert_type_array: + if type == "object" + and (has("type")) + and (.type | type) == "array" + then + if (.type | any(. == "null")) and (.type | map(select(. != "null")) | length) == 1 + then .type = (.type | map(select(. != "null"))[0]) | .nullable = true + elif (.type | any(. == "null")) + then + error("multi-type nullable union not supported by openapiv3 0.x: \(.type)") + else . + end + else . end; + + # `oneOf: [{type: null}, {$ref: X}]` (or in either order) is OpenAPI 3.1 + # idiom for "nullable ref". Convert to 3.0: `{nullable: true, allOf: [{$ref: X}]}`. + def downconvert_nullable_ref: + if type == "object" + and has("oneOf") + and (.oneOf | type) == "array" + and (.oneOf | length) == 2 + and (.oneOf | any(. == {"type": "null"})) + and (.oneOf | any(has("$ref"))) + then + (.oneOf | map(select(has("$ref"))) | .[0]) as $ref_obj + | del(.oneOf) + | .nullable = true + | .allOf = [{"$ref": $ref_obj["$ref"]}] + | (if $ref_obj.description then .description = $ref_obj.description else . end) + else . end; + + # Progenitor 0.14 only handles JSON request/response bodies. Drop + # operations that use non-JSON content types (e.g. application/x-ndjson + # for streaming endpoints) so the build doesnt panic on + # `UnexpectedFormat("unexpected content type: ...")`. The CLI doesnt + # use these surfaces; if it ever needs them, switch to a hand-rolled + # call (see api_redeem for the pattern). + def has_unsupported_content(op): + ((op.requestBody.content // {}) | keys | any(. != "application/json")) + or ((op.responses // {}) | to_entries | any( + ((.value.content // {}) | keys | any(. != "application/json")) + )); + + def strip_unsupported_operations: + .paths |= with_entries( + .value |= with_entries( + select( + (.key | IN("get", "put", "post", "delete", "patch", "options", "head", "trace") | not) + or (has_unsupported_content(.value) | not) + ) + ) + ) | .paths |= with_entries(select((.value | length) > 0)); + + walk(downconvert_type_array | downconvert_nullable_ref) | strip_unsupported_operations +' "${_tmp}" > "${_dest}" +echo "refresh: wrote ${_dest} ($(wc -l < "${_dest}") lines, OpenAPI 3.0 form)"