From 23aa9760039314d7099b4940f15bea565918bb22 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 29 Jan 2026 16:31:19 +0100 Subject: [PATCH 01/39] feat: WIP PR Message Add PR_MESSAGE markdown file (WIP) to store Pull Request message back to Weston's origin. --- PR_MESSAGE.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 PR_MESSAGE.md diff --git a/PR_MESSAGE.md b/PR_MESSAGE.md new file mode 100644 index 0000000..0cf3285 --- /dev/null +++ b/PR_MESSAGE.md @@ -0,0 +1,18 @@ +# Hey Weston πŸ‘‹ + +First, thank you for building and maintaining this repository β€” it’s been genuinely useful for distributing rules and skills across multiple projects based on different needs. + +While using the tool, I found a couple of areas where additional functionality would be extremely valuable: + +## πŸͺ Hooks (Cursor, Claude, ...) + +The Hooks documentations for the designated IDEs enforce the scripts running the Hooks to be marked as executable with `chmod +x`. + +To complete this functionality, I used already existing pipeline and introduced new **Asset Types**: + +- `cursor_hooks_root` +- `claude_hooks_root` + +## πŸ€– Copilot Rule convert + +... From dfc2f461ce7f79e9ee6c62310b87b846081eb434 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:14 +0100 Subject: [PATCH 02/39] feat: Introduce Cursor and Claude hooks as new asset kinds --- README.md | 2 ++ docs/architecture.md | 3 +++ docs/catalog-search-spec.md | 2 ++ src/error.rs | 2 +- src/manifest.rs | 8 ++++++++ 5 files changed, 16 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index e4920cf..fb76c6e 100644 --- a/README.md +++ b/README.md @@ -208,7 +208,9 @@ entries: | `agents_md` | Single AGENTS.md file | `./AGENTS.md` | | `composite_agents_md` | Merge multiple markdown files into one | `./AGENTS.md` | | `cursor_rules` | Directory of Cursor rules | `./.cursor/rules/` | +| `cursor_hooks` | Directory of Cursor hooks | `./.cursor/hooks/` | | `cursor_skills_root` | Directory with skill subdirs | `./.cursor/skills/` | +| `claude_hooks` | Directory of Claude hooks | `./.claude/hooks/` | | `agent_skill` | Claude agent skill directory | `./.claude/skills/` | ### Source Types diff --git a/docs/architecture.md b/docs/architecture.md index 8cd2830..1344f03 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -195,9 +195,12 @@ pub struct Entry { pub enum AssetKind { CursorRules, + CursorHooks, CursorSkillsRoot, + ClaudeHooks, AgentsMd, AgentSkill, + CompositeAgentsMd, } ``` diff --git a/docs/catalog-search-spec.md b/docs/catalog-search-spec.md index 9cd46c0..6940c69 100644 --- a/docs/catalog-search-spec.md +++ b/docs/catalog-search-spec.md @@ -33,7 +33,9 @@ entries: **AssetKind** values: - `cursor_rules` - Individual `.mdc` rule files +- `cursor_hooks` - Individual hook scripts - `cursor_skills_root` - Skill folders for Cursor +- `claude_hooks` - Individual hook scripts - `agents_md` - AGENTS.md files - `agent_skill` - Agent skill folders (per agentskills.io spec) diff --git a/src/error.rs b/src/error.rs index 1110d2a..e59a845 100644 --- a/src/error.rs +++ b/src/error.rs @@ -31,7 +31,7 @@ pub enum ApsError { #[error("Invalid asset kind: {kind}")] #[diagnostic( code(aps::manifest::invalid_kind), - help("Valid kinds are: cursor_rules, cursor_skills_root, agents_md") + help("Valid kinds are: cursor_rules, cursor_hooks, cursor_skills_root, claude_hooks, agents_md, composite_agents_md, agent_skill") )] InvalidAssetKind { kind: String }, diff --git a/src/manifest.rs b/src/manifest.rs index 966e1a2..9fe9599 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -91,8 +91,12 @@ impl Entry { pub enum AssetKind { /// Cursor rules directory CursorRules, + /// Cursor hooks directory + CursorHooks, /// Cursor skills root directory CursorSkillsRoot, + /// Claude hooks directory + ClaudeHooks, /// AGENTS.md file AgentsMd, /// Agent skill directory (per agentskills.io spec) @@ -106,7 +110,9 @@ impl AssetKind { pub fn default_dest(&self) -> PathBuf { match self { AssetKind::CursorRules => PathBuf::from(".cursor/rules"), + AssetKind::CursorHooks => PathBuf::from(".cursor/hooks"), AssetKind::CursorSkillsRoot => PathBuf::from(".cursor/skills"), + AssetKind::ClaudeHooks => PathBuf::from(".claude/hooks"), AssetKind::AgentsMd => PathBuf::from("AGENTS.md"), AssetKind::AgentSkill => PathBuf::from(".claude/skills"), AssetKind::CompositeAgentsMd => PathBuf::from("AGENTS.md"), @@ -118,7 +124,9 @@ impl AssetKind { pub fn from_str(s: &str) -> Result { match s { "cursor_rules" => Ok(AssetKind::CursorRules), + "cursor_hooks" => Ok(AssetKind::CursorHooks), "cursor_skills_root" => Ok(AssetKind::CursorSkillsRoot), + "claude_hooks" => Ok(AssetKind::ClaudeHooks), "agents_md" => Ok(AssetKind::AgentsMd), "agent_skill" => Ok(AssetKind::AgentSkill), "composite_agents_md" => Ok(AssetKind::CompositeAgentsMd), From 8c3098994e6007f0348d0b0226aa0cbaab440ac4 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 03/39] feat: Implement hooks validation module --- src/error.rs | 20 ++++++ src/hooks.rs | 189 +++++++++++++++++++++++++++++++++++++++++++++++++++ src/main.rs | 1 + 3 files changed, 210 insertions(+) create mode 100644 src/hooks.rs diff --git a/src/error.rs b/src/error.rs index e59a845..81f4be6 100644 --- a/src/error.rs +++ b/src/error.rs @@ -140,6 +140,26 @@ pub enum ApsError { #[error("Failed to compose markdown files: {message}")] #[diagnostic(code(aps::compose::error))] ComposeError { message: String }, + + #[error("Hooks directory should be named 'hooks': {path}")] + #[diagnostic(code(aps::hooks::invalid_directory))] + InvalidHooksDirectory { path: PathBuf }, + + #[error("Hooks config not found at {path}")] + #[diagnostic(code(aps::hooks::config_missing))] + MissingHooksConfig { path: PathBuf }, + + #[error("Invalid hooks config at {path}: {message}")] + #[diagnostic(code(aps::hooks::config_invalid))] + InvalidHooksConfig { path: PathBuf, message: String }, + + #[error("Hooks config at {path} is missing a 'hooks' section")] + #[diagnostic(code(aps::hooks::missing_section))] + MissingHooksSection { path: PathBuf }, + + #[error("Hook script not found: {path}")] + #[diagnostic(code(aps::hooks::script_not_found))] + HookScriptNotFound { path: PathBuf }, } impl ApsError { diff --git a/src/hooks.rs b/src/hooks.rs new file mode 100644 index 0000000..1b6c6d9 --- /dev/null +++ b/src/hooks.rs @@ -0,0 +1,189 @@ +use crate::error::{ApsError, Result}; +use serde_yaml::Value; +use std::collections::HashSet; +use std::path::{Path, PathBuf}; + +#[derive(Debug, Clone, Copy)] +pub enum HookKind { + Cursor, + Claude, +} + +pub fn validate_cursor_hooks(hooks_dir: &Path, strict: bool) -> Result> { + validate_hooks(HookKind::Cursor, hooks_dir, strict) +} + +pub fn validate_claude_hooks(hooks_dir: &Path, strict: bool) -> Result> { + validate_hooks(HookKind::Claude, hooks_dir, strict) +} + +fn validate_hooks(kind: HookKind, hooks_dir: &Path, strict: bool) -> Result> { + let mut warnings = Vec::new(); + + let config_path = hooks_dir.join(config_filename(kind)); + if !config_path.exists() { + warn_or_error( + &mut warnings, + strict, + ApsError::MissingHooksConfig { + path: config_path.clone(), + }, + )?; + return Ok(warnings); + } + + let config_value = match read_hooks_config(&config_path) { + Ok(value) => value, + Err(err) => { + warn_or_error(&mut warnings, strict, err)?; + return Ok(warnings); + } + }; + + let hooks_section = match get_hooks_section(&config_value) { + Some(section) => section, + None => { + warn_or_error( + &mut warnings, + strict, + ApsError::MissingHooksSection { + path: config_path.clone(), + }, + )?; + return Ok(warnings); + } + }; + + let commands = collect_hook_commands(hooks_section); + let referenced_scripts = collect_hook_script_paths(&commands, kind); + + for rel_path in referenced_scripts { + let script_path = hooks_dir.join(rel_path); + if !script_path.exists() { + warn_or_error( + &mut warnings, + strict, + ApsError::HookScriptNotFound { path: script_path }, + )?; + } + } + + Ok(warnings) +} + +fn config_filename(kind: HookKind) -> &'static str { + match kind { + HookKind::Cursor => "hooks.json", + HookKind::Claude => "settings.json", + } +} + +fn read_hooks_config(path: &Path) -> Result { + let content = std::fs::read_to_string(path) + .map_err(|e| ApsError::io(e, "Failed to read hooks config"))?; + + serde_yaml::from_str(&content).map_err(|e| ApsError::InvalidHooksConfig { + path: path.to_path_buf(), + message: e.to_string(), + }) +} + +fn get_hooks_section(config: &Value) -> Option<&Value> { + let map = match config { + Value::Mapping(map) => map, + _ => return None, + }; + + map.get(Value::String("hooks".to_string())) +} + +fn collect_hook_commands(section: &Value) -> Vec { + let mut commands = Vec::new(); + collect_command_values(section, &mut commands); + commands +} + +fn collect_command_values(value: &Value, commands: &mut Vec) { + match value { + Value::Mapping(map) => { + for (key, val) in map { + if matches!(key, Value::String(k) if k == "command") { + if let Value::String(command) = val { + commands.push(command.clone()); + continue; + } + } + collect_command_values(val, commands); + } + } + Value::Sequence(seq) => { + for val in seq { + collect_command_values(val, commands); + } + } + _ => {} + } +} + +fn collect_hook_script_paths(commands: &[String], kind: HookKind) -> HashSet { + let mut scripts = HashSet::new(); + let prefixes = match kind { + HookKind::Cursor => vec![ + ".cursor/hooks/", + "./.cursor/hooks/", + "hooks/", + "./hooks/", + ".cursor\\hooks\\", + ".\\.cursor\\hooks\\", + "hooks\\", + ".\\hooks\\", + ], + HookKind::Claude => vec![ + ".claude/hooks/", + "./.claude/hooks/", + "$CLAUDE_PROJECT_DIR/.claude/hooks/", + "${CLAUDE_PROJECT_DIR}/.claude/hooks/", + ".claude\\hooks\\", + ".\\.claude\\hooks\\", + "$CLAUDE_PROJECT_DIR\\.claude\\hooks\\", + "${CLAUDE_PROJECT_DIR}\\.claude\\hooks\\", + ], + }; + + for command in commands { + for token in command.split_whitespace() { + let token = trim_token(token); + for prefix in &prefixes { + if let Some(rel_path) = extract_relative_path(token, prefix) { + scripts.insert(PathBuf::from(rel_path)); + } + } + } + } + + scripts +} + +fn extract_relative_path(token: &str, prefix: &str) -> Option { + let position = token.find(prefix)?; + let mut rel = &token[position + prefix.len()..]; + rel = rel.trim_matches(|c: char| matches!(c, '"' | '\'' | ';' | ')' | '(' | ',')); + if rel.is_empty() { + None + } else { + Some(rel.to_string()) + } +} + +fn trim_token(token: &str) -> &str { + token.trim_matches(|c: char| matches!(c, '"' | '\'' | ';' | ')' | '(' | ',')) +} + +fn warn_or_error(warnings: &mut Vec, strict: bool, error: ApsError) -> Result<()> { + if strict { + return Err(error); + } + + warnings.push(error.to_string()); + Ok(()) +} diff --git a/src/main.rs b/src/main.rs index 2fe502b..30d41f9 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,6 +5,7 @@ mod cli; mod commands; mod compose; mod error; +mod hooks; mod install; mod lockfile; mod manifest; From 6ea84965ea90f0e88952764a872c5e044aedf9d1 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 04/39] feat: Implement hooks installation helpers and conflict handling --- src/install.rs | 304 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 304 insertions(+) diff --git a/src/install.rs b/src/install.rs index 3c4b7d0..be56344 100644 --- a/src/install.rs +++ b/src/install.rs @@ -11,6 +11,7 @@ use dialoguer::Confirm; use std::io::IsTerminal; use std::path::{Path, PathBuf}; use tracing::{debug, info}; +use walkdir::WalkDir; /// Normalize a path by removing trailing slashes /// This prevents issues with path operations like parent() @@ -77,6 +78,55 @@ fn handle_conflict( Ok(true) } +/// Handle conflict detection and resolution for a set of specific paths. +fn handle_partial_conflict( + dest_path: &Path, + conflict_paths: &[PathBuf], + manifest_dir: &Path, + options: &InstallOptions, +) -> Result { + if conflict_paths.is_empty() { + return Ok(true); + } + + if options.dry_run { + println!( + "[dry-run] Would overwrite {} item(s) under {:?}", + conflict_paths.len(), + dest_path + ); + return Ok(false); + } + + let should_overwrite = if options.yes { + true + } else if std::io::stdin().is_terminal() { + Confirm::new() + .with_prompt(format!( + "Overwrite {} existing item(s) under {:?}?", + conflict_paths.len(), + dest_path + )) + .default(false) + .interact() + .map_err(|_| ApsError::Cancelled)? + } else { + return Err(ApsError::RequiresYesFlag); + }; + + if !should_overwrite { + info!("User declined to overwrite content under {:?}", dest_path); + return Err(ApsError::Cancelled); + } + + for path in conflict_paths { + let backup_path = create_backup(manifest_dir, path)?; + println!("Created backup at: {:?}", backup_path); + } + + Ok(true) +} + /// Result of an install operation pub struct InstallResult { pub id: String, @@ -821,3 +871,257 @@ fn copy_directory(src: &Path, dst: &Path) -> Result<()> { debug!("Copied directory {:?} to {:?}", src, dst); Ok(()) } + +/// Recursively copy a directory without deleting existing destination content. +fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { + // Normalize paths to handle trailing slashes + let src = normalize_path(src); + let dst = normalize_path(dst); + + if !dst.exists() { + std::fs::create_dir_all(&dst) + .map_err(|e| ApsError::io(e, format!("Failed to create directory {:?}", dst)))?; + } + + for entry in WalkDir::new(&src) { + let entry = entry.map_err(|e| { + ApsError::io( + std::io::Error::new(std::io::ErrorKind::Other, e), + "Failed to traverse source directory", + ) + })?; + let path = entry.path(); + let rel = path.strip_prefix(&src).map_err(|e| { + ApsError::io( + std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), + format!("Failed to compute relative path: {}", e), + ) + })?; + if rel.as_os_str().is_empty() { + continue; + } + let dest_path = dst.join(rel); + + if entry.file_type().is_dir() { + if dest_path.exists() { + let meta = dest_path.symlink_metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", dest_path)) + })?; + if meta.file_type().is_symlink() || dest_path.is_file() { + if dest_path.is_dir() { + std::fs::remove_dir_all(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove directory {:?}", dest_path)) + })?; + } else { + std::fs::remove_file(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) + })?; + } + } + } + std::fs::create_dir_all(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", dest_path)) + })?; + } else { + if let Some(parent) = dest_path.parent() { + if !parent.exists() { + std::fs::create_dir_all(parent).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", parent)) + })?; + } + } + if dest_path.exists() { + let meta = dest_path.symlink_metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", dest_path)) + })?; + if meta.file_type().is_symlink() { + std::fs::remove_file(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) + })?; + } else if dest_path.is_dir() { + std::fs::remove_dir_all(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove directory {:?}", dest_path)) + })?; + } + } + std::fs::copy(path, &dest_path) + .map_err(|e| ApsError::io(e, format!("Failed to copy {:?}", path)))?; + } + } + + debug!("Merged directory {:?} into {:?}", src, dst); + Ok(()) +} + +/// Make all .sh scripts under a directory executable (recursive). +fn make_shell_scripts_executable(dir: &Path) -> Result<()> { + if !dir.exists() { + return Ok(()); + } + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + use walkdir::WalkDir; + for entry in WalkDir::new(dir) { + let entry = entry.map_err(|e| { + ApsError::io( + std::io::Error::new(std::io::ErrorKind::Other, e), + "Failed to traverse hooks directory", + ) + })?; + if !entry.file_type().is_file() { + continue; + } + if entry.path().extension().and_then(|ext| ext.to_str()) != Some("sh") { + continue; + } + + let metadata = entry.path().metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", entry.path())) + })?; + let mut permissions = metadata.permissions(); + let mode = permissions.mode(); + let new_mode = mode | 0o100 | 0o010; + if new_mode != mode { + permissions.set_mode(new_mode); + std::fs::set_permissions(entry.path(), permissions).map_err(|e| { + ApsError::io( + e, + format!("Failed to set permissions for {:?}", entry.path()), + ) + })?; + } + } + } + + #[cfg(windows)] + { + let _ = dir; + } + + Ok(()) +} + +fn hooks_config_paths( + kind: &AssetKind, + source_hooks_dir: &Path, + dest_hooks_dir: &Path, +) -> Result> { + let filename = match kind { + AssetKind::CursorHooks => "hooks.json", + AssetKind::ClaudeHooks => "settings.json", + _ => return Ok(None), + }; + + let source_parent = + source_hooks_dir + .parent() + .ok_or_else(|| ApsError::InvalidHooksDirectory { + path: source_hooks_dir.to_path_buf(), + })?; + let dest_parent = dest_hooks_dir + .parent() + .ok_or_else(|| ApsError::InvalidHooksDirectory { + path: dest_hooks_dir.to_path_buf(), + })?; + + Ok(Some(( + source_parent.join(filename), + dest_parent.join(filename), + ))) +} + +fn sync_hooks_config( + kind: &AssetKind, + source_hooks_dir: &Path, + dest_hooks_dir: &Path, + use_symlink: bool, +) -> Result<()> { + let Some((source_config, dest_config)) = + hooks_config_paths(kind, source_hooks_dir, dest_hooks_dir)? + else { + return Ok(()); + }; + + if !source_config.exists() { + return Ok(()); + } + + if let Some(parent) = dest_config.parent() { + if !parent.exists() { + std::fs::create_dir_all(parent) + .map_err(|e| ApsError::io(e, format!("Failed to create directory {:?}", parent)))?; + } + } + + if use_symlink { + create_symlink(&source_config, &dest_config)?; + return Ok(()); + } + + if dest_config.exists() { + let meta = dest_config.symlink_metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", dest_config)) + })?; + if meta.file_type().is_symlink() { + std::fs::remove_file(&dest_config) + .map_err(|e| ApsError::io(e, format!("Failed to remove file {:?}", dest_config)))?; + } else if dest_config.is_dir() { + std::fs::remove_dir_all(&dest_config).map_err(|e| { + ApsError::io(e, format!("Failed to remove directory {:?}", dest_config)) + })?; + } + } + + std::fs::copy(&source_config, &dest_config).map_err(|e| { + ApsError::io( + e, + format!("Failed to copy {:?} to {:?}", source_config, dest_config), + ) + })?; + + Ok(()) +} + +fn collect_hook_conflicts(source: &Path, dest: &Path) -> Result> { + let mut conflicts = Vec::new(); + + for entry in WalkDir::new(source) { + let entry = entry.map_err(|e| { + ApsError::io( + std::io::Error::new(std::io::ErrorKind::Other, e), + "Failed to traverse source directory", + ) + })?; + let path = entry.path(); + let rel = path.strip_prefix(source).map_err(|e| { + ApsError::io( + std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), + format!("Failed to compute relative path: {}", e), + ) + })?; + if rel.as_os_str().is_empty() { + continue; + } + let dest_path = dest.join(rel); + if !dest_path.exists() { + continue; + } + let meta = dest_path + .symlink_metadata() + .map_err(|e| ApsError::io(e, format!("Failed to read metadata for {:?}", dest_path)))?; + if meta.file_type().is_symlink() { + continue; + } + if entry.file_type().is_dir() { + if dest_path.is_file() { + conflicts.push(dest_path); + } + } else if dest_path.is_file() || dest_path.is_dir() { + conflicts.push(dest_path); + } + } + + Ok(conflicts) +} From 9b1c1c93e52812bb230af7ed089799758bfa7a4d Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 05/39] feat: Integrate hooks validation and installation into core logic --- src/commands.rs | 17 ++++++++ src/install.rs | 113 +++++++++++++++++++++++++++++++++++++++--------- 2 files changed, 110 insertions(+), 20 deletions(-) diff --git a/src/commands.rs b/src/commands.rs index fb5655b..e9719c2 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -3,6 +3,7 @@ use crate::cli::{ CatalogGenerateArgs, InitArgs, ManifestFormat, StatusArgs, SyncArgs, ValidateArgs, }; use crate::error::{ApsError, Result}; +use crate::hooks::{validate_claude_hooks, validate_cursor_hooks}; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; use crate::manifest::{ @@ -397,6 +398,22 @@ pub fn cmd_validate(args: ValidateArgs) -> Result<()> { )?; warnings.extend(skill_warnings); } + if entry.kind == AssetKind::CursorHooks { + let hook_warnings = + validate_cursor_hooks(&resolved.source_path, args.strict)?; + for warning in &hook_warnings { + println!(" Warning: {}", warning); + } + warnings.extend(hook_warnings); + } + if entry.kind == AssetKind::ClaudeHooks { + let hook_warnings = + validate_claude_hooks(&resolved.source_path, args.strict)?; + for warning in &hook_warnings { + println!(" Warning: {}", warning); + } + warnings.extend(hook_warnings); + } // Format output based on source type if let Some(git_info) = &resolved.git_info { diff --git a/src/install.rs b/src/install.rs index be56344..a1e7dd2 100644 --- a/src/install.rs +++ b/src/install.rs @@ -4,6 +4,7 @@ use crate::compose::{ compose_markdown, read_source_file, write_composed_file, ComposeOptions, ComposedSource, }; use crate::error::{ApsError, Result}; +use crate::hooks::{validate_claude_hooks, validate_cursor_hooks}; use crate::lockfile::{LockedEntry, Lockfile}; use crate::manifest::{AssetKind, Entry}; use crate::sources::{clone_at_commit, get_remote_commit_sha, GitInfo, ResolvedSource}; @@ -377,7 +378,11 @@ pub fn install_entry( let should_check_conflict = match entry.kind { AssetKind::AgentsMd => true, // Single file - always check AssetKind::CompositeAgentsMd => true, // Composite file - always check - AssetKind::CursorRules | AssetKind::CursorSkillsRoot | AssetKind::AgentSkill => { + AssetKind::CursorRules + | AssetKind::CursorHooks + | AssetKind::CursorSkillsRoot + | AssetKind::ClaudeHooks + | AssetKind::AgentSkill => { // For directory assets with symlinks, we add files to the directory // without backing up existing content from other sources !resolved.use_symlink @@ -385,19 +390,55 @@ pub fn install_entry( }; if should_check_conflict { - let should_proceed = handle_conflict(&dest_path, manifest_dir, options)?; - if !should_proceed { - // dry-run mode, skip actual installation but continue + if matches!(entry.kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + let mut conflicts = collect_hook_conflicts(&resolved.source_path, &dest_path)?; + if let Some((source_config, dest_config)) = + hooks_config_paths(&entry.kind, &resolved.source_path, &dest_path)? + { + if source_config.exists() + && dest_config.exists() + && !dest_config + .symlink_metadata() + .map(|m| m.file_type().is_symlink()) + .unwrap_or(false) + { + conflicts.push(dest_config); + } + } + conflicts.sort(); + conflicts.dedup(); + let should_proceed = + handle_partial_conflict(&dest_path, &conflicts, manifest_dir, options)?; + if !should_proceed { + // dry-run mode, skip actual installation but continue + } + } else { + let should_proceed = handle_conflict(&dest_path, manifest_dir, options)?; + if !should_proceed { + // dry-run mode, skip actual installation but continue + } } } // Validate skills if this is a skills root let mut warnings = Vec::new(); if entry.kind == AssetKind::CursorSkillsRoot { - warnings = validate_skills_root(&resolved.source_path, options.strict)?; - for warning in &warnings { - println!("Warning: {}", warning); - } + warnings.extend(validate_skills_root(&resolved.source_path, options.strict)?); + } + if entry.kind == AssetKind::CursorHooks { + warnings.extend(validate_cursor_hooks( + &resolved.source_path, + options.strict, + )?); + } + if entry.kind == AssetKind::ClaudeHooks { + warnings.extend(validate_claude_hooks( + &resolved.source_path, + options.strict, + )?); + } + for warning in &warnings { + println!("Warning: {}", warning); } // Perform the install @@ -413,6 +454,18 @@ pub fn install_entry( )? }; + if !options.dry_run && matches!(entry.kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + sync_hooks_config( + &entry.kind, + &resolved.source_path, + &dest_path, + resolved.use_symlink, + )?; + if !resolved.use_symlink { + make_shell_scripts_executable(&dest_path)?; + } + } + // Create locked entry from resolved source let locked_entry = resolved.to_locked_entry(&dest_path, checksum, symlinked_items); @@ -568,7 +621,11 @@ fn install_asset( message: "Composite entries should use install_composite_entry".to_string(), }); } - AssetKind::CursorRules | AssetKind::CursorSkillsRoot | AssetKind::AgentSkill => { + AssetKind::CursorRules + | AssetKind::CursorHooks + | AssetKind::CursorSkillsRoot + | AssetKind::ClaudeHooks + | AssetKind::AgentSkill => { if use_symlink { if include.is_empty() { // Symlink individual files (not the directory itself) @@ -605,23 +662,35 @@ fn install_asset( } else { // Copy behavior if include.is_empty() { - copy_directory(source, dest)?; + if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + copy_directory_merge(source, dest)?; + } else { + copy_directory(source, dest)?; + } } else { // Filter and copy individual items let items = filter_by_prefix(source, include)?; // Ensure dest exists - if dest.exists() { - std::fs::remove_dir_all(dest).map_err(|e| { - ApsError::io( - e, - format!("Failed to remove existing directory {:?}", dest), - ) + if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if !dest.exists() { + std::fs::create_dir_all(dest).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", dest)) + })?; + } + } else { + if dest.exists() { + std::fs::remove_dir_all(dest).map_err(|e| { + ApsError::io( + e, + format!("Failed to remove existing directory {:?}", dest), + ) + })?; + } + std::fs::create_dir_all(dest).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", dest)) })?; } - std::fs::create_dir_all(dest).map_err(|e| { - ApsError::io(e, format!("Failed to create directory {:?}", dest)) - })?; for item in items { let item_name = item.file_name().ok_or_else(|| { @@ -635,7 +704,11 @@ fn install_asset( })?; let item_dest = dest.join(item_name); if item.is_dir() { - copy_directory(&item, &item_dest)?; + if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + copy_directory_merge(&item, &item_dest)?; + } else { + copy_directory(&item, &item_dest)?; + } } else { std::fs::copy(&item, &item_dest).map_err(|e| { ApsError::io(e, format!("Failed to copy {:?}", item)) From 91cb7254f86fc98141f33878fda21a663d7cc6bb Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 06/39] feat: Enable catalog generation for hook scripts --- src/catalog.rs | 25 +++++++++++++++++++++++++ 1 file changed, 25 insertions(+) diff --git a/src/catalog.rs b/src/catalog.rs index 9ffe9e7..5e92550 100644 --- a/src/catalog.rs +++ b/src/catalog.rs @@ -4,7 +4,9 @@ //! that are synced via the manifest. Each asset kind is enumerated: //! - agents_md: One entry per file //! - cursor_rules: One entry per individual rule file +//! - cursor_hooks: One entry per hook script //! - cursor_skills_root: One entry per skill folder +//! - claude_hooks: One entry per hook script //! - agent_skill: One entry per skill folder use crate::error::{ApsError, Result}; @@ -215,6 +217,29 @@ fn enumerate_entry_assets(entry: &Entry, manifest_dir: &Path) -> Result { + let files = enumerate_files(&resolved.source_path, &entry.include)?; + for file_path in files { + let name = file_path + .file_name() + .map(|n| n.to_string_lossy().to_string()) + .unwrap_or_default(); + + if name.is_empty() { + continue; + } + + let dest_path = base_dest.join(&name); + + catalog_entries.push(CatalogEntry { + id: format!("{}:{}", entry.id, name), + name, + kind: entry.kind.clone(), + destination: format!("./{}", dest_path.display()), + short_description: None, + }); + } + } AssetKind::CursorSkillsRoot => { // Enumerate each skill folder in the directory let folders = enumerate_folders(&resolved.source_path, &entry.include)?; From 8d4a7e7cb3a032b96f9955387cdb9d8f37a46e96 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 07/39] test: Add CLI tests for Cursor and Claude hooks --- tests/cli.rs | 327 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 327 insertions(+) diff --git a/tests/cli.rs b/tests/cli.rs index b7e7cab..033ede0 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -360,6 +360,333 @@ fn sync_with_symlink_creates_symlink() { } } +// ============================================================================ +// Hooks Tests +// ============================================================================ + +#[test] +fn sync_cursor_hooks_copies_directory_and_sets_exec() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".cursor").create_dir_all().unwrap(); + source + .child(".cursor/scripts/hello.sh") + .write_str("echo hello\n") + .unwrap(); + source + .child(".cursor/scripts/nested") + .create_dir_all() + .unwrap(); + source + .child(".cursor/scripts/nested/inner.sh") + .write_str("echo inner\n") + .unwrap(); + source + .child(".cursor/hooks.json") + .write_str( + r#"{ + "hooks": { + "onStart": [ + { "command": "bash .cursor/scripts/hello.sh" }, + { "command": "bash .cursor/scripts/nested/inner.sh" } + ] + } +}"#, + ) + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: cursor-hooks + kind: cursor_hooks + source: + type: filesystem + root: {} + path: .cursor + symlink: false + dest: ./.cursor +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps().arg("sync").current_dir(&project).assert().success(); + + project + .child(".cursor/scripts/hello.sh") + .assert(predicate::path::exists()); + project + .child(".cursor/scripts/nested/inner.sh") + .assert(predicate::path::exists()); + // Verify config is also synced to parent dir + project + .child(".cursor/hooks.json") + .assert(predicate::path::exists()); + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mode = std::fs::metadata(project.path().join(".cursor/scripts/hello.sh")) + .unwrap() + .permissions() + .mode(); + assert_ne!(mode & 0o100, 0); + let nested_mode = std::fs::metadata(project.path().join(".cursor/scripts/nested/inner.sh")) + .unwrap() + .permissions() + .mode(); + assert_ne!(nested_mode & 0o100, 0); + } +} + +#[test] +fn sync_claude_hooks_copies_directory_and_sets_exec() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".claude").create_dir_all().unwrap(); + source + .child(".claude/scripts/start.sh") + .write_str("echo start\n") + .unwrap(); + source + .child(".claude/settings.json") + .write_str( + r#"{ + "hooks": { + "onSessionStart": [ + { "command": "bash $CLAUDE_PROJECT_DIR/.claude/scripts/start.sh" } + ] + } +}"#, + ) + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: claude-hooks + kind: claude_hooks + source: + type: filesystem + root: {} + path: .claude + symlink: false + dest: ./.claude +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps().arg("sync").current_dir(&project).assert().success(); + + project + .child(".claude/scripts/start.sh") + .assert(predicate::path::exists()); + // Verify config is also synced to parent dir + project + .child(".claude/settings.json") + .assert(predicate::path::exists()); + + #[cfg(unix)] + { + use std::os::unix::fs::PermissionsExt; + let mode = std::fs::metadata(project.path().join(".claude/scripts/start.sh")) + .unwrap() + .permissions() + .mode(); + assert_ne!(mode & 0o100, 0); + } +} + +#[test] +fn validate_cursor_hooks_strict_rejects_missing_config() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".cursor").create_dir_all().unwrap(); + source + .child(".cursor/scripts/hello.sh") + .write_str("echo hello\n") + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: cursor-hooks + kind: cursor_hooks + source: + type: filesystem + root: {} + path: .cursor + symlink: false + dest: ./.cursor +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps() + .args(["validate", "--strict"]) + .current_dir(&project) + .assert() + .failure() + .stderr(predicate::str::contains("hooks.json")); +} + +#[test] +fn validate_claude_hooks_strict_rejects_missing_config() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".claude").create_dir_all().unwrap(); + source + .child(".claude/scripts/start.sh") + .write_str("echo start\n") + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: claude-hooks + kind: claude_hooks + source: + type: filesystem + root: {} + path: .claude + symlink: false + dest: ./.claude +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps() + .args(["validate", "--strict"]) + .current_dir(&project) + .assert() + .failure() + .stderr(predicate::str::contains("settings.json")); +} + +#[test] +fn validate_cursor_hooks_strict_accepts_valid() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".cursor").create_dir_all().unwrap(); + source + .child(".cursor/scripts/hello.sh") + .write_str("echo hello\n") + .unwrap(); + source + .child(".cursor/hooks.json") + .write_str( + r#"{ + "hooks": { + "onStart": [ + { "command": "bash .cursor/scripts/hello.sh" } + ] + } +}"#, + ) + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: cursor-hooks + kind: cursor_hooks + source: + type: filesystem + root: {} + path: .cursor + symlink: false + dest: ./.cursor +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps() + .args(["validate", "--strict"]) + .current_dir(&project) + .assert() + .success(); +} + +#[test] +fn validate_claude_hooks_strict_accepts_valid() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source = temp.child("source"); + source.create_dir_all().unwrap(); + source.child(".claude").create_dir_all().unwrap(); + source + .child(".claude/scripts/start.sh") + .write_str("echo start\n") + .unwrap(); + source + .child(".claude/settings.json") + .write_str( + r#"{ + "hooks": { + "onSessionStart": [ + { "command": "bash .claude/scripts/start.sh" } + ] + } +}"#, + ) + .unwrap(); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: claude-hooks + kind: claude_hooks + source: + type: filesystem + root: {} + path: .claude + symlink: false + dest: ./.claude +"#, + source.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + aps() + .args(["validate", "--strict"]) + .current_dir(&project) + .assert() + .success(); +} + // ============================================================================ // Verbose Flag Tests // ============================================================================ From 62d5879a2e270fb19b586325e91f3b578b93c158 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 08/39] docs: Update PR message for hooks feature --- PR_MESSAGE.md | 19 +++++++++++++++---- 1 file changed, 15 insertions(+), 4 deletions(-) diff --git a/PR_MESSAGE.md b/PR_MESSAGE.md index 0cf3285..5f36dbf 100644 --- a/PR_MESSAGE.md +++ b/PR_MESSAGE.md @@ -1,6 +1,8 @@ # Hey Weston πŸ‘‹ -First, thank you for building and maintaining this repository β€” it’s been genuinely useful for distributing rules and skills across multiple projects based on different needs. +> [!NOTE] +> First, thank you for building and maintaining this repository β€” it’s been genuinely useful for distributing rules and skills across multiple projects based on different needs. +> Second, I am not a Rust expert, so please bear with me if I made any mistakes. I tried to follow the existing code style and conventions. While using the tool, I found a couple of areas where additional functionality would be extremely valuable: @@ -8,10 +10,19 @@ While using the tool, I found a couple of areas where additional functionality w The Hooks documentations for the designated IDEs enforce the scripts running the Hooks to be marked as executable with `chmod +x`. -To complete this functionality, I used already existing pipeline and introduced new **Asset Types**: +I extended the already existing pipeline to support this functionality with new **Asset Types**: -- `cursor_hooks_root` -- `claude_hooks_root` +- `cursor_hooks` +- `claude_hooks` + +**Implementation Details:** + +- Added `cursor_hooks` and `claude_hooks` as first-class `AssetKind`s, mirroring the directory-copy behavior of `cursor_rules`. +- Implemented a post-installation step that recursively applies `chmod +x` to all `.sh` files in the destination directory, ensuring hooks work immediately out of the box. +- Added a validation layer (`src/hooks.rs`) that verifies the expected project structure: + - For Cursor: Checks for `.cursor/hooks.json` and ensures referenced scripts exist. + - For Claude: Checks for `.claude/settings.json`, parses the `hooks` section, and validates script paths. +- Updated the catalog generation to enumerate individual hook scripts. ## πŸ€– Copilot Rule convert From b23f3bd8d64f2d3b43c5a16d8978f7fc814f7b05 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 11:58:15 +0100 Subject: [PATCH 09/39] chore: Bump version to 0.1.8 --- .release-please-manifest.json | 2 +- Cargo.lock | 2 +- Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 55c86c8..a46f218 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.7" + ".": "0.1.8" } diff --git a/Cargo.lock b/Cargo.lock index 01040a2..1007c02 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -87,7 +87,7 @@ dependencies = [ [[package]] name = "aps" -version = "0.1.7" +version = "0.1.8" dependencies = [ "assert_cmd", "assert_fs", diff --git a/Cargo.toml b/Cargo.toml index 098a5d1..b48a2a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aps" -version = "0.1.7" +version = "0.1.8" edition = "2021" description = "Manifest-driven CLI for syncing agentic assets" authors = ["APS Contributors"] From 53f60624da7f464e3c79edf810482bcae75918ce Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 12:01:16 +0100 Subject: [PATCH 10/39] docs: Update PR message for hooks feature pt.2 --- PR_MESSAGE.md | 11 +++++------ 1 file changed, 5 insertions(+), 6 deletions(-) diff --git a/PR_MESSAGE.md b/PR_MESSAGE.md index 5f36dbf..f5d5d11 100644 --- a/PR_MESSAGE.md +++ b/PR_MESSAGE.md @@ -17,12 +17,11 @@ I extended the already existing pipeline to support this functionality with new **Implementation Details:** -- Added `cursor_hooks` and `claude_hooks` as first-class `AssetKind`s, mirroring the directory-copy behavior of `cursor_rules`. -- Implemented a post-installation step that recursively applies `chmod +x` to all `.sh` files in the destination directory, ensuring hooks work immediately out of the box. -- Added a validation layer (`src/hooks.rs`) that verifies the expected project structure: - - For Cursor: Checks for `.cursor/hooks.json` and ensures referenced scripts exist. - - For Claude: Checks for `.claude/settings.json`, parses the `hooks` section, and validates script paths. -- Updated the catalog generation to enumerate individual hook scripts. +- **Smart Syncing**: Hooks are installed using a merge strategy that preserves existing files in `.cursor/` or `.claude/` directories (like extensions or other configs), only overwriting the specific hook scripts and configuration. +- **Config Management**: Automatically syncs the associated configuration file (`.cursor/hooks.json` or `.claude/settings.json`) from the source alongside the hooks directory. +- **Executable Permissions**: Post-installation step recursively applies `chmod +x` to all `.sh` files, ensuring hooks work immediately. +- **Validation**: Enforces correct structure where configuration files reside in the parent directory of the hooks folder, and verifies referenced scripts exist. +- **Catalog**: Updated generation to enumerate individual hook scripts. ## πŸ€– Copilot Rule convert From 652e13e7188244651390f75d20896a1e4fb184f2 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 12:09:49 +0100 Subject: [PATCH 11/39] feat: Remove PR Message --- PR_MESSAGE.md | 28 ---------------------------- 1 file changed, 28 deletions(-) delete mode 100644 PR_MESSAGE.md diff --git a/PR_MESSAGE.md b/PR_MESSAGE.md deleted file mode 100644 index f5d5d11..0000000 --- a/PR_MESSAGE.md +++ /dev/null @@ -1,28 +0,0 @@ -# Hey Weston πŸ‘‹ - -> [!NOTE] -> First, thank you for building and maintaining this repository β€” it’s been genuinely useful for distributing rules and skills across multiple projects based on different needs. -> Second, I am not a Rust expert, so please bear with me if I made any mistakes. I tried to follow the existing code style and conventions. - -While using the tool, I found a couple of areas where additional functionality would be extremely valuable: - -## πŸͺ Hooks (Cursor, Claude, ...) - -The Hooks documentations for the designated IDEs enforce the scripts running the Hooks to be marked as executable with `chmod +x`. - -I extended the already existing pipeline to support this functionality with new **Asset Types**: - -- `cursor_hooks` -- `claude_hooks` - -**Implementation Details:** - -- **Smart Syncing**: Hooks are installed using a merge strategy that preserves existing files in `.cursor/` or `.claude/` directories (like extensions or other configs), only overwriting the specific hook scripts and configuration. -- **Config Management**: Automatically syncs the associated configuration file (`.cursor/hooks.json` or `.claude/settings.json`) from the source alongside the hooks directory. -- **Executable Permissions**: Post-installation step recursively applies `chmod +x` to all `.sh` files, ensuring hooks work immediately. -- **Validation**: Enforces correct structure where configuration files reside in the parent directory of the hooks folder, and verifies referenced scripts exist. -- **Catalog**: Updated generation to enumerate individual hook scripts. - -## πŸ€– Copilot Rule convert - -... From d6a560f3ce2ec42affd0a2872380b48211f2bc3d Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 16:25:11 +0100 Subject: [PATCH 12/39] feat: Implement per-run Git clone caching mechanism --- src/manifest.rs | 8 +++ src/sources/git.rs | 135 +++++++++++++++++++++++++++++++++++++++++++++ src/sources/mod.rs | 5 +- 3 files changed, 147 insertions(+), 1 deletion(-) diff --git a/src/manifest.rs b/src/manifest.rs index 9fe9599..0039913 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -212,6 +212,14 @@ impl Source { } } + /// Get shallow clone setting for git sources + pub fn git_shallow(&self) -> Option { + match self { + Source::Git { shallow, .. } => Some(*shallow), + Source::Filesystem { .. } => None, + } + } + /// Get the path within a git source (for cloning at specific commits) pub fn git_path(&self) -> Option<&str> { match self { diff --git a/src/sources/git.rs b/src/sources/git.rs index 2e5a78b..33ad8dc 100644 --- a/src/sources/git.rs +++ b/src/sources/git.rs @@ -2,8 +2,10 @@ use super::{expand_path, GitInfo, ResolvedSource, SourceAdapter}; use crate::error::{ApsError, Result}; +use std::collections::HashMap; use std::path::{Path, PathBuf}; use std::process::Command; +use std::sync::Arc; use tempfile::TempDir; use tracing::{debug, info}; @@ -89,6 +91,139 @@ pub struct ResolvedGitSource { pub commit_sha: String, } +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct GitCloneKey { + repo: String, + git_ref: String, + shallow: bool, +} + +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +struct GitCommitKey { + repo: String, + commit_sha: String, + resolved_ref: String, +} + +/// Cache for git clones within a single sync run +pub struct GitCloneCache { + latest: HashMap>, + commits: HashMap>, +} + +impl GitCloneCache { + pub fn new() -> Self { + Self { + latest: HashMap::new(), + commits: HashMap::new(), + } + } + + pub fn resolve_latest( + &mut self, + repo: &str, + git_ref: &str, + shallow: bool, + ) -> Result> { + let key = GitCloneKey { + repo: repo.to_string(), + git_ref: git_ref.to_string(), + shallow, + }; + if let Some(cached) = self.latest.get(&key) { + info!("Reusing cached clone for {} @ {}", repo, git_ref); + return Ok(Arc::clone(cached)); + } + + let resolved = Arc::new(clone_and_resolve(repo, git_ref, shallow)?); + self.latest.insert(key, Arc::clone(&resolved)); + Ok(resolved) + } + + pub fn resolve_commit( + &mut self, + repo: &str, + commit_sha: &str, + resolved_ref: &str, + ) -> Result> { + let key = GitCommitKey { + repo: repo.to_string(), + commit_sha: commit_sha.to_string(), + resolved_ref: resolved_ref.to_string(), + }; + if let Some(cached) = self.commits.get(&key) { + info!( + "Reusing cached clone for {} @ {}", + repo, + &commit_sha[..8.min(commit_sha.len())] + ); + return Ok(Arc::clone(cached)); + } + + let resolved = Arc::new(clone_at_commit(repo, commit_sha, resolved_ref)?); + self.commits.insert(key, Arc::clone(&resolved)); + Ok(resolved) + } +} + +/// Resolve a git source using the per-run clone cache. +pub fn resolve_git_source_with_cache( + repo: &str, + git_ref: &str, + shallow: bool, + path: Option<&str>, + cache: &mut GitCloneCache, +) -> Result { + let resolved_git = cache.resolve_latest(repo, git_ref, shallow)?; + let path = expand_path(path.unwrap_or(".")); + let source_path = if path == "." { + resolved_git.repo_path.clone() + } else { + resolved_git.repo_path.join(&path) + }; + + let git_info = GitInfo { + resolved_ref: resolved_git.resolved_ref.clone(), + commit_sha: resolved_git.commit_sha.clone(), + }; + + Ok(ResolvedSource::git( + source_path, + repo.to_string(), + git_info, + Arc::clone(&resolved_git), + )) +} + +/// Resolve a git source at a specific commit using the per-run clone cache. +pub fn resolve_git_source_at_commit_with_cache( + repo: &str, + commit_sha: &str, + resolved_ref: &str, + path: Option<&str>, + cache: &mut GitCloneCache, +) -> Result { + let resolved_git = cache.resolve_commit(repo, commit_sha, resolved_ref)?; + let path = expand_path(path.unwrap_or(".")); + let source_path = if path == "." { + resolved_git.repo_path.clone() + } else { + resolved_git.repo_path.join(&path) + }; + + let git_info = GitInfo { + resolved_ref: resolved_git.resolved_ref.clone(), + commit_sha: resolved_git.commit_sha.clone(), + }; + + Ok(ResolvedSource::git( + source_path, + repo.to_string(), + git_info, + Arc::clone(&resolved_git), + )) +} + /// Clone a git repository and resolve the ref using the git CLI. /// This inherits the user's existing git configuration (SSH, credentials, etc.) pub fn clone_and_resolve(url: &str, git_ref: &str, shallow: bool) -> Result { diff --git a/src/sources/mod.rs b/src/sources/mod.rs index 4408f4c..7a4e56f 100644 --- a/src/sources/mod.rs +++ b/src/sources/mod.rs @@ -7,7 +7,10 @@ mod filesystem; mod git; pub use filesystem::FilesystemSource; -pub use git::{clone_at_commit, get_remote_commit_sha, GitSource}; +pub use git::{ + get_remote_commit_sha, resolve_git_source_at_commit_with_cache, resolve_git_source_with_cache, + GitCloneCache, GitSource, +}; use crate::error::Result; use crate::lockfile::LockedEntry; From 2ab83d3f68acac6fc5a120fcc8aa38faaa5fba9d Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 16:25:11 +0100 Subject: [PATCH 13/39] feat: Integrate Git clone cache into sync command execution --- src/commands.rs | 6 ++++-- src/install.rs | 46 ++++++++++++++++++++++------------------------ 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/src/commands.rs b/src/commands.rs index e9719c2..567c5a4 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -10,6 +10,7 @@ use crate::manifest::{ discover_manifest, manifest_dir, validate_manifest, AssetKind, Manifest, DEFAULT_MANIFEST_NAME, }; use crate::orphan::{detect_orphaned_paths, prompt_and_cleanup_orphans}; +use crate::sources::GitCloneCache; use crate::sync_output::{print_sync_results, print_sync_summary, SyncDisplayItem, SyncStatus}; use std::fs; use std::io::Write; @@ -150,6 +151,7 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { strict: args.strict, upgrade: args.upgrade, }; + let mut git_cache = GitCloneCache::new(); // Detect orphaned paths (destinations that changed) let orphans = detect_orphaned_paths(&entries_to_install, &lockfile, &base_dir); @@ -159,9 +161,9 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { for entry in &entries_to_install { // Use composite install for composite entries, regular install otherwise let result = if entry.is_composite() { - install_composite_entry(entry, &base_dir, &lockfile, &options)? + install_composite_entry(entry, &base_dir, &lockfile, &options, &mut git_cache)? } else { - install_entry(entry, &base_dir, &lockfile, &options)? + install_entry(entry, &base_dir, &lockfile, &options, &mut git_cache)? }; results.push(result); } diff --git a/src/install.rs b/src/install.rs index a1e7dd2..29fef54 100644 --- a/src/install.rs +++ b/src/install.rs @@ -7,7 +7,10 @@ use crate::error::{ApsError, Result}; use crate::hooks::{validate_claude_hooks, validate_cursor_hooks}; use crate::lockfile::{LockedEntry, Lockfile}; use crate::manifest::{AssetKind, Entry}; -use crate::sources::{clone_at_commit, get_remote_commit_sha, GitInfo, ResolvedSource}; +use crate::sources::{ + get_remote_commit_sha, resolve_git_source_at_commit_with_cache, resolve_git_source_with_cache, + GitCloneCache, +}; use dialoguer::Confirm; use std::io::IsTerminal; use std::path::{Path, PathBuf}; @@ -155,6 +158,7 @@ pub fn install_entry( manifest_dir: &Path, lockfile: &Lockfile, options: &InstallOptions, + git_cache: &mut GitCloneCache, ) -> Result { info!("Processing entry: {}", entry.id); @@ -170,6 +174,7 @@ pub fn install_entry( let resolved = if let Some((repo, git_ref)) = source.git_info() { let dest_path = manifest_dir.join(entry.destination()); let locked_entry = lockfile.entries.get(&entry.id); + let shallow = source.git_shallow().unwrap_or(true); // Check if we should use the locked commit let use_locked_commit = @@ -223,25 +228,13 @@ pub fn install_entry( entry.id, &locked_commit[..8.min(locked_commit.len())] ); - let resolved_git = clone_at_commit(repo, locked_commit, locked_ref)?; - - // Build the path within the cloned repo - let path = source - .git_path() - .map(|p| p.to_string()) - .unwrap_or_else(|| ".".to_string()); - let source_path = if path == "." { - resolved_git.repo_path.clone() - } else { - resolved_git.repo_path.join(&path) - }; - - let git_info = GitInfo { - resolved_ref: resolved_git.resolved_ref.clone(), - commit_sha: resolved_git.commit_sha.clone(), - }; - - ResolvedSource::git(source_path, repo.to_string(), git_info, resolved_git) + resolve_git_source_at_commit_with_cache( + repo, + locked_commit, + locked_ref, + source.git_path(), + git_cache, + )? } else { // Upgrade mode or no locked commit: check remote and clone latest // Fast-path: skip if remote commit matches lockfile and dest exists @@ -278,8 +271,7 @@ pub fn install_entry( } // Clone latest from branch - let adapter = source.to_adapter(); - adapter.resolve(manifest_dir)? + resolve_git_source_with_cache(repo, git_ref, shallow, source.git_path(), git_cache)? } } else { // Non-git source (filesystem): use adapter directly @@ -487,6 +479,7 @@ pub fn install_composite_entry( manifest_dir: &Path, lockfile: &Lockfile, options: &InstallOptions, + git_cache: &mut GitCloneCache, ) -> Result { info!("Processing composite entry: {}", entry.id); @@ -501,8 +494,13 @@ pub fn install_composite_entry( let mut all_checksums: Vec = Vec::new(); for source in &entry.sources { - let adapter = source.to_adapter(); - let resolved = adapter.resolve(manifest_dir)?; + let resolved = if let Some((repo, git_ref)) = source.git_info() { + let shallow = source.git_shallow().unwrap_or(true); + resolve_git_source_with_cache(repo, git_ref, shallow, source.git_path(), git_cache)? + } else { + let adapter = source.to_adapter(); + adapter.resolve(manifest_dir)? + }; if !resolved.source_path.exists() { return Err(ApsError::SourcePathNotFound { From 4c804a5526015738b9ed5b35a3100ec8e2a798d2 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 16:25:11 +0100 Subject: [PATCH 14/39] test: Add integration tests for Git clone caching --- tests/cli.rs | 139 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 139 insertions(+) diff --git a/tests/cli.rs b/tests/cli.rs index 033ede0..0b24cea 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -969,6 +969,145 @@ fn sync_shows_upgrade_available_status() { ); } +#[test] +fn sync_reuses_git_clone_for_same_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source_repo = temp.child("source-repo"); + source_repo.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo.path(), "# Version 1\nOriginal content\n"); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: agents-one + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-1.md + - id: agents-two + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-2.md +"#, + source_repo.path().display(), + source_repo.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .args(["--verbose", "sync"]) + .current_dir(&project) + .output() + .expect("Failed to run aps sync"); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let combined_output = format!("{stdout}\n{stderr}"); + assert!( + output.status.success(), + "aps sync failed: {combined_output}" + ); + + let clone_count = combined_output.matches("Cloning git repository:").count(); + let reuse_count = combined_output.matches("Reusing cached clone").count(); + assert_eq!( + clone_count, 1, + "expected one clone for shared repo, output: {combined_output}" + ); + assert!( + reuse_count >= 1, + "expected cached clone reuse, output: {combined_output}" + ); + + project + .child("AGENTS-1.md") + .assert(predicate::path::exists()); + project + .child("AGENTS-2.md") + .assert(predicate::path::exists()); +} + +#[test] +fn sync_clones_each_distinct_repo_once() { + let temp = assert_fs::TempDir::new().unwrap(); + + let source_repo_a = temp.child("source-repo-a"); + source_repo_a.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo_a.path(), "# Repo A\n"); + + let source_repo_b = temp.child("source-repo-b"); + source_repo_b.create_dir_all().unwrap(); + create_git_repo_with_agents_md(source_repo_b.path(), "# Repo B\n"); + + let project = temp.child("project"); + project.create_dir_all().unwrap(); + + let manifest = format!( + r#"entries: + - id: agents-a + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-A.md + - id: agents-b + kind: agents_md + source: + type: git + repo: {} + ref: main + shallow: false + path: AGENTS.md + dest: ./AGENTS-B.md +"#, + source_repo_a.path().display(), + source_repo_b.path().display() + ); + + project.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .args(["--verbose", "sync"]) + .current_dir(&project) + .output() + .expect("Failed to run aps sync"); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + let combined_output = format!("{stdout}\n{stderr}"); + assert!( + output.status.success(), + "aps sync failed: {combined_output}" + ); + + let clone_count = combined_output.matches("Cloning git repository:").count(); + let reuse_count = combined_output.matches("Reusing cached clone").count(); + assert_eq!( + clone_count, 2, + "expected one clone per distinct repo, output: {combined_output}" + ); + assert_eq!( + reuse_count, 0, + "did not expect cache reuse across distinct repos, output: {combined_output}" + ); +} + // ============================================================================ // Composite Agents MD Tests (Live Git Sources) // ============================================================================ From 81a71ca4bf30972c4f17c9045d3a6866e8394288 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 16:25:11 +0100 Subject: [PATCH 15/39] docs: Document Git clone caching in architecture.md --- docs/architecture.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/docs/architecture.md b/docs/architecture.md index 1344f03..2973f74 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -120,6 +120,12 @@ pub struct GitSource { 4. If mismatch β†’ clone and install ``` +**Per-run clone reuse:** + +- During `aps sync`, git clones are cached in-memory per run. +- Entries that share the same repo/ref/shallow (or repo/commit in locked mode) reuse the same temporary clone. +- The cache is dropped at the end of the run, so temp directories are cleaned up as usual. + ### Enum-to-Adapter Bridge The manifest uses a `Source` enum for YAML serialization, which bridges to the trait implementations: From 0ed0f50a6e5b1f885a2d1fca5f8f7af229ab9706 Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 30 Jan 2026 16:25:11 +0100 Subject: [PATCH 16/39] chore: Bump version to 0.1.9 --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1007c02..236f249 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -87,7 +87,7 @@ dependencies = [ [[package]] name = "aps" -version = "0.1.8" +version = "0.1.9" dependencies = [ "assert_cmd", "assert_fs", diff --git a/Cargo.toml b/Cargo.toml index b48a2a5..dde480d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aps" -version = "0.1.8" +version = "0.1.9" edition = "2021" description = "Manifest-driven CLI for syncing agentic assets" authors = ["APS Contributors"] From 642d01b1d0a94631d0422c74a9a8206ef3f70206 Mon Sep 17 00:00:00 2001 From: Melioo Date: Sun, 1 Feb 2026 11:37:49 +0100 Subject: [PATCH 17/39] fix: PR Changes Fixed based on PR #51 issues from CodeRabbit. --- aps.yaml | 8 +++-- src/catalog.rs | 98 ++++++++++++++++++++++++++++++++++++++++++++++---- src/hooks.rs | 85 ++++++++++++++++++++++++------------------- 3 files changed, 145 insertions(+), 46 deletions(-) diff --git a/aps.yaml b/aps.yaml index 248216d..e9d0a83 100644 --- a/aps.yaml +++ b/aps.yaml @@ -20,7 +20,7 @@ entries: ref: main path: skills include: - - skill-creation + - skill-creator dest: ./.claude/skills/ - id: address-github-comments @@ -29,5 +29,7 @@ entries: type: git repo: https://github.com/sickn33/antigravity-awesome-skills.git ref: main - path: skills/address-github-comments - dest: ./.claude/skills/address-github-comments/ + path: skills + include: + - address-github-comments + dest: ./.claude/skills/ diff --git a/src/catalog.rs b/src/catalog.rs index 5e92550..c6adb1d 100644 --- a/src/catalog.rs +++ b/src/catalog.rs @@ -218,18 +218,24 @@ fn enumerate_entry_assets(entry: &Entry, manifest_dir: &Path) -> Result { - let files = enumerate_files(&resolved.source_path, &entry.include)?; + let files = enumerate_files_recursive(&resolved.source_path, &entry.include)?; for file_path in files { - let name = file_path - .file_name() - .map(|n| n.to_string_lossy().to_string()) - .unwrap_or_default(); + let relative_path = file_path + .strip_prefix(&resolved.source_path) + .map(PathBuf::from) + .unwrap_or_else(|_| { + file_path + .file_name() + .map(PathBuf::from) + .unwrap_or_default() + }); + let name = relative_path.to_string_lossy().replace('\\', "/"); if name.is_empty() { continue; } - let dest_path = base_dest.join(&name); + let dest_path = base_dest.join(&relative_path); catalog_entries.push(CatalogEntry { id: format!("{}:{}", entry.id, name), @@ -512,6 +518,59 @@ fn enumerate_files(dir: &Path, include: &[String]) -> Result> { Ok(files) } +/// Enumerate all files in a directory recursively, optionally filtering by include prefixes +fn enumerate_files_recursive(dir: &Path, include: &[String]) -> Result> { + let mut files = Vec::new(); + enumerate_files_recursive_inner(dir, dir, include, &mut files)?; + + // Sort for deterministic output + files.sort(); + Ok(files) +} + +fn enumerate_files_recursive_inner( + current_dir: &Path, + root_dir: &Path, + include: &[String], + files: &mut Vec, +) -> Result<()> { + for entry in std::fs::read_dir(current_dir) + .map_err(|e| ApsError::io(e, format!("Failed to read directory {:?}", current_dir)))? + { + let entry = entry.map_err(|e| ApsError::io(e, "Failed to read directory entry"))?; + let path = entry.path(); + + if path.is_dir() { + enumerate_files_recursive_inner(&path, root_dir, include, files)?; + continue; + } + + // Only include files (not directories) + if !path.is_file() { + continue; + } + + let name = entry.file_name().to_string_lossy().to_string(); + let relative_path = path.strip_prefix(root_dir).unwrap_or(&path); + let relative_str = relative_path.to_string_lossy().replace('\\', "/"); + + // Apply include filter if specified + if !include.is_empty() { + let matches = include.iter().any(|prefix| { + let normalized_prefix = prefix.replace('\\', "/"); + relative_str.starts_with(&normalized_prefix) || name.starts_with(prefix) + }); + if !matches { + continue; + } + } + + files.push(path); + } + + Ok(()) +} + /// Enumerate all folders in a directory, optionally filtering by include prefixes fn enumerate_folders(dir: &Path, include: &[String]) -> Result> { let mut folders = Vec::new(); @@ -589,6 +648,33 @@ mod tests { Ok(()) } + #[test] + fn test_enumerate_files_recursive() -> Result<()> { + let temp_dir = TempDir::new().unwrap(); + let dir = temp_dir.path(); + + // Create test files in nested directories + std::fs::write(dir.join("hook1.sh"), "content1").unwrap(); + std::fs::create_dir(dir.join("nested")).unwrap(); + std::fs::write(dir.join("nested").join("hook2.sh"), "content2").unwrap(); + std::fs::create_dir(dir.join("nested").join("inner")).unwrap(); + std::fs::write(dir.join("nested").join("inner").join("hook3.sh"), "content3").unwrap(); + + // Test without filter + let files = enumerate_files_recursive(dir, &[])?; + assert_eq!(files.len(), 3); + + // Test with filename prefix filter + let files = enumerate_files_recursive(dir, &["hook1".to_string()])?; + assert_eq!(files.len(), 1); + + // Test with nested path prefix filter + let files = enumerate_files_recursive(dir, &["nested/".to_string()])?; + assert_eq!(files.len(), 2); + + Ok(()) + } + #[test] fn test_enumerate_folders() -> Result<()> { let temp_dir = TempDir::new().unwrap(); diff --git a/src/hooks.rs b/src/hooks.rs index 1b6c6d9..980bb41 100644 --- a/src/hooks.rs +++ b/src/hooks.rs @@ -20,7 +20,8 @@ pub fn validate_claude_hooks(hooks_dir: &Path, strict: bool) -> Result Result> { let mut warnings = Vec::new(); - let config_path = hooks_dir.join(config_filename(kind)); + let hooks_root = hooks_root_dir(hooks_dir); + let config_path = hooks_root.join(config_filename(kind)); if !config_path.exists() { warn_or_error( &mut warnings, @@ -58,7 +59,7 @@ fn validate_hooks(kind: HookKind, hooks_dir: &Path, strict: bool) -> Result Result PathBuf { + match hooks_dir.file_name().and_then(|name| name.to_str()) { + Some("hooks") | Some("scripts") => hooks_dir.parent().unwrap_or(hooks_dir).to_path_buf(), + _ => hooks_dir.to_path_buf(), + } +} + fn config_filename(kind: HookKind) -> &'static str { match kind { HookKind::Cursor => "hooks.json", @@ -127,36 +135,11 @@ fn collect_command_values(value: &Value, commands: &mut Vec) { fn collect_hook_script_paths(commands: &[String], kind: HookKind) -> HashSet { let mut scripts = HashSet::new(); - let prefixes = match kind { - HookKind::Cursor => vec![ - ".cursor/hooks/", - "./.cursor/hooks/", - "hooks/", - "./hooks/", - ".cursor\\hooks\\", - ".\\.cursor\\hooks\\", - "hooks\\", - ".\\hooks\\", - ], - HookKind::Claude => vec![ - ".claude/hooks/", - "./.claude/hooks/", - "$CLAUDE_PROJECT_DIR/.claude/hooks/", - "${CLAUDE_PROJECT_DIR}/.claude/hooks/", - ".claude\\hooks\\", - ".\\.claude\\hooks\\", - "$CLAUDE_PROJECT_DIR\\.claude\\hooks\\", - "${CLAUDE_PROJECT_DIR}\\.claude\\hooks\\", - ], - }; for command in commands { for token in command.split_whitespace() { - let token = trim_token(token); - for prefix in &prefixes { - if let Some(rel_path) = extract_relative_path(token, prefix) { - scripts.insert(PathBuf::from(rel_path)); - } + if let Some(rel_path) = extract_relative_path(token, kind) { + scripts.insert(PathBuf::from(rel_path)); } } } @@ -164,15 +147,43 @@ fn collect_hook_script_paths(commands: &[String], kind: HookKind) -> HashSet Option { - let position = token.find(prefix)?; - let mut rel = &token[position + prefix.len()..]; - rel = rel.trim_matches(|c: char| matches!(c, '"' | '\'' | ';' | ')' | '(' | ',')); - if rel.is_empty() { - None - } else { - Some(rel.to_string()) +fn extract_relative_path(token: &str, kind: HookKind) -> Option { + let token = trim_token(token); + if token.is_empty() { + return None; + } + + let markers = match kind { + HookKind::Cursor => [".cursor/", ".cursor\\"], + HookKind::Claude => [".claude/", ".claude\\"], + }; + + for marker in markers { + if let Some(position) = token.find(marker) { + let mut rel = &token[position + marker.len()..]; + rel = trim_token(rel); + if !rel.is_empty() { + return Some(rel.to_string()); + } + } + } + + let trimmed = token + .strip_prefix("./") + .or_else(|| token.strip_prefix(".\\")) + .unwrap_or(token); + let rel_prefixes = ["hooks/", "scripts/", "hooks\\", "scripts\\"]; + + for prefix in rel_prefixes { + if trimmed.starts_with(prefix) { + let rel = trim_token(trimmed); + if !rel.is_empty() { + return Some(rel.to_string()); + } + } } + + None } fn trim_token(token: &str) -> &str { From 74fe381dfec9fe4074ce90de6ac774d968f8d51c Mon Sep 17 00:00:00 2001 From: Melioo Date: Sun, 1 Feb 2026 12:40:41 +0100 Subject: [PATCH 18/39] fix: PR Changes pt.2 Fix of Minor issue with `exists` based on CodeRabbit. --- src/hooks.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/hooks.rs b/src/hooks.rs index 980bb41..0d0c08b 100644 --- a/src/hooks.rs +++ b/src/hooks.rs @@ -60,7 +60,7 @@ fn validate_hooks(kind: HookKind, hooks_dir: &Path, strict: bool) -> Result Date: Tue, 3 Feb 2026 09:21:22 +0100 Subject: [PATCH 19/39] feat: Scope Down to Cursor Hooks Removed Claude Hooks integration due to problematic approach with `settings.json` and already existing implementation of Claude Settings. --- .release-please-manifest.json | 2 +- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 1 - docs/catalog-search-spec.md | 1 - src/catalog.rs | 14 +-- src/commands.rs | 11 +-- src/error.rs | 2 +- src/hooks.rs | 36 ++----- src/install.rs | 32 +++--- src/manifest.rs | 4 - tests/cli.rs | 178 +++++----------------------------- 12 files changed, 56 insertions(+), 229 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index a46f218..2be9c43 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.1.8" + ".": "0.2.0" } diff --git a/Cargo.lock b/Cargo.lock index 1007c02..6cd944a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -87,7 +87,7 @@ dependencies = [ [[package]] name = "aps" -version = "0.1.8" +version = "0.2.0" dependencies = [ "assert_cmd", "assert_fs", diff --git a/Cargo.toml b/Cargo.toml index b48a2a5..c57cbdd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aps" -version = "0.1.8" +version = "0.2.0" edition = "2021" description = "Manifest-driven CLI for syncing agentic assets" authors = ["APS Contributors"] diff --git a/README.md b/README.md index fb76c6e..b37c7e8 100644 --- a/README.md +++ b/README.md @@ -210,7 +210,6 @@ entries: | `cursor_rules` | Directory of Cursor rules | `./.cursor/rules/` | | `cursor_hooks` | Directory of Cursor hooks | `./.cursor/hooks/` | | `cursor_skills_root` | Directory with skill subdirs | `./.cursor/skills/` | -| `claude_hooks` | Directory of Claude hooks | `./.claude/hooks/` | | `agent_skill` | Claude agent skill directory | `./.claude/skills/` | ### Source Types diff --git a/docs/catalog-search-spec.md b/docs/catalog-search-spec.md index 6940c69..3ee3de9 100644 --- a/docs/catalog-search-spec.md +++ b/docs/catalog-search-spec.md @@ -35,7 +35,6 @@ entries: - `cursor_rules` - Individual `.mdc` rule files - `cursor_hooks` - Individual hook scripts - `cursor_skills_root` - Skill folders for Cursor -- `claude_hooks` - Individual hook scripts - `agents_md` - AGENTS.md files - `agent_skill` - Agent skill folders (per agentskills.io spec) diff --git a/src/catalog.rs b/src/catalog.rs index c6adb1d..d383f3f 100644 --- a/src/catalog.rs +++ b/src/catalog.rs @@ -6,7 +6,6 @@ //! - cursor_rules: One entry per individual rule file //! - cursor_hooks: One entry per hook script //! - cursor_skills_root: One entry per skill folder -//! - claude_hooks: One entry per hook script //! - agent_skill: One entry per skill folder use crate::error::{ApsError, Result}; @@ -217,17 +216,14 @@ fn enumerate_entry_assets(entry: &Entry, manifest_dir: &Path) -> Result { + AssetKind::CursorHooks => { let files = enumerate_files_recursive(&resolved.source_path, &entry.include)?; for file_path in files { let relative_path = file_path .strip_prefix(&resolved.source_path) .map(PathBuf::from) .unwrap_or_else(|_| { - file_path - .file_name() - .map(PathBuf::from) - .unwrap_or_default() + file_path.file_name().map(PathBuf::from).unwrap_or_default() }); let name = relative_path.to_string_lossy().replace('\\', "/"); @@ -658,7 +654,11 @@ mod tests { std::fs::create_dir(dir.join("nested")).unwrap(); std::fs::write(dir.join("nested").join("hook2.sh"), "content2").unwrap(); std::fs::create_dir(dir.join("nested").join("inner")).unwrap(); - std::fs::write(dir.join("nested").join("inner").join("hook3.sh"), "content3").unwrap(); + std::fs::write( + dir.join("nested").join("inner").join("hook3.sh"), + "content3", + ) + .unwrap(); // Test without filter let files = enumerate_files_recursive(dir, &[])?; diff --git a/src/commands.rs b/src/commands.rs index e9719c2..4fde7f2 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -3,7 +3,7 @@ use crate::cli::{ CatalogGenerateArgs, InitArgs, ManifestFormat, StatusArgs, SyncArgs, ValidateArgs, }; use crate::error::{ApsError, Result}; -use crate::hooks::{validate_claude_hooks, validate_cursor_hooks}; +use crate::hooks::validate_cursor_hooks; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; use crate::manifest::{ @@ -406,15 +406,6 @@ pub fn cmd_validate(args: ValidateArgs) -> Result<()> { } warnings.extend(hook_warnings); } - if entry.kind == AssetKind::ClaudeHooks { - let hook_warnings = - validate_claude_hooks(&resolved.source_path, args.strict)?; - for warning in &hook_warnings { - println!(" Warning: {}", warning); - } - warnings.extend(hook_warnings); - } - // Format output based on source type if let Some(git_info) = &resolved.git_info { println!( diff --git a/src/error.rs b/src/error.rs index 81f4be6..0a8f6fc 100644 --- a/src/error.rs +++ b/src/error.rs @@ -31,7 +31,7 @@ pub enum ApsError { #[error("Invalid asset kind: {kind}")] #[diagnostic( code(aps::manifest::invalid_kind), - help("Valid kinds are: cursor_rules, cursor_hooks, cursor_skills_root, claude_hooks, agents_md, composite_agents_md, agent_skill") + help("Valid kinds are: cursor_rules, cursor_hooks, cursor_skills_root, agents_md, composite_agents_md, agent_skill") )] InvalidAssetKind { kind: String }, diff --git a/src/hooks.rs b/src/hooks.rs index 0d0c08b..acdc997 100644 --- a/src/hooks.rs +++ b/src/hooks.rs @@ -3,25 +3,15 @@ use serde_yaml::Value; use std::collections::HashSet; use std::path::{Path, PathBuf}; -#[derive(Debug, Clone, Copy)] -pub enum HookKind { - Cursor, - Claude, -} - pub fn validate_cursor_hooks(hooks_dir: &Path, strict: bool) -> Result> { - validate_hooks(HookKind::Cursor, hooks_dir, strict) -} - -pub fn validate_claude_hooks(hooks_dir: &Path, strict: bool) -> Result> { - validate_hooks(HookKind::Claude, hooks_dir, strict) + validate_hooks(hooks_dir, strict) } -fn validate_hooks(kind: HookKind, hooks_dir: &Path, strict: bool) -> Result> { +fn validate_hooks(hooks_dir: &Path, strict: bool) -> Result> { let mut warnings = Vec::new(); let hooks_root = hooks_root_dir(hooks_dir); - let config_path = hooks_root.join(config_filename(kind)); + let config_path = hooks_root.join("hooks.json"); if !config_path.exists() { warn_or_error( &mut warnings, @@ -56,7 +46,7 @@ fn validate_hooks(kind: HookKind, hooks_dir: &Path, strict: bool) -> Result PathBuf { } } -fn config_filename(kind: HookKind) -> &'static str { - match kind { - HookKind::Cursor => "hooks.json", - HookKind::Claude => "settings.json", - } -} - fn read_hooks_config(path: &Path) -> Result { let content = std::fs::read_to_string(path) .map_err(|e| ApsError::io(e, "Failed to read hooks config"))?; @@ -133,12 +116,12 @@ fn collect_command_values(value: &Value, commands: &mut Vec) { } } -fn collect_hook_script_paths(commands: &[String], kind: HookKind) -> HashSet { +fn collect_hook_script_paths(commands: &[String]) -> HashSet { let mut scripts = HashSet::new(); for command in commands { for token in command.split_whitespace() { - if let Some(rel_path) = extract_relative_path(token, kind) { + if let Some(rel_path) = extract_relative_path(token) { scripts.insert(PathBuf::from(rel_path)); } } @@ -147,16 +130,13 @@ fn collect_hook_script_paths(commands: &[String], kind: HookKind) -> HashSet Option { +fn extract_relative_path(token: &str) -> Option { let token = trim_token(token); if token.is_empty() { return None; } - let markers = match kind { - HookKind::Cursor => [".cursor/", ".cursor\\"], - HookKind::Claude => [".claude/", ".claude\\"], - }; + let markers = [".cursor/", ".cursor\\"]; for marker in markers { if let Some(position) = token.find(marker) { diff --git a/src/install.rs b/src/install.rs index 0f30133..cccf909 100644 --- a/src/install.rs +++ b/src/install.rs @@ -4,7 +4,7 @@ use crate::compose::{ compose_markdown, read_source_file, write_composed_file, ComposeOptions, ComposedSource, }; use crate::error::{ApsError, Result}; -use crate::hooks::{validate_claude_hooks, validate_cursor_hooks}; +use crate::hooks::validate_cursor_hooks; use crate::lockfile::{LockedEntry, Lockfile}; use crate::manifest::{AssetKind, Entry}; use crate::sources::{clone_at_commit, get_remote_commit_sha, GitInfo, ResolvedSource}; @@ -381,7 +381,6 @@ pub fn install_entry( AssetKind::CursorRules | AssetKind::CursorHooks | AssetKind::CursorSkillsRoot - | AssetKind::ClaudeHooks | AssetKind::AgentSkill => { // For directory assets with symlinks, we add files to the directory // without backing up existing content from other sources @@ -390,7 +389,7 @@ pub fn install_entry( }; if should_check_conflict { - if matches!(entry.kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if matches!(entry.kind, AssetKind::CursorHooks) { let mut conflicts = collect_hook_conflicts(&resolved.source_path, &dest_path)?; if let Some((source_config, dest_config)) = hooks_config_paths(&entry.kind, &resolved.source_path, &dest_path)? @@ -431,12 +430,6 @@ pub fn install_entry( options.strict, )?); } - if entry.kind == AssetKind::ClaudeHooks { - warnings.extend(validate_claude_hooks( - &resolved.source_path, - options.strict, - )?); - } for warning in &warnings { println!("Warning: {}", warning); } @@ -454,7 +447,7 @@ pub fn install_entry( )? }; - if !options.dry_run && matches!(entry.kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if !options.dry_run && matches!(entry.kind, AssetKind::CursorHooks) { sync_hooks_config( &entry.kind, &resolved.source_path, @@ -628,7 +621,6 @@ fn install_asset( AssetKind::CursorRules | AssetKind::CursorHooks | AssetKind::CursorSkillsRoot - | AssetKind::ClaudeHooks | AssetKind::AgentSkill => { if use_symlink { if include.is_empty() { @@ -666,7 +658,7 @@ fn install_asset( } else { // Copy behavior if include.is_empty() { - if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if matches!(kind, AssetKind::CursorHooks) { copy_directory_merge(source, dest)?; } else { copy_directory(source, dest)?; @@ -676,7 +668,7 @@ fn install_asset( let items = filter_by_prefix(source, include)?; // Ensure dest exists - if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if matches!(kind, AssetKind::CursorHooks) { if !dest.exists() { std::fs::create_dir_all(dest).map_err(|e| { ApsError::io(e, format!("Failed to create directory {:?}", dest)) @@ -708,7 +700,7 @@ fn install_asset( })?; let item_dest = dest.join(item_name); if item.is_dir() { - if matches!(kind, AssetKind::CursorHooks | AssetKind::ClaudeHooks) { + if matches!(kind, AssetKind::CursorHooks) { copy_directory_merge(&item, &item_dest)?; } else { copy_directory(&item, &item_dest)?; @@ -1085,11 +1077,9 @@ fn hooks_config_paths( source_hooks_dir: &Path, dest_hooks_dir: &Path, ) -> Result> { - let filename = match kind { - AssetKind::CursorHooks => "hooks.json", - AssetKind::ClaudeHooks => "settings.json", - _ => return Ok(None), - }; + if !matches!(kind, AssetKind::CursorHooks) { + return Ok(None); + } let source_parent = source_hooks_dir @@ -1104,8 +1094,8 @@ fn hooks_config_paths( })?; Ok(Some(( - source_parent.join(filename), - dest_parent.join(filename), + source_parent.join("hooks.json"), + dest_parent.join("hooks.json"), ))) } diff --git a/src/manifest.rs b/src/manifest.rs index 9fe9599..691f962 100644 --- a/src/manifest.rs +++ b/src/manifest.rs @@ -95,8 +95,6 @@ pub enum AssetKind { CursorHooks, /// Cursor skills root directory CursorSkillsRoot, - /// Claude hooks directory - ClaudeHooks, /// AGENTS.md file AgentsMd, /// Agent skill directory (per agentskills.io spec) @@ -112,7 +110,6 @@ impl AssetKind { AssetKind::CursorRules => PathBuf::from(".cursor/rules"), AssetKind::CursorHooks => PathBuf::from(".cursor/hooks"), AssetKind::CursorSkillsRoot => PathBuf::from(".cursor/skills"), - AssetKind::ClaudeHooks => PathBuf::from(".claude/hooks"), AssetKind::AgentsMd => PathBuf::from("AGENTS.md"), AssetKind::AgentSkill => PathBuf::from(".claude/skills"), AssetKind::CompositeAgentsMd => PathBuf::from("AGENTS.md"), @@ -126,7 +123,6 @@ impl AssetKind { "cursor_rules" => Ok(AssetKind::CursorRules), "cursor_hooks" => Ok(AssetKind::CursorHooks), "cursor_skills_root" => Ok(AssetKind::CursorSkillsRoot), - "claude_hooks" => Ok(AssetKind::ClaudeHooks), "agents_md" => Ok(AssetKind::AgentsMd), "agent_skill" => Ok(AssetKind::AgentSkill), "composite_agents_md" => Ok(AssetKind::CompositeAgentsMd), diff --git a/tests/cli.rs b/tests/cli.rs index 033ede0..353915d 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -445,70 +445,6 @@ fn sync_cursor_hooks_copies_directory_and_sets_exec() { } } -#[test] -fn sync_claude_hooks_copies_directory_and_sets_exec() { - let temp = assert_fs::TempDir::new().unwrap(); - - let source = temp.child("source"); - source.create_dir_all().unwrap(); - source.child(".claude").create_dir_all().unwrap(); - source - .child(".claude/scripts/start.sh") - .write_str("echo start\n") - .unwrap(); - source - .child(".claude/settings.json") - .write_str( - r#"{ - "hooks": { - "onSessionStart": [ - { "command": "bash $CLAUDE_PROJECT_DIR/.claude/scripts/start.sh" } - ] - } -}"#, - ) - .unwrap(); - - let project = temp.child("project"); - project.create_dir_all().unwrap(); - - let manifest = format!( - r#"entries: - - id: claude-hooks - kind: claude_hooks - source: - type: filesystem - root: {} - path: .claude - symlink: false - dest: ./.claude -"#, - source.path().display() - ); - - project.child("aps.yaml").write_str(&manifest).unwrap(); - - aps().arg("sync").current_dir(&project).assert().success(); - - project - .child(".claude/scripts/start.sh") - .assert(predicate::path::exists()); - // Verify config is also synced to parent dir - project - .child(".claude/settings.json") - .assert(predicate::path::exists()); - - #[cfg(unix)] - { - use std::os::unix::fs::PermissionsExt; - let mode = std::fs::metadata(project.path().join(".claude/scripts/start.sh")) - .unwrap() - .permissions() - .mode(); - assert_ne!(mode & 0o100, 0); - } -} - #[test] fn validate_cursor_hooks_strict_rejects_missing_config() { let temp = assert_fs::TempDir::new().unwrap(); @@ -548,45 +484,6 @@ fn validate_cursor_hooks_strict_rejects_missing_config() { .stderr(predicate::str::contains("hooks.json")); } -#[test] -fn validate_claude_hooks_strict_rejects_missing_config() { - let temp = assert_fs::TempDir::new().unwrap(); - - let source = temp.child("source"); - source.create_dir_all().unwrap(); - source.child(".claude").create_dir_all().unwrap(); - source - .child(".claude/scripts/start.sh") - .write_str("echo start\n") - .unwrap(); - - let project = temp.child("project"); - project.create_dir_all().unwrap(); - - let manifest = format!( - r#"entries: - - id: claude-hooks - kind: claude_hooks - source: - type: filesystem - root: {} - path: .claude - symlink: false - dest: ./.claude -"#, - source.path().display() - ); - - project.child("aps.yaml").write_str(&manifest).unwrap(); - - aps() - .args(["validate", "--strict"]) - .current_dir(&project) - .assert() - .failure() - .stderr(predicate::str::contains("settings.json")); -} - #[test] fn validate_cursor_hooks_strict_accepts_valid() { let temp = assert_fs::TempDir::new().unwrap(); @@ -637,56 +534,6 @@ fn validate_cursor_hooks_strict_accepts_valid() { .success(); } -#[test] -fn validate_claude_hooks_strict_accepts_valid() { - let temp = assert_fs::TempDir::new().unwrap(); - - let source = temp.child("source"); - source.create_dir_all().unwrap(); - source.child(".claude").create_dir_all().unwrap(); - source - .child(".claude/scripts/start.sh") - .write_str("echo start\n") - .unwrap(); - source - .child(".claude/settings.json") - .write_str( - r#"{ - "hooks": { - "onSessionStart": [ - { "command": "bash .claude/scripts/start.sh" } - ] - } -}"#, - ) - .unwrap(); - - let project = temp.child("project"); - project.create_dir_all().unwrap(); - - let manifest = format!( - r#"entries: - - id: claude-hooks - kind: claude_hooks - source: - type: filesystem - root: {} - path: .claude - symlink: false - dest: ./.claude -"#, - source.path().display() - ); - - project.child("aps.yaml").write_str(&manifest).unwrap(); - - aps() - .args(["validate", "--strict"]) - .current_dir(&project) - .assert() - .success(); -} - // ============================================================================ // Verbose Flag Tests // ============================================================================ @@ -751,6 +598,31 @@ fn duplicate_entry_ids_detected() { .stderr(predicate::str::contains("Duplicate")); } +#[test] +fn manifest_rejects_claude_hooks_kind() { + let temp = assert_fs::TempDir::new().unwrap(); + + let manifest = r#"entries: + - id: legacy-claude-hooks + kind: claude_hooks + source: + type: filesystem + root: /tmp + path: .claude +"#; + + temp.child("aps.yaml").write_str(manifest).unwrap(); + + aps() + .arg("validate") + .current_dir(&temp) + .assert() + .failure() + .stderr(predicate::str::contains("Failed to parse manifest")) + .stderr(predicate::str::contains("claude_hooks")) + .stderr(predicate::str::contains("cursor_hooks")); +} + // ============================================================================ // Upgrade Flag Tests (Lock-Respecting Behavior) // ============================================================================ From 55c9f66b953c9928adcc6642275d17e84f64e7a3 Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 3 Feb 2026 09:29:37 +0100 Subject: [PATCH 20/39] fix: PR Changes Fixed Symlink remove check based on CodeRabbit. --- src/install.rs | 22 ++++++++++++---------- 1 file changed, 12 insertions(+), 10 deletions(-) diff --git a/src/install.rs b/src/install.rs index cccf909..fb81abe 100644 --- a/src/install.rs +++ b/src/install.rs @@ -976,16 +976,18 @@ fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { let meta = dest_path.symlink_metadata().map_err(|e| { ApsError::io(e, format!("Failed to read metadata for {:?}", dest_path)) })?; - if meta.file_type().is_symlink() || dest_path.is_file() { - if dest_path.is_dir() { - std::fs::remove_dir_all(&dest_path).map_err(|e| { - ApsError::io(e, format!("Failed to remove directory {:?}", dest_path)) - })?; - } else { - std::fs::remove_file(&dest_path).map_err(|e| { - ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) - })?; - } + if meta.file_type().is_symlink() { + std::fs::remove_file(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) + })?; + } else if meta.file_type().is_dir() { + std::fs::remove_dir_all(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove directory {:?}", dest_path)) + })?; + } else { + std::fs::remove_file(&dest_path).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) + })?; } } std::fs::create_dir_all(&dest_path).map_err(|e| { From 5dd7439076ad403fcfc6545bd1fe6b8475aa5361 Mon Sep 17 00:00:00 2001 From: Melioo Date: Wed, 4 Feb 2026 09:14:46 +0100 Subject: [PATCH 21/39] fix: Remove Version Bump Removed bumped version configuration, as the `release-please` will take care of it. --- .release-please-manifest.json | 2 +- Cargo.lock | 2 +- Cargo.toml | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.release-please-manifest.json b/.release-please-manifest.json index 2be9c43..a46f218 100644 --- a/.release-please-manifest.json +++ b/.release-please-manifest.json @@ -1,3 +1,3 @@ { - ".": "0.2.0" + ".": "0.1.8" } diff --git a/Cargo.lock b/Cargo.lock index 6cd944a..1007c02 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -87,7 +87,7 @@ dependencies = [ [[package]] name = "aps" -version = "0.2.0" +version = "0.1.8" dependencies = [ "assert_cmd", "assert_fs", diff --git a/Cargo.toml b/Cargo.toml index c57cbdd..b48a2a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "aps" -version = "0.2.0" +version = "0.1.8" edition = "2021" description = "Manifest-driven CLI for syncing agentic assets" authors = ["APS Contributors"] From 861fc596fbc1a0dc9ec1b1cc205247ed07120b67 Mon Sep 17 00:00:00 2001 From: Melioo Date: Wed, 4 Feb 2026 09:30:53 +0100 Subject: [PATCH 22/39] feat: Lint issues Finalization of the pull request. Upgraded Trunk packages. Fixed README formatting. --- .trunk/trunk.yaml | 4 +-- README.md | 64 ++++++++++++++++++++++------------------------- src/commands.rs | 2 +- src/main.rs | 2 +- 4 files changed, 34 insertions(+), 38 deletions(-) diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml index db52ecc..a65fd09 100644 --- a/.trunk/trunk.yaml +++ b/.trunk/trunk.yaml @@ -23,12 +23,12 @@ lint: - AGENTS.md enabled: - checkov@3.2.497 - - clippy@1.82.0 + - clippy@1.83.0 - git-diff-check - markdownlint@0.47.0 - osv-scanner@2.3.2 - prettier@3.8.0 - - rustfmt@1.82.0 + - rustfmt@1.83.0 - taplo@0.10.0 - trufflehog@3.92.5 - yamllint@1.38.0 diff --git a/README.md b/README.md index aa7970d..c7201a2 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,4 @@ - - -
+
# Agentic Prompt Sync (aps) @@ -74,7 +72,7 @@ cargo build --release To update `aps` to the latest version, use the same method you used to install it. -### Quick Install (macOS/Linux) +### Quick Update (macOS/Linux) Re-run the install script to download and install the latest version: @@ -82,9 +80,7 @@ Re-run the install script to download and install the latest version: curl -fsSL https://raw.githubusercontent.com/westonplatter/agentic-prompt-sync/main/install.sh | sh ``` -### Download Binary - -Download the latest binary for your platform from the [Releases page](https://github.com/westonplatter/agentic-prompt-sync/releases) and replace your existing installation. +Or download the latest binary for your platform from the [Releases page](https://github.com/westonplatter/agentic-prompt-sync/releases) and replace your existing installation. ### Cargo Update @@ -106,48 +102,48 @@ cargo install aps --force 1. **Initialize a manifest** in your project: -```bash -aps init -``` + ```bash + aps init + ``` -This creates a `aps.yaml` manifest file with an example entry. + This creates a `aps.yaml` manifest file with an example entry. 2. **Add skills directly from GitHub URLs:** -```bash -# Add a skill from a GitHub URL - automatically syncs the skill -aps add https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md + ```bash + # Add a skill from a GitHub URL - automatically syncs the skill + aps add https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md -# Or use the folder URL (SKILL.md is auto-detected) -aps add https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module -``` + # Or use the folder URL (SKILL.md is auto-detected) + aps add https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module + ``` -This parses the GitHub URL, adds an entry to `aps.yaml`, and syncs **only that skill** immediately (other entries are not affected). + This parses the GitHub URL, adds an entry to `aps.yaml`, and syncs **only that skill** immediately (other entries are not affected). 3. **Or manually edit the manifest** to define your assets: -```yaml -entries: - - id: my-agents - kind: agents_md - source: - type: filesystem - root: $HOME - path: personal-generic-AGENTS.md - dest: ./AGENTS.md -``` + ```yaml + entries: + - id: my-agents + kind: agents_md + source: + type: filesystem + root: $HOME + path: personal-generic-AGENTS.md + dest: ./AGENTS.md + ``` 4. **Sync and install** your assets: -```bash -aps sync -``` + ```bash + aps sync + ``` 5. **Check status** of synced assets: -```bash -aps status -``` + ```bash + aps status + ``` ## Commands diff --git a/src/commands.rs b/src/commands.rs index 9718b4c..251c21e 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -4,8 +4,8 @@ use crate::cli::{ ValidateArgs, }; use crate::error::{ApsError, Result}; -use crate::hooks::validate_cursor_hooks; use crate::github_url::parse_github_url; +use crate::hooks::validate_cursor_hooks; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; use crate::manifest::{ diff --git a/src/main.rs b/src/main.rs index feb6973..f7ea46d 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,8 +5,8 @@ mod cli; mod commands; mod compose; mod error; -mod hooks; mod github_url; +mod hooks; mod install; mod lockfile; mod manifest; From 38bdfaae14d959d42ce04e7e496f36b3a2472c4b Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 10:34:26 +0100 Subject: [PATCH 23/39] feat: Verification Verified correct implementation of Git caching. Upgraded some Trunk linters. --- .trunk/trunk.yaml | 4 ++-- README.md | 3 ++- src/install.rs | 50 +++++++++++++++++++++++++++++++---------------- 3 files changed, 37 insertions(+), 20 deletions(-) diff --git a/.trunk/trunk.yaml b/.trunk/trunk.yaml index a65fd09..535382a 100644 --- a/.trunk/trunk.yaml +++ b/.trunk/trunk.yaml @@ -22,7 +22,7 @@ lint: - CHANGELOG.md # Auto-generated by release-please - AGENTS.md enabled: - - checkov@3.2.497 + - checkov@3.2.500 - clippy@1.83.0 - git-diff-check - markdownlint@0.47.0 @@ -30,7 +30,7 @@ lint: - prettier@3.8.0 - rustfmt@1.83.0 - taplo@0.10.0 - - trufflehog@3.92.5 + - trufflehog@3.93.0 - yamllint@1.38.0 actions: enabled: diff --git a/README.md b/README.md index c7201a2..03ae4fe 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ - **Composable AGENTS.md** - Merge multiple AGENTS.md files from local or remote sources into one - **Safe installs** - Automatic conflict detection and backup creation - **Deterministic lockfile** - Idempotent syncs that only update when needed +- **Optimized Git Operations** - Smart caching reuses clones and skips unchanged commits for fast syncs ## Installation @@ -176,7 +177,7 @@ cargo install aps --force When you run `aps sync`: -1. **Entries are synced** - Each entry in `aps.yaml` is installed to its destination +1. **Entries are synced** - Each entry in `aps.yaml` is installed to its destination (git sources reuse cached clones for speed) 2. **Stale entries are cleaned** - Entries in the lockfile that no longer exist in `aps.yaml` are automatically removed 3. **Lockfile is saved** - The updated lockfile is written to disk diff --git a/src/install.rs b/src/install.rs index 16202bf..e70789a 100644 --- a/src/install.rs +++ b/src/install.rs @@ -202,24 +202,40 @@ pub fn install_entry( _ => None, }; - // If destination exists and commit matches, we're up to date + // If destination exists, verify it matches the locked checksum before skipping if dest_path.exists() { - info!( - "Entry {} is up to date (using locked commit {})", - entry.id, - &locked_commit[..8.min(locked_commit.len())] - ); - let was_symlink = locked.is_symlink; - return Ok(InstallResult { - id: entry.id.clone(), - installed: false, - skipped_no_change: true, - locked_entry: None, - warnings: Vec::new(), - dest_path: dest_path.clone(), - was_symlink, - upgrade_available, - }); + match compute_source_checksum(&dest_path) { + Ok(dest_checksum) if dest_checksum == locked.checksum => { + info!( + "Entry {} is up to date (using locked commit {})", + entry.id, + &locked_commit[..8.min(locked_commit.len())] + ); + let was_symlink = locked.is_symlink; + return Ok(InstallResult { + id: entry.id.clone(), + installed: false, + skipped_no_change: true, + locked_entry: None, + warnings: Vec::new(), + dest_path: dest_path.clone(), + was_symlink, + upgrade_available, + }); + } + Ok(_) => { + debug!( + "Destination for {} differs from locked checksum, reinstalling", + entry.id + ); + } + Err(err) => { + debug!( + "Failed to checksum destination for {}: {}, reinstalling", + entry.id, err + ); + } + } } // Clone at the locked commit From 8d7f80515544c686f19fc5ec03571a3e0c1b68fa Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 12:42:57 +0100 Subject: [PATCH 24/39] fix: Clippy CI Error Fixed CI error with Clippy and Errors. --- src/install.rs | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/src/install.rs b/src/install.rs index fb81abe..28f745d 100644 --- a/src/install.rs +++ b/src/install.rs @@ -955,14 +955,14 @@ fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { for entry in WalkDir::new(&src) { let entry = entry.map_err(|e| { ApsError::io( - std::io::Error::new(std::io::ErrorKind::Other, e), + std::io::Error::other(e), "Failed to traverse source directory", ) })?; let path = entry.path(); let rel = path.strip_prefix(&src).map_err(|e| { ApsError::io( - std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), + std::io::Error::other(e.to_string()), format!("Failed to compute relative path: {}", e), ) })?; @@ -1037,7 +1037,7 @@ fn make_shell_scripts_executable(dir: &Path) -> Result<()> { for entry in WalkDir::new(dir) { let entry = entry.map_err(|e| { ApsError::io( - std::io::Error::new(std::io::ErrorKind::Other, e), + std::io::Error::other(e), "Failed to traverse hooks directory", ) })?; @@ -1159,14 +1159,14 @@ fn collect_hook_conflicts(source: &Path, dest: &Path) -> Result> { for entry in WalkDir::new(source) { let entry = entry.map_err(|e| { ApsError::io( - std::io::Error::new(std::io::ErrorKind::Other, e), + std::io::Error::other(e), "Failed to traverse source directory", ) })?; let path = entry.path(); let rel = path.strip_prefix(source).map_err(|e| { ApsError::io( - std::io::Error::new(std::io::ErrorKind::Other, e.to_string()), + std::io::Error::other(e.to_string()), format!("Failed to compute relative path: {}", e), ) })?; From 6bb49d01d5472afcb85b742406c57bed9762fd83 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 12:48:40 +0100 Subject: [PATCH 25/39] fix: Comment on Directory Merge Fixed doc comment per CodeRabbit. --- src/install.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/install.rs b/src/install.rs index 28f745d..227bb52 100644 --- a/src/install.rs +++ b/src/install.rs @@ -941,7 +941,10 @@ fn copy_directory(src: &Path, dst: &Path) -> Result<()> { Ok(()) } -/// Recursively copy a directory without deleting existing destination content. +/// Recursively copy a directory as an overlay. +/// +/// Overwrites destination entries that conflict with source entries while +/// preserving other destination content. fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { // Normalize paths to handle trailing slashes let src = normalize_path(src); From fa3c80935b738a3c20167dee6ff7974f4bd73e05 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 12:54:35 +0100 Subject: [PATCH 26/39] fix: Directory Merge Issue Fixed implementation per CodeRabbit. Again. --- src/install.rs | 10 +--------- 1 file changed, 1 insertion(+), 9 deletions(-) diff --git a/src/install.rs b/src/install.rs index 227bb52..09625c3 100644 --- a/src/install.rs +++ b/src/install.rs @@ -979,15 +979,7 @@ fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { let meta = dest_path.symlink_metadata().map_err(|e| { ApsError::io(e, format!("Failed to read metadata for {:?}", dest_path)) })?; - if meta.file_type().is_symlink() { - std::fs::remove_file(&dest_path).map_err(|e| { - ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) - })?; - } else if meta.file_type().is_dir() { - std::fs::remove_dir_all(&dest_path).map_err(|e| { - ApsError::io(e, format!("Failed to remove directory {:?}", dest_path)) - })?; - } else { + if meta.file_type().is_symlink() || meta.file_type().is_file() { std::fs::remove_file(&dest_path).map_err(|e| { ApsError::io(e, format!("Failed to remove file {:?}", dest_path)) })?; From e980443d32109645d35bb98de15042b4759a49fb Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 13:01:46 +0100 Subject: [PATCH 27/39] fix: Directory Merge Conflict Detect Fixed handling conflict on Symlink per CodeRabbit. --- src/install.rs | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/install.rs b/src/install.rs index 09625c3..5d76aa0 100644 --- a/src/install.rs +++ b/src/install.rs @@ -404,6 +404,15 @@ pub fn install_entry( conflicts.push(dest_config); } } + if dest_path.exists() { + let is_symlink = dest_path + .symlink_metadata() + .map(|m| m.file_type().is_symlink()) + .unwrap_or(false); + if is_symlink || !dest_path.is_dir() { + conflicts.push(dest_path.clone()); + } + } conflicts.sort(); conflicts.dedup(); let should_proceed = From af8b52890753ae288030cc65c266ac34769b9cdf Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 13:11:58 +0100 Subject: [PATCH 28/39] fix: Hook Install Logic Fixed install logic with checking destination per CodeRabbit. --- src/install.rs | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/src/install.rs b/src/install.rs index 5d76aa0..5729257 100644 --- a/src/install.rs +++ b/src/install.rs @@ -668,6 +668,19 @@ fn install_asset( // Copy behavior if include.is_empty() { if matches!(kind, AssetKind::CursorHooks) { + if dest.exists() { + let meta = dest.symlink_metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", dest)) + })?; + if meta.file_type().is_symlink() || meta.file_type().is_file() { + std::fs::remove_file(dest).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest)) + })?; + } + } + std::fs::create_dir_all(dest).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", dest)) + })?; copy_directory_merge(source, dest)?; } else { copy_directory(source, dest)?; @@ -678,11 +691,19 @@ fn install_asset( // Ensure dest exists if matches!(kind, AssetKind::CursorHooks) { - if !dest.exists() { - std::fs::create_dir_all(dest).map_err(|e| { - ApsError::io(e, format!("Failed to create directory {:?}", dest)) + if dest.exists() { + let meta = dest.symlink_metadata().map_err(|e| { + ApsError::io(e, format!("Failed to read metadata for {:?}", dest)) })?; + if meta.file_type().is_symlink() || meta.file_type().is_file() { + std::fs::remove_file(dest).map_err(|e| { + ApsError::io(e, format!("Failed to remove file {:?}", dest)) + })?; + } } + std::fs::create_dir_all(dest).map_err(|e| { + ApsError::io(e, format!("Failed to create directory {:?}", dest)) + })?; } else { if dest.exists() { std::fs::remove_dir_all(dest).map_err(|e| { From 5cc831bb26f239ccfd8e8723ec34f6fece86d3e9 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 13:22:42 +0100 Subject: [PATCH 29/39] fix: Hooks Dir Walk Fixed walking through Hooks destination with symlinks per CodeRabbit. --- src/install.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/install.rs b/src/install.rs index 5729257..297b92c 100644 --- a/src/install.rs +++ b/src/install.rs @@ -985,7 +985,7 @@ fn copy_directory_merge(src: &Path, dst: &Path) -> Result<()> { .map_err(|e| ApsError::io(e, format!("Failed to create directory {:?}", dst)))?; } - for entry in WalkDir::new(&src) { + for entry in WalkDir::new(&src).follow_links(true) { let entry = entry.map_err(|e| { ApsError::io( std::io::Error::other(e), From a027edfed5a3a2c35a44486a3419a62728c62977 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 13:31:50 +0100 Subject: [PATCH 30/39] fix: Hooks Dir Walk pt.2 Fixed again based on CodeRabbit. --- src/install.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/install.rs b/src/install.rs index 297b92c..4f86c91 100644 --- a/src/install.rs +++ b/src/install.rs @@ -1181,7 +1181,7 @@ fn sync_hooks_config( fn collect_hook_conflicts(source: &Path, dest: &Path) -> Result> { let mut conflicts = Vec::new(); - for entry in WalkDir::new(source) { + for entry in WalkDir::new(source).follow_links(true) { let entry = entry.map_err(|e| { ApsError::io( std::io::Error::other(e), From ae0e6637e2681fd783b397ca76f1e71e26c06979 Mon Sep 17 00:00:00 2001 From: Melioo Date: Thu, 5 Feb 2026 13:42:36 +0100 Subject: [PATCH 31/39] fix: Permissions & Pre-copy Cleanup Fixed based on CodeRabbit. --- src/install.rs | 25 ++++++++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/src/install.rs b/src/install.rs index 4f86c91..63fd085 100644 --- a/src/install.rs +++ b/src/install.rs @@ -736,6 +736,29 @@ fn install_asset( copy_directory(&item, &item_dest)?; } } else { + if item_dest.exists() { + let meta = item_dest.symlink_metadata().map_err(|e| { + ApsError::io( + e, + format!("Failed to read metadata for {:?}", item_dest), + ) + })?; + if meta.file_type().is_symlink() { + std::fs::remove_file(&item_dest).map_err(|e| { + ApsError::io( + e, + format!("Failed to remove file {:?}", item_dest), + ) + })?; + } else if item_dest.is_dir() { + std::fs::remove_dir_all(&item_dest).map_err(|e| { + ApsError::io( + e, + format!("Failed to remove directory {:?}", item_dest), + ) + })?; + } + } std::fs::copy(&item, &item_dest).map_err(|e| { ApsError::io(e, format!("Failed to copy {:?}", item)) })?; @@ -1078,7 +1101,7 @@ fn make_shell_scripts_executable(dir: &Path) -> Result<()> { })?; let mut permissions = metadata.permissions(); let mode = permissions.mode(); - let new_mode = mode | 0o100 | 0o010; + let new_mode = mode | 0o111; if new_mode != mode { permissions.set_mode(new_mode); std::fs::set_permissions(entry.path(), permissions).map_err(|e| { From d374572466ceab0e6c2bef23eb31b22cf9394dfb Mon Sep 17 00:00:00 2001 From: Weston Platter Date: Fri, 6 Feb 2026 00:05:15 -0700 Subject: [PATCH 32/39] doc: Remove ClaudeHooks from AssetKind enum --- docs/architecture.md | 1 - 1 file changed, 1 deletion(-) diff --git a/docs/architecture.md b/docs/architecture.md index 1344f03..0168fa7 100644 --- a/docs/architecture.md +++ b/docs/architecture.md @@ -197,7 +197,6 @@ pub enum AssetKind { CursorRules, CursorHooks, CursorSkillsRoot, - ClaudeHooks, AgentsMd, AgentSkill, CompositeAgentsMd, From 3ead5bfcb0f71d8f1f5c3feae556b17d55b935ef Mon Sep 17 00:00:00 2001 From: Melioo Date: Fri, 6 Feb 2026 09:12:55 +0100 Subject: [PATCH 33/39] feat: APS Add Improvements Added better integration of `aps add` command with processing SSH urls and more customizability. --- README.md | 22 +++-- src/cli.rs | 39 ++++----- src/commands.rs | 208 ++++++++++++++++++++++++++++++++++++++-------- src/error.rs | 12 +++ src/github_url.rs | 176 +++++++++++++++++++++++++++++++++++++++ tests/cli.rs | 124 ++++++++++++++++++++++++--- 6 files changed, 507 insertions(+), 74 deletions(-) diff --git a/README.md b/README.md index 03ae4fe..6cb6114 100644 --- a/README.md +++ b/README.md @@ -109,17 +109,23 @@ cargo install aps --force This creates a `aps.yaml` manifest file with an example entry. -2. **Add skills directly from GitHub URLs:** +2. **Add assets directly from git repositories:** ```bash # Add a skill from a GitHub URL - automatically syncs the skill - aps add https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md + aps add agent_skill https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md # Or use the folder URL (SKILL.md is auto-detected) - aps add https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module + aps add agent_skill https://github.com/hashicorp/agent-skills/tree/main/terraform/module-generation/skills/refactor-module + + # Add Cursor rules from a private repo (SSH) + aps add cursor_rules git@github.com:org/private-rules.git + + # Add a private skill with an explicit path + aps add agent_skill git@github.com:org/private-skills.git --path skills/refactor-module ``` - This parses the GitHub URL, adds an entry to `aps.yaml`, and syncs **only that skill** immediately (other entries are not affected). + This parses the repository identifier, adds an entry to `aps.yaml`, and syncs **only that asset** immediately (other entries are not affected). 3. **Or manually edit the manifest** to define your assets: @@ -151,7 +157,7 @@ cargo install aps --force | Command | Description | | -------------- | ------------------------------------------------- | | `aps init` | Create a new manifest file and update .gitignore | -| `aps add` | Add a skill from a GitHub URL and sync it | +| `aps add` | Add an asset from a git repo and sync it | | `aps sync` | Sync all entries from manifest and install assets | | `aps validate` | Validate manifest schema and check sources | | `aps status` | Display last sync information from lockfile | @@ -163,8 +169,10 @@ cargo install aps --force ### Add Options -- `--id ` - Custom entry ID (defaults to skill folder name) -- `--kind ` - Asset kind: `agent-skill`, `cursor-rules`, `cursor-skills-root`, `agents-md` (default: `agent-skill`) +- `aps add ` - Asset types: `agent_skill`, `cursor_rules`, `cursor_skills_root`, `agents_md` +- `--id ` - Custom entry ID (defaults to repo or path name) +- `--path ` - Path within the repository (required for `agent_skill` when using SSH/HTTPS repo URLs) +- `--ref ` - Git ref (branch/tag/commit) to use - `--no-sync` - Only add to manifest, don't sync immediately ### Sync Options diff --git a/src/cli.rs b/src/cli.rs index fce0f3a..6e1c797 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -23,7 +23,7 @@ pub enum Commands { /// Initialize a new manifest file Init(InitArgs), - /// Add a skill from a GitHub URL to the manifest + /// Add an asset from a git repository to the manifest Add(AddArgs), /// Sync and install assets from manifest sources @@ -52,18 +52,26 @@ pub struct InitArgs { #[derive(Parser, Debug)] pub struct AddArgs { - /// GitHub URL to a skill folder (e.g., https://github.com/owner/repo/blob/main/path/to/skill) - /// or direct URL to a SKILL.md file - #[arg(value_name = "URL")] - pub url: String, + /// Asset type and repository identifier + /// + /// Asset types: agent_skill, cursor_rules, cursor_skills_root, agents_md + /// + /// New syntax: aps add + /// Legacy syntax: aps add (defaults to agent_skill) + #[arg(value_name = "ASSET_TYPE REPO", num_args = 1..=2)] + pub targets: Vec, - /// Custom entry ID (defaults to skill folder name) + /// Custom entry ID (defaults to repo or path name) #[arg(long)] pub id: Option, - /// Asset kind (defaults to agent_skill) - #[arg(long, value_enum, default_value = "agent-skill")] - pub kind: AddAssetKind, + /// Path within the repository (overrides any path from a GitHub URL) + #[arg(long)] + pub path: Option, + + /// Git ref (branch/tag/commit) to use (defaults to auto for repo URLs) + #[arg(long = "ref")] + pub git_ref: Option, /// Path to the manifest file #[arg(long)] @@ -74,19 +82,6 @@ pub struct AddArgs { pub no_sync: bool, } -#[derive(ValueEnum, Clone, Debug, Default)] -pub enum AddAssetKind { - #[default] - #[value(name = "agent-skill")] - AgentSkill, - #[value(name = "cursor-rules")] - CursorRules, - #[value(name = "cursor-skills-root")] - CursorSkillsRoot, - #[value(name = "agents-md")] - AgentsMd, -} - #[derive(ValueEnum, Clone, Debug, Default)] pub enum ManifestFormat { #[default] diff --git a/src/commands.rs b/src/commands.rs index 1fadd9c..cd441bc 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -1,10 +1,9 @@ use crate::catalog::Catalog; use crate::cli::{ - AddArgs, AddAssetKind, CatalogGenerateArgs, InitArgs, ManifestFormat, StatusArgs, SyncArgs, - ValidateArgs, + AddArgs, CatalogGenerateArgs, InitArgs, ManifestFormat, StatusArgs, SyncArgs, ValidateArgs, }; use crate::error::{ApsError, Result}; -use crate::github_url::parse_github_url; +use crate::github_url::parse_repo_identifier; use crate::hooks::validate_cursor_hooks; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; @@ -113,47 +112,77 @@ fn update_gitignore(manifest_path: &Path) -> Result<()> { /// Execute the `aps add` command pub fn cmd_add(args: AddArgs) -> Result<()> { - // Parse the GitHub URL - let parsed = parse_github_url(&args.url)?; + let (asset_kind, repo_input, legacy_used) = parse_add_targets(&args.targets)?; - // Determine the entry ID - let entry_id = args.id.unwrap_or_else(|| { - parsed - .skill_name() - .map(|s| s.to_string()) - .unwrap_or_else(|| "unnamed-skill".to_string()) - }); + if legacy_used { + eprintln!( + "Deprecated: `aps add ` is now `aps add `.\n\ + Example: aps add agent_skill {}\n", + repo_input + ); + } - // Convert CLI asset kind to manifest asset kind - let asset_kind = match args.kind { - AddAssetKind::AgentSkill => AssetKind::AgentSkill, - AddAssetKind::CursorRules => AssetKind::CursorRules, - AddAssetKind::CursorSkillsRoot => AssetKind::CursorSkillsRoot, - AddAssetKind::AgentsMd => AssetKind::AgentsMd, - }; + let parsed = parse_repo_identifier(&repo_input)?; + + if parsed.git_ref.is_some() && args.git_ref.is_some() { + return Err(ApsError::InvalidAddArguments { + message: "Git ref is already specified in the URL; remove --ref or use a repo URL" + .to_string(), + }); + } + + if parsed.path.is_some() && args.path.is_some() { + return Err(ApsError::InvalidAddArguments { + message: "Path is already specified in the URL; remove --path or use a repo URL" + .to_string(), + }); + } + + let git_ref = args + .git_ref + .or(parsed.git_ref) + .unwrap_or_else(|| "auto".to_string()); + + let mut path = args.path.or(parsed.path); + + if asset_kind == AssetKind::AgentSkill { + let raw_path = path.ok_or_else(|| ApsError::MissingAddPath { + asset_type: "agent_skill".to_string(), + hint: "Use a GitHub blob/tree URL or pass --path to the skill folder".to_string(), + })?; + path = Some(normalize_skill_path(&raw_path)); + } else if path.is_none() { + path = default_path_for_kind(&asset_kind); + } - // Get the skill path (strips SKILL.md if present) - let skill_path = parsed.skill_path().to_string(); + let entry_id = args + .id + .unwrap_or_else(|| derive_entry_id(&asset_kind, &parsed.repo_url, path.as_deref())); + + let dest = if asset_kind == AssetKind::AgentSkill { + Some(format!( + "{}/{}/", + asset_kind + .default_dest() + .to_string_lossy() + .trim_end_matches('/'), + entry_id + )) + } else { + None + }; - // Create the new entry let new_entry = Entry { id: entry_id.clone(), kind: asset_kind.clone(), source: Some(Source::Git { repo: parsed.repo_url.clone(), - r#ref: parsed.git_ref.clone(), + r#ref: git_ref, shallow: true, - path: Some(skill_path.clone()), + path, }), sources: Vec::new(), - dest: Some(format!( - "{}/{}/", - asset_kind - .default_dest() - .to_string_lossy() - .trim_end_matches('/'), - entry_id - )), + dest, include: Vec::new(), }; @@ -199,8 +228,10 @@ pub fn cmd_add(args: AddArgs) -> Result<()> { strict: false, upgrade: false, })?; - } else { + } else if asset_kind == AssetKind::AgentSkill { println!("Run `aps sync` to install the skill."); + } else { + println!("Run `aps sync` to install the asset."); } return Ok(()); @@ -248,13 +279,122 @@ pub fn cmd_add(args: AddArgs) -> Result<()> { strict: false, upgrade: false, })?; - } else { + } else if asset_kind == AssetKind::AgentSkill { println!("Run `aps sync` to install the skill."); + } else { + println!("Run `aps sync` to install the asset."); } Ok(()) } +fn parse_add_targets(targets: &[String]) -> Result<(AssetKind, String, bool)> { + match targets.len() { + 1 => { + let normalized = targets[0].trim().replace('-', "_"); + if AssetKind::from_str(&normalized).is_ok() { + return Err(ApsError::InvalidAddArguments { + message: + "Missing repository identifier. Usage: aps add " + .to_string(), + }); + } + Ok((AssetKind::AgentSkill, targets[0].clone(), true)) + } + 2 => { + let kind = parse_asset_kind(&targets[0])?; + Ok((kind, targets[1].clone(), false)) + } + _ => Err(ApsError::InvalidAddArguments { + message: "Expected either or " + .to_string(), + }), + } +} + +fn parse_asset_kind(input: &str) -> Result { + let normalized = input.trim().replace('-', "_"); + let kind = AssetKind::from_str(&normalized)?; + if !matches!( + kind, + AssetKind::AgentSkill + | AssetKind::CursorRules + | AssetKind::CursorSkillsRoot + | AssetKind::AgentsMd + ) { + return Err(ApsError::InvalidAddArguments { + message: format!( + "Asset type '{}' is not supported by `aps add` yet. Supported types: agent_skill, cursor_rules, cursor_skills_root, agents_md", + normalized + ), + }); + } + Ok(kind) +} + +fn normalize_skill_path(path: &str) -> String { + let path = path.trim(); + if path.eq_ignore_ascii_case("skill.md") { + return "".to_string(); + } + if let Some(stripped) = path.strip_suffix("/SKILL.md") { + return stripped.to_string(); + } + if let Some(stripped) = path.strip_suffix("/skill.md") { + return stripped.to_string(); + } + path.to_string() +} + +fn default_path_for_kind(kind: &AssetKind) -> Option { + match kind { + AssetKind::CursorRules => Some(".cursor/rules".to_string()), + AssetKind::CursorHooks => Some(".cursor".to_string()), + AssetKind::CursorSkillsRoot => Some(".cursor/skills".to_string()), + AssetKind::AgentsMd => Some("AGENTS.md".to_string()), + AssetKind::AgentSkill | AssetKind::CompositeAgentsMd => None, + } +} + +fn derive_entry_id(kind: &AssetKind, repo_url: &str, path: Option<&str>) -> String { + match kind { + AssetKind::AgentSkill => path + .and_then(|p| p.rsplit('/').next()) + .filter(|p| !p.is_empty()) + .map(|s| s.to_string()) + .or_else(|| repo_basename(repo_url)) + .unwrap_or_else(|| "unnamed-skill".to_string()), + _ => repo_basename(repo_url) + .or_else(|| { + path.and_then(|p| p.rsplit('/').next()) + .filter(|p| !p.is_empty()) + .map(|s| s.to_string()) + }) + .unwrap_or_else(|| "unnamed-asset".to_string()), + } +} + +fn repo_basename(repo_url: &str) -> Option { + if let Ok(parsed) = url::Url::parse(repo_url) { + let name = parsed + .path_segments() + .and_then(|segments| segments.filter(|s| !s.is_empty()).last()) + .map(|s| s.to_string())?; + return Some(name.trim_end_matches(".git").to_string()); + } + + if let Some((_, path)) = repo_url.split_once(':') { + let name = path.rsplit('/').next().map(|s| s.to_string())?; + return Some(name.trim_end_matches(".git").to_string()); + } + + let name = std::path::Path::new(repo_url) + .file_name() + .and_then(|s| s.to_str()) + .map(|s| s.to_string())?; + Some(name.trim_end_matches(".git").to_string()) +} + /// Execute the `aps sync` command pub fn cmd_sync(args: SyncArgs) -> Result<()> { // Discover and load manifest diff --git a/src/error.rs b/src/error.rs index 89f41db..13b2158 100644 --- a/src/error.rs +++ b/src/error.rs @@ -164,6 +164,18 @@ pub enum ApsError { #[error("Invalid GitHub URL: {url}")] #[diagnostic(code(aps::add::invalid_github_url), help("{reason}"))] InvalidGitHubUrl { url: String, reason: String }, + + #[error("Invalid repository identifier: {value}")] + #[diagnostic(code(aps::add::invalid_repo), help("{reason}"))] + InvalidRepoSpecifier { value: String, reason: String }, + + #[error("Invalid add arguments")] + #[diagnostic(code(aps::add::invalid_args), help("{message}"))] + InvalidAddArguments { message: String }, + + #[error("Asset type '{asset_type}' requires a path within the repository")] + #[diagnostic(code(aps::add::missing_path), help("{hint}"))] + MissingAddPath { asset_type: String, hint: String }, } impl ApsError { diff --git a/src/github_url.rs b/src/github_url.rs index b2ead06..0138b13 100644 --- a/src/github_url.rs +++ b/src/github_url.rs @@ -1,5 +1,7 @@ //! GitHub URL parsing for the `aps add` command. //! +//! Also includes helpers for parsing git repository identifiers. +//! //! Parses GitHub URLs to extract repository, branch/ref, and path information. //! //! Supported URL formats: @@ -8,6 +10,8 @@ //! - `https://github.com/{owner}/{repo}/blob/{ref}/{path}/SKILL.md` - direct skill file use crate::error::{ApsError, Result}; +use std::path::Path; +use url::Url; /// Parsed GitHub URL components #[derive(Debug, Clone, PartialEq)] @@ -22,8 +26,21 @@ pub struct ParsedGitHubUrl { pub is_skill_file: bool, } +/// Parsed repository identifier (git URL or GitHub content URL) +#[derive(Debug, Clone, PartialEq)] +pub struct ParsedRepoIdentifier { + /// Repository URL or path (SSH/HTTPS/local) + pub repo_url: String, + /// Optional git ref (branch, tag, or commit) + pub git_ref: Option, + /// Optional path within the repository + pub path: Option, +} + +#[allow(dead_code)] impl ParsedGitHubUrl { /// Get the skill folder path (strips SKILL.md if present) + #[allow(dead_code)] pub fn skill_path(&self) -> &str { if self.is_skill_file { // Handle root-level SKILL.md files (no leading slash) @@ -41,6 +58,7 @@ impl ParsedGitHubUrl { } /// Get the skill name (last component of the path) + #[allow(dead_code)] pub fn skill_name(&self) -> Option<&str> { let skill_path = self.skill_path(); skill_path.rsplit('/').next().filter(|s| !s.is_empty()) @@ -139,6 +157,140 @@ pub fn parse_github_url(url: &str) -> Result { }) } +/// Parse a repository identifier from the `aps add` command. +/// +/// Accepts: +/// - GitHub blob/tree URLs (extracts ref + path) +/// - HTTPS/SSH Git URLs +/// - SCP-style SSH URLs (git@host:owner/repo.git) +/// - Local paths (if they exist on disk) +pub fn parse_repo_identifier(input: &str) -> Result { + let input = input.trim(); + if input.is_empty() { + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Repository identifier cannot be empty".to_string(), + }); + } + + if let Ok(parsed) = Url::parse(input) { + let scheme = parsed.scheme(); + let host = parsed.host_str().unwrap_or_default(); + let path_segments: Vec<&str> = parsed + .path() + .trim_start_matches('/') + .split('/') + .filter(|s| !s.is_empty()) + .collect(); + + if is_github_host(host) && matches!(scheme, "http" | "https") { + if path_segments.len() >= 3 + && (path_segments[2] == "blob" || path_segments[2] == "tree") + { + let parsed = parse_github_url(input)?; + return Ok(ParsedRepoIdentifier { + repo_url: parsed.repo_url, + git_ref: Some(parsed.git_ref), + path: Some(parsed.path), + }); + } + + if path_segments.len() == 2 { + let owner = path_segments[0]; + let repo = path_segments[1].trim_end_matches(".git"); + return Ok(ParsedRepoIdentifier { + repo_url: format!("https://github.com/{}/{}.git", owner, repo), + git_ref: None, + path: None, + }); + } + + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "GitHub URL must be a repository URL or a blob/tree URL".to_string(), + }); + } + + if is_github_host(host) && matches!(scheme, "ssh" | "git") { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + if matches!(scheme, "http" | "https" | "ssh" | "git" | "file") { + if path_segments.is_empty() { + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Repository URL is missing a path".to_string(), + }); + } + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + return Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: format!("Unsupported URL scheme: {}", scheme), + }); + } + + if is_scp_like_git_url(input) { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + if Path::new(input).exists() { + return Ok(ParsedRepoIdentifier { + repo_url: input.to_string(), + git_ref: None, + path: None, + }); + } + + Err(ApsError::InvalidRepoSpecifier { + value: input.to_string(), + reason: "Expected an HTTPS/SSH Git URL, GitHub blob/tree URL, or existing local path" + .to_string(), + }) +} + +fn is_github_host(host: &str) -> bool { + host == "github.com" || host == "www.github.com" +} + +fn is_scp_like_git_url(input: &str) -> bool { + if input.contains("://") { + return false; + } + + let (user_host, path) = match input.split_once(':') { + Some(parts) => parts, + None => return false, + }; + let (user, host) = match user_host.split_once('@') { + Some(parts) => parts, + None => return false, + }; + + if user.is_empty() || host.is_empty() || path.is_empty() { + return false; + } + + if path.contains(' ') { + return false; + } + + path.contains('/') || path.ends_with(".git") +} + #[cfg(test)] mod tests { use super::*; @@ -262,4 +414,28 @@ mod tests { assert_eq!(parsed.skill_path(), ""); assert_eq!(parsed.skill_name(), None); } + + #[test] + fn test_parse_repo_identifier_with_ssh_scp() { + let parsed = parse_repo_identifier("git@github.com:org/repo.git").unwrap(); + assert_eq!(parsed.repo_url, "git@github.com:org/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } + + #[test] + fn test_parse_repo_identifier_with_ssh_url() { + let parsed = parse_repo_identifier("ssh://git@github.com/org/repo.git").unwrap(); + assert_eq!(parsed.repo_url, "ssh://git@github.com/org/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } + + #[test] + fn test_parse_repo_identifier_with_github_repo_root() { + let parsed = parse_repo_identifier("https://github.com/owner/repo").unwrap(); + assert_eq!(parsed.repo_url, "https://github.com/owner/repo.git"); + assert_eq!(parsed.git_ref, None); + assert_eq!(parsed.path, None); + } } diff --git a/tests/cli.rs b/tests/cli.rs index 0e94e52..7e8c8be 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -1174,6 +1174,7 @@ fn add_creates_manifest_entry_with_no_sync() { aps() .args([ "add", + "agent_skill", "https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module", "--no-sync", ]) @@ -1207,6 +1208,7 @@ fn add_parses_skill_md_url_correctly() { aps() .args([ "add", + "agent_skill", "https://github.com/hashicorp/agent-skills/blob/main/terraform/module-generation/skills/refactor-module/SKILL.md", "--no-sync", ]) @@ -1235,6 +1237,7 @@ fn add_with_custom_id() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/skill", "--id", "my-custom-skill", @@ -1274,6 +1277,7 @@ fn add_to_existing_manifest() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/new-skill", "--no-sync", ]) @@ -1309,6 +1313,7 @@ fn add_duplicate_id_fails() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/main/path/to/duplicate-skill", "--no-sync", ]) @@ -1319,37 +1324,131 @@ fn add_duplicate_id_fails() { } #[test] -fn add_invalid_github_url_fails() { +fn add_invalid_url_format_fails() { let temp = assert_fs::TempDir::new().unwrap(); - // Non-GitHub URL + // URL without blob/tree aps() .args([ "add", - "https://gitlab.com/owner/repo/blob/main/path", + "agent_skill", + "https://github.com/owner/repo/commits/main/path", "--no-sync", ]) .current_dir(&temp) .assert() .failure() - .stderr(predicate::str::contains("github.com")); + .stderr(predicate::str::contains("blob").or(predicate::str::contains("tree"))); } #[test] -fn add_invalid_url_format_fails() { +fn add_invalid_repo_identifier_fails() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args(["add", "agent_skill", "not-a-repo", "--no-sync"]) + .current_dir(&temp) + .assert() + .failure() + .stderr(predicate::str::contains("Expected an HTTPS/SSH Git URL")); +} + +#[test] +fn add_cursor_rules_from_https_repo() { let temp = assert_fs::TempDir::new().unwrap(); - // URL without blob/tree aps() .args([ "add", - "https://github.com/owner/repo/commits/main/path", + "cursor_rules", + "https://github.com/owner/repo", "--no-sync", ]) .current_dir(&temp) .assert() - .failure() - .stderr(predicate::str::contains("blob").or(predicate::str::contains("tree"))); + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_rules")); + manifest.assert(predicate::str::contains( + "repo: https://github.com/owner/repo.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/rules")); +} + +#[test] +fn add_cursor_rules_from_ssh_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "cursor_rules", + "git@github.com:org/repo.git", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_rules")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/repo.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/rules")); +} + +#[test] +fn add_cursor_skills_root_from_ssh_repo() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "cursor_skills_root", + "git@github.com:org/skills.git", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: cursor_skills_root")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/skills.git", + )); + manifest.assert(predicate::str::contains("ref: auto")); + manifest.assert(predicate::str::contains("path: .cursor/skills")); +} + +#[test] +fn add_agent_skill_from_ssh_with_path() { + let temp = assert_fs::TempDir::new().unwrap(); + + aps() + .args([ + "add", + "agent_skill", + "git@github.com:org/private-skills.git", + "--path", + "skills/my-skill", + "--no-sync", + ]) + .current_dir(&temp) + .assert() + .success(); + + let manifest = temp.child("aps.yaml"); + manifest.assert(predicate::str::contains("kind: agent_skill")); + manifest.assert(predicate::str::contains( + "repo: git@github.com:org/private-skills.git", + )); + manifest.assert(predicate::str::contains("path: skills/my-skill")); + manifest.assert(predicate::str::contains("dest: .claude/skills/my-skill/")); } #[test] @@ -1360,6 +1459,7 @@ fn add_with_tree_url() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/tree/main/path/to/skill", "--no-sync", ]) @@ -1379,6 +1479,7 @@ fn add_with_different_ref() { aps() .args([ "add", + "agent_skill", "https://github.com/owner/repo/blob/v1.2.3/path/to/skill", "--no-sync", ]) @@ -1396,8 +1497,9 @@ fn add_help_shows_usage() { .args(["add", "--help"]) .assert() .success() - .stdout(predicate::str::contains("GitHub URL")) + .stdout(predicate::str::contains("ASSET_TYPE")) .stdout(predicate::str::contains("--id")) - .stdout(predicate::str::contains("--kind")) + .stdout(predicate::str::contains("--path")) + .stdout(predicate::str::contains("--ref")) .stdout(predicate::str::contains("--no-sync")); } From 6af90b4c3acfa880ed6c076c1e63cb1385e688cc Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 10 Feb 2026 09:46:23 +0100 Subject: [PATCH 34/39] feat: APS Lockfile Changes Changed the Lockfile name to derive from manifest name to allow multiple manifests in the same repository, e.g. having manifest for Cursor and Codex. --- src/lockfile.rs | 101 +++++++++++++++++++++++++++++++++--------------- tests/cli.rs | 54 ++++++++++++++++++++++++++ 2 files changed, 124 insertions(+), 31 deletions(-) diff --git a/src/lockfile.rs b/src/lockfile.rs index fcf8530..9e0471c 100644 --- a/src/lockfile.rs +++ b/src/lockfile.rs @@ -6,11 +6,18 @@ use std::fmt; use std::path::{Path, PathBuf}; use tracing::{debug, info}; -/// Default lockfile filename -pub const LOCKFILE_NAME: &str = "aps.lock.yaml"; - -/// Legacy lockfile filename (for backward compatibility) -const LEGACY_LOCKFILE_NAME: &str = "aps.manifest.lock"; +/// Legacy lockfile filenames (for backward compatibility). +/// +/// Historically, APS used fixed lockfile names that were not derived from the +/// manifest filename: +/// - "aps.lock.yaml" (current default for aps.yaml) +/// - "aps.manifest.lock" (older legacy name) +/// +/// Newer versions derive the lockfile name from the manifest filename +/// (e.g. aps-foo.yaml -> aps-foo.lock.yaml). These legacy names are still +/// recognized when loading and are automatically migrated to the new +/// manifest-based name on save. +const LEGACY_LOCKFILE_NAMES: &[&str] = &["aps.lock.yaml", "aps.manifest.lock"]; /// Source types for locked entries - supports both simple strings and composite structures #[derive(Debug, Clone, PartialEq)] @@ -274,19 +281,43 @@ impl Lockfile { } } - /// Get the lockfile path relative to the manifest + /// Get the lockfile path relative to the manifest. + /// + /// The lockfile name is derived from the manifest filename by removing the + /// extension (if any) and appending ".lock.yaml". + /// + /// Examples: + /// - aps.yaml -> aps.lock.yaml + /// - aps-xxx.yaml -> aps-xxx.lock.yaml + /// - custom -> custom.lock.yaml pub fn path_for_manifest(manifest_path: &Path) -> PathBuf { + // Derive the lockfile name from the manifest filename. + let lockfile_name = manifest_path + .file_name() + .and_then(|os_str| os_str.to_str()) + .map(|filename| { + // Split on the last '.' to drop the extension, if present. + if let Some((stem, _ext)) = filename.rsplit_once('.') { + format!("{stem}.lock.yaml") + } else { + format!("{filename}.lock.yaml") + } + }) + // Fallback to the historical default if we can't determine a filename. + .unwrap_or_else(|| "aps.lock.yaml".to_string()); + manifest_path .parent() - .map(|p| p.join(LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LOCKFILE_NAME)) + .map(|p| p.join(&lockfile_name)) + .unwrap_or_else(|| PathBuf::from(lockfile_name)) } - /// Load a lockfile from disk + /// Load a lockfile from disk. /// - /// Supports backward compatibility with legacy filename (aps.manifest.lock) + /// Supports backward compatibility with legacy filenames + /// (e.g. aps.lock.yaml, aps.manifest.lock). pub fn load(path: &Path) -> Result { - // Try loading from the provided path first (new filename) + // Try loading from the provided path first (canonical, manifest-based name). if path.exists() { let content = std::fs::read_to_string(path) .map_err(|e| ApsError::io(e, format!("Failed to read lockfile at {:?}", path)))?; @@ -300,16 +331,19 @@ impl Lockfile { return Ok(lockfile); } - // Fall back to legacy filename for backward compatibility - let legacy_path = path - .parent() - .map(|p| p.join(LEGACY_LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LEGACY_LOCKFILE_NAME)); + // Fall back to legacy filenames for backward compatibility. + let dir = path.parent().unwrap_or_else(|| Path::new(".")); + + for legacy_name in LEGACY_LOCKFILE_NAMES { + let legacy_path = dir.join(legacy_name); + if !legacy_path.exists() { + continue; + } - if legacy_path.exists() { info!( - "Loading legacy lockfile '{}' (will be migrated to '{}' on next save)", - LEGACY_LOCKFILE_NAME, LOCKFILE_NAME + "Loading legacy lockfile '{:?}' (will be migrated to '{:?}' on next save)", + legacy_path.file_name().unwrap_or_default(), + path.file_name().unwrap_or_default() ); let content = std::fs::read_to_string(&legacy_path).map_err(|e| { @@ -331,9 +365,10 @@ impl Lockfile { Err(ApsError::LockfileNotFound) } - /// Save the lockfile to disk + /// Save the lockfile to disk. /// - /// Automatically migrates from legacy filename if it exists + /// Automatically migrates from legacy filenames if they exist in the same + /// directory but do not match the canonical manifest-based name. pub fn save(&self, path: &Path) -> Result<()> { let content = serde_yaml::to_string(self).map_err(|e| ApsError::LockfileReadError { message: format!("Failed to serialize lockfile: {}", e), @@ -344,24 +379,28 @@ impl Lockfile { info!("Saved lockfile to {:?}", path); - // Automatic migration: Remove legacy lockfile if it exists - let legacy_path = path - .parent() - .map(|p| p.join(LEGACY_LOCKFILE_NAME)) - .unwrap_or_else(|| PathBuf::from(LEGACY_LOCKFILE_NAME)); + // Automatic migration: Remove legacy lockfiles in the same directory if + // they exist and are not the canonical path we just wrote. + let dir = path.parent().unwrap_or_else(|| Path::new(".")); + + for legacy_name in LEGACY_LOCKFILE_NAMES { + let legacy_path = dir.join(legacy_name); + if !legacy_path.exists() || legacy_path == path { + continue; + } - if legacy_path.exists() && legacy_path != path { match std::fs::remove_file(&legacy_path) { Ok(_) => { info!( - "Migrated lockfile: removed legacy file '{}'", - LEGACY_LOCKFILE_NAME + "Migrated lockfile: removed legacy file '{:?}'", + legacy_path.file_name().unwrap_or_default() ); } Err(e) => { debug!( - "Could not remove legacy lockfile '{}': {}", - LEGACY_LOCKFILE_NAME, e + "Could not remove legacy lockfile '{:?}': {}", + legacy_path.file_name().unwrap_or_default(), + e ); } } diff --git a/tests/cli.rs b/tests/cli.rs index 7e8c8be..55df3e3 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -119,6 +119,32 @@ fn sync_with_empty_manifest_succeeds() { aps().arg("sync").current_dir(&temp).assert().success(); } +#[test] +fn sync_with_custom_manifest_uses_manifest_based_lockfile_name() { + let temp = assert_fs::TempDir::new().unwrap(); + + // Use a non-default manifest filename. + let manifest_name = "aps-custom.yaml"; + temp.child(manifest_name) + .write_str("entries: []\n") + .unwrap(); + + aps() + .args(["sync", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); + + // Lockfile name should be derived from the manifest filename. + temp.child("aps-custom.lock.yaml") + .assert(predicate::path::exists()); + + // The historical default lockfile name should not be created when a custom + // manifest is explicitly used. + temp.child("aps.lock.yaml") + .assert(predicate::path::missing()); +} + #[test] fn sync_dry_run_does_not_create_lockfile() { let temp = assert_fs::TempDir::new().unwrap(); @@ -246,6 +272,34 @@ fn status_works_after_sync() { aps().arg("status").current_dir(&temp).assert().success(); } +#[test] +fn status_with_custom_manifest_uses_matching_lockfile() { + let temp = assert_fs::TempDir::new().unwrap(); + + let manifest_name = "aps-status.yaml"; + temp.child(manifest_name) + .write_str("entries: []\n") + .unwrap(); + + // First sync to create the manifest-based lockfile. + aps() + .args(["sync", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); + + // Ensure the derived lockfile exists. + temp.child("aps-status.lock.yaml") + .assert(predicate::path::exists()); + + // Then status should succeed when pointing at the same manifest. + aps() + .args(["status", "--manifest", manifest_name]) + .current_dir(&temp) + .assert() + .success(); +} + // ============================================================================ // Catalog Command Tests // ============================================================================ From 92378de074e5f3a05daf6d880b5421f9a2294269 Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 10 Feb 2026 09:53:16 +0100 Subject: [PATCH 35/39] feat: Lockfile README Extension Added documentation about new Lockfile changes. --- README.md | 22 +++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 6cb6114..1171116 100644 --- a/README.md +++ b/README.md @@ -185,9 +185,9 @@ cargo install aps --force When you run `aps sync`: -1. **Entries are synced** - Each entry in `aps.yaml` is installed to its destination (git sources reuse cached clones for speed) -2. **Stale entries are cleaned** - Entries in the lockfile that no longer exist in `aps.yaml` are automatically removed -3. **Lockfile is saved** - The updated lockfile is written to disk +1. **Entries are synced** - Each entry in your manifest (by default `aps.yaml`) is installed to its destination (git sources reuse cached clones for speed) +2. **Stale entries are cleaned** - Entries in the lockfile that no longer exist in the manifest are automatically removed +3. **Lockfile is saved** - The updated lockfile is written to disk, next to the manifest Note: Stale entry cleanup only happens during a full sync. When using `--only ` to sync specific entries, other lockfile entries are preserved. @@ -302,9 +302,19 @@ Key features: - **Order preserved**: Files are merged in the order specified in `sources` - **Auto-generated header**: Output includes a comment indicating it was composed by aps -### Lockfile (`aps.lock.yaml`) +### Lockfile (manifest-based) -The lockfile tracks installed assets and is automatically created/updated by `aps sync`. **This file should be committed to version control** to ensure reproducible installations across your team. It stores: +The lockfile tracks installed assets and is automatically created/updated by `aps sync`. **This file should be committed to version control** to ensure reproducible installations across your team. + +The lockfile name is derived from the manifest filename: + +- `aps.yaml` β†’ `aps.lock.yaml` +- `aps-rules.yaml` β†’ `aps-rules.lock.yaml` +- `custom` β†’ `custom.lock.yaml` + +This means you can keep multiple manifests in the same repository, each with its own lockfile, without conflicts. + +Each lockfile stores: - Source information - Destination paths @@ -313,6 +323,8 @@ The lockfile tracks installed assets and is automatically created/updated by `ap **Environment Variables Are Preserved**: Unlike other package managers (npm, uv, bundler) that expand environment variables to concrete paths, `aps` preserves shell variables like `$HOME` in the lockfile. This makes lockfiles portable across different machines and users who have the same relative directory structure. +**Legacy lockfiles**: For backward compatibility, `aps` still recognizes historical lockfile names (`aps.lock.yaml`, `aps.manifest.lock`) when loading. On the next successful `aps sync`, these legacy files are migrated to the manifest-based name and the old files are cleaned up. + ## Examples ### Non-interactive sync for CI/CD From a9d47104c2c5623a111c36687c827f5c5c8bf8ee Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 10 Feb 2026 10:16:46 +0100 Subject: [PATCH 36/39] feat: Sync Progress Added simple Sync Progress outside the Verbose output. --- README.md | 23 +++++++++ aps.yaml | 12 ++--- src/commands.rs | 121 +++++++++++++++++++++++++++++++++++++++++------- tests/cli.rs | 69 +++++++++++++++++++++++++++ 4 files changed, 201 insertions(+), 24 deletions(-) diff --git a/README.md b/README.md index 1171116..a961fe4 100644 --- a/README.md +++ b/README.md @@ -191,6 +191,29 @@ When you run `aps sync`: Note: Stale entry cleanup only happens during a full sync. When using `--only ` to sync specific entries, other lockfile entries are preserved. +### Example Sync Output + +When syncing a manifest with multiple entries, `aps sync` now shows per-entry progress so you can see work as it happens: + +```bash +$ aps sync + +(1/3) Syncing my-agents from filesystem $HOME/agents-md-partials (AGENTS.md)... +(1/3) Finished my-agents in 0.12s [synced] +(2/3) Syncing composite-agents from 3 composite source(s)... +(2/3) Finished composite-agents in 0.45s [copied] +(3/3) Syncing company-rules from git repo git@github.com:your-username/dotfiles.git... +(3/3) Finished company-rules in 1.23s [synced] + +Syncing from aps.yaml + + βœ“ my-agents β†’ ./AGENTS.md [synced] + Β· composite-agents β†’ ./AGENTS.md [current] + βœ“ company-rules β†’ ./.cursor/rules/ [synced] + +2 synced, 1 current +``` + ## Configuration ### Manifest File (`aps.yaml`) diff --git a/aps.yaml b/aps.yaml index e9d0a83..52e1678 100644 --- a/aps.yaml +++ b/aps.yaml @@ -18,10 +18,8 @@ entries: type: git repo: https://github.com/anthropics/skills.git ref: main - path: skills - include: - - skill-creator - dest: ./.claude/skills/ + path: skills/skill-creator + dest: ./.claude/skills/skill-creator - id: address-github-comments kind: agent_skill @@ -29,7 +27,5 @@ entries: type: git repo: https://github.com/sickn33/antigravity-awesome-skills.git ref: main - path: skills - include: - - address-github-comments - dest: ./.claude/skills/ + path: skills/address-github-comments + dest: ./.claude/skills/address-github-comments diff --git a/src/commands.rs b/src/commands.rs index cd441bc..c9cb4ac 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -14,6 +14,7 @@ use crate::manifest::{ use crate::orphan::{detect_orphaned_paths, prompt_and_cleanup_orphans}; use crate::sources::GitCloneCache; use crate::sync_output::{print_sync_results, print_sync_summary, SyncDisplayItem, SyncStatus}; +use console::{style, Style}; use std::fs; use std::io::Write; use std::path::Path; @@ -288,6 +289,56 @@ pub fn cmd_add(args: AddArgs) -> Result<()> { Ok(()) } +fn sync_status_for_result(result: &InstallResult) -> SyncStatus { + if !result.warnings.is_empty() { + SyncStatus::Warning + } else if result.skipped_no_change && result.upgrade_available.is_some() { + SyncStatus::Upgradable + } else if result.skipped_no_change { + SyncStatus::Current + } else if result.was_symlink { + SyncStatus::Synced + } else { + SyncStatus::Copied + } +} + +fn sync_status_label(status: SyncStatus) -> &'static str { + match status { + SyncStatus::Synced => "synced", + SyncStatus::Copied => "copied", + SyncStatus::Current => "current", + SyncStatus::Upgradable => "upgrade available", + SyncStatus::Warning => "warning", + SyncStatus::Error => "error", + } +} + +fn progress_style_for_status(status: SyncStatus) -> Style { + match status { + SyncStatus::Synced | SyncStatus::Copied => Style::new().green(), + SyncStatus::Current => Style::new().dim(), + SyncStatus::Upgradable => Style::new().color256(208), + SyncStatus::Warning => Style::new().yellow(), + SyncStatus::Error => Style::new().red(), + } +} + +fn format_entry_source(entry: &Entry) -> String { + if entry.is_composite() { + return format!("{} composite source(s)", entry.sources.len()); + } + + match &entry.source { + Some(Source::Git { repo, .. }) => format!("git repo {}", repo), + Some(Source::Filesystem { root, path, .. }) => match path { + Some(p) => format!("filesystem {} ({})", root, p), + None => format!("filesystem {}", root), + }, + None => "no source configured".to_string(), + } +} + fn parse_add_targets(targets: &[String]) -> Result<(AssetKind, String, bool)> { match targets.len() { 1 => { @@ -443,16 +494,64 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { // Detect orphaned paths (destinations that changed) let orphans = detect_orphaned_paths(&entries_to_install, &lockfile, &base_dir); - // Install selected entries + // Install selected entries with progressive feedback + let total = entries_to_install.len(); let mut results: Vec = Vec::new(); - for entry in &entries_to_install { + for (index, entry) in entries_to_install.iter().enumerate() { + let ordinal = index + 1; + let source_desc = format_entry_source(entry); + println!( + "({}/{}) {} {} {}", + ordinal, + total, + style("Syncing").dim(), + style(&entry.id).cyan().bold(), + style(format!("from {}...", source_desc)).dim(), + ); + std::io::stdout().flush().ok(); + + let start = std::time::Instant::now(); + // Use composite install for composite entries, regular install otherwise let result = if entry.is_composite() { - install_composite_entry(entry, &base_dir, &lockfile, &options, &mut git_cache)? + install_composite_entry(entry, &base_dir, &lockfile, &options, &mut git_cache) } else { - install_entry(entry, &base_dir, &lockfile, &options, &mut git_cache)? + install_entry(entry, &base_dir, &lockfile, &options, &mut git_cache) }; - results.push(result); + + match result { + Ok(result) => { + let elapsed = start.elapsed(); + let status = sync_status_for_result(&result); + let label = sync_status_label(status); + let status_style = progress_style_for_status(status); + println!( + "({}/{}) {} {} {} {}", + ordinal, + total, + style("Finished").dim(), + style(&entry.id).cyan().bold(), + style(format!("in {:.2?}", elapsed)).dim(), + status_style.apply_to(format!("[{}]", label)), + ); + std::io::stdout().flush().ok(); + results.push(result); + } + Err(err) => { + let elapsed = start.elapsed(); + let error_style = Style::new().red(); + eprintln!( + "({}/{}) {} {} {}: {}", + ordinal, + total, + error_style.apply_to("Failed"), + style(&entry.id).cyan().bold(), + style(format!("after {:.2?}", elapsed)).dim(), + error_style.apply_to(err.to_string()), + ); + return Err(err); + } + } } // Cleanup orphaned paths after successful install @@ -490,17 +589,7 @@ pub fn cmd_sync(args: SyncArgs) -> Result<()> { let display_items: Vec = results .iter() .map(|r| { - let status = if !r.warnings.is_empty() { - SyncStatus::Warning - } else if r.skipped_no_change && r.upgrade_available.is_some() { - SyncStatus::Upgradable - } else if r.skipped_no_change { - SyncStatus::Current - } else if r.was_symlink { - SyncStatus::Synced - } else { - SyncStatus::Copied - }; + let status = sync_status_for_result(r); let mut item = SyncDisplayItem::new( r.id.clone(), diff --git a/tests/cli.rs b/tests/cli.rs index 55df3e3..1d26eab 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -414,6 +414,75 @@ fn sync_with_symlink_creates_symlink() { } } +#[test] +fn sync_shows_progress_per_entry() { + let temp = assert_fs::TempDir::new().unwrap(); + + // Create source files + let source_dir = temp.child("source"); + source_dir.create_dir_all().unwrap(); + source_dir + .child("AGENTS-one.md") + .write_str("# One\n") + .unwrap(); + source_dir + .child("AGENTS-two.md") + .write_str("# Two\n") + .unwrap(); + + // Create manifest with two entries so we can see (1/2), (2/2) + let manifest = format!( + r#"entries: + - id: first + kind: agents_md + source: + type: filesystem + root: {root} + path: AGENTS-one.md + dest: ./AGENTS-one.md + - id: second + kind: agents_md + source: + type: filesystem + root: {root} + path: AGENTS-two.md + dest: ./AGENTS-two.md +"#, + root = source_dir.path().display() + ); + + temp.child("aps.yaml").write_str(&manifest).unwrap(); + + let output = aps() + .arg("sync") + .current_dir(&temp) + .output() + .expect("failed to run aps sync"); + + let stdout = String::from_utf8_lossy(&output.stdout).to_string(); + assert!(output.status.success(), "aps sync failed: {}", stdout); + + // Ensure progress markers are present + assert!( + stdout.contains("(1/2)"), + "expected progress for first entry, got: {stdout}" + ); + assert!( + stdout.contains("(2/2)"), + "expected progress for second entry, got: {stdout}" + ); + + // Progress should appear before the final sync summary header + let first_progress = stdout.find("(1/2)").unwrap(); + let header_pos = stdout + .find("Syncing from") + .expect("expected 'Syncing from' header in output"); + assert!( + first_progress < header_pos, + "expected progress lines before summary header, got: {stdout}" + ); +} + // ============================================================================ // Hooks Tests // ============================================================================ From ea094d3eab73561471775e7de9972a646f985a03 Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 10 Feb 2026 10:22:56 +0100 Subject: [PATCH 37/39] fix: Clippy Lint Fixed Clippy build in CI Lint. --- src/commands.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/commands.rs b/src/commands.rs index c9cb4ac..655baa5 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -429,7 +429,7 @@ fn repo_basename(repo_url: &str) -> Option { if let Ok(parsed) = url::Url::parse(repo_url) { let name = parsed .path_segments() - .and_then(|segments| segments.filter(|s| !s.is_empty()).last()) + .and_then(|mut segments| segments.rfind(|s| !s.is_empty())) .map(|s| s.to_string())?; return Some(name.trim_end_matches(".git").to_string()); } From 0fa7cbd0021ff82b3a0ef73cda8e281fd1aa154f Mon Sep 17 00:00:00 2001 From: Melioo Date: Mon, 16 Feb 2026 09:18:35 +0100 Subject: [PATCH 38/39] fix: Conflicts Resolved conflicts with recent changes. --- .gitignore | 1 + aps.lock.yaml | 24 ++-- src/cli.rs | 10 +- src/commands.rs | 278 ++++++++++++++++++++++----------------------- src/sources/mod.rs | 5 +- tests/cli.rs | 3 - 6 files changed, 156 insertions(+), 165 deletions(-) diff --git a/.gitignore b/.gitignore index 35432e6..42b2d1d 100644 --- a/.gitignore +++ b/.gitignore @@ -5,4 +5,5 @@ aps.catalog.yaml .cursor +.claude _bmad* diff --git a/aps.lock.yaml b/aps.lock.yaml index b1584ee..7e7f4be 100644 --- a/aps.lock.yaml +++ b/aps.lock.yaml @@ -1,6 +1,18 @@ version: 1 aps_version: 0.1.11 entries: + skill-creator: + source: https://github.com/anthropics/skills.git + dest: .claude/skills/skill-creator/ + resolved_ref: main + commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 + checksum: sha256:bdd5389ea8390277b8310b83eaa561a703c2fa08bbcf770e6394d42571f7445f + anthropic-skills: + source: https://github.com/anthropics/skills.git + dest: ./.claude/skills/ + resolved_ref: main + commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 + checksum: sha256:3bd37a1929706db495f2609910316bb1a385a057556deb151f968e877aef39e9 composite-agents-md: source: composite: @@ -8,21 +20,9 @@ entries: - https://github.com/westonplatter/agentically.git:agents-md-partials/AGENTS.pull-requests.md dest: ./AGENTS.md checksum: sha256:11eedede54fa5217e041f613adc9f692b985972e8487261e720fc553d707bdbf - anthropic-skills: - source: https://github.com/anthropics/skills.git - dest: ./.claude/skills/ - resolved_ref: main - commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 - checksum: sha256:3bd37a1929706db495f2609910316bb1a385a057556deb151f968e877aef39e9 internal-comms: source: https://github.com/anthropics/skills.git dest: .claude/skills/internal-comms/ resolved_ref: main commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 checksum: sha256:b353d2d5223c9b5154f52a1d1b87c65bd0f2ed9eac5e541e88e72b4c4a614743 - skill-creator: - source: https://github.com/anthropics/skills.git - dest: .claude/skills/skill-creator/ - resolved_ref: main - commit: 1ed29a03dc852d30fa6ef2ca53a67dc2c2c2c563 - checksum: sha256:bdd5389ea8390277b8310b83eaa561a703c2fa08bbcf770e6394d42571f7445f diff --git a/src/cli.rs b/src/cli.rs index b6601c0..0d05660 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -52,12 +52,10 @@ pub struct InitArgs { #[derive(Parser, Debug)] pub struct AddArgs { - /// GitHub URL or local filesystem path to a skill folder or repository. - /// Supports: GitHub URLs (https://github.com/owner/repo/...) and local - /// paths ($HOME/skills, ~/skills, ./skills). For repo-level URLs or - /// directories without SKILL.md, discovers skills and prompts for selection. - #[arg(value_name = "URL_OR_PATH")] - pub url: String, + /// Optional asset type (agent_skill, cursor_rules, cursor_skills_root, agents_md). + /// If omitted, defaults to agent_skill. When given, must be followed by URL or path. + #[arg(value_name = "ASSET_TYPE_OR_URL", num_args = 1..=2, required = true)] + pub positionals: Vec, /// Custom entry ID (defaults to repo or path name) #[arg(long)] diff --git a/src/commands.rs b/src/commands.rs index 19580b3..f6967f1 100644 --- a/src/commands.rs +++ b/src/commands.rs @@ -6,7 +6,7 @@ use crate::discover::{ discover_skills_in_local_dir, discover_skills_in_repo, prompt_skill_selection, }; use crate::error::{ApsError, Result}; -use crate::github_url::parse_repo_identifier; +use crate::github_url::{parse_github_url, parse_repo_identifier}; use crate::hooks::validate_cursor_hooks; use crate::install::{install_composite_entry, install_entry, InstallOptions, InstallResult}; use crate::lockfile::{display_status, Lockfile}; @@ -80,7 +80,11 @@ fn is_local_path(input: &str) -> bool { } /// Parse the add target into a typed enum for routing. -fn parse_add_target(url_or_path: &str, all_flag: bool) -> Result { +fn parse_add_target( + url_or_path: &str, + all_flag: bool, + asset_kind: &AssetKind, +) -> Result { if is_local_path(url_or_path) { // Check if it contains a SKILL.md (single-skill) or not (discovery) let expanded = shellexpand::full(url_or_path) @@ -117,19 +121,72 @@ fn parse_add_target(url_or_path: &str, all_flag: bool) -> Result Result Result<()> { Ok(()) } +/// Parse add positionals: 1 arg => (AgentSkill, url), 2 args => (parse first as kind, second as url). +fn parse_add_positionals(positionals: &[String]) -> Result<(AssetKind, String)> { + match positionals.len() { + 1 => Ok((AssetKind::AgentSkill, positionals[0].clone())), + 2 => { + let kind = parse_asset_kind(&positionals[0])?; + Ok((kind, positionals[1].clone())) + } + _ => Err(ApsError::InvalidAddArguments { + message: "Expected either or ".to_string(), + }), + } +} + /// Execute the `aps add` command pub fn cmd_add(args: AddArgs) -> Result<()> { - let target = parse_add_target(&args.url, args.all)?; + let (asset_kind, url) = parse_add_positionals(&args.positionals)?; + let mut target = parse_add_target(&url, args.all, &asset_kind)?; + + // --path overrides: treat repo-level as single entry with that path (no discovery/clone on add) + if let ParsedAddTarget::GitHubDiscovery { + repo_url, + git_ref, + search_path, + } = &target + { + if let Some(ref path) = args.path { + target = ParsedAddTarget::GitHubSkill { + repo_url: repo_url.clone(), + git_ref: git_ref.clone(), + skill_path: path.clone(), + skill_name: path + .rsplit('/') + .next() + .filter(|s| !s.is_empty()) + .map(String::from), + }; + } else if search_path.is_empty() && asset_kind == AssetKind::AgentSkill { + // Repo-level agent_skill with no path: keep discovery (will clone to find skills) + } + } match target { ParsedAddTarget::GitHubSkill { @@ -251,32 +345,29 @@ pub fn cmd_add(args: AddArgs) -> Result<()> { git_ref, skill_path, skill_name, - } => cmd_add_single_git(args, &repo_url, &git_ref, &skill_path, skill_name), + } => cmd_add_single_git( + args, + asset_kind, + &repo_url, + &git_ref, + &skill_path, + skill_name, + ), ParsedAddTarget::GitHubDiscovery { repo_url, git_ref, search_path, - } => cmd_add_discover_git(args, &repo_url, &git_ref, &search_path), + } => cmd_add_discover_git(args, asset_kind, &repo_url, &git_ref, &search_path), ParsedAddTarget::FilesystemSkill { original_path, skill_name, - } => cmd_add_single_filesystem(args, &original_path, &skill_name), + } => cmd_add_single_filesystem(args, asset_kind, &original_path, &skill_name), ParsedAddTarget::FilesystemDiscovery { original_path } => { - cmd_add_discover_filesystem(args, &original_path) + cmd_add_discover_filesystem(args, asset_kind, &original_path) } } } -/// Convert CLI asset kind to manifest asset kind. -fn resolve_asset_kind(kind: &AddAssetKind) -> AssetKind { - match kind { - AddAssetKind::AgentSkill => AssetKind::AgentSkill, - AddAssetKind::CursorRules => AssetKind::CursorRules, - AddAssetKind::CursorSkillsRoot => AssetKind::CursorSkillsRoot, - AddAssetKind::AgentsMd => AssetKind::AgentsMd, - } -} - /// Compute the destination path for a skill entry. fn skill_dest(asset_kind: &AssetKind, entry_id: &str) -> String { format!( @@ -317,29 +408,7 @@ fn write_entries_to_manifest( ApsError::io(e, format!("Failed to write manifest to {:?}", path)) })?; - println!("Added entry '{}' to manifest\n", entry_id); - - // Sync the new entry unless --no-sync is set - if !args.no_sync { - println!("Syncing...\n"); - cmd_sync(SyncArgs { - manifest: Some(path), - only: vec![entry_id], - yes: true, - ignore_manifest: false, - dry_run: false, - strict: false, - upgrade: false, - })?; - } else if asset_kind == AssetKind::AgentSkill { - println!("Run `aps sync` to install the skill."); - } else { - println!("Run `aps sync` to install the asset."); - } - - return Ok(()); - } - Err(e) => return Err(e), + return Ok((path, entry_ids)); } Err(e) => return Err(e), }, @@ -420,10 +489,8 @@ fn maybe_sync( strict: false, upgrade: false, })?; - } else if asset_kind == AssetKind::AgentSkill { - println!("Run `aps sync` to install the skill."); } else { - println!("Run `aps sync` to install the asset."); + println!("Run `aps sync` to install."); } Ok(()) @@ -436,6 +503,7 @@ fn maybe_sync( /// Add a single skill from a GitHub URL. fn cmd_add_single_git( args: AddArgs, + asset_kind: AssetKind, repo_url: &str, git_ref: &str, skill_path: &str, @@ -448,8 +516,6 @@ fn cmd_add_single_git( // For single-skill adds, check for duplicate ID upfront check_duplicate_id(&entry_id, args.manifest.as_deref())?; - let asset_kind = resolve_asset_kind(&args.kind); - let entry = Entry { id: entry_id.clone(), kind: asset_kind.clone(), @@ -481,6 +547,7 @@ fn cmd_add_single_git( /// Discover and add skills from a GitHub repository. fn cmd_add_discover_git( args: AddArgs, + asset_kind: AssetKind, repo_url: &str, git_ref: &str, search_path: &str, @@ -493,7 +560,7 @@ fn cmd_add_discover_git( shallow: true, path: Some(skill.repo_path.clone()), }; - cmd_add_discovered(args, skills, source_builder, repo_url) + cmd_add_discovered(args, skills, source_builder, repo_url, asset_kind) } // ============================================================================ @@ -501,13 +568,16 @@ fn cmd_add_discover_git( // ============================================================================ /// Add a single skill from a local filesystem path. -fn cmd_add_single_filesystem(args: AddArgs, original_path: &str, skill_name: &str) -> Result<()> { +fn cmd_add_single_filesystem( + args: AddArgs, + asset_kind: AssetKind, + original_path: &str, + skill_name: &str, +) -> Result<()> { let entry_id = args.id.unwrap_or_else(|| skill_name.to_string()); check_duplicate_id(&entry_id, args.manifest.as_deref())?; - let asset_kind = resolve_asset_kind(&args.kind); - let entry = Entry { id: entry_id.clone(), kind: asset_kind.clone(), @@ -536,7 +606,11 @@ fn cmd_add_single_filesystem(args: AddArgs, original_path: &str, skill_name: &st } /// Discover and add skills from a local filesystem directory. -fn cmd_add_discover_filesystem(args: AddArgs, original_path: &str) -> Result<()> { +fn cmd_add_discover_filesystem( + args: AddArgs, + asset_kind: AssetKind, + original_path: &str, +) -> Result<()> { println!("Searching for skills in {}...\n", original_path); let skills = discover_skills_in_local_dir(original_path)?; let source_builder = |skill: &DiscoveredSkill| Source::Filesystem { @@ -544,7 +618,7 @@ fn cmd_add_discover_filesystem(args: AddArgs, original_path: &str) -> Result<()> symlink: true, path: Some(skill.repo_path.clone()), }; - cmd_add_discovered(args, skills, source_builder, original_path) + cmd_add_discovered(args, skills, source_builder, original_path, asset_kind) } // ============================================================================ @@ -561,6 +635,7 @@ fn cmd_add_discovered( skills: Vec, source_builder: impl Fn(&DiscoveredSkill) -> Source, location: &str, + asset_kind: AssetKind, ) -> Result<()> { if skills.is_empty() { return Err(ApsError::NoSkillsFound { @@ -706,8 +781,6 @@ fn cmd_add_discovered( } }; - let asset_kind = resolve_asset_kind(&args.kind); - let entries: Vec = to_add .iter() .map(|skill| { @@ -899,30 +972,6 @@ fn format_entry_source(entry: &Entry) -> String { } } -fn parse_add_targets(targets: &[String]) -> Result<(AssetKind, String, bool)> { - match targets.len() { - 1 => { - let normalized = targets[0].trim().replace('-', "_"); - if AssetKind::from_str(&normalized).is_ok() { - return Err(ApsError::InvalidAddArguments { - message: - "Missing repository identifier. Usage: aps add " - .to_string(), - }); - } - Ok((AssetKind::AgentSkill, targets[0].clone(), true)) - } - 2 => { - let kind = parse_asset_kind(&targets[0])?; - Ok((kind, targets[1].clone(), false)) - } - _ => Err(ApsError::InvalidAddArguments { - message: "Expected either or " - .to_string(), - }), - } -} - fn parse_asset_kind(input: &str) -> Result { let normalized = input.trim().replace('-', "_"); let kind = AssetKind::from_str(&normalized)?; @@ -943,20 +992,6 @@ fn parse_asset_kind(input: &str) -> Result { Ok(kind) } -fn normalize_skill_path(path: &str) -> String { - let path = path.trim(); - if path.eq_ignore_ascii_case("skill.md") { - return "".to_string(); - } - if let Some(stripped) = path.strip_suffix("/SKILL.md") { - return stripped.to_string(); - } - if let Some(stripped) = path.strip_suffix("/skill.md") { - return stripped.to_string(); - } - path.to_string() -} - fn default_path_for_kind(kind: &AssetKind) -> Option { match kind { AssetKind::CursorRules => Some(".cursor/rules".to_string()), @@ -967,45 +1002,6 @@ fn default_path_for_kind(kind: &AssetKind) -> Option { } } -fn derive_entry_id(kind: &AssetKind, repo_url: &str, path: Option<&str>) -> String { - match kind { - AssetKind::AgentSkill => path - .and_then(|p| p.rsplit('/').next()) - .filter(|p| !p.is_empty()) - .map(|s| s.to_string()) - .or_else(|| repo_basename(repo_url)) - .unwrap_or_else(|| "unnamed-skill".to_string()), - _ => repo_basename(repo_url) - .or_else(|| { - path.and_then(|p| p.rsplit('/').next()) - .filter(|p| !p.is_empty()) - .map(|s| s.to_string()) - }) - .unwrap_or_else(|| "unnamed-asset".to_string()), - } -} - -fn repo_basename(repo_url: &str) -> Option { - if let Ok(parsed) = url::Url::parse(repo_url) { - let name = parsed - .path_segments() - .and_then(|mut segments| segments.rfind(|s| !s.is_empty())) - .map(|s| s.to_string())?; - return Some(name.trim_end_matches(".git").to_string()); - } - - if let Some((_, path)) = repo_url.split_once(':') { - let name = path.rsplit('/').next().map(|s| s.to_string())?; - return Some(name.trim_end_matches(".git").to_string()); - } - - let name = std::path::Path::new(repo_url) - .file_name() - .and_then(|s| s.to_str()) - .map(|s| s.to_string())?; - Some(name.trim_end_matches(".git").to_string()) -} - /// Execute the `aps sync` command pub fn cmd_sync(args: SyncArgs) -> Result<()> { // Discover and load manifest diff --git a/src/sources/mod.rs b/src/sources/mod.rs index 8d3401e..857a64e 100644 --- a/src/sources/mod.rs +++ b/src/sources/mod.rs @@ -8,9 +8,8 @@ mod git; pub use filesystem::FilesystemSource; pub use git::{ - get_remote_commit_sha, resolve_git_source_at_commit_with_cache, resolve_git_source_with_cache, - clone_and_resolve, clone_at_commit, get_remote_commit_sha, - GitCloneCache, GitSource, + clone_and_resolve, get_remote_commit_sha, resolve_git_source_at_commit_with_cache, + resolve_git_source_with_cache, GitCloneCache, GitSource, }; use crate::error::Result; diff --git a/tests/cli.rs b/tests/cli.rs index 5508254..1e30821 100644 --- a/tests/cli.rs +++ b/tests/cli.rs @@ -1623,12 +1623,9 @@ fn add_help_shows_usage() { .args(["add", "--help"]) .assert() .success() - .stdout(predicate::str::contains("ASSET_TYPE")) .stdout(predicate::str::contains("--id")) .stdout(predicate::str::contains("--path")) .stdout(predicate::str::contains("--ref")) - .stdout(predicate::str::contains("--no-sync")); - .stdout(predicate::str::contains("--kind")) .stdout(predicate::str::contains("--no-sync")) .stdout(predicate::str::contains("--all")); } From f1147ef4fa1938ea42352bbf04a5e3cf6ce1e4ba Mon Sep 17 00:00:00 2001 From: Melioo Date: Tue, 17 Feb 2026 08:45:07 +0100 Subject: [PATCH 39/39] fix: Unused Error Commented unused error. --- src/error.rs | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/error.rs b/src/error.rs index f06485e..c6834d9 100644 --- a/src/error.rs +++ b/src/error.rs @@ -173,9 +173,10 @@ pub enum ApsError { #[diagnostic(code(aps::add::invalid_args), help("{message}"))] InvalidAddArguments { message: String }, - #[error("Asset type '{asset_type}' requires a path within the repository")] - #[diagnostic(code(aps::add::missing_path), help("{hint}"))] - MissingAddPath { asset_type: String, hint: String }, + /// Not used at the moment + // #[error("Asset type '{asset_type}' requires a path within the repository")] + // #[diagnostic(code(aps::add::missing_path), help("{hint}"))] + // MissingAddPath { asset_type: String, hint: String }, #[error("No skills found in {location}")] #[diagnostic(