Compare commits

..

5 commits

Author SHA1 Message Date
ea07d74457
feat: implement stash system (phase 7)
Add named stash support with create, use, push, pop, rm, and list
commands. Stashes are stored as YAML in .arc/stashes/named/ with a
stack-based push/pop model. Active stash tracked in state.yml.

- stash push saves dirty changes and resets worktree to clean state
- stash pop requires clean worktree and matching HEAD base commit
- 28 integration tests covering all commands and edge cases
- new error variants: StashAlreadyExists, StashNotFound, NoActiveStash,
  NothingToStash, StashEmpty, StashBaseMismatch
2026-02-07 16:48:54 +00:00
8fec7a170a
fix: path traversal vulnerabilities, remove production panics, deduplicate code, fix idioms
Security:
- validate ref names before all bookmark/tag filesystem operations
- validate repo paths before worktree joins in clean/write/reset

Panics removed:
- system clock .expect() -> ArcError::ClockError
- hash serialization .expect() -> ArcError::HashError (fallible return)
- .unwrap() in resolve/merge replaced with proper error handling

DRY:
- extract load_effective_config -> config::load_effective
- extract update_refs_after_commit -> refs::update_refs_after_commit

Idioms:
- remove dead let _ = suppressions in merge.rs
- remove unused _worktree param from diff::render_diff
- &PathBuf -> &Path in config.rs
2026-02-07 16:26:06 +00:00
b52b2b709b
feat: implement phase 6 - revert, reset, merge, and graft commands
- Add reset command to discard worktree changes (all or specific files)
- Add revert command to create inverse commits (preserving immutability)
- Add merge command with three-way merge and conflict detection
- Add graft command to cherry-pick commits onto bookmarks
- Create merge.rs with file-level and line-level three-way merge engine
- Create modify.rs orchestrating undo/modification operations
- Add MergeConflicts and NoMergeBase error variants
- Make refs utility functions (write_tree, clean_tracked_files) public
- Make inspect::myers_diff public for reuse in merge engine
- Add comprehensive tests for all four commands
2026-02-07 16:07:45 +00:00
d8cce41809
fix: add path validation helpers to prevent path traversal
Add validate_ref_name() to reject ref names with separators, .., or
leading dots. Add validate_repo_path() to reject absolute paths and
parent-directory traversal in repo-relative paths.
2026-02-06 23:27:45 +00:00
acd6ad919d
refactor: add new error variants and Display/AsRef impls for newtypes
Add InvalidRefName, ClockError, and HashError variants to ArcError.
Implement Display and AsRef<str> for CommitId and DeltaId.
2026-02-06 23:27:30 +00:00
21 changed files with 2414 additions and 103 deletions

View file

@ -6,8 +6,10 @@ use clap::{Parser, Subcommand};
use crate::diff; use crate::diff;
use crate::ignore::IgnoreRules; use crate::ignore::IgnoreRules;
use crate::inspect; use crate::inspect;
use crate::modify;
use crate::refs; use crate::refs;
use crate::repo::Repository; use crate::repo::Repository;
use crate::stash;
use crate::tracking; use crate::tracking;
#[derive(Parser)] #[derive(Parser)]
@ -397,7 +399,14 @@ pub fn dispatch(cli: Cli) {
} }
} }
Command::Merge { target } => { Command::Merge { target } => {
println!("arc merge: {target} (not yet implemented)"); let repo = open_repo_or_exit();
match modify::merge_branch(&repo, &target) {
Ok(id) => println!("merged {target} -> {}", &id.0[..id.0.len().min(12)]),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
} }
Command::Show { target } => { Command::Show { target } => {
let repo = open_repo_or_exit(); let repo = open_repo_or_exit();
@ -420,13 +429,23 @@ pub fn dispatch(cli: Cli) {
} }
} }
Command::Revert { target } => { Command::Revert { target } => {
println!("arc revert: {target} (not yet implemented)"); let repo = open_repo_or_exit();
match modify::revert(&repo, &target) {
Ok(id) => println!("reverted -> {}", &id.0[..id.0.len().min(12)]),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
} }
Command::Reset { files } => { Command::Reset { files } => {
if files.is_empty() { let repo = open_repo_or_exit();
println!("arc reset: all (not yet implemented)"); match modify::reset(&repo, &files) {
} else { Ok(msg) => println!("{msg}"),
println!("arc reset: {} (not yet implemented)", files.join(", ")); Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
} }
} }
Command::Push { remote } => { Command::Push { remote } => {
@ -510,28 +529,66 @@ pub fn dispatch(cli: Cli) {
}, },
} }
} }
Command::Stash { command } => match command { Command::Stash { command } => {
StashCommand::Create { name } => { let repo = open_repo_or_exit();
println!("arc stash create: {name} (not yet implemented)"); match command {
StashCommand::Create { name } => match stash::create(&repo, &name) {
Ok(()) => println!("stash '{name}' created"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Use { name } => match stash::use_stash(&repo, &name) {
Ok(()) => println!("switched to stash '{name}'"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Push => match stash::push(&repo) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Pop => match stash::pop(&repo) {
Ok(msg) => println!("{msg}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::Rm { name } => match stash::rm(&repo, &name) {
Ok(()) => println!("stash '{name}' removed"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
StashCommand::List => match stash::list(&repo) {
Ok(output) => println!("{output}"),
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
},
} }
StashCommand::Use { name } => { }
println!("arc stash use: {name} (not yet implemented)");
}
StashCommand::Push => {
println!("arc stash push (not yet implemented)");
}
StashCommand::Pop => {
println!("arc stash pop (not yet implemented)");
}
StashCommand::Rm { name } => {
println!("arc stash rm: {name} (not yet implemented)");
}
StashCommand::List => {
println!("arc stash list (not yet implemented)");
}
},
Command::Graft { target, onto } => { Command::Graft { target, onto } => {
println!("arc graft: {target} onto {onto} (not yet implemented)"); let repo = open_repo_or_exit();
match modify::graft(&repo, &target, &onto) {
Ok(ids) => {
for id in &ids {
println!("grafted {}", &id.0[..id.0.len().min(12)]);
}
}
Err(e) => {
eprintln!("error: {e}");
std::process::exit(1);
}
}
} }
Command::Config { command } => match command { Command::Config { command } => match command {
ConfigCommand::Set { global, key, value } => { ConfigCommand::Set { global, key, value } => {
@ -590,5 +647,5 @@ fn run_diff(repo: &Repository) -> crate::error::Result<String> {
let worktree = tracking::scan_worktree(repo, &ignore)?; let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree); let changes = tracking::detect_changes(&committed, &worktree);
Ok(diff::render_diff(&committed, &worktree, &changes)) Ok(diff::render_diff(&committed, &changes))
} }

View file

@ -1,7 +1,7 @@
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use std::fs; use std::fs;
use std::path::PathBuf; use std::path::{Path, PathBuf};
use crate::error::{ArcError, Result}; use crate::error::{ArcError, Result};
use crate::repo::Repository; use crate::repo::Repository;
@ -54,7 +54,7 @@ impl Config {
Self::load_from(&path) Self::load_from(&path)
} }
fn load_from(path: &PathBuf) -> Result<Option<Config>> { fn load_from(path: &Path) -> Result<Option<Config>> {
if !path.exists() { if !path.exists() {
return Ok(None); return Ok(None);
} }
@ -63,7 +63,7 @@ impl Config {
Ok(Some(config)) Ok(Some(config))
} }
pub fn save_to(&self, path: &PathBuf) -> Result<()> { pub fn save_to(&self, path: &Path) -> Result<()> {
if let Some(parent) = path.parent() { if let Some(parent) = path.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent)?;
} }
@ -128,6 +128,12 @@ impl Config {
} }
} }
pub fn load_effective(repo: &crate::repo::Repository) -> EffectiveConfig {
let local = Config::load_local(repo).ok().flatten();
let global = Config::load_global().ok().flatten();
Config::effective(local, global)
}
impl ArcError { impl ArcError {
pub fn invalid_path(msg: impl Into<String>) -> Self { pub fn invalid_path(msg: impl Into<String>) -> Self {
Self::InvalidPath(msg.into()) Self::InvalidPath(msg.into())

View file

@ -1,7 +1,7 @@
use crate::model::{FileChange, FileChangeKind, FileContentDelta}; use crate::model::{FileChange, FileChangeKind, FileContentDelta};
use crate::tracking::FileTree; use crate::tracking::FileTree;
pub fn render_diff(committed: &FileTree, _worktree: &FileTree, changes: &[FileChange]) -> String { pub fn render_diff(committed: &FileTree, changes: &[FileChange]) -> String {
let mut output = String::new(); let mut output = String::new();
for change in changes { for change in changes {

View file

@ -22,6 +22,17 @@ pub enum ArcError {
TagAlreadyExists(String), TagAlreadyExists(String),
CannotRemoveActiveMark(String), CannotRemoveActiveMark(String),
DirtyWorktree, DirtyWorktree,
InvalidRefName(String),
ClockError,
HashError(String),
MergeConflicts(Vec<String>),
NoMergeBase(String),
StashAlreadyExists(String),
StashNotFound(String),
NoActiveStash,
NothingToStash,
StashEmpty(String),
StashBaseMismatch,
} }
impl fmt::Display for ArcError { impl fmt::Display for ArcError {
@ -53,6 +64,23 @@ impl fmt::Display for ArcError {
f, f,
"uncommitted changes in worktree; commit or reset before switching" "uncommitted changes in worktree; commit or reset before switching"
), ),
Self::InvalidRefName(n) => write!(f, "invalid ref name: {n}"),
Self::ClockError => write!(f, "system clock error: time before unix epoch"),
Self::HashError(msg) => write!(f, "hash computation error: {msg}"),
Self::MergeConflicts(files) => {
write!(f, "merge conflicts in: {}", files.join(", "))
}
Self::NoMergeBase(name) => {
write!(f, "no common ancestor found for merge with: {name}")
}
Self::StashAlreadyExists(n) => write!(f, "stash already exists: {n}"),
Self::StashNotFound(n) => write!(f, "stash not found: {n}"),
Self::NoActiveStash => write!(f, "no active stash"),
Self::NothingToStash => write!(f, "nothing to stash, working tree clean"),
Self::StashEmpty(n) => write!(f, "stash is empty: {n}"),
Self::StashBaseMismatch => {
write!(f, "stash base does not match current HEAD")
}
} }
} }
} }

View file

@ -63,7 +63,7 @@ pub fn show(repo: &Repository, target: &str) -> Result<String> {
tracking::materialize_committed_tree(repo, &c.parents[0])? tracking::materialize_committed_tree(repo, &c.parents[0])?
}; };
let diff_output = diff::render_diff(&parent_tree, &BTreeMap::new(), &obj.delta.changes); let diff_output = diff::render_diff(&parent_tree, &obj.delta.changes);
if !diff_output.is_empty() { if !diff_output.is_empty() {
output.push_str(&diff_output); output.push_str(&diff_output);
} }
@ -225,13 +225,13 @@ fn days_to_ymd(days_since_epoch: i64) -> (i64, u32, u32) {
} }
#[derive(Debug)] #[derive(Debug)]
enum DiffOp { pub enum DiffOp {
Equal(usize, usize), Equal(usize, usize),
Insert(usize), Insert(usize),
Delete(()), Delete(usize),
} }
fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> { pub fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
let n = old.len(); let n = old.len();
let m = new.len(); let m = new.len();
@ -239,7 +239,7 @@ fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
return (0..m).map(DiffOp::Insert).collect(); return (0..m).map(DiffOp::Insert).collect();
} }
if m == 0 { if m == 0 {
return (0..n).map(|_| DiffOp::Delete(())).collect(); return (0..n).map(DiffOp::Delete).collect();
} }
let max_d = n + m; let max_d = n + m;
@ -311,7 +311,7 @@ fn myers_diff(old: &[String], new: &[String]) -> Vec<DiffOp> {
ops.push(DiffOp::Insert(y as usize)); ops.push(DiffOp::Insert(y as usize));
} else { } else {
x -= 1; x -= 1;
ops.push(DiffOp::Delete(())); ops.push(DiffOp::Delete(x as usize));
} }
} }
} }

View file

@ -4,10 +4,13 @@ pub mod diff;
pub mod error; pub mod error;
pub mod ignore; pub mod ignore;
pub mod inspect; pub mod inspect;
pub mod merge;
pub mod model; pub mod model;
pub mod modify;
pub mod refs; pub mod refs;
pub mod repo; pub mod repo;
pub mod resolve; pub mod resolve;
pub mod stash;
pub mod store; pub mod store;
pub mod tracking; pub mod tracking;

272
src/merge.rs Normal file
View file

@ -0,0 +1,272 @@
use std::collections::BTreeSet;
use crate::inspect::{DiffOp, myers_diff};
use crate::tracking::FileTree;
pub struct MergeOutcome {
pub tree: FileTree,
pub conflicts: Vec<String>,
}
pub fn three_way_merge(base: &FileTree, ours: &FileTree, theirs: &FileTree) -> MergeOutcome {
let all_paths: BTreeSet<&String> = base
.keys()
.chain(ours.keys())
.chain(theirs.keys())
.collect();
let mut tree = FileTree::new();
let mut conflicts = Vec::new();
for path in all_paths {
let b = base.get(path);
let o = ours.get(path);
let t = theirs.get(path);
match (b, o, t) {
(_, Some(ov), Some(tv)) if ov == tv => {
tree.insert(path.clone(), ov.clone());
}
(Some(bv), Some(ov), Some(tv)) if bv == ov => {
tree.insert(path.clone(), tv.clone());
}
(Some(bv), Some(ov), Some(tv)) if bv == tv => {
tree.insert(path.clone(), ov.clone());
}
(Some(bv), Some(ov), Some(tv)) => match merge_file_content(path, ov, tv, bv) {
FileMerge::Clean(bytes) => {
tree.insert(path.clone(), bytes);
}
FileMerge::Conflict(bytes) => {
tree.insert(path.clone(), bytes);
conflicts.push(path.clone());
}
},
(None, None, Some(tv)) => {
tree.insert(path.clone(), tv.clone());
}
(None, Some(ov), None) => {
tree.insert(path.clone(), ov.clone());
}
(None, Some(ov), Some(tv)) => match merge_file_content(path, ov, tv, &[]) {
FileMerge::Clean(bytes) => {
tree.insert(path.clone(), bytes);
}
FileMerge::Conflict(bytes) => {
tree.insert(path.clone(), bytes);
conflicts.push(path.clone());
}
},
(Some(bv), None, Some(tv)) if bv == tv => {}
(Some(bv), Some(ov), None) if bv == ov => {}
(Some(_), None, Some(_tv)) => {
conflicts.push(path.clone());
}
(Some(_), Some(ov), None) => {
conflicts.push(path.clone());
tree.insert(path.clone(), ov.clone());
}
(Some(_), None, None) => {}
(None, None, None) => {}
}
}
MergeOutcome { tree, conflicts }
}
enum FileMerge {
Clean(Vec<u8>),
Conflict(Vec<u8>),
}
fn merge_file_content(_path: &str, ours: &[u8], theirs: &[u8], base: &[u8]) -> FileMerge {
let (Ok(base_text), Ok(ours_text), Ok(theirs_text)) = (
std::str::from_utf8(base),
std::str::from_utf8(ours),
std::str::from_utf8(theirs),
) else {
return FileMerge::Conflict(ours.to_vec());
};
let base_lines: Vec<String> = base_text.lines().map(String::from).collect();
let ours_lines: Vec<String> = ours_text.lines().map(String::from).collect();
let theirs_lines: Vec<String> = theirs_text.lines().map(String::from).collect();
let edits_ours = build_edit_map(&base_lines, &ours_lines);
let edits_theirs = build_edit_map(&base_lines, &theirs_lines);
let mut result = Vec::new();
let mut has_conflict = false;
let mut i = 0;
while i < base_lines.len() || edits_ours.has_tail_insert(i) || edits_theirs.has_tail_insert(i) {
let o_edit = edits_ours.get(i);
let t_edit = edits_theirs.get(i);
match (o_edit, t_edit) {
(None, None) => {
if i < base_lines.len() {
result.push(base_lines[i].clone());
}
i += 1;
}
(Some(chunk_o), None) => {
for line in &chunk_o.replacement {
result.push(line.clone());
}
i += chunk_o.delete_count.max(1);
}
(None, Some(chunk_t)) => {
for line in &chunk_t.replacement {
result.push(line.clone());
}
i += chunk_t.delete_count.max(1);
}
(Some(chunk_o), Some(chunk_t)) => {
if chunk_o.replacement == chunk_t.replacement
&& chunk_o.delete_count == chunk_t.delete_count
{
for line in &chunk_o.replacement {
result.push(line.clone());
}
} else {
has_conflict = true;
result.push("<<<<<<< ours".to_string());
for line in &chunk_o.replacement {
result.push(line.clone());
}
result.push("=======".to_string());
for line in &chunk_t.replacement {
result.push(line.clone());
}
result.push(">>>>>>> theirs".to_string());
}
i += chunk_o.delete_count.max(chunk_t.delete_count).max(1);
}
}
}
let mut text = result.join("\n");
if !text.is_empty() {
text.push('\n');
}
if has_conflict {
FileMerge::Conflict(text.into_bytes())
} else {
FileMerge::Clean(text.into_bytes())
}
}
#[derive(Debug, Clone)]
struct EditChunk {
replacement: Vec<String>,
delete_count: usize,
}
struct EditMap {
chunks: std::collections::BTreeMap<usize, EditChunk>,
tail_inserts: std::collections::BTreeMap<usize, Vec<String>>,
}
impl EditMap {
fn get(&self, base_idx: usize) -> Option<EditChunk> {
if let Some(chunk) = self.chunks.get(&base_idx) {
return Some(chunk.clone());
}
if let Some(inserts) = self.tail_inserts.get(&base_idx) {
return Some(EditChunk {
replacement: inserts.clone(),
delete_count: 0,
});
}
None
}
fn has_tail_insert(&self, base_idx: usize) -> bool {
self.tail_inserts.contains_key(&base_idx)
}
}
fn build_edit_map(base: &[String], modified: &[String]) -> EditMap {
let ops = myers_diff(base, modified);
let mut chunks: std::collections::BTreeMap<usize, EditChunk> =
std::collections::BTreeMap::new();
let mut tail_inserts: std::collections::BTreeMap<usize, Vec<String>> =
std::collections::BTreeMap::new();
let mut i = 0;
while i < ops.len() {
match &ops[i] {
DiffOp::Equal(_, _) => {
i += 1;
}
DiffOp::Delete(base_idx) => {
let start = *base_idx;
let mut delete_count = 0;
let mut replacement = Vec::new();
while i < ops.len() {
match &ops[i] {
DiffOp::Delete(_) => {
delete_count += 1;
i += 1;
}
DiffOp::Insert(new_idx) => {
replacement.push(modified[*new_idx].clone());
i += 1;
}
_ => break,
}
}
chunks.insert(
start,
EditChunk {
replacement,
delete_count,
},
);
}
DiffOp::Insert(_) => {
let mut inserts = Vec::new();
while i < ops.len() {
if let DiffOp::Insert(idx) = &ops[i] {
inserts.push(modified[*idx].clone());
i += 1;
} else {
break;
}
}
let base_pos = if i < ops.len() {
match &ops[i] {
DiffOp::Equal(bi, _) => *bi,
DiffOp::Delete(bi) => *bi,
_ => base.len(),
}
} else {
base.len()
};
if base_pos < base.len() {
let chunk = chunks.entry(base_pos).or_insert(EditChunk {
replacement: Vec::new(),
delete_count: 0,
});
let mut combined = inserts;
combined.append(&mut chunk.replacement);
chunk.replacement = combined;
} else {
tail_inserts.entry(base_pos).or_default().extend(inserts);
}
}
}
}
EditMap {
chunks,
tail_inserts,
}
}

View file

@ -8,6 +8,30 @@ pub struct CommitId(pub String);
#[serde(transparent)] #[serde(transparent)]
pub struct DeltaId(pub String); pub struct DeltaId(pub String);
impl std::fmt::Display for CommitId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl std::fmt::Display for DeltaId {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(&self.0)
}
}
impl AsRef<str> for CommitId {
fn as_ref(&self) -> &str {
&self.0
}
}
impl AsRef<str> for DeltaId {
fn as_ref(&self) -> &str {
&self.0
}
}
#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)] #[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct Commit { pub struct Commit {
pub id: CommitId, pub id: CommitId,

361
src/modify.rs Normal file
View file

@ -0,0 +1,361 @@
use std::collections::{BTreeMap, HashSet};
use std::fs;
use crate::error::{ArcError, Result};
use crate::ignore::IgnoreRules;
use crate::merge;
use crate::model::{CommitId, Delta, FileChangeKind, Head, RefTarget};
use crate::refs;
use crate::repo::Repository;
use crate::resolve;
use crate::store::{self, CommitObject};
use crate::tracking::{self, FileTree};
pub fn reset(repo: &Repository, files: &[String]) -> Result<String> {
let head_commit = tracking::resolve_head_commit(repo)?;
let ignore = IgnoreRules::load(&repo.workdir);
let committed = match &head_commit {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree);
if changes.is_empty() {
return Ok("nothing to reset, working tree clean".to_string());
}
let filter: Option<HashSet<&str>> = if files.is_empty() {
None
} else {
Some(files.iter().map(|s| s.as_str()).collect())
};
let mut reset_count = 0usize;
for change in &changes {
if let Some(ref f) = filter
&& !f.contains(change.path.as_str())
{
continue;
}
crate::repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
match &change.kind {
FileChangeKind::Add { .. } => {
if abs.exists() {
fs::remove_file(&abs)?;
}
}
FileChangeKind::Modify { .. } | FileChangeKind::Delete => {
if let Some(content) = committed.get(&change.path) {
if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&abs, content)?;
}
}
FileChangeKind::Rename { from } => {
if abs.exists() {
fs::remove_file(&abs)?;
}
if let Some(content) = committed.get(from) {
let from_abs = repo.workdir.join(from);
if let Some(parent) = from_abs.parent() {
fs::create_dir_all(parent)?;
}
fs::write(&from_abs, content)?;
}
}
}
reset_count += 1;
}
refs::remove_empty_dirs(&repo.workdir)?;
if reset_count == 0 {
Ok("no matching files to reset".to_string())
} else {
Ok(format!("reset {reset_count} file(s)"))
}
}
pub fn revert(repo: &Repository, target: &str) -> Result<CommitId> {
require_clean_worktree(repo)?;
let head_id = tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet)?;
let commits = resolve_commit_or_range(repo, target)?;
let mut current_tree = tracking::materialize_committed_tree(repo, &head_id)?;
for obj in commits.iter().rev() {
let parent_tree = if obj.commit.parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &obj.commit.parents[0])?
};
let commit_tree = tracking::materialize_committed_tree(repo, &obj.commit.id)?;
let outcome = merge::three_way_merge(&commit_tree, &current_tree, &parent_tree);
if !outcome.conflicts.is_empty() {
write_tree_to_worktree(repo, &outcome.tree)?;
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
current_tree = outcome.tree;
}
write_tree_to_worktree(repo, &current_tree)?;
let short_target = if target.len() > 12 {
&target[..12]
} else {
target
};
let message = format!("revert {short_target}");
commit_tree(repo, &message, vec![head_id], &current_tree)
}
pub fn merge_branch(repo: &Repository, target: &str) -> Result<CommitId> {
require_clean_worktree(repo)?;
let ours_id = tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet)?;
let theirs_id = resolve::resolve_target(repo, target)?;
if ours_id == theirs_id {
return Err(ArcError::NothingToCommit);
}
let base_id = find_merge_base(repo, &ours_id, &theirs_id)?;
let base_tree = match &base_id {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let ours_tree = tracking::materialize_committed_tree(repo, &ours_id)?;
let theirs_tree = tracking::materialize_committed_tree(repo, &theirs_id)?;
let outcome = merge::three_way_merge(&base_tree, &ours_tree, &theirs_tree);
write_tree_to_worktree(repo, &outcome.tree)?;
if !outcome.conflicts.is_empty() {
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
let message = format!("merge {target}");
commit_tree(repo, &message, vec![ours_id, theirs_id], &outcome.tree)
}
pub fn graft(repo: &Repository, target: &str, onto: &str) -> Result<Vec<CommitId>> {
require_clean_worktree(repo)?;
let source_commits = resolve_commit_or_range(repo, target)?;
let onto_id = resolve::resolve_target(repo, onto)?;
let is_bookmark = if crate::repo::validate_ref_name(onto).is_ok() {
repo.bookmarks_dir().join(onto).exists()
} else {
false
};
let mut current_tip = onto_id.clone();
let mut current_tree = tracking::materialize_committed_tree(repo, &current_tip)?;
let mut new_ids = Vec::new();
for obj in &source_commits {
let parent_tree = if obj.commit.parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &obj.commit.parents[0])?
};
let commit_tree = tracking::materialize_committed_tree(repo, &obj.commit.id)?;
let outcome = merge::three_way_merge(&parent_tree, &current_tree, &commit_tree);
if !outcome.conflicts.is_empty() {
write_tree_to_worktree(repo, &outcome.tree)?;
return Err(ArcError::MergeConflicts(outcome.conflicts));
}
let short_id = &obj.commit.id.0[..obj.commit.id.0.len().min(12)];
let message = format!("graft {short_id}: {}", obj.commit.message);
let new_id = commit_tree_internal(repo, &message, vec![current_tip], &outcome.tree)?;
current_tip = new_id.clone();
current_tree = outcome.tree;
new_ids.push(new_id);
}
if is_bookmark {
let bookmark_path = repo.bookmarks_dir().join(onto);
let ref_target = RefTarget {
commit: Some(current_tip.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
fs::write(&bookmark_path, ref_yaml)?;
let head = repo.load_head()?;
if let Head::Attached { bookmark, .. } = &head
&& bookmark == onto
{
repo.save_head(&Head::Attached {
bookmark: bookmark.clone(),
commit: current_tip.clone(),
})?;
}
} else {
repo.save_head(&Head::Detached {
commit: current_tip,
})?;
}
write_tree_to_worktree(repo, &current_tree)?;
Ok(new_ids)
}
fn require_clean_worktree(repo: &Repository) -> Result<()> {
let (report, _) = tracking::status(repo)?;
if !report.is_clean() {
return Err(ArcError::DirtyWorktree);
}
Ok(())
}
fn resolve_commit_or_range(repo: &Repository, spec: &str) -> Result<Vec<CommitObject>> {
if spec.contains("..") {
let resolved = resolve::parse_and_resolve_range(repo, Some(spec))?;
Ok(resolved.chain[resolved.start_idx..].to_vec())
} else {
let id = resolve::resolve_target(repo, spec)?;
let obj = store::read_commit_object(repo, &id)?;
Ok(vec![obj])
}
}
fn find_merge_base(
repo: &Repository,
ours: &CommitId,
theirs: &CommitId,
) -> Result<Option<CommitId>> {
let mut ours_ancestors = HashSet::new();
collect_ancestors(repo, ours, &mut ours_ancestors)?;
ours_ancestors.insert(ours.0.clone());
let mut queue = vec![theirs.clone()];
let mut visited = HashSet::new();
while let Some(id) = queue.pop() {
if ours_ancestors.contains(&id.0) {
return Ok(Some(id));
}
if !visited.insert(id.0.clone()) {
continue;
}
let obj = store::read_commit_object(repo, &id)?;
for parent in &obj.commit.parents {
queue.push(parent.clone());
}
}
Ok(None)
}
fn collect_ancestors(
repo: &Repository,
id: &CommitId,
ancestors: &mut HashSet<String>,
) -> Result<()> {
let obj = store::read_commit_object(repo, id)?;
for parent in &obj.commit.parents {
if ancestors.insert(parent.0.clone()) {
collect_ancestors(repo, parent, ancestors)?;
}
}
Ok(())
}
fn write_tree_to_worktree(repo: &Repository, tree: &FileTree) -> Result<()> {
let ignore = IgnoreRules::load(&repo.workdir);
let current = tracking::scan_worktree(repo, &ignore)?;
refs::clean_tracked_files(repo, &current)?;
refs::write_tree(repo, tree)?;
Ok(())
}
fn commit_tree(
repo: &Repository,
message: &str,
parents: Vec<CommitId>,
tree: &FileTree,
) -> Result<CommitId> {
let id = commit_tree_internal(repo, message, parents, tree)?;
Ok(id)
}
fn commit_tree_internal(
repo: &Repository,
message: &str,
parents: Vec<CommitId>,
new_tree: &FileTree,
) -> Result<CommitId> {
let parent_tree = if parents.is_empty() {
BTreeMap::new()
} else {
tracking::materialize_committed_tree(repo, &parents[0])?
};
let changes = tracking::detect_changes(&parent_tree, new_tree);
if changes.is_empty() {
return Err(ArcError::NothingToCommit);
}
let delta_id = store::compute_delta_id(&parents.first().cloned(), &changes)?;
let delta = Delta {
id: delta_id.clone(),
base: parents.first().cloned(),
changes,
};
let config = crate::config::load_effective(repo);
let author = match (config.user_name, config.user_email) {
(Some(name), Some(email)) => Some(crate::model::Signature { name, email }),
_ => None,
};
let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.map_err(|_| ArcError::ClockError)?
.as_secs() as i64;
let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp)?;
let commit_obj = crate::model::Commit {
id: commit_id.clone(),
parents: parents.clone(),
delta: delta_id,
message: message.to_string(),
author,
timestamp,
};
let obj = CommitObject {
commit: commit_obj,
delta,
};
store::write_commit_object(repo, &obj)?;
let head = repo.load_head()?;
crate::refs::update_refs_after_commit(repo, &head, &commit_id)?;
Ok(commit_id)
}

View file

@ -44,12 +44,14 @@ fn short_id(id: &CommitId) -> &str {
} }
pub fn mark_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> { pub fn mark_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> {
crate::repo::validate_ref_name(name)?;
let id = resolve_commit_or_head(repo, commit)?; let id = resolve_commit_or_head(repo, commit)?;
write_ref_target(&repo.bookmarks_dir().join(name), &id)?; write_ref_target(&repo.bookmarks_dir().join(name), &id)?;
Ok(id) Ok(id)
} }
pub fn mark_rm(repo: &Repository, name: &str) -> Result<()> { pub fn mark_rm(repo: &Repository, name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
let path = repo.bookmarks_dir().join(name); let path = repo.bookmarks_dir().join(name);
if !path.exists() { if !path.exists() {
return Err(ArcError::BookmarkNotFound(name.to_string())); return Err(ArcError::BookmarkNotFound(name.to_string()));
@ -95,6 +97,8 @@ pub fn mark_list(repo: &Repository) -> Result<String> {
} }
pub fn mark_rename(repo: &Repository, name: &str, new_name: &str) -> Result<()> { pub fn mark_rename(repo: &Repository, name: &str, new_name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
crate::repo::validate_ref_name(new_name)?;
let old_path = repo.bookmarks_dir().join(name); let old_path = repo.bookmarks_dir().join(name);
if !old_path.exists() { if !old_path.exists() {
return Err(ArcError::BookmarkNotFound(name.to_string())); return Err(ArcError::BookmarkNotFound(name.to_string()));
@ -125,6 +129,7 @@ pub fn mark_rename(repo: &Repository, name: &str, new_name: &str) -> Result<()>
} }
pub fn tag_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> { pub fn tag_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<CommitId> {
crate::repo::validate_ref_name(name)?;
let path = repo.tags_dir().join(name); let path = repo.tags_dir().join(name);
if path.exists() { if path.exists() {
return Err(ArcError::TagAlreadyExists(name.to_string())); return Err(ArcError::TagAlreadyExists(name.to_string()));
@ -135,6 +140,7 @@ pub fn tag_add(repo: &Repository, name: &str, commit: Option<&str>) -> Result<Co
} }
pub fn tag_rm(repo: &Repository, name: &str) -> Result<()> { pub fn tag_rm(repo: &Repository, name: &str) -> Result<()> {
crate::repo::validate_ref_name(name)?;
let path = repo.tags_dir().join(name); let path = repo.tags_dir().join(name);
if !path.exists() { if !path.exists() {
return Err(ArcError::TagNotFound(name.to_string())); return Err(ArcError::TagNotFound(name.to_string()));
@ -182,19 +188,18 @@ pub fn switch(repo: &Repository, target: &str) -> Result<String> {
return Err(ArcError::DirtyWorktree); return Err(ArcError::DirtyWorktree);
} }
let bookmark_path = repo.bookmarks_dir().join(target); let valid_ref = crate::repo::validate_ref_name(target).is_ok();
let tag_path = repo.tags_dir().join(target);
let (new_head, message) = if bookmark_path.exists() { let (new_head, message) = if valid_ref && repo.bookmarks_dir().join(target).exists() {
let ref_target = read_ref_target(&bookmark_path)?; let ref_target = read_ref_target(&repo.bookmarks_dir().join(target))?;
let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?; let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?;
let head = Head::Attached { let head = Head::Attached {
bookmark: target.to_string(), bookmark: target.to_string(),
commit, commit,
}; };
(head, format!("switched to bookmark '{target}'")) (head, format!("switched to bookmark '{target}'"))
} else if tag_path.exists() { } else if valid_ref && repo.tags_dir().join(target).exists() {
let ref_target = read_ref_target(&tag_path)?; let ref_target = read_ref_target(&repo.tags_dir().join(target))?;
let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?; let commit = ref_target.commit.ok_or(ArcError::NoCommitsYet)?;
let head = Head::Detached { commit }; let head = Head::Detached { commit };
(head, format!("switched to tag '{target}'")) (head, format!("switched to tag '{target}'"))
@ -221,8 +226,9 @@ pub fn switch(repo: &Repository, target: &str) -> Result<String> {
Ok(message) Ok(message)
} }
fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Result<()> { pub fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
for path in tree.keys() { for path in tree.keys() {
crate::repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path); let abs = repo.workdir.join(path);
if abs.exists() { if abs.exists() {
fs::remove_file(&abs)?; fs::remove_file(&abs)?;
@ -233,7 +239,7 @@ fn clean_tracked_files(repo: &Repository, tree: &tracking::FileTree) -> Result<(
Ok(()) Ok(())
} }
fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> { pub fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> {
let entries = match fs::read_dir(dir) { let entries = match fs::read_dir(dir) {
Ok(e) => e, Ok(e) => e,
Err(_) => return Ok(()), Err(_) => return Ok(()),
@ -257,8 +263,9 @@ fn remove_empty_dirs(dir: &std::path::Path) -> Result<()> {
Ok(()) Ok(())
} }
fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> { pub fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
for (path, bytes) in tree { for (path, bytes) in tree {
crate::repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path); let abs = repo.workdir.join(path);
if let Some(parent) = abs.parent() { if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?; fs::create_dir_all(parent)?;
@ -267,3 +274,33 @@ fn write_tree(repo: &Repository, tree: &tracking::FileTree) -> Result<()> {
} }
Ok(()) Ok(())
} }
pub fn update_refs_after_commit(
repo: &Repository,
head: &Head,
commit_id: &CommitId,
) -> Result<()> {
let ref_target = RefTarget {
commit: Some(commit_id.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
match head {
Head::Unborn { bookmark } | Head::Attached { bookmark, .. } => {
fs::write(repo.bookmarks_dir().join(bookmark), &ref_yaml)?;
let new_head = Head::Attached {
bookmark: bookmark.clone(),
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
Head::Detached { .. } => {
let new_head = Head::Detached {
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
}
Ok(())
}

View file

@ -110,3 +110,42 @@ impl Repository {
Ok(()) Ok(())
} }
} }
pub fn validate_ref_name(name: &str) -> Result<()> {
use std::path::{Component, Path};
if name.is_empty() {
return Err(ArcError::InvalidRefName(name.to_string()));
}
let p = Path::new(name);
let mut comps = p.components();
match (comps.next(), comps.next()) {
(Some(Component::Normal(_)), None) => {}
_ => return Err(ArcError::InvalidRefName(name.to_string())),
}
if name.starts_with('.') || name.contains('\0') {
return Err(ArcError::InvalidRefName(name.to_string()));
}
Ok(())
}
pub fn validate_repo_path(p: &str) -> Result<()> {
use std::path::{Component, Path};
if p.is_empty() || p.contains('\0') {
return Err(ArcError::InvalidPath(format!("invalid repo path: {p}")));
}
let path = Path::new(p);
for c in path.components() {
match c {
Component::Normal(_) | Component::CurDir => {}
_ => return Err(ArcError::InvalidPath(format!("invalid repo path: {p}"))),
}
}
Ok(())
}

View file

@ -11,21 +11,23 @@ pub fn resolve_target(repo: &Repository, target: &str) -> Result<CommitId> {
return tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet); return tracking::resolve_head_commit(repo)?.ok_or(ArcError::NoCommitsYet);
} }
let bookmark_path = repo.bookmarks_dir().join(target); if crate::repo::validate_ref_name(target).is_ok() {
if bookmark_path.exists() { let bookmark_path = repo.bookmarks_dir().join(target);
let contents = fs::read_to_string(&bookmark_path)?; if bookmark_path.exists() {
let ref_target: RefTarget = serde_yaml::from_str(&contents)?; let contents = fs::read_to_string(&bookmark_path)?;
if let Some(id) = ref_target.commit { let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
return Ok(id); if let Some(id) = ref_target.commit {
return Ok(id);
}
} }
}
let tag_path = repo.tags_dir().join(target); let tag_path = repo.tags_dir().join(target);
if tag_path.exists() { if tag_path.exists() {
let contents = fs::read_to_string(&tag_path)?; let contents = fs::read_to_string(&tag_path)?;
let ref_target: RefTarget = serde_yaml::from_str(&contents)?; let ref_target: RefTarget = serde_yaml::from_str(&contents)?;
if let Some(id) = ref_target.commit { if let Some(id) = ref_target.commit {
return Ok(id); return Ok(id);
}
} }
} }
@ -51,7 +53,13 @@ fn resolve_commit_prefix(repo: &Repository, prefix: &str) -> Result<CommitId> {
match matches.len() { match matches.len() {
0 => Err(ArcError::UnknownRevision(prefix.to_string())), 0 => Err(ArcError::UnknownRevision(prefix.to_string())),
1 => Ok(matches.into_iter().next().unwrap()), 1 => {
let id = matches
.into_iter()
.next()
.ok_or_else(|| ArcError::UnknownRevision(prefix.to_string()))?;
Ok(id)
}
_ => Err(ArcError::AmbiguousPrefix(prefix.to_string())), _ => Err(ArcError::AmbiguousPrefix(prefix.to_string())),
} }
} }

330
src/stash.rs Normal file
View file

@ -0,0 +1,330 @@
use std::collections::BTreeMap;
use std::fs;
use std::path::PathBuf;
use std::time::{SystemTime, UNIX_EPOCH};
use serde::{Deserialize, Serialize};
use crate::error::{ArcError, Result};
use crate::ignore::IgnoreRules;
use crate::model::{CommitId, FileChangeKind};
use crate::refs;
use crate::repo::{self, Repository};
use crate::tracking;
/// Persistent state tracking the currently active stash.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashState {
pub active: Option<String>,
}
/// A named stash file containing a stack of entries.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashFile {
pub entries: Vec<StashEntry>,
}
/// A single stash entry representing a snapshot of dirty changes.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashEntry {
pub base: Option<CommitId>,
pub timestamp: i64,
pub changes: Vec<StashChange>,
}
/// A single file change within a stash entry.
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct StashChange {
pub path: String,
pub kind: StashChangeKind,
pub content: Option<Vec<u8>>,
}
/// The kind of change recorded in a stash.
#[derive(Serialize, Deserialize, Debug, Clone)]
#[serde(rename_all = "lowercase")]
pub enum StashChangeKind {
Add,
Modify,
Delete,
}
fn stash_named_dir(repo: &Repository) -> PathBuf {
repo.stashes_dir().join("named")
}
fn stash_state_path(repo: &Repository) -> PathBuf {
repo.stashes_dir().join("state.yml")
}
fn stash_file_path(repo: &Repository, name: &str) -> PathBuf {
stash_named_dir(repo).join(format!("{name}.yml"))
}
fn load_state(repo: &Repository) -> Result<StashState> {
let path = stash_state_path(repo);
if !path.exists() {
return Ok(StashState { active: None });
}
let contents = fs::read_to_string(&path)?;
let state: StashState = serde_yaml::from_str(&contents)?;
Ok(state)
}
fn save_state(repo: &Repository, state: &StashState) -> Result<()> {
let yaml = serde_yaml::to_string(state)?;
fs::write(stash_state_path(repo), yaml)?;
Ok(())
}
fn load_stash_file(repo: &Repository, name: &str) -> Result<StashFile> {
let path = stash_file_path(repo, name);
let contents = fs::read_to_string(&path)?;
let file: StashFile = serde_yaml::from_str(&contents)?;
Ok(file)
}
fn save_stash_file(repo: &Repository, name: &str, file: &StashFile) -> Result<()> {
let yaml = serde_yaml::to_string(file)?;
fs::write(stash_file_path(repo, name), yaml)?;
Ok(())
}
/// Create a new named stash and set it as active.
pub fn create(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
fs::create_dir_all(stash_named_dir(repo))?;
let path = stash_file_path(repo, name);
if path.exists() {
return Err(ArcError::StashAlreadyExists(name.to_string()));
}
let file = StashFile { entries: vec![] };
save_stash_file(repo, name, &file)?;
let state = StashState {
active: Some(name.to_string()),
};
save_state(repo, &state)?;
Ok(())
}
/// Switch the active stash to an existing named stash.
pub fn use_stash(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
let path = stash_file_path(repo, name);
if !path.exists() {
return Err(ArcError::StashNotFound(name.to_string()));
}
let state = StashState {
active: Some(name.to_string()),
};
save_state(repo, &state)?;
Ok(())
}
/// Push current dirty changes onto the active stash and reset the worktree.
pub fn push(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let name = state.active.ok_or(ArcError::NoActiveStash)?;
repo::validate_ref_name(&name)?;
let ignore = IgnoreRules::load(&repo.workdir);
let head_commit = tracking::resolve_head_commit(repo)?;
let committed = match &head_commit {
Some(id) => tracking::materialize_committed_tree(repo, id)?,
None => BTreeMap::new(),
};
let worktree = tracking::scan_worktree(repo, &ignore)?;
let changes = tracking::detect_changes(&committed, &worktree);
if changes.is_empty() {
return Err(ArcError::NothingToStash);
}
let mut stash_changes = Vec::new();
let mut added_paths = Vec::new();
for change in &changes {
let (kind, content) = match &change.kind {
FileChangeKind::Add { content: c } => {
added_paths.push(change.path.clone());
let bytes = match c {
crate::model::FileContentDelta::Full { bytes } => Some(bytes.clone()),
_ => None,
};
(StashChangeKind::Add, bytes)
}
FileChangeKind::Modify { content: c } => {
let bytes = match c {
crate::model::FileContentDelta::Full { bytes } => Some(bytes.clone()),
_ => None,
};
(StashChangeKind::Modify, bytes)
}
FileChangeKind::Delete => (StashChangeKind::Delete, None),
FileChangeKind::Rename { .. } => (StashChangeKind::Add, None),
};
stash_changes.push(StashChange {
path: change.path.clone(),
kind,
content,
});
}
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map_err(|_| ArcError::ClockError)?
.as_secs() as i64;
let entry = StashEntry {
base: head_commit.clone(),
timestamp,
changes: stash_changes,
};
let mut stash_file = load_stash_file(repo, &name)?;
stash_file.entries.push(entry);
save_stash_file(repo, &name, &stash_file)?;
refs::clean_tracked_files(repo, &committed)?;
for path in &added_paths {
repo::validate_repo_path(path)?;
let abs = repo.workdir.join(path);
if abs.exists() {
fs::remove_file(&abs)?;
}
}
refs::write_tree(repo, &committed)?;
refs::remove_empty_dirs(&repo.workdir)?;
let n = changes.len();
Ok(format!("pushed {n} change(s) to stash '{name}'"))
}
/// Pop the most recent entry from the active stash and apply it to the worktree.
pub fn pop(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let name = state.active.ok_or(ArcError::NoActiveStash)?;
repo::validate_ref_name(&name)?;
let (report, _) = tracking::status(repo)?;
if !report.is_clean() {
return Err(ArcError::DirtyWorktree);
}
let mut stash_file = load_stash_file(repo, &name)?;
if stash_file.entries.is_empty() {
return Err(ArcError::StashEmpty(name.clone()));
}
let entry = stash_file
.entries
.pop()
.ok_or_else(|| ArcError::StashEmpty(name.clone()))?;
let head_commit = tracking::resolve_head_commit(repo)?;
if entry.base != head_commit {
return Err(ArcError::StashBaseMismatch);
}
let n = entry.changes.len();
for change in &entry.changes {
match change.kind {
StashChangeKind::Add | StashChangeKind::Modify => {
repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
if let Some(parent) = abs.parent() {
fs::create_dir_all(parent)?;
}
if let Some(bytes) = &change.content {
fs::write(&abs, bytes)?;
}
}
StashChangeKind::Delete => {
repo::validate_repo_path(&change.path)?;
let abs = repo.workdir.join(&change.path);
if abs.exists() {
fs::remove_file(&abs)?;
}
}
}
}
refs::remove_empty_dirs(&repo.workdir)?;
save_stash_file(repo, &name, &stash_file)?;
Ok(format!("popped {n} change(s) from stash '{name}'"))
}
/// Remove a named stash. If it was active, deactivate it.
pub fn rm(repo: &Repository, name: &str) -> Result<()> {
repo::validate_ref_name(name)?;
let path = stash_file_path(repo, name);
if !path.exists() {
return Err(ArcError::StashNotFound(name.to_string()));
}
fs::remove_file(&path)?;
let mut state = load_state(repo)?;
if state.active.as_deref() == Some(name) {
state.active = None;
save_state(repo, &state)?;
}
Ok(())
}
/// List all named stashes, marking the active one.
pub fn list(repo: &Repository) -> Result<String> {
let state = load_state(repo)?;
let active = state.active.as_deref();
let named_dir = stash_named_dir(repo);
if !named_dir.exists() {
return Ok("no stashes".to_string());
}
let mut names: Vec<String> = Vec::new();
for entry in fs::read_dir(&named_dir)? {
let entry = entry?;
if entry.file_type()?.is_file() {
let fname = entry.file_name().to_string_lossy().to_string();
if let Some(name) = fname.strip_suffix(".yml") {
names.push(name.to_string());
}
}
}
names.sort();
if names.is_empty() {
return Ok("no stashes".to_string());
}
let mut lines = Vec::new();
for name in &names {
let stash_file = load_stash_file(repo, name)?;
let count = stash_file.entries.len();
let prefix = if active == Some(name.as_str()) {
"* "
} else {
" "
};
lines.push(format!("{prefix}{name} ({count} entries)"));
}
Ok(lines.join("\n"))
}

View file

@ -67,10 +67,11 @@ struct CommitForHash<'a> {
pub timestamp: i64, pub timestamp: i64,
} }
pub fn compute_delta_id(base: &Option<CommitId>, changes: &[FileChange]) -> DeltaId { pub fn compute_delta_id(base: &Option<CommitId>, changes: &[FileChange]) -> Result<DeltaId> {
let hashable = DeltaForHash { base, changes }; let hashable = DeltaForHash { base, changes };
let bytes = rmp_serde::to_vec(&hashable).expect("delta hash serialization failed"); let bytes = rmp_serde::to_vec(&hashable)
DeltaId(sha256_hex(&bytes)) .map_err(|e| crate::error::ArcError::HashError(e.to_string()))?;
Ok(DeltaId(sha256_hex(&bytes)))
} }
pub fn compute_commit_id( pub fn compute_commit_id(
@ -79,7 +80,7 @@ pub fn compute_commit_id(
message: &str, message: &str,
author: &Option<Signature>, author: &Option<Signature>,
timestamp: i64, timestamp: i64,
) -> CommitId { ) -> Result<CommitId> {
let hashable = CommitForHash { let hashable = CommitForHash {
parents, parents,
delta, delta,
@ -87,6 +88,7 @@ pub fn compute_commit_id(
author, author,
timestamp, timestamp,
}; };
let bytes = rmp_serde::to_vec(&hashable).expect("commit hash serialization failed"); let bytes = rmp_serde::to_vec(&hashable)
CommitId(sha256_hex(&bytes)) .map_err(|e| crate::error::ArcError::HashError(e.to_string()))?;
Ok(CommitId(sha256_hex(&bytes)))
} }

View file

@ -179,14 +179,14 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
None => vec![], None => vec![],
}; };
let delta_id = store::compute_delta_id(&head_commit, &changes); let delta_id = store::compute_delta_id(&head_commit, &changes)?;
let delta = Delta { let delta = Delta {
id: delta_id.clone(), id: delta_id.clone(),
base: head_commit.clone(), base: head_commit.clone(),
changes, changes,
}; };
let config = load_effective_config(repo); let config = crate::config::load_effective(repo);
let author = match (config.user_name, config.user_email) { let author = match (config.user_name, config.user_email) {
(Some(name), Some(email)) => Some(Signature { name, email }), (Some(name), Some(email)) => Some(Signature { name, email }),
_ => None, _ => None,
@ -194,10 +194,10 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
let timestamp = std::time::SystemTime::now() let timestamp = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH) .duration_since(std::time::UNIX_EPOCH)
.expect("system clock error") .map_err(|_| ArcError::ClockError)?
.as_secs() as i64; .as_secs() as i64;
let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp); let commit_id = store::compute_commit_id(&parents, &delta_id, message, &author, timestamp)?;
let commit_obj = crate::model::Commit { let commit_obj = crate::model::Commit {
id: commit_id.clone(), id: commit_id.clone(),
@ -214,43 +214,11 @@ pub fn commit(repo: &Repository, message: &str) -> Result<CommitId> {
}; };
store::write_commit_object(repo, &obj)?; store::write_commit_object(repo, &obj)?;
update_refs_after_commit(repo, &head, &commit_id)?; crate::refs::update_refs_after_commit(repo, &head, &commit_id)?;
Ok(commit_id) Ok(commit_id)
} }
fn load_effective_config(repo: &Repository) -> crate::config::EffectiveConfig {
let local = crate::config::Config::load_local(repo).ok().flatten();
let global = crate::config::Config::load_global().ok().flatten();
crate::config::Config::effective(local, global)
}
fn update_refs_after_commit(repo: &Repository, head: &Head, commit_id: &CommitId) -> Result<()> {
let ref_target = crate::model::RefTarget {
commit: Some(commit_id.clone()),
};
let ref_yaml = serde_yaml::to_string(&ref_target)?;
match head {
Head::Unborn { bookmark } | Head::Attached { bookmark, .. } => {
fs::write(repo.bookmarks_dir().join(bookmark), &ref_yaml)?;
let new_head = Head::Attached {
bookmark: bookmark.clone(),
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
Head::Detached { .. } => {
let new_head = Head::Detached {
commit: commit_id.clone(),
};
repo.save_head(&new_head)?;
}
}
Ok(())
}
use std::fmt; use std::fmt;
pub struct StatusReport { pub struct StatusReport {

View file

@ -75,8 +75,15 @@ fn tag_list_subcommand_succeeds() {
#[test] #[test]
fn stash_list_subcommand_succeeds() { fn stash_list_subcommand_succeeds() {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
let output = arc_cmd() let output = arc_cmd()
.args(["stash", "list"]) .args(["stash", "list"])
.current_dir(dir.path())
.output() .output()
.expect("failed to run arc"); .expect("failed to run arc");
assert!(output.status.success()); assert!(output.status.success());

207
tests/graft.rs Normal file
View file

@ -0,0 +1,207 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn graft_single_commit_onto_bookmark() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry pick me");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("grafted"));
assert!(dir.path().join("cherry.txt").exists());
let content = std::fs::read_to_string(dir.path().join("cherry.txt")).unwrap();
assert_eq!(content, "cherry\n");
}
#[test]
fn graft_creates_new_commit() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_output = arc_cmd()
.args(["log"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_stdout = String::from_utf8_lossy(&log_output.stdout);
assert!(log_stdout.contains("graft"));
}
#[test]
fn graft_fails_with_dirty_worktree() {
let dir = init_repo();
let id = commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("dirty.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["graft", &id, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn graft_preserves_original_commits() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["graft", &cherry, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let show_output = arc_cmd()
.args(["show", &cherry])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(show_output.status.success());
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
assert!(show_stdout.contains("cherry"));
}
#[test]
fn graft_with_commit_prefix() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
let cherry = commit_file(&dir, "cherry.txt", "cherry\n", "cherry");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let short = &cherry[..12];
let output = arc_cmd()
.args(["graft", short, "--onto", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
}

264
tests/merge.rs Normal file
View file

@ -0,0 +1,264 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn merge_diverged_branches() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base commit");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "main-file.txt", "main\n", "main change");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "feature-file.txt", "feature\n", "feature change");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("merged feature"));
assert!(dir.path().join("base.txt").exists());
assert!(dir.path().join("main-file.txt").exists());
assert!(dir.path().join("feature-file.txt").exists());
}
#[test]
fn merge_creates_commit_with_two_parents() {
let dir = init_repo();
commit_file(&dir, "base.txt", "base\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "a.txt", "main\n", "main work");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "b.txt", "feature\n", "feature work");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
let merged_line = stdout.trim();
assert!(merged_line.contains("merged"));
let show_output = arc_cmd()
.args(["show", "HEAD"])
.current_dir(dir.path())
.output()
.expect("failed");
let show_stdout = String::from_utf8_lossy(&show_output.stdout);
assert!(show_stdout.contains("parent"));
}
#[test]
fn merge_fails_with_dirty_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "b.txt", "b\n", "main work");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
commit_file(&dir, "c.txt", "c\n", "feature work");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("dirty.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn merge_same_file_no_conflict() {
let dir = init_repo();
std::fs::write(dir.path().join("a.txt"), "line1\nline2\nline3\n").unwrap();
arc_cmd()
.args(["commit", "base"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "line1\nline2\nline3\nmain-line\n").unwrap();
arc_cmd()
.args(["commit", "main edit"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(
dir.path().join("a.txt"),
"feature-line\nline1\nline2\nline3\n",
)
.unwrap();
arc_cmd()
.args(["commit", "feature edit"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
}
#[test]
fn merge_conflict_reports_error() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "base");
arc_cmd()
.args(["mark", "add", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "main version\n").unwrap();
arc_cmd()
.args(["commit", "main change"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
std::fs::write(dir.path().join("a.txt"), "feature version\n").unwrap();
arc_cmd()
.args(["commit", "feature change"])
.current_dir(dir.path())
.output()
.expect("failed");
arc_cmd()
.args(["switch", "main"])
.current_dir(dir.path())
.output()
.expect("failed");
let output = arc_cmd()
.args(["merge", "feature"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("conflict"));
}

151
tests/reset.rs Normal file
View file

@ -0,0 +1,151 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn reset_all_restores_modified_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "first");
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("reset"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "original\n");
}
#[test]
fn reset_all_removes_added_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("new.txt"), "new\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
assert!(!dir.path().join("new.txt").exists());
}
#[test]
fn reset_all_restores_deleted_file() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "a\n");
}
#[test]
fn reset_specific_file_only() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "second");
std::fs::write(dir.path().join("a.txt"), "changed-a\n").unwrap();
std::fs::write(dir.path().join("b.txt"), "changed-b\n").unwrap();
let output = arc_cmd()
.args(["reset", "a.txt"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let a_content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(a_content, "a\n");
let b_content = std::fs::read_to_string(dir.path().join("b.txt")).unwrap();
assert_eq!(b_content, "changed-b\n");
}
#[test]
fn reset_clean_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("nothing to reset"));
}
#[test]
fn reset_multiple_changes() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "second");
std::fs::write(dir.path().join("a.txt"), "changed\n").unwrap();
std::fs::remove_file(dir.path().join("b.txt")).unwrap();
std::fs::write(dir.path().join("c.txt"), "new\n").unwrap();
let output = arc_cmd()
.args(["reset"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let a = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(a, "a\n");
assert!(dir.path().join("b.txt").exists());
assert!(!dir.path().join("c.txt").exists());
}

159
tests/revert.rs Normal file
View file

@ -0,0 +1,159 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) -> String {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
}
#[test]
fn revert_single_commit() {
let dir = init_repo();
commit_file(&dir, "a.txt", "original\n", "first");
let id2 = commit_file(&dir, "a.txt", "changed\n", "second");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let stdout = String::from_utf8_lossy(&output.stdout);
assert!(stdout.contains("reverted"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "original\n");
}
#[test]
fn revert_creates_new_commit() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "first");
let id2 = commit_file(&dir, "a.txt", "v2\n", "second");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let log_output = arc_cmd()
.args(["log"])
.current_dir(dir.path())
.output()
.expect("failed");
let log_stdout = String::from_utf8_lossy(&log_output.stdout);
assert!(log_stdout.contains("revert"));
}
#[test]
fn revert_file_addition() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
let id2 = commit_file(&dir, "b.txt", "b\n", "add b");
let output = arc_cmd()
.args(["revert", &id2])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
assert!(!dir.path().join("b.txt").exists());
}
#[test]
fn revert_file_deletion() {
let dir = init_repo();
commit_file(&dir, "a.txt", "a\n", "first");
commit_file(&dir, "b.txt", "b\n", "add b");
std::fs::remove_file(dir.path().join("b.txt")).unwrap();
let id3 = {
let output = arc_cmd()
.args(["commit", "delete b"])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
String::from_utf8_lossy(&output.stdout)
.trim()
.strip_prefix("committed ")
.unwrap()
.to_string()
};
let output = arc_cmd()
.args(["revert", &id3])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("b.txt")).unwrap();
assert_eq!(content, "b\n");
}
#[test]
fn revert_fails_with_dirty_worktree() {
let dir = init_repo();
let id = commit_file(&dir, "a.txt", "a\n", "first");
std::fs::write(dir.path().join("a.txt"), "dirty\n").unwrap();
let output = arc_cmd()
.args(["revert", &id])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(!output.status.success());
let stderr = String::from_utf8_lossy(&output.stderr);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn revert_with_prefix() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "first");
let id2 = commit_file(&dir, "a.txt", "v2\n", "second");
let short = &id2[..12];
let output = arc_cmd()
.args(["revert", short])
.current_dir(dir.path())
.output()
.expect("failed");
assert!(output.status.success());
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v1\n");
}

388
tests/stash.rs Normal file
View file

@ -0,0 +1,388 @@
use std::process::Command;
use tempfile::TempDir;
fn arc_cmd() -> Command {
Command::new(env!("CARGO_BIN_EXE_arc"))
}
fn init_repo() -> TempDir {
let dir = TempDir::new().unwrap();
arc_cmd()
.arg("init")
.current_dir(dir.path())
.output()
.expect("failed to init");
dir
}
fn commit_file(dir: &TempDir, name: &str, content: &str, msg: &str) {
std::fs::write(dir.path().join(name), content).unwrap();
let output = arc_cmd()
.args(["commit", msg])
.current_dir(dir.path())
.output()
.expect("failed to commit");
assert!(
output.status.success(),
"commit failed: {}",
String::from_utf8_lossy(&output.stderr)
);
}
fn run_ok(dir: &TempDir, args: &[&str]) -> String {
let output = arc_cmd()
.args(args)
.current_dir(dir.path())
.output()
.expect("failed to run");
assert!(
output.status.success(),
"command {:?} failed: {}",
args,
String::from_utf8_lossy(&output.stderr)
);
String::from_utf8_lossy(&output.stdout).trim().to_string()
}
fn run_fail(dir: &TempDir, args: &[&str]) -> String {
let output = arc_cmd()
.args(args)
.current_dir(dir.path())
.output()
.expect("failed to run");
assert!(
!output.status.success(),
"command {:?} should have failed but succeeded: {}",
args,
String::from_utf8_lossy(&output.stdout)
);
String::from_utf8_lossy(&output.stderr).trim().to_string()
}
#[test]
fn stash_create_creates_stash() {
let dir = init_repo();
let stdout = run_ok(&dir, &["stash", "create", "wip"]);
assert!(stdout.contains("stash 'wip' created"));
let stash_path = dir.path().join(".arc/stashes/named/wip.yml");
assert!(stash_path.exists());
}
#[test]
fn stash_create_sets_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("wip"));
}
#[test]
fn stash_create_fails_if_exists() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "create", "wip"]);
assert!(stderr.contains("stash already exists"));
}
#[test]
fn stash_create_fails_invalid_name() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "create", "../escape"]);
assert!(stderr.contains("invalid ref name"));
}
#[test]
fn stash_use_switches_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "first"]);
run_ok(&dir, &["stash", "create", "second"]);
run_ok(&dir, &["stash", "use", "first"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("first"));
}
#[test]
fn stash_use_fails_nonexistent() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "use", "nope"]);
assert!(stderr.contains("stash not found"));
}
#[test]
fn stash_push_saves_and_resets() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
let stdout = run_ok(&dir, &["stash", "push"]);
assert!(stdout.contains("pushed"));
assert!(stdout.contains("change(s)"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "hello\n");
}
#[test]
fn stash_push_handles_added_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "added\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
}
#[test]
fn stash_push_handles_deleted_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
run_ok(&dir, &["stash", "push"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "hello\n");
}
#[test]
fn stash_push_fails_no_active() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
std::fs::write(dir.path().join("a.txt"), "changed\n").unwrap();
let stderr = run_fail(&dir, &["stash", "push"]);
assert!(stderr.contains("no active stash"));
}
#[test]
fn stash_push_fails_clean_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "push"]);
assert!(stderr.contains("nothing to stash"));
}
#[test]
fn stash_pop_restores_changes() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
let stdout = run_ok(&dir, &["stash", "pop"]);
assert!(stdout.contains("popped"));
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "modified\n");
}
#[test]
fn stash_pop_restores_added_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "added\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("new.txt")).unwrap();
assert_eq!(content, "added\n");
}
#[test]
fn stash_pop_restores_deleted_files() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::remove_file(dir.path().join("a.txt")).unwrap();
run_ok(&dir, &["stash", "push"]);
run_ok(&dir, &["stash", "pop"]);
assert!(!dir.path().join("a.txt").exists());
}
#[test]
fn stash_pop_fails_no_active() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("no active stash"));
}
#[test]
fn stash_pop_fails_empty_stash() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("stash is empty"));
}
#[test]
fn stash_pop_fails_dirty_worktree() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
std::fs::write(dir.path().join("a.txt"), "dirty\n").unwrap();
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("uncommitted changes"));
}
#[test]
fn stash_pop_fails_base_mismatch() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
commit_file(&dir, "b.txt", "new file\n", "second commit");
let stderr = run_fail(&dir, &["stash", "pop"]);
assert!(stderr.contains("stash base does not match"));
}
#[test]
fn stash_rm_removes_stash() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
let stdout = run_ok(&dir, &["stash", "rm", "wip"]);
assert!(stdout.contains("stash 'wip' removed"));
let stash_path = dir.path().join(".arc/stashes/named/wip.yml");
assert!(!stash_path.exists());
}
#[test]
fn stash_rm_clears_active_if_removed() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
run_ok(&dir, &["stash", "rm", "wip"]);
let state = std::fs::read_to_string(dir.path().join(".arc/stashes/state.yml")).unwrap();
assert!(state.contains("null") || !state.contains("wip"));
}
#[test]
fn stash_rm_fails_nonexistent() {
let dir = init_repo();
let stderr = run_fail(&dir, &["stash", "rm", "nope"]);
assert!(stderr.contains("stash not found"));
}
#[test]
fn stash_list_shows_stashes() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "alpha"]);
run_ok(&dir, &["stash", "create", "beta"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("alpha"));
assert!(stdout.contains("beta"));
}
#[test]
fn stash_list_marks_active() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "alpha"]);
run_ok(&dir, &["stash", "create", "beta"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("* beta"));
let has_inactive_alpha = stdout.lines().any(|l| l.trim_start().starts_with("alpha"));
assert!(has_inactive_alpha);
assert!(!stdout.contains("* alpha"));
}
#[test]
fn stash_list_sorted() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "zebra"]);
run_ok(&dir, &["stash", "create", "alpha"]);
let stdout = run_ok(&dir, &["stash", "list"]);
let alpha_pos = stdout.find("alpha").unwrap();
let zebra_pos = stdout.find("zebra").unwrap();
assert!(alpha_pos < zebra_pos);
}
#[test]
fn stash_list_shows_entry_count() {
let dir = init_repo();
commit_file(&dir, "a.txt", "hello\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("0 entries"));
std::fs::write(dir.path().join("a.txt"), "modified\n").unwrap();
run_ok(&dir, &["stash", "push"]);
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("1 entries"));
}
#[test]
fn stash_list_empty() {
let dir = init_repo();
let stdout = run_ok(&dir, &["stash", "list"]);
assert!(stdout.contains("no stashes"));
}
#[test]
fn stash_push_pop_multiple() {
let dir = init_repo();
commit_file(&dir, "a.txt", "v1\n", "initial");
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("a.txt"), "v2\n").unwrap();
run_ok(&dir, &["stash", "push"]);
std::fs::write(dir.path().join("a.txt"), "v3\n").unwrap();
run_ok(&dir, &["stash", "push"]);
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v3\n");
run_ok(&dir, &["reset"]);
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("a.txt")).unwrap();
assert_eq!(content, "v2\n");
}
#[test]
fn stash_push_on_unborn() {
let dir = init_repo();
run_ok(&dir, &["stash", "create", "wip"]);
std::fs::write(dir.path().join("new.txt"), "content\n").unwrap();
run_ok(&dir, &["stash", "push"]);
assert!(!dir.path().join("new.txt").exists());
run_ok(&dir, &["stash", "pop"]);
let content = std::fs::read_to_string(dir.path().join("new.txt")).unwrap();
assert_eq!(content, "content\n");
}